├── .env.local.example ├── .eslintrc.json ├── .gitignore ├── README.md ├── app ├── actions.tsx ├── api │ └── elevenlabs │ │ └── speech │ │ └── route.ts ├── favicon.ico ├── globals.css ├── layout.tsx └── page.tsx ├── chrome_extension ├── README.md ├── docs │ └── usage-enable.png └── src │ ├── content.js │ ├── icon128.png │ ├── icon16.png │ ├── icon32.png │ ├── icon48.png │ ├── manifest.json │ ├── popup.html │ └── popup.js ├── components.json ├── components ├── classifier.tsx ├── dui │ ├── clothing.tsx │ ├── politics.tsx │ ├── reply.tsx │ ├── stocks.tsx │ └── weather.tsx ├── dynamicskeleton.tsx ├── filteredimage.tsx ├── message.tsx ├── replies.tsx ├── theme-provider.tsx ├── tweet.tsx ├── tweetskeleton.tsx ├── twitter-list.tsx └── ui │ ├── avatar.tsx │ ├── button.tsx │ ├── card.tsx │ ├── carousel.tsx │ ├── linechart.tsx │ ├── skeleton.tsx │ ├── spinner.tsx │ ├── switch.tsx │ └── tabs.tsx ├── lib ├── data.ts ├── elevenlabs.client.ts ├── filters.ts ├── test-data.ts ├── tool-definition.ts └── utils.ts ├── next.config.mjs ├── package-lock.json ├── package.json ├── postcss.config.js ├── prettier.config.js ├── public ├── demo.png ├── next.svg ├── placeholder-avatar.jpg ├── suncloud.mp4 └── vercel.svg ├── tailwind.config.ts ├── tsconfig.json └── types └── tweets.ts /.env.local.example: -------------------------------------------------------------------------------- 1 | OPENAI_API_KEY= 2 | ELEVENLABS_API_KEY= -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["next", "next/core-web-vitals"], 3 | "rules": {} 4 | } 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | .yarn/install-state.gz 8 | 9 | # testing 10 | /coverage 11 | 12 | # next.js 13 | /.next/ 14 | /out/ 15 | 16 | # production 17 | /build 18 | 19 | # misc 20 | .DS_Store 21 | *.pem 22 | 23 | # debug 24 | npm-debug.log* 25 | yarn-debug.log* 26 | yarn-error.log* 27 | 28 | # local env files 29 | .env*.local 30 | 31 | # vercel 32 | .vercel 33 | 34 | # typescript 35 | *.tsbuildinfo 36 | next-env.d.ts 37 | 38 | # Others 39 | .idea/ 40 | .env -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Generative-X 2 | 3 | Generative-X (twitter) augments your twitter timeline with AI using image filters, text-to-speech, auto replies, and dynamic UI components that pop in to give more context to tweets! 4 | 5 | Built during the SPCxOpenAI Hackathon 6 | 7 | ## Demo Video 8 | [Watch the video on X](https://x.com/ultrasoundchad/status/1764464890960638099?s=20) 9 | 10 | 11 | 12 | 13 | ## Getting Started 14 | Under the hood, there's a nextjs application and a chrome extension used to pull tweets off of your feed and inject the nextjs app as an iframe into X 15 | 16 | Let's start by running the nextjs app wwhich will use a sample twitter feed 17 | 1. Add your OPENAI_API_KEY and ELEVENLABS_API_KEY (used for tts) 18 | `cp .env.local.example .env.local` 19 | 2. Run the application 20 | `npm run dev` 21 | 3. Try out the image filters (these will be snappy as they're cached) 22 | 4. Try out the dynamic UI switch 23 | 24 | ## Chrome Extension 25 | You'll need to load the extension into your browser to use it 26 | 27 | ### Installation 28 | 29 | 1. Download this repo - the extension is in the `chrome_extensions/src` folder (where this file is) 30 | 2. Open Chrome > Go to Extensions 31 | 3. Enable Developer mode (switch top-right) 32 | 4. Click on "Load unpacked" and select the `src` folder at the same level of this file 33 | 34 | ### Usage 35 | 36 | 1. Browse to https://twitter.com 37 | 2. Click on the extension icon in the toolbar (heart-shaped for now) 38 | 3. Select "X Timeline" from the menu 39 | ![Extension Menu](chrome_extension/docs/usage-enable.png) 40 | 41 | 42 | ## Dynamic User Interfaces 43 | There are currently 5 dynamic components that can be rendered based on tweet context. We use GPT3.5 with function calling to determine which component to render. 44 | 45 | Dynamic User Interfaces (DUIs) can be found in `/app/components/dui` 46 | 47 | 1. `weather.tsx` 48 | Renders live weather data if location and "weather" is mentioned in a tweet 49 | 50 | 2. `stocks.tsx` 51 | Renders live stock data if a ticker symbol i.e $TSLA is mentioned in a tweet 52 | 53 | 3. `poltics.tsx` 54 | Renders a political scale with refeference links (generated from perplexity sonar) if a tweet is poltical 55 | 56 | 4. `clothing.tsx` 57 | This component will try to match the clothing items in a tweet image to items in the Nordstrom Rack catalog. For the demo it will only render for tweets under the [@TechBroDrip](https://twitter.com/TechBroDrip0) 58 | 59 | 5. `Reply.tsx` 60 | Renders a few suggested replies with tts in a reply component. This is the default component is there are not other components rendered. 61 | 62 | ## Adding New Components 63 | This application gets better with more components. If you have ideas for components that could augment the X experience, open a PR. 64 | 65 | Docs on adding new components flow coming soon. 66 | 67 | ## TODO 68 | - [ ] move function calling router out of `actions/tsx` and into it's own api (there is currently an issue where server action calls are not parallelized in production see https://github.com/vercel/next.js/discussions/50743) 69 | -------------------------------------------------------------------------------- /app/actions.tsx: -------------------------------------------------------------------------------- 1 | "use server"; 2 | import * as React from "react"; 3 | import { z } from "zod"; 4 | import OpenAI from "openai"; 5 | 6 | import { runOpenAICompletion } from "@/lib/utils"; 7 | import { getStockData, getPoliticalLeaning } from "@/lib/test-data"; 8 | 9 | import { 10 | getWeatherApi, 11 | getPoliticalApi, 12 | getStockApi, 13 | getClothingApi, 14 | getReplyApi, 15 | } from "@/lib/data"; 16 | 17 | import { Weather } from "@/components/dui/weather"; 18 | import { Stocks } from "@/components/dui/stocks"; 19 | import { Politics } from "@/components/dui/politics"; 20 | import { Clothing } from "@/components/dui/clothing"; 21 | import { Reply } from "@/components/dui/reply"; 22 | 23 | import type { Tweet } from "@/types/tweets"; 24 | import { 25 | FilterId, 26 | getFilterVoiceId, 27 | getResponseAdjectives, 28 | } from "@/lib/filters"; 29 | 30 | const openai = new OpenAI({ 31 | apiKey: process.env.OPENAI_API_KEY || "", 32 | }); 33 | 34 | // async function generateReply(tweet: Tweet, filterId: FilterId | null): Promise { 35 | // const adjectives = getResponseAdjectives(filterId) || "brief"; 36 | // const replyData = await getReplyApi(tweet.content, adjectives); 37 | // if (replyData) { 38 | // // if no community notes, remove. 39 | // if (replyData.community_note == "None") { 40 | // delete replyData.community_note; 41 | // } 42 | // return ; 43 | // } 44 | // return <>; 45 | // } 46 | 47 | interface Classification {} 48 | 49 | export async function classifyTweetByContent( 50 | tweet: Tweet, 51 | filterId: FilterId | null, 52 | ) { 53 | const promise = new Promise<{ component: React.ReactNode, isDone: boolean, isReply: boolean }>((resolve, _reject) => { 54 | classify(tweet, filterId, 55 | (component, isDone, isReply) => resolve({component, isDone, isReply: isReply ?? false}), 56 | ); 57 | }); 58 | 59 | // Weird behavior of server components: 60 | // They run sequentially unless wrapped in a promise 61 | // Ref: https://www.youtube.com/watch?v=CDZg3maL9q0 62 | // 63 | // This fixes it on `npm run dev` but not on production `npm run build && npm run start` 64 | return { promise: promise }; 65 | } 66 | 67 | async function classify( 68 | tweet: Tweet, 69 | filterId: FilterId | null, 70 | onUpdateDynamic: ( 71 | component: React.ReactNode, 72 | isDone: boolean, 73 | isReply?: boolean, 74 | ) => void, 75 | ) { 76 | const tweetContent = tweet.content; 77 | 78 | // Hardcode Clothing UI for now 79 | // Since routing requires GPT4V to interpret 80 | // The image as containing clothing 81 | if (tweet.user.username === "@TechBroDrip") { 82 | if (tweet.media && tweet.media.length && tweet.media[0].url) { 83 | let clothingData = await getClothingApi(tweet.media[0].url); 84 | onUpdateDynamic(, true); 85 | } 86 | } 87 | 88 | const completion = runOpenAICompletion(openai, { 89 | model: "gpt-3.5-turbo", 90 | stream: true, 91 | messages: [ 92 | { 93 | role: "system", 94 | content: `\ 95 | You are a generative twitter bot. You will receive raw tweet data and manipulate the data to fit certain criterias. 96 | 97 | If the tweet mentions the weather of a location, call \`get_current_weather\` to show the weather UI. 98 | If the tweet mentions the stocks and/or has a ticket symbol like $AAPL, $TSLA, $META, $MSFT, $GOOG call \`get_stock_price\` to show the stock price UI. 99 | If the tweet mentions deep political issues, call the \`get_political_stance\` to show a political stance UI. 100 | 101 | Otherwise just say 'reply' and we will show a reply UI. 102 | `, 103 | }, 104 | { 105 | role: "user", 106 | content: tweetContent, 107 | }, 108 | ], 109 | functions: [ 110 | { 111 | name: "get_current_weather", 112 | description: "Get the current weather in a given location", 113 | parameters: z.object({ 114 | location: z 115 | .string() 116 | .describe("The city and state, e.g. San Francisco, London, Hawaii"), 117 | unit: z.string().describe("The unit of the temperature, e.g. C or F"), 118 | }), 119 | }, 120 | { 121 | name: "get_stock_price", 122 | description: 123 | "Get the current stock price of a given stock or currency based on it's ticker symbol. Use this to show the price to the user.", 124 | parameters: z.object({ 125 | ticker: z 126 | .string() 127 | .describe( 128 | "The name or symbol of the stock or currency. e.g. DOGE/AAPL/USD.", 129 | ), 130 | }), 131 | }, 132 | { 133 | name: "get_political_stance", 134 | description: 135 | "Summarize the tweet and any political references. Use this to show a political UI to the user.", 136 | parameters: z.object({ 137 | summary: z.string().describe("The political summary of the tweet"), 138 | }), 139 | }, 140 | ] as const, 141 | temperature: 0, 142 | }); 143 | 144 | completion.onTextContent((content: string, isFinal: boolean) => { 145 | onUpdateDynamic(<>, true, true); 146 | }); 147 | 148 | completion.onFunctionCall( 149 | "get_political_stance", 150 | async ({ summary }: { summary: string }) => { 151 | const polticalData = await getPoliticalApi(tweet.content); 152 | onUpdateDynamic(, true); 153 | }, 154 | ); 155 | 156 | completion.onFunctionCall( 157 | "get_stock_price", 158 | async ({ ticker }: { ticker: string }) => { 159 | const stockData = await getStockApi(ticker); 160 | const isStockDataBroken = 161 | !stockData || !stockData.current_price || !stockData.ticker; 162 | if (isStockDataBroken) { 163 | console.error("Stock data is broken", ticker, stockData); 164 | onUpdateDynamic(<>, true, true); 165 | } else onUpdateDynamic(, true); 166 | }, 167 | ); 168 | 169 | completion.onFunctionCall( 170 | "get_current_weather", 171 | async ({ location, unit }) => { 172 | const weatherData = await getWeatherApi(location); 173 | onUpdateDynamic(, true); 174 | }, 175 | ); 176 | } 177 | -------------------------------------------------------------------------------- /app/api/elevenlabs/speech/route.ts: -------------------------------------------------------------------------------- 1 | /* This code has been lifted (with permission) from the open-source MIT licensed big-AGI project. 2 | * https://github.com/enricoros/big-AGI 3 | */ 4 | import { NextRequest } from "next/server"; 5 | import { z } from "zod"; 6 | 7 | function elevenlabsVoiceId(voiceId?: string): string { 8 | return voiceId?.trim() || "21m00Tcm4TlvDq8ikWAM"; 9 | } 10 | 11 | function elevenlabsAccess( 12 | elevenKey: string | undefined, 13 | apiPath: string, 14 | ): { headers: HeadersInit; url: string } { 15 | // API key 16 | elevenKey = (elevenKey || process.env.ELEVENLABS_API_KEY || "").trim(); 17 | if (!elevenKey) throw new Error("Missing ElevenLabs API key."); 18 | 19 | // API host 20 | const host = "https://api.elevenlabs.io"; 21 | 22 | return { 23 | headers: { 24 | "Content-Type": "application/json", 25 | "xi-api-key": elevenKey, 26 | }, 27 | url: host + apiPath, 28 | }; 29 | } 30 | 31 | const speechInputSchema = z.object({ 32 | elevenKey: z.string().optional(), 33 | text: z.string(), 34 | voiceId: z.string().optional(), 35 | nonEnglish: z.boolean(), 36 | streaming: z.boolean().optional(), 37 | streamOptimization: z.number().optional(), 38 | }); 39 | 40 | export type SpeechInputSchema = z.infer; 41 | 42 | export namespace ElevenlabsWire { 43 | export interface TTSRequest { 44 | text: string; 45 | model_id?: "eleven_monolingual_v1" | string; 46 | voice_settings?: { 47 | stability: number; 48 | similarity_boost: number; 49 | }; 50 | } 51 | } 52 | 53 | function createEmptyReadableStream(): ReadableStream { 54 | return new ReadableStream({ 55 | start: (controller) => controller.close(), 56 | }); 57 | } 58 | 59 | async function elevenLabsHandler(req: NextRequest) { 60 | try { 61 | // construct the upstream request 62 | const { 63 | elevenKey, 64 | text, 65 | voiceId, 66 | nonEnglish, 67 | streaming, 68 | streamOptimization, 69 | } = speechInputSchema.parse(await req.json()); 70 | const path = 71 | `/v1/text-to-speech/${elevenlabsVoiceId(voiceId)}` + 72 | (streaming 73 | ? `/stream?optimize_streaming_latency=${streamOptimization || 1}` 74 | : ""); 75 | const { headers, url } = elevenlabsAccess(elevenKey, path); 76 | const body: ElevenlabsWire.TTSRequest = { 77 | text: text, 78 | ...(nonEnglish && { model_id: "eleven_multilingual_v1" }), 79 | }; 80 | 81 | // elevenlabs POST 82 | const upstreamResponse: Response = await fetch(url, { 83 | method: "POST", 84 | headers, 85 | body: JSON.stringify(body), 86 | }); 87 | 88 | // Throws an error if the response is not ok 89 | // Use in server-side code, and not tRPC code (which has utility functions in trpc.serverutils.ts) 90 | if (!upstreamResponse.ok) { 91 | const errorPayload: object | null = await upstreamResponse 92 | .json() 93 | .catch(() => null); 94 | // noinspection ExceptionCaughtLocallyJS 95 | throw new Error( 96 | `${upstreamResponse.statusText} (${upstreamResponse.status})${errorPayload ? " · " + JSON.stringify(errorPayload) : ""}`, 97 | ); 98 | } 99 | 100 | // NOTE: this is disabled, as we pass-through what we get upstream for speed, as it is not worthy 101 | // to wait for the entire audio to be downloaded before we send it to the client 102 | // if (!streaming) { 103 | // const audioArrayBuffer = await upstreamResponse.arrayBuffer(); 104 | // return new NextResponse(audioArrayBuffer, { status: 200, headers: { 'Content-Type': 'audio/mpeg' } }); 105 | // } 106 | 107 | // stream the data to the client 108 | const audioReadableStream = 109 | upstreamResponse.body || createEmptyReadableStream(); 110 | return new Response(audioReadableStream, { 111 | status: 200, 112 | headers: { "Content-Type": "audio/mpeg" }, 113 | }); 114 | } catch (error: any) { 115 | const fetchOrVendorError = 116 | (error?.message || error?.error || "unknown error") + 117 | (error?.cause ? " · " + error.cause : ""); 118 | console.log(`api/elevenlabs/speech: fetch issue: ${fetchOrVendorError}`); 119 | return new Response(`[Issue] elevenlabs: ${fetchOrVendorError}`, { 120 | status: 500, 121 | }); 122 | } 123 | } 124 | 125 | export const runtime = "edge"; 126 | export { elevenLabsHandler as POST }; 127 | -------------------------------------------------------------------------------- /app/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmchad/generative-x/66b2e4ef63ef44de3596258c8d594224a06c13fa/app/favicon.ico -------------------------------------------------------------------------------- /app/globals.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | 5 | @layer base { 6 | :root { 7 | --background: 0 0% 100%; 8 | --foreground: 222.2 84% 4.9%; 9 | 10 | --card: 0 0% 100%; 11 | --card-foreground: 222.2 84% 4.9%; 12 | 13 | --popover: 0 0% 100%; 14 | --popover-foreground: 222.2 84% 4.9%; 15 | 16 | --primary: 222.2 47.4% 11.2%; 17 | --primary-foreground: 210 40% 98%; 18 | 19 | --secondary: 210 40% 96.1%; 20 | --secondary-foreground: 222.2 47.4% 11.2%; 21 | 22 | --muted: 210 40% 96.1%; 23 | --muted-foreground: 215.4 16.3% 46.9%; 24 | 25 | --accent: 210 40% 96.1%; 26 | --accent-foreground: 222.2 47.4% 11.2%; 27 | 28 | --destructive: 0 84.2% 60.2%; 29 | --destructive-foreground: 210 40% 98%; 30 | 31 | --border: 214.3 31.8% 91.4%; 32 | --input: 214.3 31.8% 91.4%; 33 | --ring: 222.2 84% 4.9%; 34 | 35 | --radius: 0.5rem; 36 | } 37 | 38 | .dark { 39 | --background: 222.2 84% 4.9%; 40 | --foreground: 210 40% 98%; 41 | 42 | --card: 222.2 84% 4.9%; 43 | --card-foreground: 210 40% 98%; 44 | 45 | --popover: 222.2 84% 4.9%; 46 | --popover-foreground: 210 40% 98%; 47 | 48 | --primary: 210 40% 98%; 49 | --primary-foreground: 222.2 47.4% 11.2%; 50 | 51 | --secondary: 217.2 32.6% 17.5%; 52 | --secondary-foreground: 210 40% 98%; 53 | 54 | --muted: 217.2 32.6% 17.5%; 55 | --muted-foreground: 215 20.2% 65.1%; 56 | 57 | --accent: 217.2 32.6% 17.5%; 58 | --accent-foreground: 210 40% 98%; 59 | 60 | --destructive: 0 62.8% 30.6%; 61 | --destructive-foreground: 210 40% 98%; 62 | 63 | --border: 217.2 32.6% 17.5%; 64 | --input: 217.2 32.6% 17.5%; 65 | --ring: 212.7 26.8% 83.9%; 66 | } 67 | } 68 | 69 | @layer base { 70 | * { 71 | @apply border-border; 72 | } 73 | body { 74 | @apply bg-background text-foreground; 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /app/layout.tsx: -------------------------------------------------------------------------------- 1 | import type { Metadata } from "next"; 2 | import { Inter } from "next/font/google"; 3 | import "./globals.css"; 4 | import { ThemeProvider } from "@/components/theme-provider"; 5 | 6 | const inter = Inter({ subsets: ["latin"] }); 7 | 8 | export const metadata: Metadata = { 9 | title: "Generative Utility", 10 | description: "Generative Utility", 11 | }; 12 | 13 | export default function RootLayout({ 14 | children, 15 | }: Readonly<{ 16 | children: React.ReactNode; 17 | }>) { 18 | return ( 19 | 20 | 21 | 27 | {children} 28 | 29 | 30 | 31 | ); 32 | } 33 | -------------------------------------------------------------------------------- /app/page.tsx: -------------------------------------------------------------------------------- 1 | import { Suspense } from "react"; 2 | import TwitterList from "@/components/twitter-list"; 3 | 4 | export default function Home() { 5 | return ( 6 |
7 | 8 | 9 | 10 |
11 | ); 12 | } 13 | -------------------------------------------------------------------------------- /chrome_extension/README.md: -------------------------------------------------------------------------------- 1 | # Chrome Extension 2 | 3 | Why: parse the content of selected pages, as other approaches (official/unofficial APIs do not seem to work). 4 | 5 | This extension can be activated by clicking on the icon in the toolbar, and then selecting the kind of page to parse. 6 | 7 | ## Installation 8 | 9 | 1. Download this repo - the extension is in the `chrome_extensions/src` folder (where this file is) 10 | 2. Open Chrome > Go to Extensions 11 | 3. Enable Developer mode (switch top-right) 12 | 4. Click on "Load unpacked" and select the `src` folder at the same level of this file 13 | 14 | ## Usage 15 | 16 | 1. Browse to https://twitter.com 17 | 2. Click on the extension icon in the toolbar (heart-shaped for now) 18 | 3. Select "X Timeline" from the menu 19 | ![Extension Menu](docs/usage-enable.png) 20 | -------------------------------------------------------------------------------- /chrome_extension/docs/usage-enable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmchad/generative-x/66b2e4ef63ef44de3596258c8d594224a06c13fa/chrome_extension/docs/usage-enable.png -------------------------------------------------------------------------------- /chrome_extension/src/content.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Reconstruct Tweets (the the TypeScript definition) from the timeline HTML element 3 | */ 4 | function parseTweetsFromTimeline(eTimeline) { 5 | // @Tweets - note that the nth-child is 2, because the first sibling is "Name.. reposted.." (if present) 6 | const eItems = Array.from( 7 | eTimeline.querySelectorAll("div > article > div > div"), 8 | ); 9 | if (!eItems.length) { 10 | console.log("Tweets not found"); 11 | return null; 12 | } 13 | 14 | return eItems 15 | .map((eItem) => { 16 | const [_eReposted, eTweetC] = eItem.children; 17 | 18 | // image is first child of eTweet, deep down to the img tag 19 | const [eImageC, eBodyC] = eTweetC.children; 20 | const avatar = eImageC.querySelector("img"); 21 | const userAvatarURL = avatar ? avatar.src ?? null : null; 22 | 23 | // body of the tweet 24 | const [eUserAndHandle, eTweetText, ...eRest] = eBodyC.children; 25 | const eStats = eRest.pop(); // last child is the stats 26 | const eEmbed = eRest.length ? eRest.pop() : null; // if there's a 3rd child, it's the embed 27 | 28 | // Body > 0:User, Handle, Time 29 | const [eUserParts, eHandleParts] = eUserAndHandle.querySelectorAll( 30 | '[data-testid="User-Name"]', 31 | )[0].children; 32 | const [eUserName, _eUserSymbols] = eUserParts.querySelectorAll( 33 | "div > a > div > div", 34 | ); 35 | const [eHandle, _spacer, eTimeC] = eHandleParts.children[0].children; 36 | const userName = eUserName.innerText; 37 | const userHandle = eHandle.querySelectorAll("a > div")[0].innerText; 38 | if (!eTimeC) { 39 | console.log("Time not found", eHandleParts, eUserAndHandle, eItems); 40 | return null; 41 | } 42 | const eTweetTime = eTimeC.querySelectorAll("a > time")[0]; 43 | const tweetISOTime = eTweetTime.getAttribute("datetime"); 44 | 45 | // to find the ID of the tweet, we parse it from the time link 46 | const eTweetTimeLink = eTweetTime.parentElement; 47 | const tweetLink = eTweetTimeLink.getAttribute("href"); 48 | const tweetID = tweetLink.split("/").pop(); 49 | 50 | // Body > 1:Tweet Text 51 | // NOTE: only use the first part, because the rest are 'show more...' 52 | const eTextParts = eTweetText.children[0] || null; 53 | const tweetText = eTextParts ? eTextParts.innerText : null; 54 | 55 | // Body > (next?):Embeds 56 | let tweetImageURLs = []; 57 | if (eEmbed) { 58 | // NOTE: using img with alt="Image", which may be restrictive, but it's the best we can do atm 59 | let imgSelector = eEmbed.querySelectorAll('div > img[alt="Image"]'); 60 | // if empty (no img with alt="Image"), try with alt="Embedded video" 61 | if (!imgSelector.length) 62 | imgSelector = eEmbed.querySelectorAll( 63 | 'div > img[alt="Embedded video"]', 64 | ); 65 | if (imgSelector.length) 66 | tweetImageURLs = Array.from(imgSelector) 67 | .filter((e) => e.src && e.src.indexOf("/emoji/v2/") === -1) 68 | .map((e) => e.src); 69 | } 70 | 71 | // Body > (last):Stats 72 | const [eStatsReply, eStatsRepost, eStatsLike, eStatsView] = 73 | eStats.querySelectorAll("& > div > div > div"); 74 | const tweetEngagement = { 75 | replies: eStatsReply.innerText?.trim() || "", 76 | reposts: eStatsRepost.innerText?.trim() || "", 77 | likes: eStatsLike.innerText?.trim() || "", 78 | views: eStatsView.innerText?.trim() || "", 79 | }; 80 | 81 | // make sure this stays in sync with the 'Tweet' type in the frontend 82 | return { 83 | id: tweetID, 84 | link: tweetLink, 85 | user: { 86 | username: userHandle || "@elonmusk", 87 | displayName: userName || "Elon Musk", 88 | avatarUrl: 89 | userAvatarURL || 90 | "https://pbs.twimg.com/profile_images/1683325380441128960/yRsRRjGO_400x400.jpg", 91 | verified: false, // TODO 92 | }, 93 | content: tweetText || "", 94 | media: 95 | tweetImageURLs.length >= 1 96 | ? tweetImageURLs.map((url) => ({ 97 | type: "image", 98 | url: url, 99 | altText: undefined, 100 | })) 101 | : undefined, 102 | engagement: tweetEngagement, 103 | timestamp: tweetISOTime, 104 | }; 105 | }) 106 | .filter(Boolean); 107 | } 108 | 109 | chrome.runtime.onMessage.addListener(function (request, sender, sendResponse) { 110 | if (request.action !== "parseXTimeline") return; 111 | const openInNewWindow = request.openInNewWindow; 112 | const demoData = request.demoData; 113 | 114 | // @Timeline: parse and then replace 115 | let eTimeline = document.querySelector( 116 | '[aria-label="Timeline: Your Home Timeline"]', 117 | ); 118 | if (!eTimeline) { 119 | // retry with a per-person timeline 120 | eTimeline = document.querySelector( 121 | 'section[aria-labelledby^="accessible-list"] > div:nth-child(2)', 122 | ); 123 | console.log("Timeline not found, trying the per-person timeline"); 124 | } 125 | if (!eTimeline) return console.log("Timeline not found"); 126 | console.log("Timeline found:", eTimeline); 127 | 128 | // Scrape Tweets from the timeline 129 | const tweets = demoData ? [] : parseTweetsFromTimeline(eTimeline); 130 | console.log("Tweets:", tweets); 131 | const queryString = demoData 132 | ? "" 133 | : encodeURIComponent(JSON.stringify(tweets)); 134 | console.log("Encoded length:", queryString.length); 135 | 136 | if (openInNewWindow) { 137 | // Example of opening a new window with a URL including the query string. 138 | // You might want to adjust this to fit your specific requirements. 139 | window.open(request.url + `?tweets=${queryString}`, "_blank"); 140 | return; 141 | } 142 | 143 | // IFrame to our Frontend, passing Tweets as Query 144 | const iFrameHeight = Math.max(600, eTimeline.offsetHeight); 145 | const iFrontend = document.createElement("iframe"); 146 | iFrontend.src = demoData ? request.url : request.url + `?tweets=${queryString}`; 147 | 148 | iFrontend.frameBorder = "0"; 149 | // iFrontend.scrolling = 'no'; 150 | iFrontend.style.width = "100%"; 151 | iFrontend.style.height = `${iFrameHeight}px`; 152 | iFrontend.style.zIndex = "1"; 153 | eTimeline.parentNode.insertBefore(iFrontend, eTimeline); 154 | 155 | // Hide the timeline 156 | // NOTE: not doing it because otherwise the "end of page refresher" will run continuously, fetching more and more tweets 157 | // eTimeline.style.display = 'none'; 158 | 159 | // update the timeline to be below and invisible 160 | eTimeline.parentNode.style.position = "relative"; 161 | eTimeline.style.position = "absolute"; 162 | eTimeline.style.left = "0"; 163 | eTimeline.style.right = "0"; 164 | eTimeline.style.top = "0"; 165 | // eTimeline.style.zIndex = '-1'; 166 | // eTimeline.style.visibility = 'hidden'; 167 | 168 | // @Home: hide the 'new tweet' blocks 169 | const eHome = document.querySelector('[aria-label="Home timeline"]'); 170 | if (eHome) { 171 | // Check if eHome has exactly 5 div children 172 | const divChildren = Array.from(eHome.children).filter( 173 | (child) => child.tagName === "DIV", 174 | ); 175 | if (divChildren.length === 5) { 176 | divChildren[0].style.display = "none"; // Hides the 1st child 177 | divChildren[2].style.display = "none"; // Hides the 3rd child 178 | } else if (divChildren.length === 3) { 179 | // person timeline 180 | if (divChildren[2]?.children?.[0]?.children?.[0]?.children?.[0]) 181 | divChildren[2].children[0].children[0].children[0].style.display = 182 | "none"; 183 | } else 184 | console.log( 185 | `eHome does not have exactly 5 div children, it has ${divChildren.length}.`, 186 | ); 187 | } else console.log("Home not found"); 188 | 189 | // Optionally, send data back to your popup or background script 190 | sendResponse({ data: "some data" }); 191 | }); 192 | -------------------------------------------------------------------------------- /chrome_extension/src/icon128.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmchad/generative-x/66b2e4ef63ef44de3596258c8d594224a06c13fa/chrome_extension/src/icon128.png -------------------------------------------------------------------------------- /chrome_extension/src/icon16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmchad/generative-x/66b2e4ef63ef44de3596258c8d594224a06c13fa/chrome_extension/src/icon16.png -------------------------------------------------------------------------------- /chrome_extension/src/icon32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmchad/generative-x/66b2e4ef63ef44de3596258c8d594224a06c13fa/chrome_extension/src/icon32.png -------------------------------------------------------------------------------- /chrome_extension/src/icon48.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmchad/generative-x/66b2e4ef63ef44de3596258c8d594224a06c13fa/chrome_extension/src/icon48.png -------------------------------------------------------------------------------- /chrome_extension/src/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "manifest_version": 3, 3 | "name": "Generative X", 4 | "version": "1.1", 5 | "description": "Make Twitter ✨ Again", 6 | "permissions": ["activeTab", "scripting"], 7 | "action": { 8 | "default_popup": "popup.html", 9 | "default_icon": { 10 | "16": "icon16.png", 11 | "48": "icon48.png", 12 | "128": "icon128.png" 13 | } 14 | }, 15 | "content_scripts": [ 16 | { 17 | "matches": ["*://*.twitter.com/*"], 18 | "js": ["content.js"] 19 | } 20 | ], 21 | "icons": { 22 | "16": "icon16.png", 23 | "48": "icon48.png", 24 | "128": "icon128.png" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /chrome_extension/src/popup.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Sparkles Menu 5 | 6 | 7 | 8 | 12 | 196 | 197 | 198 | 227 | 228 | 229 | 230 | 231 | -------------------------------------------------------------------------------- /chrome_extension/src/popup.js: -------------------------------------------------------------------------------- 1 | document.getElementById("xTimelineButton").addEventListener("click", () => { 2 | const openInNewWindow = document.getElementById("newWindowCheckbox").checked; 3 | const demoData = document.getElementById("demoDataCheckbox").checked; 4 | // Get the state of the custom URL checkbox and input 5 | const useCustomUrl = document.getElementById("customUrlCheckbox").checked; 6 | const customUrlInput = document.getElementById("customUrlInput").value; 7 | // Determine the URL to use 8 | const targetUrl = useCustomUrl && customUrlInput ? customUrlInput : "http://localhost:3000"; 9 | chrome.tabs.query({ active: true, currentWindow: true }, function (tabs) { 10 | chrome.tabs.sendMessage(tabs[0].id, { 11 | action: "parseXTimeline", 12 | openInNewWindow, 13 | demoData, 14 | url: targetUrl, // Add the URL to the message 15 | }); 16 | }); 17 | }); 18 | 19 | // Code to toggle the visibility of the custom URL input field based on the checkbox state 20 | document.getElementById("customUrlCheckbox").addEventListener("change", function() { 21 | if(this.checked) { 22 | document.getElementById("customUrlInput").style.display = "block"; 23 | } else { 24 | document.getElementById("customUrlInput").style.display = "none"; 25 | } 26 | }); 27 | 28 | // const listenerTwitter = () => { 29 | // chrome.tabs.query({active: true, currentWindow: true}, function (tabs) { 30 | // chrome.scripting.executeScript({target: {tabId: tabs[0].id}, function: filterTwitterTimeline}); 31 | // }); 32 | // }; 33 | 34 | // const listenerNotImplemented = () => { 35 | // chrome.tabs.query({ active: true, currentWindow: true }, function (tabs) { 36 | // chrome.scripting.executeScript({ 37 | // target: { tabId: tabs[0].id }, 38 | // function: () => alert("Not implemented yet"), 39 | // }); 40 | // }); 41 | // }; 42 | // document 43 | // .getElementById("fbFilterButton") 44 | // .addEventListener("click", listenerNotImplemented); 45 | // document 46 | // .getElementById("moreFilterButton") 47 | // .addEventListener("click", listenerNotImplemented); 48 | -------------------------------------------------------------------------------- /components.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://ui.shadcn.com/schema.json", 3 | "style": "default", 4 | "rsc": true, 5 | "tsx": true, 6 | "tailwind": { 7 | "config": "tailwind.config.ts", 8 | "css": "app/globals.css", 9 | "baseColor": "slate", 10 | "cssVariables": true, 11 | "prefix": "" 12 | }, 13 | "aliases": { 14 | "components": "@/components", 15 | "utils": "@/lib/utils" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /components/classifier.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react"; 2 | import { z } from "zod"; 3 | import OpenAI from "openai"; 4 | 5 | import { runOpenAICompletion } from "@/lib/utils"; 6 | import { getStockData, getPoliticalLeaning } from "@/lib/test-data"; 7 | 8 | import { 9 | getWeatherApi, 10 | getPoliticalApi, 11 | getStockApi, 12 | getClothingApi, 13 | } from "@/lib/data"; 14 | 15 | import { Weather } from "@/components/dui/weather"; 16 | import { Stocks } from "@/components/dui/stocks"; 17 | import { Politics } from "@/components/dui/politics"; 18 | import { Clothing } from "./dui/clothing"; 19 | 20 | import type { Tweet } from "@/types/tweets"; 21 | 22 | const openai = new OpenAI({ 23 | apiKey: 24 | process.env.OPENAI_API_KEY || process.env.NEXT_PUBLIC_OPENAI_API_KEY || "", 25 | dangerouslyAllowBrowser: true, 26 | }); 27 | 28 | function classifyTweetByContent( 29 | tweet: Tweet, 30 | onUpdateDynamic: (component: React.ReactNode, isDone: boolean) => void, 31 | onReplyToText: (newText: string) => void, 32 | onReplaceTweetText: (newText: string) => void, 33 | ) { 34 | const tweetContent = tweet.content; 35 | 36 | // Hardcode Clothing UI for now 37 | // TODO: @Dhruv 38 | if (tweet.user.username === "@TechBroDrip") { 39 | if (tweet.media && tweet.media.length && tweet.media[0].url) { 40 | getClothingApi(tweet.media[0].url) 41 | .then((clothingData) => { 42 | onUpdateDynamic(, true); 43 | }) 44 | .catch((err) => { 45 | onReplyToText(tweetContent); 46 | console.error(err); 47 | }); 48 | } else onReplyToText(tweetContent); 49 | return; 50 | } 51 | 52 | const completion = runOpenAICompletion(openai, { 53 | model: "gpt-3.5-turbo", 54 | stream: true, 55 | messages: [ 56 | { 57 | role: "system", 58 | content: `\ 59 | You are a generative twitter bot. You will receive raw tweet data and manipulate the data to fit certain criterias. 60 | 61 | If the tweet mentions the weather of a location, call \`get_current_weather\` to show the weather UI. 62 | If the tweet mentions the stocks and/or has a ticket symbol like $AAPL, $TSLA, $META, $MSFT, $GOOG call \`get_stock_price\` to show the stock price UI. 63 | If the tweet mentions deep political issues, call the \`get_political_stance\` to show a political stance UI. 64 | 65 | Otherwise just say 'reply' and we will show a reply UI. 66 | `, 67 | }, 68 | { 69 | role: "user", 70 | content: tweetContent, 71 | }, 72 | ], 73 | functions: [ 74 | { 75 | name: "get_current_weather", 76 | description: "Get the current weather in a given location", 77 | parameters: z.object({ 78 | location: z 79 | .string() 80 | .describe("The city and state, e.g. San Francisco, London, Hawaii"), 81 | unit: z.string().describe("The unit of the temperature, e.g. C or F"), 82 | }), 83 | }, 84 | { 85 | name: "get_stock_price", 86 | description: 87 | "Get the current stock price of a given stock or currency based on it's ticker symbol. Use this to show the price to the user.", 88 | parameters: z.object({ 89 | ticker: z 90 | .string() 91 | .describe( 92 | "The name or symbol of the stock or currency. e.g. DOGE/AAPL/USD.", 93 | ), 94 | }), 95 | }, 96 | { 97 | name: "get_political_stance", 98 | description: 99 | "Summarize the tweet and any political references. Use this to show a political UI to the user.", 100 | parameters: z.object({ 101 | summary: z.string().describe("The political summary of the tweet"), 102 | }), 103 | }, 104 | ] as const, 105 | temperature: 0, 106 | }); 107 | 108 | completion.onTextContent((content: string, isFinal: boolean) => { 109 | // onUpdateDynamic(isFinal ? : content, isFinal); 110 | isFinal && onReplyToText(tweetContent); 111 | }); 112 | 113 | completion.onFunctionCall( 114 | "get_political_stance", 115 | async ({ summary }: { summary: string }) => { 116 | const polticalData = await getPoliticalApi(tweet.content); 117 | onUpdateDynamic(, true); 118 | }, 119 | ); 120 | 121 | completion.onFunctionCall( 122 | "get_stock_price", 123 | async ({ ticker }: { ticker: string }) => { 124 | const stockData = await getStockApi(ticker); 125 | const isStockDataBroken = 126 | !stockData || !stockData.current_price || !stockData.ticker; 127 | if (isStockDataBroken) { 128 | console.error("Stock data is broken", ticker, stockData); 129 | onReplyToText(tweetContent); 130 | } else onUpdateDynamic(, true); 131 | }, 132 | ); 133 | 134 | completion.onFunctionCall( 135 | "get_current_weather", 136 | async ({ location, unit }) => { 137 | const weatherData = await getWeatherApi(location); 138 | onUpdateDynamic(, true); 139 | }, 140 | ); 141 | } 142 | 143 | export function useClassifiedTweet( 144 | initialTweet: Tweet, 145 | enabled: boolean, 146 | ): { 147 | isClassified: boolean; 148 | isReply: boolean; 149 | tweetComponent: React.ReactNode; 150 | replacedTweetText: string; 151 | } { 152 | // local state 153 | const [isClassified, setIsClassified] = React.useState(false); 154 | const [isReply, setIsReply] = React.useState(false); 155 | const [tweetComponent, setTweetComponent] = 156 | React.useState(null); 157 | const [replacementTweetText, setReplacementTweetText] = React.useState< 158 | string | null 159 | >(null); 160 | 161 | React.useEffect(() => { 162 | if (!enabled) { 163 | setIsClassified(false); 164 | setTweetComponent(null); 165 | setReplacementTweetText(null); 166 | setIsReply(false); 167 | return; 168 | } 169 | 170 | async function doClassify() { 171 | setIsClassified(false); 172 | classifyTweetByContent( 173 | initialTweet, 174 | (component, isDone) => { 175 | setTweetComponent(component); 176 | isDone && setIsClassified(isDone); 177 | }, 178 | () => { 179 | setIsReply(true); 180 | setIsClassified(true); 181 | }, 182 | (newText) => setReplacementTweetText(newText), 183 | ); 184 | } 185 | 186 | const timeoutId = setTimeout(doClassify, 1000); 187 | return () => clearTimeout(timeoutId); 188 | }, [enabled, initialTweet]); 189 | 190 | return { 191 | isClassified, 192 | isReply, 193 | tweetComponent, 194 | replacedTweetText: 195 | replacementTweetText !== null 196 | ? replacementTweetText 197 | : initialTweet.content, 198 | }; 199 | } 200 | -------------------------------------------------------------------------------- /components/dui/clothing.tsx: -------------------------------------------------------------------------------- 1 | import { CardTitle, CardHeader, CardContent, Card } from "@/components/ui/card"; 2 | import { 3 | CarouselItem, 4 | CarouselContent, 5 | CarouselPrevious, 6 | CarouselNext, 7 | Carousel, 8 | } from "@/components/ui/carousel"; 9 | 10 | // type ClothingProps = { 11 | // img_src: string; 12 | // item_link: string; 13 | // description: string; 14 | // item_type: string; 15 | // }; 16 | 17 | interface TwitterCardProps { 18 | img_src: string; 19 | item_link: string; 20 | description: string; 21 | item_type: string; 22 | } 23 | 24 | const TwitterCard: React.FC = ({ 25 | img_src, 26 | item_link, 27 | description, 28 | item_type, 29 | }) => { 30 | return ( 31 |
32 | 33 | {item_type} 38 | 39 |
40 | {/*

{description}

*/} 41 |

Type: {item_type}

42 |
43 |
44 | ); 45 | }; 46 | 47 | export interface ClothingProps { 48 | data: { 49 | [category: string]: { 50 | img_src: string; 51 | item_link: string; 52 | description: string; 53 | item_type: string; 54 | }[]; 55 | }; 56 | } 57 | 58 | // TODO: @Dhruv 59 | export const Clothing = ({ props }: { props: ClothingProps }) => { 60 | return ( 61 | 62 | 63 | Get This Outfit 64 | 65 | 66 |
67 | {Object.entries(props) 68 | .filter(([category, items]) => items.length > 0) 69 | .map(([category, items], index) => ( 70 |
71 |
72 |
73 |

{category}

74 |
75 | {items.map((item: any, index: number) => ( 76 | 77 | ))} 78 |
79 |
80 | ))} 81 |
82 |
83 |
84 | ); 85 | }; 86 | -------------------------------------------------------------------------------- /components/dui/politics.tsx: -------------------------------------------------------------------------------- 1 | import { CardTitle, CardHeader, CardContent, Card } from "@/components/ui/card"; 2 | 3 | type Article = { 4 | link: string; 5 | title: string; 6 | }; 7 | 8 | type PoliticalProps = { 9 | articles: Article[]; 10 | party: string; 11 | }; 12 | 13 | export const Politics = ({ props }: { props: PoliticalProps }) => { 14 | const colorClass = { 15 | right: "text-red-600", 16 | left: "text-blue-600", 17 | center: "text-purple-600", 18 | }[props.party]; 19 | 20 | return ( 21 | 22 | 23 | 24 | Political Leaning 25 | 26 | 27 | 28 |
29 | {/* Political Leaning: */} 30 | {/* {props.party} */} 31 |
32 |
33 | {props.party === "Left" ? ( 34 |
35 |
36 | Left 37 |
38 |
Center
39 |
Right
40 |
41 | ) : props.party === "Right" ? ( 42 |
43 |
Left
44 |
Center
45 |
46 | Right 47 |
48 |
49 | ) : ( 50 |
51 |
Left
52 |
53 | Center 54 |
55 |
Right
56 |
57 | )} 58 |
59 |
60 | 61 |
62 | Related Sources: 63 | {props.articles.map((article, index) => ( 64 | 78 | ))} 79 |
80 |
81 |
82 | ); 83 | }; 84 | -------------------------------------------------------------------------------- /components/dui/reply.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import { AvatarImage, AvatarFallback, Avatar } from "@/components/ui/avatar"; 3 | import { 4 | CarouselItem, 5 | CarouselContent, 6 | CarouselPrevious, 7 | CarouselNext, 8 | Carousel, 9 | } from "@/components/ui/carousel"; 10 | import { 11 | CardTitle, 12 | CardDescription, 13 | CardHeader, 14 | CardContent, 15 | CardFooter, 16 | Card, 17 | } from "@/components/ui/card"; 18 | import { Button } from "../ui/button"; 19 | import { EXPERIMENTAL_speakTextStream } from "@/lib/elevenlabs.client"; 20 | import { SpeakerWaveIcon } from "@heroicons/react/20/solid"; 21 | 22 | export function Reply({ 23 | replies, 24 | voiceId, 25 | }: { 26 | replies: Record; 27 | voiceId: string | undefined; 28 | }) { 29 | return ( 30 | 31 | 32 | 33 | ✨ Suggested Replies ✨ 34 | 35 | 36 | 37 | 38 | 39 | {Object.entries(replies).map(([id, reply]) => ( 40 | 41 |
42 |
43 |
44 | {reply} 45 |
46 | 49 | EXPERIMENTAL_speakTextStream(reply, voiceId) 50 | } 51 | /> 52 |
53 |
54 |
55 | ))} 56 |
57 | 58 | 59 |
60 |
61 |
62 | ); 63 | } 64 | -------------------------------------------------------------------------------- /components/dui/stocks.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | CardTitle, 3 | CardDescription, 4 | CardHeader, 5 | CardContent, 6 | CardFooter, 7 | Card, 8 | } from "@/components/ui/card"; 9 | import { Button } from "@/components/ui/button"; 10 | import LineChart from "../ui/linechart"; 11 | 12 | type ClosePrices = { 13 | [key: string]: number; 14 | }; 15 | 16 | type StockProps = { 17 | amount_today: number; 18 | close_prices: ClosePrices; 19 | current_price: number; 20 | high: number; 21 | low: number; 22 | percent_today: number; 23 | ticker: string; 24 | volume: number; 25 | }; 26 | 27 | export const Stocks = ({ props }: { props: StockProps }) => { 28 | // Format volume with commas and prevent overflow 29 | const formattedVolume = props.volume?.toLocaleString(); 30 | const prices = props.close_prices || {}; 31 | const pricesKeys = Object.keys(prices); 32 | const hasClosePrices = pricesKeys.length > 0; 33 | let color; 34 | if (hasClosePrices) { 35 | if (props.current_price < prices[pricesKeys[0]]) { 36 | color = "text-red-500"; // Using Tailwind CSS for red color 37 | } else { 38 | color = "text-green-500"; // Using Tailwind CSS for green color 39 | } 40 | } 41 | return ( 42 | 43 | 44 | {props.ticker} 45 | 46 | 47 | {hasClosePrices && ( 48 |
49 | 50 |
51 | )} 52 |
53 | 54 | ${props.current_price?.toFixed(2)} 55 | 56 | 57 | +${props.amount_today?.toFixed(2)} ( 58 | {props.percent_today?.toFixed(2)}%) 59 | 60 |
61 |
62 | High 63 | ${props.high?.toFixed(2)} 64 |
65 |
66 | Low 67 | ${props.low?.toFixed(2)} 68 |
69 |
70 | Volume 71 | {formattedVolume} 72 |
73 |
74 |
75 | ); 76 | }; 77 | -------------------------------------------------------------------------------- /components/dui/weather.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * v0 by Vercel. 3 | * @see https://v0.dev/t/X8iksundfgI 4 | * Documentation: https://v0.dev/docs#integrating-generated-code-into-your-nextjs-app 5 | */ 6 | import { CardContent, Card } from "@/components/ui/card"; 7 | 8 | interface WeatherProps { 9 | location?: string; 10 | temperature: number; 11 | description: string; 12 | feels_like?: number; 13 | humidity?: number; 14 | wind_speed?: number; 15 | wind_direction?: string; 16 | icon?: string; 17 | } 18 | 19 | export const Weather = ({ props }: { props: WeatherProps }) => { 20 | return ( 21 | 22 | 31 | 32 |
33 |
34 |

35 | {props.location} 36 |

37 |

38 | {props.description} 39 |

40 |
41 | alt text 47 |
48 |
49 |

50 | {props.temperature}°F 51 |

52 |

53 | Feels like {props.feels_like}°F 54 |

55 |
56 |
57 |

58 | 59 | {props.humidity}% 60 |

61 |

62 | 63 | {props.wind_speed} mph 64 | 68 |

69 |
70 |
71 |
72 | ); 73 | }; 74 | 75 | function DropletIcon(props: any) { 76 | return ( 77 | 89 | 90 | 91 | ); 92 | } 93 | 94 | function SunIcon(props: any) { 95 | return ( 96 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | ); 119 | } 120 | 121 | function ArrowUpIcon(props: any) { 122 | return ( 123 | 135 | 136 | 137 | 138 | ); 139 | } 140 | 141 | function WindIcon(props: any) { 142 | return ( 143 | 155 | 156 | 157 | 158 | 159 | ); 160 | } 161 | -------------------------------------------------------------------------------- /components/dynamicskeleton.tsx: -------------------------------------------------------------------------------- 1 | import { Skeleton } from "@/components/ui/skeleton"; 2 | 3 | export default function DynamicSkeleton() { 4 | return ( 5 |
6 | {/*
*/} 7 | {/*
*/} 8 | {/* */} 9 | {/*
*/} 10 |
11 | 12 |
13 | {/*
*/} 14 |
15 | ); 16 | } 17 | -------------------------------------------------------------------------------- /components/filteredimage.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { FilterId, getImageFilter } from "@/lib/filters"; 3 | 4 | // Initialize a cache outside of the component 5 | const clientSideImageCache = new Map(); 6 | 7 | export function FilteredImage(props: { 8 | imageUrl: string; 9 | imageText?: string; 10 | filterId: FilterId | null; 11 | altText: string; 12 | className: string; 13 | }) { 14 | // find the style 15 | const { imageUrl } = props; 16 | const styleText = getImageFilter(props.filterId); 17 | 18 | // state 19 | const [loading, setLoading] = React.useState(false); 20 | const [filteredImageUrl, setFilteredImageUrl] = React.useState( 21 | null, 22 | ); 23 | 24 | React.useEffect(() => { 25 | if (!imageUrl || !styleText) { 26 | setLoading(false); 27 | return; 28 | } 29 | 30 | // retrieve from cache, if available 31 | const cacheKey = `${imageUrl}_${styleText}`; 32 | const cachedUrl = clientSideImageCache.get(cacheKey); 33 | if (cachedUrl) { 34 | // If the image is found in the cache, use it and skip fetching 35 | setFilteredImageUrl(cachedUrl); 36 | return; 37 | } 38 | 39 | const _ac = new AbortController(); 40 | 41 | async function getFilteredImage( 42 | srcUrl: string, 43 | dstStyle: string, 44 | ac: AbortController, 45 | ) { 46 | // Set loading to true to show the skeleton 47 | setLoading(true); 48 | setFilteredImageUrl(null); 49 | 50 | // Construct the endpoint URL with provided imageUrl and filterId 51 | const endpoint = `https://spc-openai-hackathon-backend.onrender.com/filter_image?image_url=${encodeURIComponent(srcUrl)}&new_filter=${encodeURIComponent(dstStyle)}&text=${props.imageText || ""}`; 52 | 53 | // Call the endpoint to get the filtered image 54 | try { 55 | const response = await fetch(endpoint); 56 | const data = await response.json(); 57 | 58 | if (data.url && data.url?.startsWith("http")) { 59 | // Save the fetched URL to the cache 60 | clientSideImageCache.set(cacheKey, data.url); 61 | 62 | // replace the image in the UI only if the client hasn't moved on since 63 | if (!ac.signal.aborted) setFilteredImageUrl(data.url); 64 | } else if (data.image) { 65 | // Assuming data.image contains the base64-encoded image data 66 | // and assuming the MIME type is known, e.g., 'image/png'. 67 | // You might need to adjust the MIME type based on your actual data or API response. 68 | const base64ImageUrl = `data:image/jpeg;base64,${data.image}`; 69 | 70 | // Save the base64 URL to the cache 71 | clientSideImageCache.set(cacheKey, base64ImageUrl); 72 | 73 | // Replace the image in the UI only if the client hasn't moved on since 74 | if (!ac.signal.aborted) setFilteredImageUrl(base64ImageUrl); 75 | } else { 76 | // If the response doesn't contain a valid URL or base64 image data, log the error 77 | console.error("FilteredImage: Invalid response:", data); 78 | } 79 | } catch (error) { 80 | console.error("Error fetching filtered image:", error); 81 | } 82 | 83 | // disable the loading indicator 84 | if (!ac.signal.aborted) setLoading(false); 85 | } 86 | 87 | const timeoutId = setTimeout( 88 | () => getFilteredImage(imageUrl, styleText, _ac), 89 | 0, 90 | ); 91 | return () => { 92 | _ac.abort(); 93 | setLoading(false); 94 | clearTimeout(timeoutId); 95 | }; 96 | }, [imageUrl, styleText]); 97 | 98 | // No style: pic as-is 99 | if (!styleText) 100 | return ( 101 | 102 | {props.altText} 103 | 104 | ); 105 | 106 | // Has filtered image url: render that 107 | if (filteredImageUrl) 108 | return ( 109 | 110 | {props.altText} 115 | 116 | ); 117 | 118 | return ( 119 |
120 | {loading ? ( 121 | // 122 | 123 | {props.altText} 128 | 129 | ) : ( 130 | // `Issue filtering image ${styleText}` 131 | 132 | {props.altText} 133 | 134 | )} 135 |
136 | ); 137 | } 138 | -------------------------------------------------------------------------------- /components/message.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import { cn } from "@/lib/utils"; 3 | import { UserIcon } from "@heroicons/react/20/solid"; 4 | 5 | export function BotMessage({ 6 | children, 7 | className, 8 | }: { 9 | children: React.ReactNode; 10 | className?: string; 11 | }) { 12 | return ( 13 |
14 |
15 | 16 |
17 |
18 | {children} 19 |
20 |
21 | ); 22 | } 23 | -------------------------------------------------------------------------------- /components/replies.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react"; 2 | import { useQuery } from "react-query"; 3 | import { FilterId, getResponseAdjectives } from "@/lib/filters"; 4 | import { Reply } from "@/components/dui/reply"; 5 | import { AvatarImage, AvatarFallback, Avatar } from "@/components/ui/avatar"; 6 | 7 | const RENDER_ENDPOINT = "https://spc-openai-hackathon-backend.onrender.com/"; 8 | 9 | export function Replies(props: { 10 | tweetContent: string; 11 | tweetId: string; 12 | filterId: FilterId | null; 13 | voiceId: string | undefined; 14 | className: string; 15 | }) { 16 | const adjectives = getResponseAdjectives(props.filterId) || "brief"; 17 | 18 | // reactive query 19 | const { data: replies } = useQuery( 20 | `replies-${props.tweetId}-${props.filterId || "null"}`, 21 | { 22 | enabled: !!props.tweetId, 23 | queryFn: async () => { 24 | const endpoint = `${RENDER_ENDPOINT}/question_replies?text=${encodeURIComponent(props.tweetContent)}&adjective=${encodeURIComponent(adjectives)}`; 25 | const response = await fetch(endpoint); 26 | return response.json(); 27 | }, 28 | refetchOnWindowFocus: false, 29 | refetchOnMount: false, 30 | staleTime: Infinity, 31 | }, 32 | ); 33 | 34 | console.log(replies); 35 | 36 | if (replies) { 37 | // if no community notes, remove. 38 | if (replies.community_note == "None") { 39 | delete replies.community_note; 40 | } 41 | return ( 42 |
43 | 44 |
45 | ); 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /components/theme-provider.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import * as React from "react"; 4 | import { ThemeProvider as NextThemesProvider } from "next-themes"; 5 | import { type ThemeProviderProps } from "next-themes/dist/types"; 6 | 7 | export function ThemeProvider({ children, ...props }: ThemeProviderProps) { 8 | return {children}; 9 | } 10 | -------------------------------------------------------------------------------- /components/tweet.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react"; 2 | import { Avatar, AvatarImage } from "@/components/ui/avatar"; 3 | import { 4 | UserIcon, 5 | CheckBadgeIcon, 6 | SpeakerWaveIcon, 7 | } from "@heroicons/react/20/solid"; 8 | import { Tweet } from "@/types/tweets"; 9 | import { formatDate } from "@/lib/utils"; 10 | import { FilteredImage } from "@/components/filteredimage"; 11 | import { FilterId, getFilterVoiceId } from "@/lib/filters"; 12 | import DynamicSkeleton from "./dynamicskeleton"; 13 | import { Replies } from "@/components/replies"; 14 | import { EXPERIMENTAL_speakTextStream } from "@/lib/elevenlabs.client"; 15 | import { classifyTweetByContent } from "@/app/actions"; 16 | 17 | // set to false to always show the original picture 18 | const FILTER_ON_AVATARS = true; 19 | 20 | type Classification = { 21 | component: React.ReactNode; 22 | isDone: boolean; 23 | isReply?: boolean; 24 | }; 25 | 26 | export default function TweetComponent({ 27 | tweet, 28 | filterId, 29 | isDynamic, 30 | }: { 31 | tweet: Tweet; 32 | filterId: FilterId | null; 33 | isDynamic: boolean; 34 | }) { 35 | const reallyDynamic = isDynamic && tweet.content?.length > 5; 36 | 37 | // const DynamicComponent = isClassified ? tweetComponent : reallyDynamic ? : null; 38 | //const {isClassified, isReply, tweetComponent, replacedTweetText} = useClassifiedTweet(tweet, reallyDynamic); 39 | 40 | const [classification, setClassification] = React.useState({ 41 | component: <>, 42 | isDone: false, 43 | }); 44 | 45 | const DynamicComponent = reallyDynamic ? ( 46 | classification.isDone ? ( 47 | classification.component 48 | ) : ( 49 | 50 | ) 51 | ) : null; 52 | 53 | let replacedTweetText = tweet.content; 54 | 55 | React.useEffect(() => { 56 | // async function doClassify() { 57 | // setClassification(await classifyTweetByContent(tweet, filterId)); 58 | // } 59 | if (reallyDynamic) { 60 | classifyTweetByContent(tweet, filterId).then((result) => 61 | result.promise.then((res) => { 62 | setClassification({ 63 | component: res.component, 64 | isDone: res.isDone, 65 | isReply: res.isReply, 66 | }); 67 | }), 68 | ); 69 | } 70 | 71 | // if (reallyDynamic) { 72 | // doClassify(); 73 | // } 74 | }, [tweet, reallyDynamic, filterId]); 75 | 76 | const voiceId = getFilterVoiceId(filterId) || undefined; 77 | 78 | return ( 79 |
80 |
81 | 82 | {FILTER_ON_AVATARS && ( 83 | 89 | )} 90 | 91 | {!FILTER_ON_AVATARS && ( 92 | 96 | )} 97 | {/* Fallback icon in case Avatar component doesn't handle missing images */} 98 | {!FILTER_ON_AVATARS && ( 99 | 100 | )} 101 | 102 |
103 |
104 |
105 |

106 | {tweet.user.displayName} 107 |

108 | {tweet.user.verified && ( 109 | 113 | )} 114 | 115 | {tweet.user.username /* has @ */} · {formatDate(tweet.timestamp)} 116 | 117 | 120 | EXPERIMENTAL_speakTextStream(replacedTweetText, voiceId) 121 | } 122 | /> 123 |
124 |
125 |

{replacedTweetText}

126 |
127 | {/* Optionally render media if exists */} 128 | {tweet.media?.map((media, index) => ( 129 |
134 | {media.type === "image" ? ( 135 | 142 | ) : ( 143 | 156 | )} 157 |
158 | ))} 159 | {/* Render DynamicComponent if it exists */} 160 | {DynamicComponent && ( 161 |
162 | {DynamicComponent} 163 |
164 | )} 165 | {/* Replies */} 166 | {classification.isReply && ( 167 | 174 | )} 175 | {/* Displaying some engagement metrics */} 176 |
177 | Likes: {tweet.engagement.likes} · 178 | Replies: {tweet.engagement.replies} · 179 | Reposts: {tweet.engagement.reposts} 180 |
181 |
182 |
183 | ); 184 | } 185 | -------------------------------------------------------------------------------- /components/tweetskeleton.tsx: -------------------------------------------------------------------------------- 1 | import { Skeleton } from "@/components/ui/skeleton"; 2 | 3 | export default function TweetComponentSkeleton() { 4 | return ( 5 |
6 |
7 | 8 |
9 |
10 |
11 | 12 | 13 | 14 |
15 |
16 | 17 | 18 | 19 |
20 | {/* Placeholder for media */} 21 |
22 | 23 |
24 | {/* Placeholder for engagement metrics */} 25 |
26 | 27 |
28 |
29 |
30 | ); 31 | } 32 | -------------------------------------------------------------------------------- /components/twitter-list.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | import React from "react"; 3 | import { useState, useRef } from "react"; 4 | import { QueryClient, QueryClientProvider } from "react-query"; 5 | import { ReadonlyURLSearchParams, useSearchParams } from "next/navigation"; 6 | 7 | import TweetComponent from "@/components/tweet"; 8 | import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs"; 9 | import { Switch } from "./ui/switch"; 10 | import { 11 | Tweet as TweetType, 12 | TwitterUser, 13 | TweetMedia, 14 | TweetEngagement, 15 | } from "@/types/tweets"; 16 | import { tweetData as hardcodedTweets } from "@/lib/test-data"; 17 | import { FilterId, FiltersList } from "@/lib/filters"; 18 | 19 | const queryClient = new QueryClient(); 20 | 21 | function getQueryTweets( 22 | queryParams: ReadonlyURLSearchParams, 23 | ): TweetType[] | null { 24 | const queryTweetsString = queryParams.get("tweets") || null; 25 | if (queryTweetsString) { 26 | try { 27 | return JSON.parse(queryTweetsString) as TweetType[]; 28 | } catch (error) { 29 | console.error("Error parsing query tweets", error, queryTweetsString); 30 | } 31 | } 32 | return null; 33 | } 34 | 35 | export default function TwitterList() { 36 | // take tweets passed as Query Params or use the initialTweets as fallback 37 | const initialTweets = getQueryTweets(useSearchParams()) || hardcodedTweets; 38 | 39 | // state 40 | const [displayedTweets, setDisplayedTweets] = useState(initialTweets); 41 | const endOfListRef = useRef(null); 42 | 43 | const [filterId, setFilterId] = useState(null); 44 | const [isDynamic, setDynamic] = useState(false); 45 | 46 | // useEffect(() => { 47 | // // Scroll to the bottom of the list whenever displayedTweets changes 48 | // // NOTE: disabled because if does on the first scroll when embedding the page on Twitter 49 | // // endOfListRef.current?.scrollIntoView({ behavior: "smooth" }); 50 | // }, [displayedTweets, filterId]); 51 | 52 | return ( 53 | 54 |
55 | {/* Options Switcher */} 56 |
57 | 58 | 59 | {FiltersList.map((filter, index) => ( 60 | setFilterId(filter.id)} 64 | > 65 | {filter.name} 66 | 67 | ))} 68 | 69 | 70 |
71 |
Dynamic UI
72 | setDynamic(!isDynamic)} /> 73 |
74 |
75 | 76 | {displayedTweets.length > 0 ? ( 77 | displayedTweets.map((tweet, index) => ( 78 | 84 | )) 85 | ) : ( 86 |

87 | No tweets to display 88 |

89 | )} 90 | {/*
91 | 92 |
*/} 93 |
94 | {/*
95 | 96 |
*/} 97 |
98 | 99 | ); 100 | } 101 | -------------------------------------------------------------------------------- /components/ui/avatar.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import * as React from "react"; 4 | import * as AvatarPrimitive from "@radix-ui/react-avatar"; 5 | 6 | import { cn } from "@/lib/utils"; 7 | 8 | const Avatar = React.forwardRef< 9 | React.ElementRef, 10 | React.ComponentPropsWithoutRef 11 | >(({ className, ...props }, ref) => ( 12 | 20 | )); 21 | Avatar.displayName = AvatarPrimitive.Root.displayName; 22 | 23 | const AvatarImage = React.forwardRef< 24 | React.ElementRef, 25 | React.ComponentPropsWithoutRef 26 | >(({ className, ...props }, ref) => ( 27 | 32 | )); 33 | AvatarImage.displayName = AvatarPrimitive.Image.displayName; 34 | 35 | const AvatarFallback = React.forwardRef< 36 | React.ElementRef, 37 | React.ComponentPropsWithoutRef 38 | >(({ className, ...props }, ref) => ( 39 | 47 | )); 48 | AvatarFallback.displayName = AvatarPrimitive.Fallback.displayName; 49 | 50 | export { Avatar, AvatarImage, AvatarFallback }; 51 | -------------------------------------------------------------------------------- /components/ui/button.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react"; 2 | import { Slot } from "@radix-ui/react-slot"; 3 | import { cva, type VariantProps } from "class-variance-authority"; 4 | 5 | import { cn } from "@/lib/utils"; 6 | 7 | const buttonVariants = cva( 8 | "inline-flex items-center justify-center whitespace-nowrap rounded-md text-sm font-medium ring-offset-background transition-colors focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:pointer-events-none disabled:opacity-50", 9 | { 10 | variants: { 11 | variant: { 12 | default: "bg-primary text-primary-foreground hover:bg-primary/90", 13 | destructive: 14 | "bg-destructive text-destructive-foreground hover:bg-destructive/90", 15 | outline: 16 | "border border-input bg-background hover:bg-accent hover:text-accent-foreground", 17 | secondary: 18 | "bg-secondary text-secondary-foreground hover:bg-secondary/80", 19 | ghost: "hover:bg-accent hover:text-accent-foreground", 20 | link: "text-primary underline-offset-4 hover:underline", 21 | }, 22 | size: { 23 | default: "h-10 px-4 py-2", 24 | sm: "h-9 rounded-md px-3", 25 | lg: "h-11 rounded-md px-8", 26 | icon: "h-10 w-10", 27 | }, 28 | }, 29 | defaultVariants: { 30 | variant: "default", 31 | size: "default", 32 | }, 33 | }, 34 | ); 35 | 36 | export interface ButtonProps 37 | extends React.ButtonHTMLAttributes, 38 | VariantProps { 39 | asChild?: boolean; 40 | } 41 | 42 | const Button = React.forwardRef( 43 | ({ className, variant, size, asChild = false, ...props }, ref) => { 44 | const Comp = asChild ? Slot : "button"; 45 | return ( 46 | 51 | ); 52 | }, 53 | ); 54 | Button.displayName = "Button"; 55 | 56 | export { Button, buttonVariants }; 57 | -------------------------------------------------------------------------------- /components/ui/card.tsx: -------------------------------------------------------------------------------- 1 | import * as React from "react"; 2 | 3 | import { cn } from "@/lib/utils"; 4 | 5 | const Card = React.forwardRef< 6 | HTMLDivElement, 7 | React.HTMLAttributes 8 | >(({ className, ...props }, ref) => ( 9 |
17 | )); 18 | Card.displayName = "Card"; 19 | 20 | const CardHeader = React.forwardRef< 21 | HTMLDivElement, 22 | React.HTMLAttributes 23 | >(({ className, ...props }, ref) => ( 24 |
29 | )); 30 | CardHeader.displayName = "CardHeader"; 31 | 32 | const CardTitle = React.forwardRef< 33 | HTMLParagraphElement, 34 | React.HTMLAttributes 35 | >(({ className, ...props }, ref) => ( 36 |

44 | )); 45 | CardTitle.displayName = "CardTitle"; 46 | 47 | const CardDescription = React.forwardRef< 48 | HTMLParagraphElement, 49 | React.HTMLAttributes 50 | >(({ className, ...props }, ref) => ( 51 |

56 | )); 57 | CardDescription.displayName = "CardDescription"; 58 | 59 | const CardContent = React.forwardRef< 60 | HTMLDivElement, 61 | React.HTMLAttributes 62 | >(({ className, ...props }, ref) => ( 63 |

64 | )); 65 | CardContent.displayName = "CardContent"; 66 | 67 | const CardFooter = React.forwardRef< 68 | HTMLDivElement, 69 | React.HTMLAttributes 70 | >(({ className, ...props }, ref) => ( 71 |
76 | )); 77 | CardFooter.displayName = "CardFooter"; 78 | 79 | export { 80 | Card, 81 | CardHeader, 82 | CardFooter, 83 | CardTitle, 84 | CardDescription, 85 | CardContent, 86 | }; 87 | -------------------------------------------------------------------------------- /components/ui/carousel.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import * as React from "react"; 4 | import useEmblaCarousel, { 5 | type UseEmblaCarouselType, 6 | } from "embla-carousel-react"; 7 | import { ArrowLeft, ArrowRight } from "lucide-react"; 8 | 9 | import { cn } from "@/lib/utils"; 10 | import { Button } from "@/components/ui/button"; 11 | 12 | type CarouselApi = UseEmblaCarouselType[1]; 13 | type UseCarouselParameters = Parameters; 14 | type CarouselOptions = UseCarouselParameters[0]; 15 | type CarouselPlugin = UseCarouselParameters[1]; 16 | 17 | type CarouselProps = { 18 | opts?: CarouselOptions; 19 | plugins?: CarouselPlugin; 20 | orientation?: "horizontal" | "vertical"; 21 | setApi?: (api: CarouselApi) => void; 22 | }; 23 | 24 | type CarouselContextProps = { 25 | carouselRef: ReturnType[0]; 26 | api: ReturnType[1]; 27 | scrollPrev: () => void; 28 | scrollNext: () => void; 29 | canScrollPrev: boolean; 30 | canScrollNext: boolean; 31 | } & CarouselProps; 32 | 33 | const CarouselContext = React.createContext(null); 34 | 35 | function useCarousel() { 36 | const context = React.useContext(CarouselContext); 37 | 38 | if (!context) { 39 | throw new Error("useCarousel must be used within a "); 40 | } 41 | 42 | return context; 43 | } 44 | 45 | const Carousel = React.forwardRef< 46 | HTMLDivElement, 47 | React.HTMLAttributes & CarouselProps 48 | >( 49 | ( 50 | { 51 | orientation = "horizontal", 52 | opts, 53 | setApi, 54 | plugins, 55 | className, 56 | children, 57 | ...props 58 | }, 59 | ref, 60 | ) => { 61 | const [carouselRef, api] = useEmblaCarousel( 62 | { 63 | ...opts, 64 | axis: orientation === "horizontal" ? "x" : "y", 65 | }, 66 | plugins, 67 | ); 68 | const [canScrollPrev, setCanScrollPrev] = React.useState(false); 69 | const [canScrollNext, setCanScrollNext] = React.useState(false); 70 | 71 | const onSelect = React.useCallback((api: CarouselApi) => { 72 | if (!api) { 73 | return; 74 | } 75 | 76 | setCanScrollPrev(api.canScrollPrev()); 77 | setCanScrollNext(api.canScrollNext()); 78 | }, []); 79 | 80 | const scrollPrev = React.useCallback(() => { 81 | api?.scrollPrev(); 82 | }, [api]); 83 | 84 | const scrollNext = React.useCallback(() => { 85 | api?.scrollNext(); 86 | }, [api]); 87 | 88 | const handleKeyDown = React.useCallback( 89 | (event: React.KeyboardEvent) => { 90 | if (event.key === "ArrowLeft") { 91 | event.preventDefault(); 92 | scrollPrev(); 93 | } else if (event.key === "ArrowRight") { 94 | event.preventDefault(); 95 | scrollNext(); 96 | } 97 | }, 98 | [scrollPrev, scrollNext], 99 | ); 100 | 101 | React.useEffect(() => { 102 | if (!api || !setApi) { 103 | return; 104 | } 105 | 106 | setApi(api); 107 | }, [api, setApi]); 108 | 109 | React.useEffect(() => { 110 | if (!api) { 111 | return; 112 | } 113 | 114 | onSelect(api); 115 | api.on("reInit", onSelect); 116 | api.on("select", onSelect); 117 | 118 | return () => { 119 | api?.off("select", onSelect); 120 | }; 121 | }, [api, onSelect]); 122 | 123 | return ( 124 | 137 |
145 | {children} 146 |
147 |
148 | ); 149 | }, 150 | ); 151 | Carousel.displayName = "Carousel"; 152 | 153 | const CarouselContent = React.forwardRef< 154 | HTMLDivElement, 155 | React.HTMLAttributes 156 | >(({ className, ...props }, ref) => { 157 | const { carouselRef, orientation } = useCarousel(); 158 | 159 | return ( 160 |
161 |
170 |
171 | ); 172 | }); 173 | CarouselContent.displayName = "CarouselContent"; 174 | 175 | const CarouselItem = React.forwardRef< 176 | HTMLDivElement, 177 | React.HTMLAttributes 178 | >(({ className, ...props }, ref) => { 179 | const { orientation } = useCarousel(); 180 | 181 | return ( 182 |
193 | ); 194 | }); 195 | CarouselItem.displayName = "CarouselItem"; 196 | 197 | const CarouselPrevious = React.forwardRef< 198 | HTMLButtonElement, 199 | React.ComponentProps 200 | >(({ className, variant = "outline", size = "icon", ...props }, ref) => { 201 | const { orientation, scrollPrev, canScrollPrev } = useCarousel(); 202 | 203 | return ( 204 | 222 | ); 223 | }); 224 | CarouselPrevious.displayName = "CarouselPrevious"; 225 | 226 | const CarouselNext = React.forwardRef< 227 | HTMLButtonElement, 228 | React.ComponentProps 229 | >(({ className, variant = "outline", size = "icon", ...props }, ref) => { 230 | const { orientation, scrollNext, canScrollNext } = useCarousel(); 231 | 232 | return ( 233 | 251 | ); 252 | }); 253 | CarouselNext.displayName = "CarouselNext"; 254 | 255 | export { 256 | type CarouselApi, 257 | Carousel, 258 | CarouselContent, 259 | CarouselItem, 260 | CarouselPrevious, 261 | CarouselNext, 262 | }; 263 | -------------------------------------------------------------------------------- /components/ui/linechart.tsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | interface LineChartData { 4 | [key: string]: number; 5 | } 6 | 7 | interface LineChartProps { 8 | data: LineChartData; 9 | width?: number; 10 | height?: number; 11 | } 12 | 13 | // Helper function to map data points to SVG coordinates 14 | function scale(data: LineChartData, width: number, height: number) { 15 | const values = Object.values(data); 16 | const dates = Object.keys(data).map((date) => new Date(date).getTime()); 17 | const minX = Math.min(...dates); 18 | const maxX = Math.max(...dates); 19 | const minY = Math.min(...values); 20 | const maxY = Math.max(...values); 21 | 22 | // Scale the dates and values to fit the SVG dimensions 23 | const xScale = (date: string) => 24 | ((new Date(date).getTime() - minX) / (maxX - minX)) * width; 25 | const yScale = (value: number) => 26 | height - ((value - minY) / (maxY - minY)) * height; 27 | 28 | return { xScale, yScale }; 29 | } 30 | 31 | const LineChart: React.FC = ({ 32 | data, 33 | width = 600, 34 | height = 300, 35 | }) => { 36 | const { xScale, yScale } = scale(data, width, height); 37 | const dates = Object.keys(data); 38 | const firstValue = data[dates[0]]; 39 | const lastValue = data[dates[dates.length - 1]]; 40 | 41 | // Determine line color based on comparison of first and last data points 42 | const lineColor = lastValue > firstValue ? "green" : "red"; 43 | 44 | // Create the 'd' attribute for the element 45 | let pathD = `M ${xScale(dates[0])} ${yScale(data[dates[0]])}`; 46 | for (let i = 1; i < dates.length; i++) { 47 | pathD += ` L ${xScale(dates[i])} ${yScale(data[dates[i]])}`; 48 | } 49 | 50 | return ( 51 | 52 | 53 | 54 | ); 55 | }; 56 | 57 | export default LineChart; 58 | -------------------------------------------------------------------------------- /components/ui/skeleton.tsx: -------------------------------------------------------------------------------- 1 | import { cn } from "@/lib/utils"; 2 | 3 | function Skeleton({ 4 | className, 5 | ...props 6 | }: React.HTMLAttributes) { 7 | return ( 8 |
12 | ); 13 | } 14 | 15 | export { Skeleton }; 16 | -------------------------------------------------------------------------------- /components/ui/spinner.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | export const spinner = ( 4 | 14 | 15 | 16 | ); 17 | -------------------------------------------------------------------------------- /components/ui/switch.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import * as React from "react"; 4 | import * as SwitchPrimitives from "@radix-ui/react-switch"; 5 | 6 | import { cn } from "@/lib/utils"; 7 | 8 | const Switch = React.forwardRef< 9 | React.ElementRef, 10 | React.ComponentPropsWithoutRef 11 | >(({ className, ...props }, ref) => ( 12 | 20 | 25 | 26 | )); 27 | Switch.displayName = SwitchPrimitives.Root.displayName; 28 | 29 | export { Switch }; 30 | -------------------------------------------------------------------------------- /components/ui/tabs.tsx: -------------------------------------------------------------------------------- 1 | "use client"; 2 | 3 | import * as React from "react"; 4 | import * as TabsPrimitive from "@radix-ui/react-tabs"; 5 | 6 | import { cn } from "@/lib/utils"; 7 | 8 | const Tabs = TabsPrimitive.Root; 9 | 10 | const TabsList = React.forwardRef< 11 | React.ElementRef, 12 | React.ComponentPropsWithoutRef 13 | >(({ className, ...props }, ref) => ( 14 | 22 | )); 23 | TabsList.displayName = TabsPrimitive.List.displayName; 24 | 25 | const TabsTrigger = React.forwardRef< 26 | React.ElementRef, 27 | React.ComponentPropsWithoutRef 28 | >(({ className, ...props }, ref) => ( 29 | 37 | )); 38 | TabsTrigger.displayName = TabsPrimitive.Trigger.displayName; 39 | 40 | const TabsContent = React.forwardRef< 41 | React.ElementRef, 42 | React.ComponentPropsWithoutRef 43 | >(({ className, ...props }, ref) => ( 44 | 52 | )); 53 | TabsContent.displayName = TabsPrimitive.Content.displayName; 54 | 55 | export { Tabs, TabsList, TabsTrigger, TabsContent }; 56 | -------------------------------------------------------------------------------- /lib/data.ts: -------------------------------------------------------------------------------- 1 | const RENDER_ENDPOINT = "https://spc-openai-hackathon-backend.onrender.com/"; 2 | 3 | export async function getWeatherApi(location: string) { 4 | const endpoint = `${RENDER_ENDPOINT}/text_to_weather?location=${encodeURIComponent(location)}`; 5 | const response = await fetch(endpoint); 6 | return response.json(); 7 | } 8 | 9 | export async function getStockApi(ticker: string) { 10 | const endpoint = `${RENDER_ENDPOINT}/text_to_finance_data?ticker=${encodeURIComponent(ticker)}`; 11 | const response = await fetch(endpoint); 12 | return response.json(); 13 | } 14 | 15 | export async function getPoliticalApi(content: string) { 16 | const endpoint = `${RENDER_ENDPOINT}/text_to_politics?text=${encodeURIComponent(content)}`; 17 | const response = await fetch(endpoint); 18 | return response.json(); 19 | } 20 | 21 | export async function getClothingApi(content: string) { 22 | const endpoint = `${RENDER_ENDPOINT}/get_clothing?url=${encodeURIComponent(content)}`; 23 | const response = await fetch(endpoint); 24 | return response.json(); 25 | } 26 | 27 | export async function getReplyApi(content: string, adjectives: string) { 28 | const endpoint = `${RENDER_ENDPOINT}/question_replies?text=${encodeURIComponent(content)}&adjective=${encodeURIComponent(adjectives)}`; 29 | const response = await fetch(endpoint); 30 | return response.json(); 31 | } 32 | -------------------------------------------------------------------------------- /lib/elevenlabs.client.ts: -------------------------------------------------------------------------------- 1 | /* This code has been lifted (with permission) from the open-source MIT licensed big-AGI project. 2 | * https://github.com/enricoros/big-AGI 3 | */ 4 | import type { SpeechInputSchema } from "@/app/api/elevenlabs/speech/route"; 5 | 6 | export class AudioLivePlayer { 7 | private readonly audioContext: AudioContext; 8 | private readonly audioElement: HTMLAudioElement; 9 | private readonly mediaSource: MediaSource; 10 | private readonly bufferSizeLimit: number; 11 | private readonly onStart: (() => void) | null; 12 | private readonly onStop: (() => void) | null; 13 | private reader: ReadableStreamDefaultReader | null; 14 | 15 | constructor() { 16 | this.audioContext = new AudioContext(); 17 | this.audioElement = new Audio(); 18 | this.mediaSource = new MediaSource(); 19 | this.bufferSizeLimit = 5; // in seconds 20 | this.onStart = null; 21 | this.onStop = null; 22 | this.reader = null; 23 | } 24 | 25 | async EXPERIMENTAL_playStream(edgeResponse: Response) { 26 | if (this.reader) { 27 | await this.stop(); 28 | } 29 | 30 | if (!edgeResponse.body) { 31 | return; 32 | } 33 | const esgeReadableStream = edgeResponse.body; 34 | 35 | const sourceNode = this.audioContext.createMediaElementSource( 36 | this.audioElement, 37 | ); 38 | sourceNode.connect(this.audioContext.destination); 39 | 40 | const mimeType = "audio/mpeg"; 41 | this.mediaSource.addEventListener("sourceopen", async () => { 42 | const sourceBuffer: SourceBuffer = 43 | this.mediaSource.addSourceBuffer(mimeType); 44 | this.reader = esgeReadableStream.getReader(); 45 | 46 | if (this.onStart) { 47 | this.onStart(); 48 | } 49 | 50 | while (true) { 51 | const { done, value } = await this.reader.read(); 52 | if (done) { 53 | sourceBuffer.onupdateend = () => this.mediaSource.endOfStream(); 54 | break; 55 | } 56 | 57 | await new Promise((resolve) => { 58 | if (!sourceBuffer.updating) { 59 | resolve(null); 60 | } else { 61 | sourceBuffer.addEventListener("updateend", () => resolve(null), { 62 | once: true, 63 | }); 64 | } 65 | }); 66 | 67 | if (this.audioElement.buffered.length > 0) { 68 | const currentTime = this.audioElement.currentTime; 69 | const bufferedEnd = this.audioElement.buffered.end( 70 | this.audioElement.buffered.length - 1, 71 | ); 72 | const remainingBuffer = bufferedEnd - currentTime; 73 | 74 | if (remainingBuffer > this.bufferSizeLimit) { 75 | // E: just made this a bit more resilient, but not much 76 | try { 77 | // Remove old data from the buffer 78 | sourceBuffer.remove(0, currentTime - 1); 79 | await new Promise((resolve) => { 80 | sourceBuffer.addEventListener( 81 | "updateend", 82 | () => resolve(null), 83 | { once: true }, 84 | ); 85 | }); 86 | } catch (e) { 87 | console.warn("Error removing old data from the buffer:", e); 88 | } 89 | } 90 | } 91 | 92 | // Wait for the sourceBuffer to finish updating before appending new data 93 | await new Promise((resolve) => { 94 | if (!sourceBuffer.updating) { 95 | resolve(null); 96 | } else { 97 | sourceBuffer.addEventListener("updateend", () => resolve(null), { 98 | once: true, 99 | }); 100 | } 101 | }); 102 | 103 | // Append new data to the buffer 104 | sourceBuffer.appendBuffer(value); 105 | } 106 | 107 | if (this.onStop) { 108 | this.onStop(); 109 | } 110 | }); 111 | 112 | this.audioElement.src = URL.createObjectURL(this.mediaSource); 113 | this.audioElement.autoplay = true; 114 | } 115 | 116 | async stop() { 117 | if (this.reader) { 118 | await this.reader.cancel(); 119 | this.reader = null; 120 | this.mediaSource.endOfStream(); 121 | this.audioElement.pause(); 122 | } 123 | } 124 | 125 | // setOnStart(callback) { 126 | // this.onStart = callback; 127 | // } 128 | // 129 | // setOnStop(callback) { 130 | // this.onStop = callback; 131 | // } 132 | } 133 | 134 | export async function EXPERIMENTAL_speakTextStream( 135 | text: string, 136 | voiceId?: string, 137 | ) { 138 | if (!text?.trim()) return; 139 | 140 | try { 141 | const edgeResponse = await frontendFetchAPIElevenLabsSpeech( 142 | text, 143 | voiceId, 144 | false, 145 | true, 146 | ); 147 | 148 | // if (!liveAudioPlayer) 149 | const liveAudioPlayer = new AudioLivePlayer(); 150 | // fire/forget 151 | void liveAudioPlayer.EXPERIMENTAL_playStream(edgeResponse); 152 | } catch (error) { 153 | // has happened once in months of testing, not sure what was the cause 154 | console.error("EXPERIMENTAL_speakTextStream:", error); 155 | } 156 | } 157 | 158 | async function frontendFetchAPIElevenLabsSpeech( 159 | text: string, 160 | voiceId: string | undefined, 161 | nonEnglish: boolean, 162 | streaming: boolean, 163 | ): Promise { 164 | // NOTE: hardcoded 1000 as a failsafe, since the API will take very long and consume lots of credits for longer texts 165 | const speechInput: SpeechInputSchema = { 166 | text: text.slice(0, 100), 167 | ...(voiceId && { voiceId }), 168 | nonEnglish, 169 | ...(streaming && { streaming: true, streamOptimization: 4 }), 170 | }; 171 | 172 | const response = await fetch("/api/elevenlabs/speech", { 173 | method: "POST", 174 | headers: { "Content-Type": "application/json" }, 175 | body: JSON.stringify(speechInput), 176 | }); 177 | 178 | if (!response.ok) { 179 | const errorData = await response.json(); 180 | throw new Error(errorData.error || errorData.message || "Unknown error"); 181 | } 182 | 183 | return response; 184 | } 185 | -------------------------------------------------------------------------------- /lib/filters.ts: -------------------------------------------------------------------------------- 1 | export type FilterId = 2 | | "anime" 3 | | "cyberpunk" 4 | | "cheese" 5 | | "candy" 6 | | "disgusting"; 7 | 8 | // Reimagine the following prompt if it were filtered like ${filter}: ${prompt} 9 | export const Filters: { 10 | [id in FilterId]: { 11 | name: string; 12 | imageFilter: string; 13 | responseAdjectives: string; 14 | voiceId?: string; 15 | }; 16 | } = { 17 | anime: { 18 | name: "🌸 Anime", // 🌸🍙😸 19 | // imageFilter: 'Anime, like Demon Slayer', 20 | imageFilter: 21 | "anime with immersive visuals and emotional richness, preserving the subject", 22 | // imageFilter: 'shojo anime style, in soft colors, flowery details, and use of color to evoke emotion', 23 | responseAdjectives: "kawaii, cute", 24 | voiceId: "MF3mGyEYCl7XYWbV9V6O", 25 | }, 26 | cyberpunk: { 27 | name: "🌆 Cyberpunk", 28 | // imageFilter: 'Cyberpunk, like Blade Runner', 29 | imageFilter: 30 | "cyberpunk style with neon-drenched aesthetic, mirroring Blade Runner' dystopian future", 31 | responseAdjectives: "gen-z, braindead", 32 | voiceId: "Pqfiihpuz4Fl2QsVF9rg", // Elon Musk 33 | }, 34 | candy: { 35 | // Perfect 36 | name: "🦄", 37 | imageFilter: 38 | "unicorns in a dreamy world made of pink cotton candy, preserving the subjects", 39 | responseAdjectives: "enchanting, whimsical, fluffy", 40 | voiceId: "jBpfuIE2acCO8z3wKNLl", 41 | }, 42 | cheese: { 43 | name: "🧀", 44 | imageFilter: 45 | "cheese fantasy with subjects preserved, set against richly textured, colorful cheese-themed backdrops", 46 | responseAdjectives: "cheesy, cheesy, as if spoken by a cheese lover", 47 | voiceId: "ZQe5CZNOzWyzPSCn5a3c", 48 | }, 49 | disgusting: { 50 | // Perfect 51 | name: "🤮", 52 | imageFilter: "disgusting, gruesome, repulsive, icky", 53 | responseAdjectives: "disgusting, gross, repulsive", 54 | voiceId: "N2lVS1w4EtoT3dr4eOWO", 55 | }, 56 | }; 57 | 58 | // used by the UI to display the filters list up top 59 | export const FiltersList: { name: string; id: FilterId | null }[] = Object.keys( 60 | Filters, 61 | ).map((key) => ({ 62 | name: Filters[key as FilterId].name, 63 | id: key as FilterId, 64 | })); 65 | FiltersList.unshift({ name: "Serious", id: null }); 66 | 67 | export function getImageFilter(filterId: FilterId | null): string | null { 68 | return filterId ? Filters[filterId].imageFilter ?? null : null; 69 | } 70 | 71 | export function getResponseAdjectives( 72 | filterId: FilterId | null, 73 | ): string | null { 74 | return filterId ? Filters[filterId].responseAdjectives ?? null : null; 75 | } 76 | 77 | export function getFilterVoiceId(filterId: FilterId | null): string | null { 78 | return filterId ? Filters[filterId].voiceId ?? null : null; 79 | } 80 | -------------------------------------------------------------------------------- /lib/test-data.ts: -------------------------------------------------------------------------------- 1 | import { Tweet } from "@/types/tweets"; 2 | 3 | export async function getWeather(location: string, unit: string) { 4 | // Dummy data for demonstration 5 | const weatherData: any = { 6 | "San Francisco": { temperature: 13, description: "Rainy" }, 7 | "New York": { temperature: 25, description: "Sunny" }, 8 | London: { temperature: 15, description: "Cloudy" }, 9 | Tokyo: { temperature: 20, description: "Cloudy" }, 10 | }; 11 | 12 | // Default to 'San Francisco' if the location is not in the dummy data 13 | const weather = weatherData[location] || weatherData["San Francisco"]; 14 | 15 | // Convert temperature to Fahrenheit if unit is 'F' 16 | if (unit === "F") { 17 | weather.temperature = (weather.temperature * 9) / 5 + 32; 18 | } 19 | 20 | return { 21 | temperature: weather.temperature, 22 | description: weather.description, 23 | }; 24 | } 25 | 26 | type PoliticalStance = "right" | "left" | "center"; 27 | 28 | interface PoliticalLeaningData { 29 | stance: PoliticalStance; 30 | references: string[]; 31 | } 32 | 33 | export const getPoliticalLeaning = async ( 34 | summary: string, 35 | ): Promise => { 36 | const stances: PoliticalStance[] = ["right", "left", "center"]; 37 | const randomStance = stances[Math.floor(Math.random() * stances.length)]; // Randomly selects a stance 38 | 39 | const references = [ 40 | `https://example.com/source${Math.floor(Math.random() * 10) + 1}`, 41 | `https://anotherexample.com/source${Math.floor(Math.random() * 10) + 1}`, 42 | ]; // Generates two random references 43 | 44 | // Simulating a delay to mimic asynchronous API call 45 | return new Promise((resolve) => { 46 | setTimeout(() => { 47 | resolve({ 48 | stance: randomStance, 49 | references: references, 50 | }); 51 | }, 500); // Simulate a 500ms delay 52 | }); 53 | }; 54 | 55 | interface StocksProps { 56 | ticker: string; 57 | amount_today: number; 58 | percent_today: number; 59 | current_price: number; 60 | high: number; 61 | low: number; 62 | volume: number; 63 | close_prices: number; 64 | } 65 | 66 | // Mock function to simulate fetching stock data 67 | export const getStockData = (ticker: string): Promise => { 68 | // Simulating a delay to mimic asynchronous API call 69 | return new Promise((resolve) => { 70 | setTimeout(() => { 71 | resolve({ 72 | ticker, 73 | amount_today: Math.random() * 10, // Random change in price for today 74 | percent_today: Math.random() * 5, // Random percent change for today 75 | current_price: 100 + Math.random() * 50, // Random current price 76 | high: 150 + Math.random() * 10, // Random high price 77 | low: 90 + Math.random() * 10, // Random low price 78 | volume: 1000000 + Math.floor(Math.random() * 1000000), // Random volume 79 | close_prices: 100 + Math.random() * 50, 80 | }); 81 | }, 500); // Simulate a 500ms delay 82 | }); 83 | }; 84 | 85 | export const tweetData: Tweet[] = [ 86 | // Weather in SF 87 | { 88 | id: "1756212083501502859", 89 | link: "/BasedBeffJezos/status/1756212083501502859", 90 | user: { 91 | username: "@BasedBeffJezos", 92 | displayName: "Beff Jezos — e/acc ", 93 | avatarUrl: 94 | "https://pbs.twimg.com/profile_images/1663808077693427716/w8CIR0hM_x96.jpg", 95 | verified: false, 96 | }, 97 | content: 98 | "SF is low key goated when low-variance ideal weather year round is the vibe", 99 | engagement: { replies: "13", reposts: "2", likes: "90", views: "12K" }, 100 | timestamp: "2024-02-10T07:02:44.000Z", 101 | }, 102 | // { 103 | // "id": "1719952696869003550", 104 | // "link": "/Scobleizer/status/1763801233339634089", 105 | // "user": { 106 | // "username": "@Scobleizer", 107 | // "displayName": "Robert Scoble", 108 | // "avatarUrl": "https://pbs.twimg.com/profile_images/1719781327527133184/oOgZZpVK_x96.jpg", 109 | // "verified": false 110 | // }, 111 | // "content": "The streets of San Francisco are glistening under the steady drizzle today. Rainy days like this bring a certain charm to the city, making every coffee shop corner a cozy refuge. ☔🌁 #SanFranciscoRain", 112 | // "engagement": { 113 | // "replies": "12", 114 | // "reposts": "5", 115 | // "likes": "87", 116 | // "views": "10K" 117 | // }, 118 | // "timestamp": "2024-03-02T10:15:30.000Z" 119 | // }, 120 | 121 | // Stock - Tesla 122 | { 123 | id: "1763801233339634099", 124 | link: "/Scobleizer/status/1763801233339634099", 125 | user: { 126 | username: "@Scobleizer", 127 | displayName: "Robert Scoble", 128 | avatarUrl: 129 | "https://pbs.twimg.com/profile_images/1719781327527133184/oOgZZpVK_x96.jpg", 130 | verified: false, 131 | }, 132 | content: 133 | "Watching $TSLA stock closely today as the market reacts to the latest electric vehicle innovations announced. The future of transportation is electric, and it's thrilling to see how Tesla leads the charge. 🔌🚗 #Tesla #StockMarket", 134 | engagement: { 135 | replies: "25", 136 | reposts: "15", 137 | likes: "130", 138 | views: "15K", 139 | }, 140 | timestamp: "2024-03-02T12:30:45.000Z", 141 | }, 142 | 143 | // Image + Replies 144 | { 145 | id: "1763656445545849004", 146 | link: "/USFWS/status/1763656445545849004", 147 | user: { 148 | username: "@USFWS", 149 | displayName: "U.S. Fish and Wildlife Service", 150 | avatarUrl: 151 | "https://pbs.twimg.com/profile_images/877224343352348672/TIrrbqtT_x96.jpg", 152 | verified: false, 153 | }, 154 | content: 155 | "Do you see it? Not the great blue heron. \n\nPhotos: used with permission by Jacob Hall", 156 | media: [ 157 | { 158 | type: "image", 159 | url: "https://pbs.twimg.com/media/GHnDnYOWIAACs-7?format=jpg&name=medium", 160 | }, 161 | ], 162 | engagement: { 163 | replies: "55", 164 | reposts: "312", 165 | likes: "1.9K", 166 | views: "144K", 167 | }, 168 | timestamp: "2024-03-01T20:03:58.000Z", 169 | }, 170 | 171 | // Politics 172 | { 173 | id: "1764318863255097531", 174 | link: "/elonmusk/status/1764318863255097531", 175 | user: { 176 | username: "@elonmusk", 177 | displayName: "Elon Musk", 178 | avatarUrl: 179 | "https://pbs.twimg.com/profile_images/1683325380441128960/yRsRRjGO_x96.jpg", 180 | verified: false, 181 | }, 182 | content: 183 | "Three things America needs:\n\n- Secure borders\n- Safe cities\n- Sensible spending", 184 | media: [ 185 | { 186 | type: "image", 187 | url: "https://pbs.twimg.com/media/GHwPy20XUAAQhOy?format=jpg&name=medium", 188 | }, 189 | ], 190 | engagement: { replies: "10K", reposts: "17K", likes: "95K", views: "13M" }, 191 | timestamp: "2024-03-03T15:56:11.000Z", 192 | }, 193 | 194 | // Beer machine 195 | { 196 | id: "1763813991023853587", 197 | link: "/Rainmaker1973/status/1763813991023853587", 198 | user: { 199 | username: "@Rainmaker1973", 200 | displayName: "Massimo", 201 | avatarUrl: 202 | "https://pbs.twimg.com/profile_images/914888589670043654/KVvwjcWA_x96.jpg", 203 | verified: false, 204 | }, 205 | content: "Self pouring beer machine \n\n[ superscientific]", 206 | media: [ 207 | { 208 | type: "image", 209 | url: "https://pbs.twimg.com/amplify_video_thumb/1763058979414896640/img/fvPXvIdZvozkK8Gt?format=jpg&name=large", 210 | }, 211 | ], 212 | engagement: { 213 | replies: "46", 214 | reposts: "62", 215 | likes: "529", 216 | views: "107K", 217 | }, 218 | timestamp: "2024-03-02T06:30:00.000Z", 219 | }, 220 | 221 | // fillers 222 | { 223 | id: "1763703079411531868", 224 | link: "/EMostaque/status/1763703079411531868", 225 | user: { 226 | username: "@EMostaque", 227 | displayName: "Emad", 228 | avatarUrl: 229 | "https://pbs.twimg.com/profile_images/1762879891060473856/9DqabWPQ_x96.jpg", 230 | verified: false, 231 | }, 232 | content: "", 233 | media: [ 234 | { 235 | type: "image", 236 | url: "https://pbs.twimg.com/media/GHnui_FXUAA57HB?format=jpg&name=medium", 237 | }, 238 | ], 239 | engagement: { 240 | replies: "6", 241 | reposts: "12", 242 | likes: "122", 243 | views: "9.6K", 244 | }, 245 | timestamp: "2024-03-01T23:09:16.000Z", 246 | }, 247 | { 248 | id: "1763413922483150970", 249 | link: "/BUILD_OR_DIE/status/1763413922483150970", 250 | user: { 251 | username: "@BUILD_OR_DIE", 252 | displayName: "BUILD OR DIE", 253 | avatarUrl: 254 | "https://pbs.twimg.com/profile_images/1736573607613014016/fSsKNAmm_x96.jpg", 255 | verified: false, 256 | }, 257 | content: 258 | ".\n@elonmusk\n's Engineering Design Process\n\n1. Make requirements less dumb \n2. Delete the part or process\n3. Simplify or optimize\n4. Accelerate cycle-time\n5. Automate\n\n\"The most common error of a smart engineer is to optimise a thing that should not exist...”", 259 | media: [ 260 | { 261 | type: "image", 262 | url: "https://pbs.twimg.com/ext_tw_video_thumb/1763413856976605185/pu/img/92QZumlU_wQ2oz-m?format=jpg&name=medium&eeid=3", 263 | }, 264 | ], 265 | engagement: { 266 | replies: "19", 267 | reposts: "389", 268 | likes: "2K", 269 | views: "202K", 270 | }, 271 | timestamp: "2024-03-01T04:00:16.000Z", 272 | }, 273 | 274 | // { 275 | // "id": "1726679121126883442", 276 | // "link": "/TechBroDrip/status/1726679121126883442", 277 | // "user": { 278 | // "username": "@TechBroDrip", 279 | // "displayName": "Dripped Out Technology Brothers", 280 | // "avatarUrl": "https://pbs.twimg.com/profile_images/1529556000465793024/wiaHs-BI_x96.jpg", 281 | // "verified": false 282 | // }, 283 | // "content": "Emmett Shear (CEO of OpenAI + co-founder and former CEO of Twitch)", 284 | // "media": [{"type": "image", "url": "https://pbs.twimg.com/media/F_ZlVK3asAAw81i?format=jpg&name=medium"}], 285 | // "engagement": {"replies": "17", "reposts": "4", "likes": "160", "views": "22K"}, 286 | // "timestamp": "2023-11-20T19:09:17.000Z" 287 | // }, 288 | // { 289 | // "id": "1726385797656379782", 290 | // "link": "/TechBroDrip/status/1726385797656379782", 291 | // "user": { 292 | // "username": "@TechBroDrip", 293 | // "displayName": "Dripped Out Technology Brothers", 294 | // "avatarUrl": "https://pbs.twimg.com/profile_images/1529556000465793024/wiaHs-BI_x96.jpg", 295 | // "verified": false 296 | // }, 297 | // "content": "Sam Altman (co-founder of OpenAI and former president of Y Combinator)", 298 | // "media": [{"type": "image", "url": "https://pbs.twimg.com/media/F_VauwbXYAA-o3Z?format=jpg&name=medium"}], 299 | // "engagement": {"replies": "", "reposts": "8", "likes": "257", "views": "26K"}, 300 | // "timestamp": "2023-11-19T23:43:43.000Z" 301 | // }, 302 | 303 | { 304 | id: "1763801233339634110", 305 | link: "/frank_smith1111/status/1719952696869003550", 306 | user: { 307 | username: "@frank_smith1111", 308 | displayName: "Frank Smith", 309 | avatarUrl: 310 | "https://pbs.twimg.com/profile_images/1617740523456040961/LCMDvOCY_x96.jpg", 311 | verified: false, 312 | }, 313 | content: 314 | "As we look to the future, it's clear that policies focusing on renewable energy and climate change are not just necessary, but urgent. We must support leaders who prioritize our planet and future generations over short-term gains. 🌍 #ClimateAction #RenewableEnergy", 315 | engagement: { 316 | replies: "45", 317 | reposts: "30", 318 | likes: "250", 319 | views: "20K", 320 | }, 321 | timestamp: "2024-03-02T15:47:22.000Z", 322 | }, 323 | 324 | { 325 | id: "1763595049881968661", 326 | link: "/IroncladDev/status/1763595049881968661", 327 | user: { 328 | username: "@IroncladDev", 329 | displayName: "IroncladDev ", 330 | avatarUrl: 331 | "https://pbs.twimg.com/profile_images/1587110787277103104/3jOe4QcP_x96.png", 332 | verified: false, 333 | }, 334 | content: "it's friday let's get it", 335 | media: [ 336 | { 337 | type: "image", 338 | url: "https://pbs.twimg.com/media/GHh_dRrXIAAGjdI?format=png&name=small", 339 | }, 340 | ], 341 | engagement: { 342 | replies: "3", 343 | reposts: "3", 344 | likes: "43", 345 | views: "1.1K", 346 | }, 347 | timestamp: "2024-03-01T16:00:00.000Z", 348 | }, 349 | { 350 | id: "1763723097440403931", 351 | link: "/thenetrunna/status/1763723097440403931", 352 | user: { 353 | username: "@thenetrunna", 354 | displayName: "Netrunner — e/acc", 355 | avatarUrl: 356 | "https://pbs.twimg.com/profile_images/1762929887642333184/rJSX-5t__x96.jpg", 357 | verified: false, 358 | }, 359 | content: 360 | "what did ilya see? what did Karpathy see? what did Elon see? what did Logan see? what did roon see? what did jimmy apples see? what did netrunner see? what did the AGI see?", 361 | engagement: { 362 | replies: "50", 363 | reposts: "22", 364 | likes: "253", 365 | views: "34K", 366 | }, 367 | timestamp: "2024-03-02T00:28:49.000Z", 368 | }, 369 | { 370 | id: "1761945745119224130", 371 | link: "/BasedBeffJezos/status/1761945745119224130", 372 | user: { 373 | username: "@BasedBeffJezos", 374 | displayName: "Beff Jezos — e/acc ", 375 | avatarUrl: 376 | "https://pbs.twimg.com/profile_images/1663808077693427716/w8CIR0hM_x96.jpg", 377 | verified: false, 378 | }, 379 | content: "SF weather is unreal these days tbh.", 380 | engagement: { replies: "9", reposts: "1", likes: "161", views: "18K" }, 381 | timestamp: "2024-02-26T02:46:15.000Z", 382 | }, 383 | { 384 | id: "1763666653748105404", 385 | link: "/AravSrinivas/status/1763666653748105404", 386 | user: { 387 | username: "@AravSrinivas", 388 | displayName: "Aravind Srinivas", 389 | avatarUrl: 390 | "https://pbs.twimg.com/profile_images/1735494334471086080/dQ90FPoq_x96.jpg", 391 | verified: false, 392 | }, 393 | content: 394 | '“The problem that Google faces today relative to the likes of OpenAI and Perplexity is very similar to what we did to Microsoft 20 years ago. What ChatGPT in particular has made us realize is that many of the tasks that we have so far labeled "search" and where we click on blue…', 395 | engagement: { 396 | replies: "31", 397 | reposts: "52", 398 | likes: "475", 399 | views: "223K", 400 | }, 401 | timestamp: "2024-03-01T20:44:32.000Z", 402 | }, 403 | 404 | // Clothing 405 | { 406 | id: "1762494379540001174", 407 | link: "/TechBroDrip/status/1762494379540001174", 408 | user: { 409 | username: "@TechBroDrip", 410 | displayName: "Dripped Out Technology Brothers", 411 | avatarUrl: 412 | "https://pbs.twimg.com/profile_images/1529556000465793024/wiaHs-BI_x96.jpg", 413 | verified: false, 414 | }, 415 | content: "Mark Zuckerberg (founder of Meta)", 416 | media: [ 417 | { 418 | type: "image", 419 | url: "https://pbs.twimg.com/media/GHWjTBPbwAEBT0S?format=jpg&name=medium", 420 | }, 421 | ], 422 | engagement: { replies: "10", reposts: "30", likes: "927", views: "63K" }, 423 | timestamp: "2024-02-27T15:06:20.000Z", 424 | }, 425 | { 426 | id: "1726516611962826894", 427 | link: "/TechBroDrip/status/1726516611962826894", 428 | user: { 429 | username: "@TechBroDrip", 430 | displayName: "Dripped Out Technology Brothers", 431 | avatarUrl: 432 | "https://pbs.twimg.com/profile_images/1529556000465793024/wiaHs-BI_x96.jpg", 433 | verified: false, 434 | }, 435 | content: "Satya Nadella (CEO of Microsoft)", 436 | media: [ 437 | { 438 | type: "image", 439 | url: "https://pbs.twimg.com/media/F_XRtQBb0AAwi24?format=jpg&name=medium", 440 | }, 441 | ], 442 | engagement: { replies: "5", reposts: "13", likes: "492", views: "26K" }, 443 | timestamp: "2023-11-20T08:23:32.000Z", 444 | }, 445 | 446 | { 447 | id: "1763726146183844008", 448 | link: "/jachiam0/status/1763726146183844008", 449 | user: { 450 | username: "@jachiam0", 451 | displayName: "Joshua Achiam ", 452 | avatarUrl: 453 | "https://pbs.twimg.com/profile_images/967329395080744960/O-MKd6Nx_x96.jpg", 454 | verified: false, 455 | }, 456 | content: 457 | '(Silicon Valley HBO 2024) Hooli\'s new flagship chat model, "Bighead," is unexpectedly political. Gavin Belson frantically rewrites the system prompts manually in an attempt to "fix" it.', 458 | engagement: { 459 | replies: "7", 460 | reposts: "18", 461 | likes: "245", 462 | views: "17K", 463 | }, 464 | timestamp: "2024-03-02T00:40:56.000Z", 465 | }, 466 | { 467 | id: "1763721070765342914", 468 | link: "/morqon/status/1763721070765342914", 469 | user: { 470 | username: "@morqon", 471 | displayName: "morgan —", 472 | avatarUrl: 473 | "https://pbs.twimg.com/profile_images/1712182984084803584/AnClMwYr_x96.jpg", 474 | verified: false, 475 | }, 476 | content: 477 | "openai memo to staff: elon’s allegations “do not reflect the reality of our work or mission”\n\naltman: “the attacks will keep coming”", 478 | engagement: { 479 | replies: "3", 480 | reposts: "4", 481 | likes: "71", 482 | views: "12K", 483 | }, 484 | timestamp: "2024-03-02T00:20:46.000Z", 485 | }, 486 | { 487 | id: "1763535656993608057", 488 | link: "/DeniTechh/status/1763535656993608057", 489 | user: { 490 | username: "@DeniTechh", 491 | displayName: "Deni | denitech.dev", 492 | avatarUrl: 493 | "https://pbs.twimg.com/profile_images/1727760312932139008/4tzWgbyn_x96.jpg", 494 | verified: false, 495 | }, 496 | content: 497 | "Learn to code.\n\nEven if NVIDIA CEO tells you that you don’t need to, because of AI.", 498 | media: [ 499 | { 500 | type: "image", 501 | url: "https://pbs.twimg.com/media/GHlWVQXXQAEd3ta?format=jpg&name=900x900", 502 | }, 503 | ], 504 | engagement: { 505 | replies: "129", 506 | reposts: "86", 507 | likes: "603", 508 | views: "41K", 509 | }, 510 | timestamp: "2024-03-01T12:04:00.000Z", 511 | }, 512 | ]; 513 | 514 | export const trimmedTweetData = tweetData.slice(0, 3); 515 | -------------------------------------------------------------------------------- /lib/tool-definition.ts: -------------------------------------------------------------------------------- 1 | import { z } from "zod"; 2 | 3 | /** 4 | * A tool definition contains all information required for a language model to generate tool calls. 5 | */ 6 | export interface ToolDefinition { 7 | /** 8 | * The name of the tool. 9 | * Should be understandable for language models and unique among the tools that they know. 10 | * 11 | * Note: Using generics to enable result type inference when there are multiple tool calls. 12 | */ 13 | name: NAME; 14 | 15 | /** 16 | * A optional description of what the tool does. Will be used by the language model to decide whether to use the tool. 17 | */ 18 | description?: string; 19 | 20 | /** 21 | * The schema of the input that the tool expects. The language model will use this to generate the input. 22 | * Use descriptions to make the input understandable for the language model. 23 | */ 24 | parameters: z.Schema; 25 | } 26 | -------------------------------------------------------------------------------- /lib/utils.ts: -------------------------------------------------------------------------------- 1 | import { type ClassValue, clsx } from "clsx"; 2 | import { twMerge } from "tailwind-merge"; 3 | import { ToolDefinition } from "@/lib/tool-definition"; 4 | import { OpenAIStream } from "ai"; 5 | import type OpenAI from "openai"; 6 | import zodToJsonSchema from "zod-to-json-schema"; 7 | 8 | export function cn(...inputs: ClassValue[]) { 9 | return twMerge(clsx(inputs)); 10 | } 11 | 12 | export const formatDate = (isoString: string) => { 13 | // Placeholder for date formatting function 14 | return new Date(isoString).toLocaleDateString("en-US", { 15 | day: "numeric", 16 | month: "short", 17 | year: "numeric", 18 | }); 19 | }; 20 | 21 | const consumeStream = async (stream: ReadableStream) => { 22 | const reader = stream.getReader(); 23 | while (true) { 24 | const { done } = await reader.read(); 25 | if (done) break; 26 | } 27 | }; 28 | 29 | export function runOpenAICompletion< 30 | T extends Omit< 31 | Parameters[0], 32 | "functions" 33 | > & { 34 | functions: ToolDefinition[]; 35 | }, 36 | >(openai: OpenAI, params: T) { 37 | let text = ""; 38 | let hasFunction = false; 39 | 40 | type FunctionNames = 41 | T["functions"] extends Array ? T["functions"][number]["name"] : never; 42 | 43 | let onTextContent: (text: string, isFinal: boolean) => void = () => {}; 44 | 45 | let onFunctionCall: Record) => void> = {}; 46 | 47 | const { functions, ...rest } = params; 48 | 49 | (async () => { 50 | consumeStream( 51 | OpenAIStream( 52 | (await openai.chat.completions.create({ 53 | ...rest, 54 | stream: true, 55 | functions: functions.map((fn) => ({ 56 | name: fn.name, 57 | description: fn.description, 58 | parameters: zodToJsonSchema(fn.parameters) as Record< 59 | string, 60 | unknown 61 | >, 62 | })), 63 | })) as any, 64 | { 65 | async experimental_onFunctionCall(functionCallPayload) { 66 | hasFunction = true; 67 | onFunctionCall[ 68 | functionCallPayload.name as keyof typeof onFunctionCall 69 | ]?.(functionCallPayload.arguments as Record); 70 | }, 71 | onToken(token) { 72 | text += token; 73 | if (text.startsWith("{")) return; 74 | onTextContent(text, false); 75 | }, 76 | onFinal() { 77 | if (hasFunction) return; 78 | onTextContent(text, true); 79 | }, 80 | }, 81 | ), 82 | ); 83 | })(); 84 | 85 | return { 86 | onTextContent: ( 87 | callback: (text: string, isFinal: boolean) => void | Promise, 88 | ) => { 89 | onTextContent = callback; 90 | }, 91 | onFunctionCall: ( 92 | name: FunctionNames, 93 | callback: (args: any) => void | Promise, 94 | ) => { 95 | onFunctionCall[name] = callback; 96 | }, 97 | }; 98 | } 99 | 100 | export const runAsyncFnWithoutBlocking = ( 101 | fn: (...args: any) => Promise, 102 | ) => { 103 | fn(); 104 | }; 105 | -------------------------------------------------------------------------------- /next.config.mjs: -------------------------------------------------------------------------------- 1 | /** @type {import('next').NextConfig} */ 2 | const nextConfig = {}; 3 | 4 | export default nextConfig; 5 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "spc-openai-hackathon", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build", 8 | "start": "next start", 9 | "lint": "next lint", 10 | "prettier": "prettier --write --ignore-unknown ." 11 | }, 12 | "dependencies": { 13 | "@heroicons/react": "^2.1.1", 14 | "@radix-ui/react-avatar": "^1.0.4", 15 | "@radix-ui/react-slot": "^1.0.2", 16 | "@radix-ui/react-switch": "^1.0.3", 17 | "@radix-ui/react-tabs": "^1.0.4", 18 | "ai": "^3.0.2", 19 | "class-variance-authority": "^0.7.0", 20 | "clsx": "^2.1.0", 21 | "embla-carousel-react": "^8.0.0", 22 | "lucide-react": "^0.344.0", 23 | "next": "14.1.1", 24 | "next-themes": "^0.2.1", 25 | "openai": "^4.28.4", 26 | "react": "^18", 27 | "react-dom": "^18", 28 | "react-query": "^3.39.3", 29 | "tailwind-merge": "^2.2.1", 30 | "tailwindcss-animate": "^1.0.7", 31 | "zod": "^3.22.4" 32 | }, 33 | "devDependencies": { 34 | "@types/node": "^20", 35 | "@types/react": "^18", 36 | "@types/react-dom": "^18", 37 | "autoprefixer": "^10.0.1", 38 | "eslint": "^8", 39 | "eslint-config-next": "14.1.1", 40 | "postcss": "^8", 41 | "prettier": "^3.2.5", 42 | "prettier-plugin-tailwindcss": "^0.5.12", 43 | "tailwindcss": "^3.3.0", 44 | "typescript": "^5" 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /postcss.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: { 3 | tailwindcss: {}, 4 | autoprefixer: {}, 5 | }, 6 | }; 7 | -------------------------------------------------------------------------------- /prettier.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | plugins: ["prettier-plugin-tailwindcss"], 3 | }; 4 | -------------------------------------------------------------------------------- /public/demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmchad/generative-x/66b2e4ef63ef44de3596258c8d594224a06c13fa/public/demo.png -------------------------------------------------------------------------------- /public/next.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /public/placeholder-avatar.jpg: -------------------------------------------------------------------------------- 1 | v0 by Vercel -------------------------------------------------------------------------------- /public/suncloud.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/gmchad/generative-x/66b2e4ef63ef44de3596258c8d594224a06c13fa/public/suncloud.mp4 -------------------------------------------------------------------------------- /public/vercel.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tailwind.config.ts: -------------------------------------------------------------------------------- 1 | import type { Config } from "tailwindcss"; 2 | 3 | const config = { 4 | darkMode: ["class"], 5 | content: [ 6 | "./pages/**/*.{ts,tsx}", 7 | "./components/**/*.{ts,tsx}", 8 | "./app/**/*.{ts,tsx}", 9 | "./src/**/*.{ts,tsx}", 10 | ], 11 | prefix: "", 12 | theme: { 13 | container: { 14 | center: true, 15 | padding: "2rem", 16 | screens: { 17 | "2xl": "1400px", 18 | }, 19 | }, 20 | extend: { 21 | colors: { 22 | border: "hsl(var(--border))", 23 | input: "hsl(var(--input))", 24 | ring: "hsl(var(--ring))", 25 | background: "hsl(var(--background))", 26 | foreground: "hsl(var(--foreground))", 27 | primary: { 28 | DEFAULT: "hsl(var(--primary))", 29 | foreground: "hsl(var(--primary-foreground))", 30 | }, 31 | secondary: { 32 | DEFAULT: "hsl(var(--secondary))", 33 | foreground: "hsl(var(--secondary-foreground))", 34 | }, 35 | destructive: { 36 | DEFAULT: "hsl(var(--destructive))", 37 | foreground: "hsl(var(--destructive-foreground))", 38 | }, 39 | muted: { 40 | DEFAULT: "hsl(var(--muted))", 41 | foreground: "hsl(var(--muted-foreground))", 42 | }, 43 | accent: { 44 | DEFAULT: "hsl(var(--accent))", 45 | foreground: "hsl(var(--accent-foreground))", 46 | }, 47 | popover: { 48 | DEFAULT: "hsl(var(--popover))", 49 | foreground: "hsl(var(--popover-foreground))", 50 | }, 51 | card: { 52 | DEFAULT: "hsl(var(--card))", 53 | foreground: "hsl(var(--card-foreground))", 54 | }, 55 | }, 56 | borderRadius: { 57 | lg: "var(--radius)", 58 | md: "calc(var(--radius) - 2px)", 59 | sm: "calc(var(--radius) - 4px)", 60 | }, 61 | keyframes: { 62 | "accordion-down": { 63 | from: { height: "0" }, 64 | to: { height: "var(--radix-accordion-content-height)" }, 65 | }, 66 | "accordion-up": { 67 | from: { height: "var(--radix-accordion-content-height)" }, 68 | to: { height: "0" }, 69 | }, 70 | }, 71 | animation: { 72 | "accordion-down": "accordion-down 0.2s ease-out", 73 | "accordion-up": "accordion-up 0.2s ease-out", 74 | }, 75 | }, 76 | }, 77 | plugins: [require("tailwindcss-animate")], 78 | } satisfies Config; 79 | 80 | export default config; 81 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "lib": ["dom", "dom.iterable", "esnext"], 4 | "allowJs": true, 5 | "skipLibCheck": true, 6 | "strict": true, 7 | "noEmit": true, 8 | "esModuleInterop": true, 9 | "module": "esnext", 10 | "moduleResolution": "bundler", 11 | "resolveJsonModule": true, 12 | "isolatedModules": true, 13 | "jsx": "preserve", 14 | "incremental": true, 15 | "plugins": [ 16 | { 17 | "name": "next" 18 | } 19 | ], 20 | "paths": { 21 | "@/*": ["./*"] 22 | } 23 | }, 24 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"], 25 | "exclude": ["node_modules"] 26 | } 27 | -------------------------------------------------------------------------------- /types/tweets.ts: -------------------------------------------------------------------------------- 1 | export type TwitterUser = { 2 | username: string; 3 | displayName: string; 4 | verified: boolean; 5 | avatarUrl: string; 6 | }; 7 | 8 | export type TweetMedia = { 9 | type: "image" | "video"; 10 | url: string; 11 | altText?: string; 12 | }; 13 | 14 | // NOTE: updated strings, to just render as-is (with ...K, or ...M) 15 | export type TweetEngagement = { 16 | replies: string; 17 | reposts: string; 18 | likes: string; 19 | views: string; 20 | // bookmarks: string; 21 | }; 22 | 23 | export type Tweet = { 24 | id: string; 25 | link: string; 26 | user: TwitterUser; 27 | content: string; 28 | media?: TweetMedia[]; 29 | engagement: TweetEngagement; 30 | timestamp: string; // ISO 8601 format 31 | // quotedTweet?: Tweet; // For quoted tweets - NOTE: removed for simplicity 32 | }; 33 | 34 | // query: ?tweets= 35 | --------------------------------------------------------------------------------