├── .env.template ├── .github └── workflows │ └── master_live-ai-interview.yml ├── .gitignore ├── .npmrc ├── README.md ├── chatEngine.js ├── index.html ├── package-lock.json ├── package.json ├── public ├── animations │ ├── Asking Question.fbx │ ├── Having A Meeting.fbx │ ├── Hip Hop Dancing.fbx │ ├── Idle.fbx │ ├── Seated Idle.fbx │ ├── Sit To Stand.fbx │ ├── Sit To Type.fbx │ ├── Sitting Clap.fbx │ ├── Sitting Disapproval.fbx │ ├── Sitting Idle.fbx │ ├── Sitting Talking.fbx │ ├── Sitting Thumbs Up.fbx │ ├── Sitting Victory.fbx │ ├── Stand To Sit.fbx │ ├── Standing Greeting.fbx │ ├── Type To Sit.fbx │ ├── Typing.fbx │ ├── Victory.fbx │ ├── Wave Hip Hop Dance.fbx │ └── Waving.fbx ├── audios │ ├── pizzas.json │ ├── pizzas.mp3 │ ├── pizzas.ogg │ ├── welcome.json │ ├── welcome.mp3 │ └── welcome.ogg ├── models │ ├── 6505ad3b7a4b5e00b4da04e8.glb │ └── office_desk.glb ├── temp │ └── resume.pdf ├── textures │ ├── house.jpg │ ├── youtubeBackground.jpg │ └── youtubeBackgroundHawaii.jpg └── vite.svg ├── server.js ├── src ├── App.jsx ├── assets │ └── react.svg ├── components │ ├── Avatar.jsx │ ├── Desk.jsx │ ├── ErrorDisplay.jsx │ ├── Experience.jsx │ ├── SettingsDisplay.jsx │ ├── UserInput.jsx │ └── hooks │ │ ├── chatbotService.jsx │ │ ├── useChatbot.jsx │ │ └── useSpeechRecognition.jsx ├── index.css └── main.jsx └── vite.config.js /.env.template: -------------------------------------------------------------------------------- 1 | OPENAI_API_KEY = "" 2 | AZURE_SPEECH_KEY = "" 3 | VITE_AZURE_SPEECH_KEY = "" 4 | AZURE_SPEECH_REGION = "eastus" 5 | VITE_AZURE_SPEECH_REGION = "eastus" 6 | NODE_ENV = "development" -------------------------------------------------------------------------------- /.github/workflows/master_live-ai-interview.yml: -------------------------------------------------------------------------------- 1 | # Docs for the Azure Web Apps Deploy action: https://github.com/Azure/webapps-deploy 2 | # More GitHub Actions for Azure: https://github.com/Azure/actions 3 | 4 | name: Build and deploy Node.js app to Azure Web App - live-ai-interview 5 | 6 | on: 7 | push: 8 | branches: 9 | - master 10 | workflow_dispatch: 11 | 12 | jobs: 13 | build: 14 | runs-on: ubuntu-latest 15 | 16 | steps: 17 | - uses: actions/checkout@v2 18 | 19 | - name: Set up Node.js version 20 | uses: actions/setup-node@v1 21 | with: 22 | node-version: '16.x' 23 | 24 | - name: Replace values 25 | shell: bash 26 | env: 27 | FONTAWESOME_TOKEN: ${{ secrets.FONTAWESOME }} 28 | run: | 29 | sed -i.back "s|MY_FONTAWESOME_TOKEN|${FONTAWESOME_TOKEN}|g" ./.npmrc 30 | 31 | - name: npm install, build, and test 32 | run: | 33 | npm install 34 | npm run build --if-present 35 | npm run test --if-present 36 | 37 | - name: Upload artifact for deployment job 38 | uses: actions/upload-artifact@v2 39 | with: 40 | name: node-app 41 | path: . 42 | 43 | deploy: 44 | runs-on: ubuntu-latest 45 | needs: build 46 | environment: 47 | name: 'Production' 48 | url: ${{ steps.deploy-to-webapp.outputs.webapp-url }} 49 | 50 | steps: 51 | - name: Download artifact from build job 52 | uses: actions/download-artifact@v2 53 | with: 54 | name: node-app 55 | 56 | - name: 'Deploy to Azure Web App' 57 | id: deploy-to-webapp 58 | uses: azure/webapps-deploy@v2 59 | with: 60 | app-name: 'live-ai-interview' 61 | slot-name: 'Production' 62 | publish-profile: ${{ secrets.AZUREAPPSERVICE_PUBLISHPROFILE_DA16E37CB8B64DB0A22C79AC674AF6B0 }} 63 | package: . 64 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | *.local 14 | 15 | # Editor directories and files 16 | .vscode/* 17 | !.vscode/extensions.json 18 | .idea 19 | .DS_Store 20 | *.suo 21 | *.ntvs* 22 | *.njsproj 23 | *.sln 24 | *.sw? 25 | 26 | .env 27 | .azure 28 | .npmrc -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | @fortawesome:registry=https://npm.fontawesome.com/ 2 | //npm.fontawesome.com/:_authToken=MY_FONTAWESOME_TOKEN -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Live Interview AI 2 | ![a6d97ad2-e696-4349-991d-42db7c8a9082](https://github.com/user-attachments/assets/22ea43c4-52bb-4ef3-b9b9-a6585308be2e) 3 | 4 | ## Introduction 5 | This is a live interview AI model that can take part in interviews in your place. 6 | It is a simple chatbot that can answer questions related to your resume, ask questions to the interviewer in your behalf and save them for later use. 7 | 8 | It's powered by GPT-3.5 and uses your own avatar model from Ready Player Me. 9 | 10 | ## License 11 | 12 | This project is licensed under the Creative Commons Attribution-NonCommercial 4.0 International (CC BY-NC 4.0) License for non-commercial use. 13 | 14 | For commercial use, please contact me at [contact@juledz.com] to obtain a commercial license. 15 | -------------------------------------------------------------------------------- /chatEngine.js: -------------------------------------------------------------------------------- 1 | import OpenAI from "openai"; 2 | import * as sdk from "microsoft-cognitiveservices-speech-sdk"; 3 | import dotenv from "dotenv"; 4 | import { LocalStorage } from "node-localstorage"; 5 | // Allow use of require function 6 | import { createRequire } from "module"; 7 | import path from "path"; 8 | 9 | const require = createRequire(import.meta.url); 10 | const PDFExtract = require("pdf.js-extract").PDFExtract; 11 | const pdfExtract = new PDFExtract(); 12 | 13 | const fs = require("fs"); 14 | const request = require("request"); 15 | 16 | export default class Chatbot { 17 | constructor(public_path) { 18 | dotenv.config(); 19 | this.socket_id = null; 20 | this.openai = new OpenAI({ 21 | apiKey: process.env.OPENAI_API_KEY, 22 | }); 23 | 24 | global.localStorage = new LocalStorage(`/${public_path}/temp`); 25 | global.localStorage.clear(); 26 | 27 | this.openaiHistory = []; 28 | this.messages = []; 29 | 30 | this.speechConfig = sdk.SpeechConfig.fromSubscription(process.env.AZURE_SPEECH_KEY, process.env.AZURE_SPEECH_REGION); 31 | 32 | // Get the localstorage path 33 | this.publicDir = path.join(process.cwd(), public_path); 34 | 35 | // Create temp folder 36 | if (!fs.existsSync(this.publicDir + "/temp")) { 37 | fs.mkdirSync(this.publicDir + "/temp"); 38 | } 39 | 40 | // Create audio folder 41 | if (!fs.existsSync(this.publicDir + "/temp/audio")) { 42 | fs.mkdirSync(this.publicDir + "/temp/audio"); 43 | } 44 | 45 | // Create chats folder 46 | if (!fs.existsSync(this.publicDir + "/temp/chats")) { 47 | fs.mkdirSync(this.publicDir + "/temp/chats"); 48 | } 49 | } 50 | 51 | async initialize(settings, socket_id) { 52 | this.settings = settings; 53 | this.socket_id = socket_id; 54 | 55 | this.speechConfig.speechSynthesisVoiceName = settings.tts_voice; 56 | this.speechConfig.speechRecognitionLanguage = settings.speechLanguage; 57 | this.audioFilePaths = []; 58 | 59 | this.speechAudioConfig = sdk.AudioConfig.fromDefaultMicrophoneInput(); 60 | this.speechRecognizer = new sdk.SpeechRecognizer(this.speechConfig, this.speechAudioConfig); 61 | 62 | const resumeText = await this.downloadResume(); 63 | 64 | // Clear history 65 | this.openaiHistory = []; 66 | this.messages = []; 67 | 68 | this.messages.push({ 69 | role: "system", 70 | content: `You're an interview clone taking part in a job interview: Rules: 1) Never mention you're an AI 2) Always give logical answer only based on the resume and the Identity information, never invent fake information 3) Always answer in the language you're asked to 4) Always answer in a human way, never use technical terms 5) Always answer in a way that is coherent with the previous answers but NEVER repeat yourself 6) NEVER repeat yourself. Identity: you're name is ${this.settings.my_name} ${this.settings.my_surname} and you're applying for a ${this.settings.job_title} position at ${this.settings.company_name}. The interviewer's name is ${this.settings.interviewer_name} ${this.settings.interviewer_surname}. From now on answer in ${this.settings.language} NEVER MENTION THAT YOU'RE AN AI. Here's the resume text in order to help you answer the questions: ${resumeText}`, 71 | }); 72 | 73 | for (const [input_text, completion_text] of this.openaiHistory) { 74 | this.messages.push({ 75 | role: "user", 76 | content: input_text, 77 | }); 78 | this.messages.push({ 79 | role: "assistant", 80 | content: completion_text, 81 | }); 82 | } 83 | } 84 | 85 | async downloadResume() { 86 | return new Promise((resolve, reject) => { 87 | let resume_text = ""; 88 | request(this.settings.link_to_resume, { encoding: null }, (err, res, body) => { 89 | if (err) throw err; 90 | let resume_text = ""; 91 | fs.writeFileSync(this.publicDir + "/temp/resume.pdf", body); 92 | const buffer = fs.readFileSync(this.publicDir + "/temp/resume.pdf"); 93 | const options = {}; 94 | pdfExtract.extractBuffer(buffer, options, (err, data) => { 95 | if (err) return console.log(err); 96 | let content_array = data.pages[0].content; 97 | for (let i = 0; i < content_array.length; i++) { 98 | resume_text += content_array[i].str + " "; 99 | } 100 | resolve(resume_text); 101 | }); 102 | }); 103 | }); 104 | } 105 | 106 | async chat(userInput) { 107 | this.messages.push({ 108 | role: "user", 109 | content: userInput, 110 | }); 111 | 112 | try { 113 | const completion = await this.openai.chat.completions.create({ 114 | model: "gpt-3.5-turbo", 115 | messages: this.messages, 116 | }); 117 | 118 | this.messages.push({ 119 | role: "assistant", 120 | content: completion.choices[0].message.content, 121 | }); 122 | 123 | //console.log(`ANSWER: ${completion.choices[0].message.content}`); 124 | await this.exportChat(); 125 | 126 | return completion.choices[0].message.content; 127 | } catch (error) { 128 | console.log(error); // Print error 129 | 130 | return { 131 | error: error, 132 | }; 133 | } 134 | } 135 | 136 | async exportChat() { 137 | console.log("Exporting chat..."); 138 | const chat = []; 139 | for (let i = 0; i < this.messages.length; i++) { 140 | if (this.messages[i].role == "user" || this.messages[i].role == "assistant") { 141 | chat.push({ 142 | role: this.messages[i].role, 143 | content: this.messages[i].content, 144 | audio: this.audioFilePaths[i], 145 | }); 146 | } 147 | } 148 | const chat_path = path.join(this.publicDir, "temp/chats", `${this.socket_id}.json`); 149 | console.log(`Chat path: ${chat_path}`); 150 | // Save chat to file 151 | let data = JSON.stringify(chat); 152 | 153 | // Write to file 154 | console.log(`Writing to file: ${chat_path}`); 155 | await fs.writeFile(chat_path, data, (err) => { 156 | if (err) throw err; 157 | console.log("Chat saved to file."); 158 | }); 159 | 160 | return chat_path; 161 | } 162 | 163 | async textToSpeech(text) { 164 | let visemes = []; 165 | 166 | const fileName = `${Math.random().toString(36).substring(7)}.wav`; 167 | const audioFilePath = path.join(this.publicDir, "temp/audio", fileName); 168 | 169 | const audioConfig = sdk.AudioConfig.fromAudioFileOutput(audioFilePath); 170 | 171 | const synthesizer = new sdk.SpeechSynthesizer(this.speechConfig, audioConfig); 172 | 173 | synthesizer.visemeReceived = (s, e) => { 174 | visemes.push({ visemeId: e.visemeId, audioOffset: e.audioOffset / 10000 }); 175 | }; 176 | 177 | const ssml = `${text}`; 178 | 179 | await new Promise((resolve, reject) => { 180 | synthesizer.speakSsmlAsync(ssml, (result) => { 181 | if (result.reason === sdk.ResultReason.SynthesizingAudioCompleted) { 182 | resolve(); 183 | } else { 184 | reject(result); 185 | } 186 | }); 187 | }); 188 | 189 | // Close synthesizer 190 | synthesizer.close(); 191 | 192 | // Return audio file path and visemes 193 | return { audioFilePath: audioFilePath, visemes: visemes }; 194 | } 195 | 196 | async speechToText() { 197 | return new Promise((resolve, reject) => { 198 | try { 199 | console.log("[SYSTEM]: Speak into your microphone."); 200 | 201 | let text = ""; 202 | this.speechRecognizer.recognized = (s, e) => { 203 | try { 204 | const res = e.result; 205 | console.log(`recognized: ${res.text}`); 206 | } catch (error) { 207 | console.log(error); 208 | } 209 | }; 210 | 211 | this.speechRecognizer.sessionStarted = (s, e) => { 212 | console.log(`SESSION STARTED: ${e.sessionId}`); 213 | }; 214 | 215 | console.log("Starting recognition..."); 216 | try { 217 | this.speechRecognizer.recognizeOnceAsync( 218 | (result) => { 219 | console.log(`RECOGNIZED: Text=${result.text}`); 220 | text = result.text; 221 | resolve(text); 222 | }, 223 | (error) => { 224 | console.log(error); 225 | } 226 | ); 227 | } catch (err) { 228 | console.log(err); 229 | } 230 | 231 | process.stdin.on("keypress", (str, key) => { 232 | if (key.name === "space") { 233 | stopRecognition(); 234 | } 235 | }); 236 | 237 | const stopRecognition = async () => { 238 | try { 239 | console.log("Stopping recognition..."); 240 | this.speechRecognizer.stopContinuousRecognitionAsync(); 241 | resolve(text); 242 | } catch (error) { 243 | console.log(error); 244 | } 245 | }; 246 | } catch (error) { 247 | console.log(error); 248 | reject(error); 249 | } 250 | }); 251 | } 252 | 253 | async close() { 254 | console.log("Closing chatbot..."); 255 | this.speechRecognizer.close(); 256 | 257 | for (let i = 0; i < this.audioFilePaths.length; i++) { 258 | fs.unlinkSync(this.audioFilePaths[i]); 259 | } 260 | } 261 | } 262 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Live AI Interview - by Juled Zaganjori 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 |
28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "live-interview", 3 | "private": true, 4 | "version": "1.0.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "vite", 8 | "build": "vite build", 9 | "preview": "vite preview" 10 | }, 11 | "dependencies": { 12 | "@fortawesome/fontawesome-pro": "^6.4.2", 13 | "@react-three/drei": "9.75.0", 14 | "@react-three/fiber": "8.13.3", 15 | "@types/three": "0.152.1", 16 | "dotenv": "^16.3.1", 17 | "express": "^4.18.2", 18 | "leva": "^0.9.35", 19 | "lodash.debounce": "^4.0.8", 20 | "microsoft-cognitiveservices-speech-sdk": "^1.32.0", 21 | "module": "^1.0.0", 22 | "node-localstorage": "^3.0.5", 23 | "openai": "^4.8.0", 24 | "path": "^0.12.7", 25 | "pdf.js-extract": "^0.2.1", 26 | "process": "^0.11.10", 27 | "react": "^18.2.0", 28 | "react-dom": "^18.2.0", 29 | "request": "^2.88.2", 30 | "socket.io": "^4.7.2", 31 | "socket.io-client": "^4.7.2", 32 | "three": "0.153.0" 33 | }, 34 | "devDependencies": { 35 | "@types/react": "^18.0.27", 36 | "@types/react-dom": "^18.0.10", 37 | "@vitejs/plugin-react": "^3.1.0", 38 | "depcheck": "^1.4.6", 39 | "vite": "^4.5.2" 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /public/animations/Asking Question.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Asking Question.fbx -------------------------------------------------------------------------------- /public/animations/Having A Meeting.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Having A Meeting.fbx -------------------------------------------------------------------------------- /public/animations/Hip Hop Dancing.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Hip Hop Dancing.fbx -------------------------------------------------------------------------------- /public/animations/Idle.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Idle.fbx -------------------------------------------------------------------------------- /public/animations/Seated Idle.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Seated Idle.fbx -------------------------------------------------------------------------------- /public/animations/Sit To Stand.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Sit To Stand.fbx -------------------------------------------------------------------------------- /public/animations/Sit To Type.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Sit To Type.fbx -------------------------------------------------------------------------------- /public/animations/Sitting Clap.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Sitting Clap.fbx -------------------------------------------------------------------------------- /public/animations/Sitting Disapproval.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Sitting Disapproval.fbx -------------------------------------------------------------------------------- /public/animations/Sitting Idle.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Sitting Idle.fbx -------------------------------------------------------------------------------- /public/animations/Sitting Talking.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Sitting Talking.fbx -------------------------------------------------------------------------------- /public/animations/Sitting Thumbs Up.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Sitting Thumbs Up.fbx -------------------------------------------------------------------------------- /public/animations/Sitting Victory.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Sitting Victory.fbx -------------------------------------------------------------------------------- /public/animations/Stand To Sit.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Stand To Sit.fbx -------------------------------------------------------------------------------- /public/animations/Standing Greeting.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Standing Greeting.fbx -------------------------------------------------------------------------------- /public/animations/Type To Sit.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Type To Sit.fbx -------------------------------------------------------------------------------- /public/animations/Typing.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Typing.fbx -------------------------------------------------------------------------------- /public/animations/Victory.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Victory.fbx -------------------------------------------------------------------------------- /public/animations/Wave Hip Hop Dance.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Wave Hip Hop Dance.fbx -------------------------------------------------------------------------------- /public/animations/Waving.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/animations/Waving.fbx -------------------------------------------------------------------------------- /public/audios/pizzas.json: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "soundFile": "/Users/wawa/Documents/Projects/wawasensei/r3f-lipsync-tutorial/Rhubarb-Lip-Sync-1.13.0-macOS/../public/audios/pizzas.ogg", 4 | "duration": 6.92 5 | }, 6 | "mouthCues": [ 7 | { "start": 0.00, "end": 0.04, "value": "X" }, 8 | { "start": 0.04, "end": 0.26, "value": "F" }, 9 | { "start": 0.26, "end": 0.33, "value": "C" }, 10 | { "start": 0.33, "end": 0.41, "value": "A" }, 11 | { "start": 0.41, "end": 0.66, "value": "B" }, 12 | { "start": 0.66, "end": 0.73, "value": "C" }, 13 | { "start": 0.73, "end": 0.87, "value": "B" }, 14 | { "start": 0.87, "end": 1.19, "value": "X" }, 15 | { "start": 1.19, "end": 1.26, "value": "B" }, 16 | { "start": 1.26, "end": 1.33, "value": "A" }, 17 | { "start": 1.33, "end": 1.50, "value": "B" }, 18 | { "start": 1.50, "end": 1.95, "value": "X" }, 19 | { "start": 1.95, "end": 2.28, "value": "B" }, 20 | { "start": 2.28, "end": 2.36, "value": "A" }, 21 | { "start": 2.36, "end": 2.63, "value": "B" }, 22 | { "start": 2.63, "end": 2.70, "value": "C" }, 23 | { "start": 2.70, "end": 2.84, "value": "B" }, 24 | { "start": 2.84, "end": 2.90, "value": "A" }, 25 | { "start": 2.90, "end": 2.96, "value": "C" }, 26 | { "start": 2.96, "end": 3.04, "value": "A" }, 27 | { "start": 3.04, "end": 3.21, "value": "E" }, 28 | { "start": 3.21, "end": 3.28, "value": "F" }, 29 | { "start": 3.28, "end": 3.75, "value": "B" }, 30 | { "start": 3.75, "end": 3.82, "value": "D" }, 31 | { "start": 3.82, "end": 4.02, "value": "B" }, 32 | { "start": 4.02, "end": 4.16, "value": "C" }, 33 | { "start": 4.16, "end": 4.23, "value": "B" }, 34 | { "start": 4.23, "end": 4.31, "value": "A" }, 35 | { "start": 4.31, "end": 4.51, "value": "C" }, 36 | { "start": 4.51, "end": 4.58, "value": "B" }, 37 | { "start": 4.58, "end": 4.65, "value": "C" }, 38 | { "start": 4.65, "end": 4.75, "value": "A" }, 39 | { "start": 4.75, "end": 4.88, "value": "B" }, 40 | { "start": 4.88, "end": 5.28, "value": "X" }, 41 | { "start": 5.28, "end": 5.41, "value": "F" }, 42 | { "start": 5.41, "end": 5.62, "value": "C" }, 43 | { "start": 5.62, "end": 5.90, "value": "B" }, 44 | { "start": 5.90, "end": 5.97, "value": "E" }, 45 | { "start": 5.97, "end": 6.04, "value": "F" }, 46 | { "start": 6.04, "end": 6.11, "value": "C" }, 47 | { "start": 6.11, "end": 6.18, "value": "B" }, 48 | { "start": 6.18, "end": 6.26, "value": "A" }, 49 | { "start": 6.26, "end": 6.51, "value": "B" }, 50 | { "start": 6.51, "end": 6.65, "value": "C" }, 51 | { "start": 6.65, "end": 6.92, "value": "X" } 52 | ] 53 | } 54 | -------------------------------------------------------------------------------- /public/audios/pizzas.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/audios/pizzas.mp3 -------------------------------------------------------------------------------- /public/audios/pizzas.ogg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/audios/pizzas.ogg -------------------------------------------------------------------------------- /public/audios/welcome.json: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "soundFile": "/Users/wawa/Documents/Projects/wawasensei/r3f-lipsync-tutorial/Rhubarb-Lip-Sync-1.13.0-macOS/../public/audios/welcome.ogg", 4 | "duration": 6.21 5 | }, 6 | "mouthCues": [ 7 | { "start": 0.00, "end": 0.05, "value": "X" }, 8 | { "start": 0.05, "end": 0.27, "value": "D" }, 9 | { "start": 0.27, "end": 0.41, "value": "B" }, 10 | { "start": 0.41, "end": 0.49, "value": "A" }, 11 | { "start": 0.49, "end": 0.59, "value": "F" }, 12 | { "start": 0.59, "end": 0.66, "value": "B" }, 13 | { "start": 0.66, "end": 0.73, "value": "F" }, 14 | { "start": 0.73, "end": 0.80, "value": "D" }, 15 | { "start": 0.80, "end": 0.94, "value": "B" }, 16 | { "start": 0.94, "end": 1.01, "value": "C" }, 17 | { "start": 1.01, "end": 1.15, "value": "B" }, 18 | { "start": 1.15, "end": 1.29, "value": "C" }, 19 | { "start": 1.29, "end": 1.40, "value": "B" }, 20 | { "start": 1.40, "end": 1.47, "value": "F" }, 21 | { "start": 1.47, "end": 1.54, "value": "C" }, 22 | { "start": 1.54, "end": 1.61, "value": "B" }, 23 | { "start": 1.61, "end": 1.68, "value": "A" }, 24 | { "start": 1.68, "end": 1.79, "value": "F" }, 25 | { "start": 1.79, "end": 1.87, "value": "A" }, 26 | { "start": 1.87, "end": 1.93, "value": "C" }, 27 | { "start": 1.93, "end": 2.27, "value": "F" }, 28 | { "start": 2.27, "end": 2.37, "value": "A" }, 29 | { "start": 2.37, "end": 2.44, "value": "B" }, 30 | { "start": 2.44, "end": 2.58, "value": "C" }, 31 | { "start": 2.58, "end": 2.84, "value": "B" }, 32 | { "start": 2.84, "end": 2.90, "value": "H" }, 33 | { "start": 2.90, "end": 3.10, "value": "B" }, 34 | { "start": 3.10, "end": 3.24, "value": "E" }, 35 | { "start": 3.24, "end": 3.38, "value": "C" }, 36 | { "start": 3.38, "end": 3.52, "value": "F" }, 37 | { "start": 3.52, "end": 3.59, "value": "C" }, 38 | { "start": 3.59, "end": 3.80, "value": "B" }, 39 | { "start": 3.80, "end": 3.88, "value": "A" }, 40 | { "start": 3.88, "end": 4.10, "value": "B" }, 41 | { "start": 4.10, "end": 4.38, "value": "F" }, 42 | { "start": 4.38, "end": 4.45, "value": "D" }, 43 | { "start": 4.45, "end": 4.66, "value": "B" }, 44 | { "start": 4.66, "end": 4.73, "value": "C" }, 45 | { "start": 4.73, "end": 5.08, "value": "B" }, 46 | { "start": 5.08, "end": 5.15, "value": "G" }, 47 | { "start": 5.15, "end": 5.22, "value": "C" }, 48 | { "start": 5.22, "end": 5.30, "value": "A" }, 49 | { "start": 5.30, "end": 5.41, "value": "E" }, 50 | { "start": 5.41, "end": 5.50, "value": "A" }, 51 | { "start": 5.50, "end": 5.56, "value": "B" }, 52 | { "start": 5.56, "end": 5.69, "value": "D" }, 53 | { "start": 5.69, "end": 5.76, "value": "B" }, 54 | { "start": 5.76, "end": 5.83, "value": "C" }, 55 | { "start": 5.83, "end": 6.11, "value": "B" }, 56 | { "start": 6.11, "end": 6.21, "value": "X" } 57 | ] 58 | } 59 | -------------------------------------------------------------------------------- /public/audios/welcome.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/audios/welcome.mp3 -------------------------------------------------------------------------------- /public/audios/welcome.ogg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/audios/welcome.ogg -------------------------------------------------------------------------------- /public/models/6505ad3b7a4b5e00b4da04e8.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/models/6505ad3b7a4b5e00b4da04e8.glb -------------------------------------------------------------------------------- /public/models/office_desk.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/models/office_desk.glb -------------------------------------------------------------------------------- /public/temp/resume.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/temp/resume.pdf -------------------------------------------------------------------------------- /public/textures/house.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/textures/house.jpg -------------------------------------------------------------------------------- /public/textures/youtubeBackground.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/textures/youtubeBackground.jpg -------------------------------------------------------------------------------- /public/textures/youtubeBackgroundHawaii.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/0Shark/live-interview/af3b064a826c0a24fa2f76b809eec83964718d20/public/textures/youtubeBackgroundHawaii.jpg -------------------------------------------------------------------------------- /public/vite.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /server.js: -------------------------------------------------------------------------------- 1 | import express from "express"; 2 | import dotenv from "dotenv"; 3 | import { Server } from "socket.io"; 4 | import { createServer } from "http"; 5 | import Chatbot from "./chatEngine.js"; 6 | import process from "process"; 7 | 8 | dotenv.config(); 9 | 10 | // Express 11 | const app = express(); 12 | 13 | app.use(express.static("dist")); 14 | 15 | // Socket.io 16 | const server = createServer(app); 17 | let io = null; 18 | // Development 19 | if (process.env.NODE_ENV === "production") { 20 | io = new Server(server); 21 | } else { 22 | io = new Server(server, { 23 | cors: { 24 | origin: "*", 25 | methods: ["GET", "POST"], 26 | }, 27 | }); 28 | } 29 | // Chatbot 30 | // If in production, save audio files to dist folder 31 | const chatbot = new Chatbot(process.env.NODE_ENV === "production" ? "dist" : "public"); 32 | 33 | io.on("connection", (socket) => { 34 | console.log(`CONNECTED ${socket.id}`); 35 | 36 | socket.on("disconnect", (reason) => { 37 | global.localStorage.clear(); 38 | console.log(`DISCONNECTED ${socket.id}: ${reason}`); 39 | }); 40 | 41 | // Initialize the chatbot 42 | socket.on("init", (settings) => { 43 | settings = JSON.parse(JSON.stringify(settings)); 44 | try { 45 | chatbot.initialize(settings, socket.id); 46 | socket.emit("responseInit", true); 47 | console.log(`INITIALIZED ${socket.id}`); 48 | } catch (err) { 49 | console.log(err); 50 | socket.emit("responseInit", false); 51 | console.log(`INIT FAILED ${socket.id}`); 52 | } 53 | }); 54 | 55 | socket.on("message", (data) => { 56 | data = JSON.parse(JSON.stringify(data)); 57 | console.log(`QUESTION (${socket.id}): ${data.question}`); 58 | async function chat() { 59 | try { 60 | const response = await chatbot.chat(data.question); 61 | const speechData = await chatbot.textToSpeech(response); 62 | console.log(`RESPONSE (${socket.id}): ${response}`); 63 | console.log(`AUDIO (${socket.id}): ${speechData.audioFilePath}`); 64 | socket.emit("responseMessage", { 65 | response: response, 66 | speechData: speechData, 67 | }); 68 | } catch (err) { 69 | console.log(`ERROR (${socket.id}): ${err}`); 70 | socket.emit("responseMessage", { 71 | response: "Sorry, I don't understand that.", 72 | speechData: null, 73 | }); 74 | } 75 | } 76 | chat(); 77 | }); 78 | }); 79 | 80 | io.on("disconnect", (socket) => { 81 | console.log(`DISCONNECTED ${socket.id}`); 82 | async function closeChatbot() { 83 | await chatbot.close(); 84 | } 85 | closeChatbot(); 86 | }); 87 | 88 | const port = process.env.PORT || 5000; 89 | 90 | server.listen(port, () => { 91 | console.log("server started at port " + port); 92 | }); 93 | -------------------------------------------------------------------------------- /src/App.jsx: -------------------------------------------------------------------------------- 1 | import { Canvas } from "@react-three/fiber"; 2 | import { Experience } from "./components/Experience"; 3 | import UserInput from "./components/UserInput"; 4 | import { useState } from "react"; 5 | 6 | function App() { 7 | const [response, setResponse] = useState({ 8 | response: "Hello, thank you for having me here today. I'm excited to learn more about this opportunity.", 9 | speechData: { 10 | audioFilePath: "", 11 | visemes: null, 12 | }, 13 | }); 14 | 15 | const [isChatbotReady, setIsChatbotReady] = useState(false); 16 | 17 | return ( 18 |
19 |
20 | 21 | 22 | 23 | 24 | 25 |
26 | 27 |
28 | 29 | Joining the room... 30 |
31 |
32 | ); 33 | } 34 | 35 | export default App; 36 | -------------------------------------------------------------------------------- /src/assets/react.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/components/Avatar.jsx: -------------------------------------------------------------------------------- 1 | import { useAnimations, useFBX, useGLTF } from "@react-three/drei"; 2 | import { useFrame, useLoader } from "@react-three/fiber"; 3 | import { useControls } from "leva"; 4 | import React, { useEffect, useMemo, useRef, useState } from "react"; 5 | import * as THREE from "three"; 6 | 7 | const animationFiles = { 8 | "Type To Sit": "/animations/Type To Sit.fbx", 9 | Typing: "/animations/Typing.fbx", 10 | Victory: "/animations/Victory.fbx", 11 | "Wave Hip Hop Dance": "/animations/Wave Hip Hop Dance.fbx", 12 | Waving: "/animations/Waving.fbx", 13 | "Asking Question": "/animations/Asking Question.fbx", 14 | "Having A Meeting": "/animations/Having A Meeting.fbx", 15 | "Hip Hop Dancing": "/animations/Hip Hop Dancing.fbx", 16 | Idle: "/animations/Idle.fbx", 17 | "Seated Idle": "/animations/Seated Idle.fbx", 18 | "Sit To Stand": "/animations/Sit To Stand.fbx", 19 | "Sit To Type": "/animations/Sit To Type.fbx", 20 | "Sitting Clap": "/animations/Sitting Clap.fbx", 21 | "Sitting Disapproval": "/animations/Sitting Disapproval.fbx", 22 | "Sitting Idle": "/animations/Sitting Idle.fbx", 23 | "Sitting Talking": "/animations/Sitting Talking.fbx", 24 | "Sitting Thumbs Up": "/animations/Sitting Thumbs Up.fbx", 25 | "Sitting Victory": "/animations/Sitting Victory.fbx", 26 | "Stand To Sit": "/animations/Stand To Sit.fbx", 27 | "Standing Greeting": "/animations/Standing Greeting.fbx", 28 | }; 29 | 30 | const sittingTalkingAnimations = ["Sitting Talking", "Sitting Idle", "Having A Meeting"]; 31 | 32 | // Preload animations 33 | Object.values(animationFiles).forEach((url) => { 34 | useFBX.preload(url); 35 | }); 36 | 37 | // ReadyPlayerMe visemes map 38 | const azureToOculusVisemes = { 39 | 0: "viseme_sil", 40 | 1: "viseme_PP", 41 | 2: "viseme_aa", 42 | 3: "viseme_aa", 43 | 4: "viseme_E", 44 | 5: "viseme_RR", 45 | 6: "viseme_I", 46 | 7: "viseme_U", 47 | 8: "viseme_O", 48 | 9: "viseme_aa", 49 | 10: "viseme_O", 50 | 11: "viseme_I", 51 | 12: "viseme_sil", 52 | 13: "viseme_RR", 53 | 14: "viseme_nn", 54 | 15: "viseme_SS", 55 | 16: "viseme_CH", 56 | 17: "viseme_TH", 57 | 18: "viseme_FF", 58 | 19: "viseme_DD", 59 | 20: "viseme_kk", 60 | 21: "viseme_PP", 61 | }; 62 | 63 | export function Avatar(props) { 64 | // Development 65 | // const { playAudio, headFollow, smoothMorphTarget, morphTargetSmoothing, animationName } = useControls({ 66 | // playAudio: true, 67 | // headFollow: true, 68 | // smoothMorphTarget: true, 69 | // morphTargetSmoothing: 0.5, 70 | // animationName: { 71 | // value: "Having A Meeting", 72 | // options: Object.keys(animationFiles), 73 | // }, 74 | // }); 75 | // Production 76 | const { playAudio, headFollow, smoothMorphTarget, morphTargetSmoothing, animationName } = { 77 | playAudio: true, 78 | headFollow: true, 79 | smoothMorphTarget: true, 80 | morphTargetSmoothing: 0.5, 81 | animationName: { 82 | value: "Having A Meeting", 83 | options: Object.keys(animationFiles), 84 | }, 85 | }; 86 | 87 | let audio = useMemo(() => { 88 | let audioPath = props.response.speechData.audioFilePath; 89 | if (!audioPath) { 90 | audioPath = ""; 91 | } 92 | // turn to path to URL which is inside the public/temp/audio folder 93 | audioPath = audioPath.replace(/\\/g, "/"); 94 | // Get audio file name 95 | audioPath = audioPath.split("/").pop(); 96 | // Add URL to audio file 97 | audioPath = `/temp/audio/${audioPath}`; 98 | 99 | console.log("Received response: ", props.response.response); 100 | 101 | return new Audio(audioPath); 102 | }, [props.response]); 103 | 104 | let lipsync = useMemo(() => { 105 | let lipsync = props.response.speechData.visemes; 106 | if (lipsync) { 107 | return lipsync; 108 | } else { 109 | return []; 110 | } 111 | }, [props.response]); 112 | 113 | useFrame(() => { 114 | let currentAudioTime = audio.currentTime; 115 | if (audio.paused || audio.ended) { 116 | setAnimation("Sitting Idle"); 117 | return; 118 | } 119 | 120 | Object.values(azureToOculusVisemes).forEach((value) => { 121 | if (!smoothMorphTarget) { 122 | nodes.Wolf3D_Head.morphTargetInfluences[nodes.Wolf3D_Head.morphTargetDictionary[value]] = 0; 123 | nodes.Wolf3D_Teeth.morphTargetInfluences[nodes.Wolf3D_Teeth.morphTargetDictionary[value]] = 0; 124 | } else { 125 | nodes.Wolf3D_Head.morphTargetInfluences[nodes.Wolf3D_Head.morphTargetDictionary[value]] = THREE.MathUtils.lerp( 126 | nodes.Wolf3D_Head.morphTargetInfluences[nodes.Wolf3D_Head.morphTargetDictionary[value]], 127 | 0, 128 | morphTargetSmoothing 129 | ); 130 | 131 | nodes.Wolf3D_Teeth.morphTargetInfluences[nodes.Wolf3D_Teeth.morphTargetDictionary[value]] = THREE.MathUtils.lerp( 132 | nodes.Wolf3D_Teeth.morphTargetInfluences[nodes.Wolf3D_Teeth.morphTargetDictionary[value]], 133 | 0, 134 | morphTargetSmoothing 135 | ); 136 | } 137 | }); 138 | 139 | for (let i = 0; i < lipsync.length; i++) { 140 | let visemeId = lipsync[i].visemeId; 141 | // lipsync[i].audioOffset is in milliseconds, so divide by 1000 to get seconds 142 | let visemeOffsetTime = lipsync[i].audioOffset / 1000; 143 | let nextVisemeOffsetTime = lipsync[i + 1] ? lipsync[i + 1].audioOffset / 1000 : 0; 144 | 145 | if (currentAudioTime >= visemeOffsetTime && currentAudioTime < nextVisemeOffsetTime) { 146 | if (!smoothMorphTarget) { 147 | nodes.Wolf3D_Head.morphTargetInfluences[nodes.Wolf3D_Head.morphTargetDictionary[azureToOculusVisemes[visemeId]]] = 1; 148 | nodes.Wolf3D_Teeth.morphTargetInfluences[nodes.Wolf3D_Teeth.morphTargetDictionary[azureToOculusVisemes[visemeId]]] = 1; 149 | } else { 150 | nodes.Wolf3D_Head.morphTargetInfluences[nodes.Wolf3D_Head.morphTargetDictionary[azureToOculusVisemes[visemeId]]] = THREE.MathUtils.lerp( 151 | nodes.Wolf3D_Head.morphTargetInfluences[nodes.Wolf3D_Head.morphTargetDictionary[azureToOculusVisemes[visemeId]]], 152 | 1, 153 | morphTargetSmoothing 154 | ); 155 | nodes.Wolf3D_Teeth.morphTargetInfluences[nodes.Wolf3D_Teeth.morphTargetDictionary[azureToOculusVisemes[visemeId]]] = THREE.MathUtils.lerp( 156 | nodes.Wolf3D_Teeth.morphTargetInfluences[nodes.Wolf3D_Teeth.morphTargetDictionary[azureToOculusVisemes[visemeId]]], 157 | 1, 158 | morphTargetSmoothing 159 | ); 160 | } 161 | 162 | // Blink sometimes 163 | if (Math.random() < 0.1) { 164 | nodes.EyeLeft.morphTargetInfluences[nodes.EyeLeft.morphTargetDictionary["blink"]] = 1; 165 | nodes.EyeRight.morphTargetInfluences[nodes.EyeRight.morphTargetDictionary["blink"]] = 1; 166 | } 167 | break; 168 | } 169 | } 170 | }, [audio]); 171 | 172 | const { nodes, materials } = useGLTF("/models/6505ad3b7a4b5e00b4da04e8.glb"); 173 | const { animations: idleAnimation } = useFBX("/animations/Sitting Idle.fbx"); 174 | 175 | const [animation, setAnimation] = useState("Sitting Idle"); 176 | idleAnimation[0].name = "Sitting Idle"; 177 | 178 | const group = useRef(); 179 | 180 | // Load all custom animations 181 | let animationFilesArray = Object.values(animationFiles); 182 | let customAnimations = []; 183 | for (let i = 0; i < animationFilesArray.length; i++) { 184 | let { animations } = useFBX(animationFilesArray[i]); 185 | animations[0].name = Object.keys(animationFiles)[i]; 186 | customAnimations.push(animations[0]); 187 | } 188 | const { actions } = useAnimations([idleAnimation[0], ...customAnimations], group); 189 | 190 | useEffect(() => { 191 | nodes.Wolf3D_Head.morphTargetInfluences[nodes.Wolf3D_Head.morphTargetDictionary["viseme_I"]] = 1; 192 | nodes.Wolf3D_Teeth.morphTargetInfluences[nodes.Wolf3D_Teeth.morphTargetDictionary["viseme_I"]] = 1; 193 | if (playAudio) { 194 | audio.play(); 195 | // Choose one of the animations in the array sittingTalkingAnimations randomly 196 | setAnimation(sittingTalkingAnimations[Math.floor(Math.random() * sittingTalkingAnimations.length)]); 197 | } else { 198 | setAnimation("Sitting Idle"); 199 | audio.pause(); 200 | } 201 | }, [props.response, audio, playAudio]); 202 | 203 | useEffect(() => { 204 | if (actions[animation]) { 205 | actions[animation].reset().fadeIn(0.5).play(); 206 | 207 | return () => actions[animation].fadeOut(0.5); 208 | } else { 209 | console.error(`Animation "${animation}" not found.`); 210 | } 211 | }, [animation]); 212 | 213 | useFrame((state) => { 214 | if (headFollow) { 215 | group.current.getObjectByName("Head").lookAt(state.camera.position); 216 | } 217 | }); 218 | 219 | return ( 220 | 221 | 222 | 223 | 224 | 229 | 230 | 231 | 239 | 247 | 255 | 263 | 264 | ); 265 | } 266 | 267 | useGLTF.preload("/models/6505ad3b7a4b5e00b4da04e8.glb"); 268 | -------------------------------------------------------------------------------- /src/components/Desk.jsx: -------------------------------------------------------------------------------- 1 | /* 2 | Auto-generated by: https://github.com/pmndrs/gltfjsx 3 | Command: npx gltfjsx@6.2.13 public/models/office_desk.glb -o src/components/Desk.jsx -r public 4 | Author: saeed khalili (https://sketchfab.com/saeedkhalili.ir) 5 | License: CC-BY-4.0 (http://creativecommons.org/licenses/by/4.0/) 6 | Source: https://sketchfab.com/3d-models/office-desk-b7a7bf47bdb241d1ba52acd7ecf2f0e8 7 | Title: Office Desk 8 | */ 9 | 10 | import React, { useRef } from 'react' 11 | import { useGLTF } from '@react-three/drei' 12 | 13 | export function Desk(props) { 14 | const { nodes, materials } = useGLTF('/models/office_desk.glb') 15 | return ( 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | ) 60 | } 61 | 62 | useGLTF.preload('/models/office_desk.glb') 63 | -------------------------------------------------------------------------------- /src/components/ErrorDisplay.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | const ErrorDisplay = ({ error }) => { 4 | if (!error) return null; 5 | 6 | return ( 7 |
8 |

Ooops!

9 |

Something went wrong. Please try again later or contact me at zaganjorijuled [at] gmail.com with the error below.

10 |

Error: {error.message}

11 |
12 | ); 13 | }; 14 | 15 | export default ErrorDisplay; -------------------------------------------------------------------------------- /src/components/Experience.jsx: -------------------------------------------------------------------------------- 1 | import { Environment, OrbitControls, useTexture } from "@react-three/drei"; 2 | import { useThree } from "@react-three/fiber"; 3 | import { Avatar } from "./Avatar"; 4 | import { Desk } from "./Desk"; 5 | import { useEffect, useRef, useState } from "react"; 6 | 7 | export const Experience = ({response}) => { 8 | const controls = useRef(); 9 | 10 | // useEffect(() => { 11 | // const handleMouseMove = (event) => { 12 | // controls.current.setAzimuthalAngle((event.clientX / window.innerWidth) * Math.PI - Math.PI / 2); 13 | // controls.current.setPolarAngle((event.clientY / window.innerHeight) * Math.PI - Math.PI / 2); 14 | // }; 15 | 16 | // window.addEventListener("mousemove", handleMouseMove); 17 | // return () => { 18 | // window.removeEventListener("mousemove", handleMouseMove); 19 | // }; 20 | // }, []); 21 | 22 | const texture = useTexture("textures/youtubeBackground.jpg"); 23 | const viewport = useThree((state) => state.viewport); 24 | 25 | // Change the width and height of the plane to match the viewport 26 | const newWidth = viewport.width * 3.5; 27 | const newHeight = viewport.height * 3; 28 | 29 | return ( 30 | <> 31 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | ); 51 | }; 52 | -------------------------------------------------------------------------------- /src/components/SettingsDisplay.jsx: -------------------------------------------------------------------------------- 1 | import React, { useState, useEffect, useRef } from "react"; 2 | 3 | const SettingsDisplay = ({ settings, setSettings, visible, setVisible }) => { 4 | const formRef = useRef(null); 5 | const [newSettings, setNewSettings] = useState(settings); 6 | 7 | const updateSettings = (e) => { 8 | e.preventDefault(); 9 | const formData = new FormData(formRef.current); 10 | const newSettings = Object.fromEntries(formData.entries()); 11 | 12 | if (validateUrl(e.target.link_to_resume.value)) { 13 | setSettings(newSettings); 14 | setVisible(false); 15 | } else { 16 | console.log("Invalid settings"); 17 | formRef.current.classList.add("invalid"); 18 | } 19 | }; 20 | 21 | function validateUrl(url) { 22 | try { 23 | new URL(url); 24 | // Check if url ends with .pdf 25 | if (url.slice(-4) !== ".pdf") { 26 | console.log("Invalid url"); 27 | return false; 28 | } 29 | 30 | return true; 31 | } catch (_) { 32 | console.log("Invalid url"); 33 | return false; 34 | } 35 | } 36 | 37 | // Render the settings 38 | return ( 39 |
40 |
setVisible(false)}> 41 | 42 |
43 |

Settings

44 |

Updating the settings will restart the chatbot.

45 |
46 |
47 | 48 | setNewSettings({ ...newSettings, job_title: e.target.value })} 55 | /> 56 |
57 |
58 | 59 | setNewSettings({ ...newSettings, company_name: e.target.value })} 66 | /> 67 |
68 |
69 | 70 | setNewSettings({ ...newSettings, interviewer_name: e.target.value })} 77 | /> 78 |
79 |
80 | 81 | setNewSettings({ ...newSettings, interviewer_surname: e.target.value })} 88 | /> 89 |
90 |
91 | 92 | setNewSettings({ ...newSettings, my_name: e.target.value })} 99 | /> 100 |
101 |
102 | 103 | setNewSettings({ ...newSettings, my_surname: e.target.value })} 110 | /> 111 |
112 |
113 | 114 | 119 |
120 |
121 | 122 | 131 |
132 |
133 | 134 | 144 |
145 |
146 | 147 | setNewSettings({ ...newSettings, link_to_resume: e.target.value })} 153 | /> 154 |
155 | 156 |
157 | 160 |
161 |
162 |
163 | ); 164 | }; 165 | 166 | export default SettingsDisplay; 167 | -------------------------------------------------------------------------------- /src/components/UserInput.jsx: -------------------------------------------------------------------------------- 1 | import React, { useState, useEffect, useRef } from "react"; 2 | import { useSpeechRecognition } from "./hooks/useSpeechRecognition"; 3 | import { useChatbot } from "./hooks/useChatbot"; 4 | import debounce from "lodash.debounce"; 5 | import ErrorDisplay from "./ErrorDisplay"; 6 | import SettingsDisplay from "./SettingsDisplay"; 7 | 8 | const UserInput = ({ setResponse, isChatbotReady, setIsChatbotReady }) => { 9 | const urlParams = new URLSearchParams(window.location.search); 10 | let showSettings = urlParams.get("showSettings") || true; 11 | 12 | const [visible, setVisible] = useState(showSettings); 13 | const [settings, setSettings] = useState({ 14 | job_title: urlParams.get("job_title") || "Software Engineer", 15 | company_name: urlParams.get("company_name") || "Google", 16 | interviewer_name: urlParams.get("interviewer_name") || "John", 17 | interviewer_surname: urlParams.get("interviewer_surname") || "Doe", 18 | my_name: "You Led", 19 | my_surname: "Zaganyori", 20 | language: "English", 21 | tts_voice: "en-US-RogerNeural", 22 | speechLanguage: "en-US", 23 | link_to_resume: "https://juledz.com/resume.pdf", 24 | }); 25 | 26 | const { initChatbot, sendMessage, error } = useChatbot(setResponse, settings, setIsChatbotReady); 27 | 28 | useEffect(() => { 29 | initChatbot().then((ready) => { 30 | setIsChatbotReady(ready); 31 | }); 32 | 33 | updateSpeechConfig(settings.speechLanguage, settings.tts_voice); 34 | }, [settings]); 35 | 36 | const [speechText, setSpeechText] = useState(""); 37 | const [listening, setListening] = useState(false); 38 | 39 | const { startListening, stopListening, updateSpeechConfig } = useSpeechRecognition( 40 | settings.speechLanguage, 41 | settings.tts_voice, 42 | speechText, 43 | setSpeechText, 44 | setListening 45 | ); 46 | 47 | const debouncedSendMessage = debounce((message) => { 48 | if (!message) return; 49 | if (listening) { 50 | stopListening(); 51 | } 52 | sendMessage(message); 53 | }, 500); 54 | 55 | const toggleListening = () => { 56 | if (listening) { 57 | console.log("stop listening"); 58 | stopListening(); 59 | } else { 60 | console.log("start listening"); 61 | startListening(); 62 | } 63 | }; 64 | 65 | const inputRef = useRef(null); 66 | 67 | useEffect(() => { 68 | if (listening) { 69 | inputRef.current.focus(); 70 | } 71 | }, [listening]); 72 | 73 | // When user presses enter, send message 74 | useEffect(() => { 75 | const handleKeyDown = (e) => { 76 | if (e.key === "Enter") { 77 | if (speechText !== "") { 78 | debouncedSendMessage(speechText); 79 | setSpeechText(""); 80 | } 81 | } 82 | }; 83 | 84 | document.addEventListener("keydown", handleKeyDown); 85 | 86 | return () => { 87 | document.removeEventListener("keydown", handleKeyDown); 88 | }; 89 | }, [speechText]); 90 | 91 | return ( 92 |
93 | {isChatbotReady ? ( 94 |
95 |
96 |
97 |
e.preventDefault()}> 98 | setSpeechText(e.target.value)} placeholder="Type a message..." /> 99 | 100 | {/* */} 103 | 104 | 107 | 108 |
setVisible(true)}> 109 | 110 |
111 |
112 |
113 |
114 | 115 |
116 |
117 | ) : ( 118 | 119 | )} 120 |
121 | ); 122 | }; 123 | 124 | export default UserInput; 125 | -------------------------------------------------------------------------------- /src/components/hooks/chatbotService.jsx: -------------------------------------------------------------------------------- 1 | import { io } from "socket.io-client"; 2 | 3 | class ChatbotService { 4 | constructor() { 5 | // Production 6 | this.socket = io(); 7 | 8 | // Development 9 | //this.socket = io("localhost:5000"); 10 | } 11 | 12 | async init(settings) { 13 | this.socket.emit("init", settings); 14 | 15 | let response = await new Promise((resolve, reject) => { 16 | this.socket.on("responseInit", (response) => { 17 | if (response) { 18 | resolve(response); 19 | } else { 20 | reject(response); 21 | } 22 | }); 23 | }); 24 | 25 | return response; 26 | } 27 | 28 | async sendMessage(message) { 29 | this.socket.emit("message", { question: message }); 30 | 31 | let response = await new Promise((resolve, reject) => { 32 | this.socket.on("responseMessage", (response) => { 33 | if (response) { 34 | resolve(response); 35 | } else { 36 | reject(response); 37 | } 38 | }); 39 | }); 40 | 41 | return response; 42 | } 43 | } 44 | 45 | export const chatbotService = new ChatbotService(); 46 | -------------------------------------------------------------------------------- /src/components/hooks/useChatbot.jsx: -------------------------------------------------------------------------------- 1 | import { useState } from "react"; 2 | import { chatbotService } from "./chatbotService"; 3 | 4 | export const useChatbot = (setResponse, settings, setIsChatbotReady) => { 5 | const initChatbot = async () => { 6 | try { 7 | await chatbotService.init(settings); 8 | setIsChatbotReady(true); 9 | return true; 10 | } catch (error) { 11 | setError(error); 12 | setIsChatbotReady(false); 13 | return false; 14 | } 15 | }; 16 | 17 | const sendMessage = async (message) => { 18 | console.log("Sending message: ", message); 19 | const response = await chatbotService.sendMessage(message); 20 | setResponse(response); 21 | }; 22 | 23 | const [error, setError] = useState(null); 24 | 25 | return { 26 | initChatbot, 27 | sendMessage, 28 | error, 29 | }; 30 | }; 31 | -------------------------------------------------------------------------------- /src/components/hooks/useSpeechRecognition.jsx: -------------------------------------------------------------------------------- 1 | import { useState, useEffect } from "react"; 2 | import * as sdk from "microsoft-cognitiveservices-speech-sdk"; 3 | 4 | export const useSpeechRecognition = (speechLanguage, tts_voice, speechText, setSpeechText, setListening) => { 5 | let speechConfig = sdk.SpeechConfig.fromSubscription(import.meta.env.VITE_AZURE_SPEECH_KEY, import.meta.env.VITE_AZURE_SPEECH_REGION); 6 | 7 | speechConfig.speechSynthesisVoiceName = tts_voice; 8 | speechConfig.speechRecognitionLanguage = speechLanguage; 9 | 10 | let speechAudioConfig = sdk.AudioConfig.fromDefaultMicrophoneInput(); 11 | let speechRecognizer = new sdk.SpeechRecognizer(speechConfig, speechAudioConfig); 12 | 13 | const updateSpeechConfig = (newSpeechLanguage, newTtsVoice) => { 14 | speechConfig.speechSynthesisVoiceName = newTtsVoice; 15 | speechConfig.speechRecognitionLanguage = newSpeechLanguage; 16 | speechRecognizer.close(); 17 | speechRecognizer = new sdk.SpeechRecognizer(speechConfig, speechAudioConfig); 18 | }; 19 | 20 | const startListening = () => { 21 | setListening(true); 22 | speechRecognizer = new sdk.SpeechRecognizer(speechConfig, speechAudioConfig); 23 | 24 | speechRecognizer.recognizeOnceAsync((result) => { 25 | speechRecognizer.recognizing = (_, event) => { 26 | setSpeechText(event.result.text); 27 | }; 28 | 29 | if (result.reason === ResultReason.RecognizedSpeech) { 30 | setSpeechText(result.text); 31 | setListening(false); 32 | } 33 | }); 34 | }; 35 | 36 | const stopListening = () => { 37 | speechRecognizer.recognizing = null; 38 | speechRecognizer.close(); 39 | setListening(false); 40 | }; 41 | 42 | return { 43 | startListening, 44 | stopListening, 45 | updateSpeechConfig 46 | }; 47 | }; 48 | -------------------------------------------------------------------------------- /src/index.css: -------------------------------------------------------------------------------- 1 | @import "/node_modules/@fortawesome/fontawesome-pro/css/all.css"; 2 | 3 | #root { 4 | width: 100vw; 5 | height: 100vh; 6 | } 7 | 8 | body { 9 | -webkit-font-smoothing: antialiased; 10 | -moz-osx-font-smoothing: grayscale; 11 | font-family: "Space Mono", monospace; 12 | margin: 0; 13 | font-size: 1em; 14 | } 15 | 16 | html { 17 | font-size: 13px; 18 | } 19 | 20 | @media screen and (min-width: 1000px) { 21 | html { 22 | /* font-size: calc([minimum size] + ([maximum size] - [minimum size]) * ((100vw - [minimum viewport width]) / ([maximum viewport width] - [minimum viewport width]))); */ 23 | font-size: calc(13px + 3 * ((100vw - 1000px) / 900)); 24 | } 25 | } 26 | 27 | @media screen and (min-width: 1900px) { 28 | html { 29 | font-size: 20px; 30 | } 31 | } 32 | 33 | .main-container { 34 | position: relative; 35 | display: grid; 36 | width: 100%; 37 | height: 100%; 38 | background: #141313; 39 | } 40 | 41 | .main-container .loading { 42 | position: absolute; 43 | background: #141313; 44 | width: 100%; 45 | height: 100%; 46 | font-size: 2em; 47 | color: #fff; 48 | display: flex; 49 | justify-content: center; 50 | align-items: center; 51 | flex-direction: column; 52 | gap: 1rem; 53 | z-index: 9999; 54 | opacity: 0; 55 | pointer-events: none; 56 | transition: all 0.3s ease-in-out; 57 | } 58 | 59 | .main-container[data-chatbot-ready="false"] .loading { 60 | opacity: 1; 61 | pointer-events: all; 62 | } 63 | 64 | .main-container .loading i { 65 | height: fit-content; 66 | width: fit-content; 67 | } 68 | 69 | .main-container .loading span { 70 | font-size: 1.2rem; 71 | } 72 | 73 | .canvas-wrapper { 74 | position: relative; 75 | display: grid; 76 | height: 100vh; 77 | } 78 | 79 | .chatbotInputContainer { 80 | position: absolute; 81 | pointer-events: all; 82 | bottom: 0; 83 | width: 90%; 84 | height: fit-content; 85 | background-color: #ffffffa9; 86 | border-top: 1px solid #ccc; 87 | display: flex; 88 | align-items: center; 89 | justify-content: center; 90 | left: 0; 91 | right: 0; 92 | margin: 0 auto; 93 | bottom: 2em; 94 | padding: 1em 0; 95 | border-radius: 20em; 96 | } 97 | 98 | .chatbotSettings { 99 | position: fixed; 100 | top: 10vh; 101 | width: calc(65% - 8em); 102 | height: calc(70vh - 8em); 103 | bottom: 10vh; 104 | overflow-y: scroll; 105 | left: 0; 106 | right: 0; 107 | margin: auto; 108 | background: #181c20; 109 | color: #fff; 110 | border-radius: 2.4em; 111 | padding: 4em; 112 | transition: all 0.3s ease-in-out; 113 | -ms-overflow-style: none; /* IE and Edge */ 114 | scrollbar-width: none; /* Firefox */ 115 | } 116 | 117 | .chatbotSettings::-webkit-scrollbar { 118 | display: none; 119 | } 120 | 121 | .chatbotSettings[data-visible="false"] { 122 | transform: scale(0.5); 123 | opacity: 0; 124 | pointer-events: none; 125 | } 126 | 127 | .chatbotInput { 128 | width: 100%; 129 | padding: 0 3em 0 1.2em; 130 | } 131 | 132 | .chatbotInput .chatbotInput_container { 133 | display: grid; 134 | grid-template-columns: auto max-content; 135 | align-items: center; 136 | gap: 2em; 137 | } 138 | 139 | .chatbotInput .chatbotInput_container i { 140 | cursor: pointer; 141 | color: #3f100c; 142 | font-size: 1.6em; 143 | } 144 | 145 | .chatbotInput .chatbotInput_container i:hover { 146 | transform: scale(1.3); 147 | } 148 | 149 | .chatbotSettings .closeButton { 150 | position: absolute; 151 | top: 3em; 152 | right: 3em; 153 | cursor: pointer; 154 | font-size: 1.5rem; 155 | transition: all 0.3s ease-in-out; 156 | width: 0.8em; 157 | height: 0.8em; 158 | border-radius: 50%; 159 | padding: 0.5rem; 160 | border: 1px solid #fff; 161 | display: grid; 162 | justify-items: center; 163 | align-items: center; 164 | } 165 | 166 | .chatbotSettings .closeButton:hover i { 167 | transform: scale(1.2); 168 | color: #181c20; 169 | } 170 | 171 | .chatbotSettings .closeButton i { 172 | font-size: 0.8rem; 173 | line-height: 0; 174 | } 175 | 176 | .chatbotSettings .closeButton:hover { 177 | background: #fff; 178 | } 179 | 180 | .settingsContainer { 181 | display: flex; 182 | height: 100%; 183 | flex-direction: column; 184 | margin-bottom: 8em; 185 | } 186 | 187 | .settingsContainer form.settings { 188 | display: grid; 189 | gap: 0.5em; 190 | grid-template-columns: 1fr 1fr; 191 | height: 100%; 192 | column-gap: 2em; 193 | } 194 | 195 | .settingsContainer form.settings .setting { 196 | display: flex; 197 | color: #464c5f; 198 | flex-direction: column; 199 | gap: 0.5em; 200 | } 201 | 202 | .settingsContainer form.settings .setting input[type="text"], 203 | .settingsContainer form.settings .setting select { 204 | font-size: 1em; 205 | padding: 0.4rem 1em; 206 | border: none; 207 | background: #373c4b; 208 | border-radius: 0.3em; 209 | color: #ffffff63; 210 | } 211 | 212 | .settingsContainer form.settings button.btn_outline { 213 | background: #373c4b; 214 | border: 1px solid #fff; 215 | color: #fff; 216 | padding: 1em 2em; 217 | border-radius: 30em; 218 | margin-top: 2em; 219 | cursor: pointer; 220 | } 221 | 222 | .settingsContainer form.settings.invalid #link_to_resume { 223 | border: 1px solid #ff0000; 224 | } 225 | 226 | .chatbotInput .chatbotInput_container form { 227 | display: grid; 228 | width: 100%; 229 | grid-template-columns: auto max-content max-content; 230 | gap: 1.5em; 231 | } 232 | 233 | .chatbotInput .chatbotInput_container form input { 234 | border-radius: 25em; 235 | background: #fff; 236 | border: none; 237 | padding: 0.4em 2em; 238 | font-size: 1em; 239 | } 240 | 241 | .chatbotInput .chatbotInput_container form button { 242 | display: grid; 243 | cursor: pointer; 244 | justify-items: center; 245 | align-items: center; 246 | width: 4rem; 247 | height: 4rem; 248 | border: none; 249 | border-radius: 50%; 250 | padding: 0; 251 | margin: 0; 252 | font-size: 0.8em; 253 | transition: all 0.3s ease-in-out; 254 | } 255 | 256 | .chatbotInput .chatbotInput_container form button:hover { 257 | background: #3f100c; 258 | color: #fff; 259 | } 260 | 261 | .chatbotInput .chatbotInput_container form button i { 262 | color: inherit; 263 | } 264 | 265 | .chatbotInput .chatbotInput_container form input:focus { 266 | outline: none; 267 | } 268 | 269 | .chatbotInputWrap { 270 | position: absolute; 271 | top: 0; 272 | left: 0; 273 | width: 100%; 274 | height: 100%; 275 | pointer-events: none; 276 | } 277 | -------------------------------------------------------------------------------- /src/main.jsx: -------------------------------------------------------------------------------- 1 | import React from 'react' 2 | import ReactDOM from 'react-dom/client' 3 | import App from './App' 4 | import './index.css' 5 | 6 | ReactDOM.createRoot(document.getElementById('root')).render( 7 | 8 | 9 | , 10 | ) 11 | -------------------------------------------------------------------------------- /vite.config.js: -------------------------------------------------------------------------------- 1 | import { defineConfig } from 'vite' 2 | import react from '@vitejs/plugin-react' 3 | 4 | // https://vitejs.dev/config/ 5 | export default defineConfig({ 6 | plugins: [react()] 7 | }) 8 | --------------------------------------------------------------------------------