├── .DS_Store ├── .github └── FUNDING.yml ├── README.md ├── SpeechManager.js ├── assets ├── Stan.gltf └── siteOGImage.jpg ├── audioManager.js ├── game.js ├── index.html ├── main.js └── styles.css /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/collidingScopes/3d-model-playground/d20877f2790e28681dab8a40fc51fa52fecfe73b/.DS_Store -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: [collidingScopes] 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry 12 | polar: # Replace with a single Polar username 13 | buy_me_a_coffee: # Replace with a single Buy Me a Coffee username 14 | thanks_dev: # Replace with a single thanks.dev username 15 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] 16 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 3D Model Playground 2 | 3 | Control 3D models using hand gestures and voice commands in real-time. 4 | 5 | An interactive web app built with threejs, mediapipe computer vision, web speech API, and rosebud AI. 6 | 7 | - Say "drag", "rotate", "scale", or "animate" to change the interaction mode 8 | - Pinch fingers to control the 3D model 9 | - Drag/drop a new 3D model onto the page to import it (GLTF format only for now) 10 | 11 | [Video](https://x.com/measure_plan/status/1929900748235550912) | [Live Demo](https://collidingscopes.github.io/3d-model-playground/) 12 | 13 | ## Requirements 14 | 15 | - Modern web browser with WebGL support 16 | - Camera / microphone access 17 | 18 | ## Technologies 19 | 20 | - **Three.js** for 3D rendering 21 | - **MediaPipe** for hand tracking and gesture recognition 22 | - **Web Speech API** for speech recognition 23 | - **HTML5 Canvas** for visual feedback 24 | - **JavaScript** for real-time interaction 25 | 26 | ## Setup for Development 27 | 28 | ```bash 29 | # Clone this repository 30 | git clone https://github.com/collidingScopes/3d-model-playground 31 | 32 | # Navigate to the project directory 33 | cd 3d-model-playground 34 | 35 | # Serve with your preferred method (example using Python) 36 | python -m http.server 37 | ``` 38 | 39 | Then navigate to `http://localhost:8000` in your browser. 40 | 41 | ## License 42 | 43 | MIT License 44 | 45 | ## Credits 46 | 47 | - Three.js - https://threejs.org/ 48 | - MediaPipe - https://mediapipe.dev/ 49 | - Rosebud AI - https://rosebud.ai/ 50 | - Quaternius 3D models - https://quaternius.com/ 51 | 52 | ## Related Projects 53 | 54 | You might also like some of my other open source projects: 55 | 56 | - [Threejs hand tracking tutorial](https://collidingScopes.github.io/threejs-handtracking-101) - Basic hand tracking setup with threejs and MediaPipe computer vision 57 | - [Particular Drift](https://collidingScopes.github.io/particular-drift) - Turn photos into flowing particle animations 58 | - [Liquid Logo](https://collidingScopes.github.io/liquid-logo) - Transform logos and icons into liquid metal animations 59 | - [Video-to-ASCII](https://collidingScopes.github.io/ascii) - Convert videos into ASCII pixel art 60 | 61 | ## Contact 62 | 63 | - Instagram: [@stereo.drift](https://www.instagram.com/stereo.drift/) 64 | - Twitter/X: [@measure_plan](https://x.com/measure_plan) 65 | - Email: [stereodriftvisuals@gmail.com](mailto:stereodriftvisuals@gmail.com) 66 | - GitHub: [collidingScopes](https://github.com/collidingScopes) 67 | 68 | ## Donations 69 | 70 | If you found this tool useful, feel free to buy me a coffee. 71 | 72 | My name is Alan, and I enjoy building open source software for computer vision, games, and more. This would be much appreciated during late-night coding sessions! 73 | 74 | [](https://www.buymeacoffee.com/stereoDrift) -------------------------------------------------------------------------------- /SpeechManager.js: -------------------------------------------------------------------------------- 1 | function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { 2 | try { 3 | var info = gen[key](arg); 4 | var value = info.value; 5 | } catch (error) { 6 | reject(error); 7 | return; 8 | } 9 | if (info.done) { 10 | resolve(value); 11 | } else { 12 | Promise.resolve(value).then(_next, _throw); 13 | } 14 | } 15 | function _async_to_generator(fn) { 16 | return function() { 17 | var self = this, args = arguments; 18 | return new Promise(function(resolve, reject) { 19 | var gen = fn.apply(self, args); 20 | function _next(value) { 21 | asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); 22 | } 23 | function _throw(err) { 24 | asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); 25 | } 26 | _next(undefined); 27 | }); 28 | }; 29 | } 30 | function _class_call_check(instance, Constructor) { 31 | if (!(instance instanceof Constructor)) { 32 | throw new TypeError("Cannot call a class as a function"); 33 | } 34 | } 35 | function _defineProperties(target, props) { 36 | for(var i = 0; i < props.length; i++){ 37 | var descriptor = props[i]; 38 | descriptor.enumerable = descriptor.enumerable || false; 39 | descriptor.configurable = true; 40 | if ("value" in descriptor) descriptor.writable = true; 41 | Object.defineProperty(target, descriptor.key, descriptor); 42 | } 43 | } 44 | function _create_class(Constructor, protoProps, staticProps) { 45 | if (protoProps) _defineProperties(Constructor.prototype, protoProps); 46 | if (staticProps) _defineProperties(Constructor, staticProps); 47 | return Constructor; 48 | } 49 | function _ts_generator(thisArg, body) { 50 | var f, y, t, g, _ = { 51 | label: 0, 52 | sent: function() { 53 | if (t[0] & 1) throw t[1]; 54 | return t[1]; 55 | }, 56 | trys: [], 57 | ops: [] 58 | }; 59 | return g = { 60 | next: verb(0), 61 | "throw": verb(1), 62 | "return": verb(2) 63 | }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { 64 | return this; 65 | }), g; 66 | function verb(n) { 67 | return function(v) { 68 | return step([ 69 | n, 70 | v 71 | ]); 72 | }; 73 | } 74 | function step(op) { 75 | if (f) throw new TypeError("Generator is already executing."); 76 | while(_)try { 77 | if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; 78 | if (y = 0, t) op = [ 79 | op[0] & 2, 80 | t.value 81 | ]; 82 | switch(op[0]){ 83 | case 0: 84 | case 1: 85 | t = op; 86 | break; 87 | case 4: 88 | _.label++; 89 | return { 90 | value: op[1], 91 | done: false 92 | }; 93 | case 5: 94 | _.label++; 95 | y = op[1]; 96 | op = [ 97 | 0 98 | ]; 99 | continue; 100 | case 7: 101 | op = _.ops.pop(); 102 | _.trys.pop(); 103 | continue; 104 | default: 105 | if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { 106 | _ = 0; 107 | continue; 108 | } 109 | if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) { 110 | _.label = op[1]; 111 | break; 112 | } 113 | if (op[0] === 6 && _.label < t[1]) { 114 | _.label = t[1]; 115 | t = op; 116 | break; 117 | } 118 | if (t && _.label < t[2]) { 119 | _.label = t[2]; 120 | _.ops.push(op); 121 | break; 122 | } 123 | if (t[2]) _.ops.pop(); 124 | _.trys.pop(); 125 | continue; 126 | } 127 | op = body.call(thisArg, _); 128 | } catch (e) { 129 | op = [ 130 | 6, 131 | e 132 | ]; 133 | y = 0; 134 | } finally{ 135 | f = t = 0; 136 | } 137 | if (op[0] & 5) throw op[1]; 138 | return { 139 | value: op[0] ? op[1] : void 0, 140 | done: true 141 | }; 142 | } 143 | } 144 | export var SpeechManager = /*#__PURE__*/ function() { 145 | "use strict"; 146 | function SpeechManager(onTranscript, onRecognitionActive, onCommandRecognized) { 147 | var _this = this; 148 | _class_call_check(this, SpeechManager); 149 | this.onTranscript = onTranscript; 150 | this.onRecognitionActive = onRecognitionActive; // Callback for recognition state 151 | this.onCommandRecognized = onCommandRecognized; // Callback for recognized commands 152 | this.recognition = null; 153 | this.isRecognizing = false; 154 | this.finalTranscript = ''; 155 | this.interimTranscript = ''; 156 | var SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition; 157 | if (SpeechRecognition) { 158 | this.recognition = new SpeechRecognition(); 159 | this.recognition.continuous = true; // Keep listening even after a pause 160 | this.recognition.interimResults = true; // Get results while speaking 161 | this.recognition.onstart = function() { 162 | _this.isRecognizing = true; 163 | console.log('Speech recognition started.'); 164 | if (_this.onRecognitionActive) _this.onRecognitionActive(true); 165 | }; 166 | this.recognition.onresult = function(event) { 167 | _this.interimTranscript = ''; 168 | for(var i = event.resultIndex; i < event.results.length; ++i){ 169 | if (event.results[i].isFinal) { 170 | // Append to finalTranscript and then clear it for the next utterance 171 | // This way, `finalTranscript` holds the *current complete* utterance. 172 | var currentFinalTranscript = event.results[i][0].transcript.trim().toLowerCase(); 173 | _this.finalTranscript += currentFinalTranscript; // Append to potentially longer session transcript if needed, though we process per utterance 174 | if (_this.onTranscript) { 175 | // Display the raw transcript before processing as command 176 | _this.onTranscript(event.results[i][0].transcript, ''); // Send final, clear interim 177 | } 178 | // Check for commands 179 | var commandMap = { 180 | 'drag': 'drag', 181 | 'rotate': 'rotate', 182 | 'rotation': 'rotate', 183 | 'scale': 'scale', 184 | 'size': 'scale', 185 | 'zoom': 'scale', 186 | 'animate': 'animate', 187 | 'anime': 'animate', 188 | 'animation': 'animate' // Alias for animate 189 | }; 190 | var spokenCommands = Object.keys(commandMap); 191 | var _iteratorNormalCompletion = true, _didIteratorError = false, _iteratorError = undefined; 192 | try { 193 | for(var _iterator = spokenCommands[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true){ 194 | var spokenCmd = _step.value; 195 | if (currentFinalTranscript.includes(spokenCmd)) { 196 | var actualCommand = commandMap[spokenCmd]; 197 | if (_this.onCommandRecognized) { 198 | _this.onCommandRecognized(actualCommand); 199 | } 200 | break; // Process the first command found (and its alias) 201 | } 202 | } 203 | } catch (err) { 204 | _didIteratorError = true; 205 | _iteratorError = err; 206 | } finally{ 207 | try { 208 | if (!_iteratorNormalCompletion && _iterator.return != null) { 209 | _iterator.return(); 210 | } 211 | } finally{ 212 | if (_didIteratorError) { 213 | throw _iteratorError; 214 | } 215 | } 216 | } 217 | // Reset finalTranscript for the next full utterance if you are processing utterance by utterance 218 | // If you want to accumulate, then don't reset here. 219 | // For command processing, resetting per utterance is usually best. 220 | _this.finalTranscript = ''; 221 | } else { 222 | _this.interimTranscript += event.results[i][0].transcript; 223 | if (_this.onTranscript) { 224 | _this.onTranscript(null, _this.interimTranscript); 225 | } 226 | } 227 | } 228 | // If only interim results were processed in this event batch, ensure onTranscript is called 229 | if (_this.interimTranscript && !event.results[event.results.length - 1].isFinal) { 230 | if (_this.onTranscript) { 231 | _this.onTranscript(null, _this.interimTranscript); 232 | } 233 | } 234 | }; 235 | this.recognition.onerror = function(event) { 236 | console.error('Speech recognition error:', event.error); 237 | var oldIsRecognizing = _this.isRecognizing; 238 | _this.isRecognizing = false; 239 | _this.finalTranscript = ''; // Clear transcript on error 240 | _this.interimTranscript = ''; 241 | if (_this.onTranscript) _this.onTranscript('', ''); // Clear display 242 | if (oldIsRecognizing && _this.onRecognitionActive) _this.onRecognitionActive(false); 243 | // Automatically restart if it's an 'aborted' or 'no-speech' error 244 | if (event.error === 'aborted' || event.error === 'no-speech') { 245 | console.log('Restarting speech recognition due to inactivity or abort.'); 246 | // Don't call startRecognition directly, let onend handle it if continuous 247 | } 248 | }; 249 | this.recognition.onend = function() { 250 | var oldIsRecognizing = _this.isRecognizing; 251 | _this.isRecognizing = false; 252 | console.log('Speech recognition ended.'); 253 | _this.finalTranscript = ''; // Clear transcript on end 254 | _this.interimTranscript = ''; 255 | if (_this.onTranscript) _this.onTranscript('', ''); // Clear display 256 | if (oldIsRecognizing && _this.onRecognitionActive) _this.onRecognitionActive(false); 257 | // If it ended and continuous is true, restart it. 258 | // This handles cases where the browser might stop it. 259 | if (_this.recognition.continuous) { 260 | console.log('Continuous mode: Restarting speech recognition.'); 261 | _this.startRecognition(); // startRecognition already resets transcripts 262 | } 263 | }; 264 | } else { 265 | console.warn('Web Speech API is not supported in this browser.'); 266 | } 267 | } 268 | _create_class(SpeechManager, [ 269 | { 270 | key: "startRecognition", 271 | value: function startRecognition() { 272 | var _this = this; 273 | if (this.recognition && !this.isRecognizing) { 274 | try { 275 | this.finalTranscript = ''; // Reset transcript 276 | this.interimTranscript = ''; 277 | this.recognition.start(); 278 | } catch (e) { 279 | console.error("Error starting speech recognition:", e); 280 | // This can happen if it's already started or due to permissions 281 | if (e.name === 'InvalidStateError' && this.isRecognizing) { 282 | // Already started, do nothing 283 | } else { 284 | // Attempt to restart if it fails for other reasons (e.g. after an error) 285 | setTimeout(function() { 286 | return _this.startRecognition(); 287 | }, 500); 288 | } 289 | } 290 | } 291 | } 292 | }, 293 | { 294 | key: "stopRecognition", 295 | value: function stopRecognition() { 296 | if (this.recognition && this.isRecognizing) { 297 | this.recognition.stop(); 298 | } 299 | } 300 | }, 301 | { 302 | key: "requestPermissionAndStart", 303 | value: // Call this on user interaction to request microphone permission 304 | function requestPermissionAndStart() { 305 | var _this = this; 306 | return _async_to_generator(function() { 307 | var err; 308 | return _ts_generator(this, function(_state) { 309 | switch(_state.label){ 310 | case 0: 311 | if (!_this.recognition) { 312 | console.log("Speech recognition not supported."); 313 | return [ 314 | 2 315 | ]; 316 | } 317 | _state.label = 1; 318 | case 1: 319 | _state.trys.push([ 320 | 1, 321 | 3, 322 | , 323 | 4 324 | ]); 325 | // Attempt to get microphone access (this might prompt the user) 326 | return [ 327 | 4, 328 | navigator.mediaDevices.getUserMedia({ 329 | audio: true 330 | }) 331 | ]; 332 | case 2: 333 | _state.sent(); 334 | console.log("Microphone permission granted."); 335 | _this.startRecognition(); 336 | return [ 337 | 3, 338 | 4 339 | ]; 340 | case 3: 341 | err = _state.sent(); 342 | console.error("Microphone permission denied or error:", err); 343 | if (_this.onTranscript) { 344 | _this.onTranscript("Microphone access denied. Please allow microphone access in your browser settings.", ""); 345 | } 346 | return [ 347 | 3, 348 | 4 349 | ]; 350 | case 4: 351 | return [ 352 | 2 353 | ]; 354 | } 355 | }); 356 | })(); 357 | } 358 | } 359 | ]); 360 | return SpeechManager; 361 | }(); -------------------------------------------------------------------------------- /assets/siteOGImage.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/collidingScopes/3d-model-playground/d20877f2790e28681dab8a40fc51fa52fecfe73b/assets/siteOGImage.jpg -------------------------------------------------------------------------------- /audioManager.js: -------------------------------------------------------------------------------- 1 | // Basic Web Audio API Sound Manager 2 | function _class_call_check(instance, Constructor) { 3 | if (!(instance instanceof Constructor)) { 4 | throw new TypeError("Cannot call a class as a function"); 5 | } 6 | } 7 | function _defineProperties(target, props) { 8 | for(var i = 0; i < props.length; i++){ 9 | var descriptor = props[i]; 10 | descriptor.enumerable = descriptor.enumerable || false; 11 | descriptor.configurable = true; 12 | if ("value" in descriptor) descriptor.writable = true; 13 | Object.defineProperty(target, descriptor.key, descriptor); 14 | } 15 | } 16 | function _create_class(Constructor, protoProps, staticProps) { 17 | if (protoProps) _defineProperties(Constructor.prototype, protoProps); 18 | if (staticProps) _defineProperties(Constructor, staticProps); 19 | return Constructor; 20 | } 21 | export var AudioManager = /*#__PURE__*/ function() { 22 | "use strict"; 23 | function AudioManager() { 24 | _class_call_check(this, AudioManager); 25 | // Use '||' for broader browser compatibility, though 'webkit' is largely legacy 26 | var AudioContext = window.AudioContext || window.webkitAudioContext; 27 | this.audioCtx = null; 28 | this.isInitialized = false; 29 | this.lastClickTime = 0; 30 | this.clickInterval = 200; // Milliseconds between clicks for rhythm 31 | if (AudioContext) { 32 | try { 33 | this.audioCtx = new AudioContext(); 34 | this.isInitialized = true; 35 | console.log("AudioContext created successfully."); 36 | } catch (e) { 37 | console.error("Error creating AudioContext:", e); 38 | } 39 | } else { 40 | console.warn("Web Audio API is not supported in this browser."); 41 | } 42 | } 43 | _create_class(AudioManager, [ 44 | { 45 | // Resume audio context after user interaction (required by many browsers) 46 | key: "resumeContext", 47 | value: function resumeContext() { 48 | if (this.audioCtx && this.audioCtx.state === 'suspended') { 49 | this.audioCtx.resume().then(function() { 50 | console.log("AudioContext resumed successfully."); 51 | }).catch(function(e) { 52 | return console.error("Error resuming AudioContext:", e); 53 | }); 54 | } 55 | } 56 | }, 57 | { 58 | key: "playInteractionClickSound", 59 | value: function playInteractionClickSound() { 60 | if (!this.isInitialized || !this.audioCtx || this.audioCtx.state !== 'running') return; 61 | var internalCurrentTime = this.audioCtx.currentTime; 62 | // Check if enough time has passed since the last click 63 | if (internalCurrentTime - this.lastClickTime < this.clickInterval / 1000) { 64 | return; // Too soon for the next click 65 | } 66 | this.lastClickTime = internalCurrentTime; 67 | var oscillator = this.audioCtx.createOscillator(); 68 | var gainNode = this.audioCtx.createGain(); 69 | oscillator.connect(gainNode); 70 | gainNode.connect(this.audioCtx.destination); 71 | oscillator.type = 'sine'; // Softer waveform for a 'tic' 72 | oscillator.frequency.setValueAtTime(1200, this.audioCtx.currentTime); // Lowered base pitch 73 | // A very quick pitch drop can make it sound more 'clicky' 74 | oscillator.frequency.exponentialRampToValueAtTime(600, this.audioCtx.currentTime + 0.01); // Lowered pitch drop target 75 | var clickVolume = 0.08; // Increased volume slightly 76 | gainNode.gain.setValueAtTime(0, this.audioCtx.currentTime); // Start silent for a clean attack 77 | gainNode.gain.linearRampToValueAtTime(clickVolume, this.audioCtx.currentTime + 0.003); // Very fast attack 78 | gainNode.gain.exponentialRampToValueAtTime(0.0001, this.audioCtx.currentTime + 0.005); // Keep decay short for 'tic' 79 | oscillator.start(this.audioCtx.currentTime); 80 | oscillator.stop(this.audioCtx.currentTime + 0.005); // Match decay duration 81 | } 82 | } 83 | ]); 84 | return AudioManager; 85 | }(); 86 | -------------------------------------------------------------------------------- /game.js: -------------------------------------------------------------------------------- 1 | function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { 2 | try { 3 | var info = gen[key](arg); 4 | var value = info.value; 5 | } catch (error) { 6 | reject(error); 7 | return; 8 | } 9 | if (info.done) { 10 | resolve(value); 11 | } else { 12 | Promise.resolve(value).then(_next, _throw); 13 | } 14 | } 15 | function _async_to_generator(fn) { 16 | return function() { 17 | var self = this, args = arguments; 18 | return new Promise(function(resolve, reject) { 19 | var gen = fn.apply(self, args); 20 | function _next(value) { 21 | asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); 22 | } 23 | function _throw(err) { 24 | asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); 25 | } 26 | _next(undefined); 27 | }); 28 | }; 29 | } 30 | function _class_call_check(instance, Constructor) { 31 | if (!(instance instanceof Constructor)) { 32 | throw new TypeError("Cannot call a class as a function"); 33 | } 34 | } 35 | function _defineProperties(target, props) { 36 | for(var i = 0; i < props.length; i++){ 37 | var descriptor = props[i]; 38 | descriptor.enumerable = descriptor.enumerable || false; 39 | descriptor.configurable = true; 40 | if ("value" in descriptor) descriptor.writable = true; 41 | Object.defineProperty(target, descriptor.key, descriptor); 42 | } 43 | } 44 | function _create_class(Constructor, protoProps, staticProps) { 45 | if (protoProps) _defineProperties(Constructor.prototype, protoProps); 46 | if (staticProps) _defineProperties(Constructor, staticProps); 47 | return Constructor; 48 | } 49 | function _define_property(obj, key, value) { 50 | if (key in obj) { 51 | Object.defineProperty(obj, key, { 52 | value: value, 53 | enumerable: true, 54 | configurable: true, 55 | writable: true 56 | }); 57 | } else { 58 | obj[key] = value; 59 | } 60 | return obj; 61 | } 62 | function _object_spread(target) { 63 | for(var i = 1; i < arguments.length; i++){ 64 | var source = arguments[i] != null ? arguments[i] : {}; 65 | var ownKeys = Object.keys(source); 66 | if (typeof Object.getOwnPropertySymbols === "function") { 67 | ownKeys = ownKeys.concat(Object.getOwnPropertySymbols(source).filter(function(sym) { 68 | return Object.getOwnPropertyDescriptor(source, sym).enumerable; 69 | })); 70 | } 71 | ownKeys.forEach(function(key) { 72 | _define_property(target, key, source[key]); 73 | }); 74 | } 75 | return target; 76 | } 77 | function _ts_generator(thisArg, body) { 78 | var f, y, t, g, _ = { 79 | label: 0, 80 | sent: function() { 81 | if (t[0] & 1) throw t[1]; 82 | return t[1]; 83 | }, 84 | trys: [], 85 | ops: [] 86 | }; 87 | return g = { 88 | next: verb(0), 89 | "throw": verb(1), 90 | "return": verb(2) 91 | }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { 92 | return this; 93 | }), g; 94 | function verb(n) { 95 | return function(v) { 96 | return step([ 97 | n, 98 | v 99 | ]); 100 | }; 101 | } 102 | function step(op) { 103 | if (f) throw new TypeError("Generator is already executing."); 104 | while(_)try { 105 | if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; 106 | if (y = 0, t) op = [ 107 | op[0] & 2, 108 | t.value 109 | ]; 110 | switch(op[0]){ 111 | case 0: 112 | case 1: 113 | t = op; 114 | break; 115 | case 4: 116 | _.label++; 117 | return { 118 | value: op[1], 119 | done: false 120 | }; 121 | case 5: 122 | _.label++; 123 | y = op[1]; 124 | op = [ 125 | 0 126 | ]; 127 | continue; 128 | case 7: 129 | op = _.ops.pop(); 130 | _.trys.pop(); 131 | continue; 132 | default: 133 | if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { 134 | _ = 0; 135 | continue; 136 | } 137 | if (op[0] === 3 && (!t || op[1] > t[0] && op[1] < t[3])) { 138 | _.label = op[1]; 139 | break; 140 | } 141 | if (op[0] === 6 && _.label < t[1]) { 142 | _.label = t[1]; 143 | t = op; 144 | break; 145 | } 146 | if (t && _.label < t[2]) { 147 | _.label = t[2]; 148 | _.ops.push(op); 149 | break; 150 | } 151 | if (t[2]) _.ops.pop(); 152 | _.trys.pop(); 153 | continue; 154 | } 155 | op = body.call(thisArg, _); 156 | } catch (e) { 157 | op = [ 158 | 6, 159 | e 160 | ]; 161 | y = 0; 162 | } finally{ 163 | f = t = 0; 164 | } 165 | if (op[0] & 5) throw op[1]; 166 | return { 167 | value: op[0] ? op[1] : void 0, 168 | done: true 169 | }; 170 | } 171 | } 172 | import * as THREE from 'three'; 173 | import { GLTFLoader } from 'three/loaders/GLTFLoader.js'; 174 | import { HandLandmarker, FilesetResolver } from 'https://esm.sh/@mediapipe/tasks-vision@0.10.14'; 175 | import { AudioManager } from './audioManager.js'; // Import the AudioManager 176 | import { SpeechManager } from './SpeechManager.js'; // Import SpeechManager 177 | export var Game = /*#__PURE__*/ function() { 178 | "use strict"; 179 | function Game(renderDiv) { 180 | var _this = this; 181 | _class_call_check(this, Game); 182 | this.renderDiv = renderDiv; 183 | this.scene = null; 184 | this.camera = null; 185 | this.renderer = null; 186 | this.videoElement = null; 187 | this.handLandmarker = null; 188 | this.lastVideoTime = -1; 189 | this.hands = []; // Stores data about detected hands (landmarks, anchor position, line group) 190 | this.handLineMaterial = null; // Material for hand lines 191 | this.fingertipMaterialHand1 = null; // Material for first hand's fingertip circles (blue) 192 | this.fingertipMaterialHand2 = null; // Material for second hand's fingertip circles (green) 193 | this.fingertipLandmarkIndices = [ 194 | 0, 195 | 4, 196 | 8, 197 | 12, 198 | 16, 199 | 20 200 | ]; // WRIST + TIP landmarks 201 | this.handConnections = null; // Landmark connection definitions 202 | // this.handCollisionRadius = 30; // Conceptual radius for hand collision, was 25 (sphere radius) - Not needed for template 203 | this.gameState = 'loading'; // loading, ready, tracking, error 204 | this.gameOverText = null; // Will be repurposed or simplified 205 | this.clock = new THREE.Clock(); 206 | this.audioManager = new AudioManager(); // Create an instance of AudioManager 207 | this.lastLandmarkPositions = [ 208 | [], 209 | [] 210 | ]; // Store last known smoothed positions for each hand's landmarks 211 | this.smoothingFactor = 0.4; // Alpha for exponential smoothing (0 < alpha <= 1). Smaller = more smoothing. 212 | this.loadedModels = {}; 213 | this.pandaModel = null; // Add reference for the Panda model 214 | this.animationMixer = null; // For Stan model animations 215 | this.animationClips = []; // To store all animation clips from the model 216 | this.animationActions = {}; // To store animation actions by name or index 217 | this.currentAction = null; // To keep track of the currently playing animation action 218 | this.speechManager = null; 219 | this.speechBubble = null; 220 | this.speechBubbleTimeout = null; 221 | this.isSpeechActive = false; // Track if speech recognition is active for styling 222 | this.grabbingHandIndex = -1; // -1: no hand, 0: first hand, 1: second hand grabbing 223 | this.pickedUpModel = null; // Reference to the model being dragged 224 | this.modelDragOffset = new THREE.Vector3(); // Offset between model and pinch point in 3D 225 | this.modelGrabStartDepth = 0; // To store the model's Z depth when grabbed 226 | this.interactionMode = 'drag'; // 'drag', 'rotate', 'scale', 'animate' - Default to drag 227 | this.interactionModeButtons = {}; // To store references to mode buttons 228 | this.loadedDroppedModelData = null; // To temporarily store parsed GLTF data 229 | this.interactionModeColors = { 230 | drag: { 231 | base: '#00FFFF', 232 | text: '#000000', 233 | hand: new THREE.Color('#00FFFF') 234 | }, 235 | rotate: { 236 | base: '#FF00FF', 237 | text: '#FFFFFF', 238 | hand: new THREE.Color('#FF00FF') 239 | }, 240 | scale: { 241 | base: '#FFFF00', 242 | text: '#000000', 243 | hand: new THREE.Color('#FFFF00') 244 | }, 245 | animate: { 246 | base: '#FFA500', 247 | text: '#000000', 248 | hand: new THREE.Color('#FFA500') 249 | } // Orange 250 | }; 251 | this.rotateLastHandX = null; // Stores the last hand X position for rotation calculation 252 | this.rotateSensitivity = 0.02; // Adjust for faster/slower rotation 253 | this.scaleInitialPinchDistance = null; // Stores the initial distance between two pinching hands 254 | this.scaleInitialModelScale = null; // Stores the model's scale when scaling starts 255 | this.scaleSensitivity = 0.05; // Adjust for faster/slower scaling - Increased from 0.02 to 0.05 256 | this.grabbingPulseSpeed = 8; // Speed of the grab pulse animation 257 | this.grabbingPulseAmplitude = 0.5; // How much the scale increases (e.g., 0.5 means 50% bigger at peak) 258 | this.pulseBaseScale = 1.0; // Base scale for non-pulsing and start of pulse 259 | this.fingertipDefaultOpacity = 0.3; // Default opacity for hand landmarks (Reduced from 0.6) 260 | this.fingertipGrabOpacity = 1.0; // Opacity when hand is actively grabbing/interacting 261 | this.instructionTextElement = document.querySelector("#instruction-text"); // DOM element for instruction text 262 | this.interactionModeInstructions = { 263 | drag: "Pinch to grab and move the model", 264 | rotate: "Pinch and move hand left/right to rotate", 265 | scale: "Use two hands. Pinch with both and move hands closer/farther", 266 | animate: "Pinch and move hand up/down to cycle animations" // Updated instruction 267 | }; 268 | this.animationControlHandIndex = -1; // Index of the hand controlling animation scrolling 269 | this.animationControlInitialPinchY = null; // Initial Y position of the pinch for animation scrolling 270 | this.animationScrollThreshold = 40; // Pixels of vertical movement to trigger an animation change (Reduced from 50) 271 | // Initialize asynchronously 272 | this._init().catch(function(error) { 273 | console.error("Initialization failed:", error); 274 | _this._showError("Initialization failed. Check console."); 275 | }); 276 | } 277 | _create_class(Game, [ 278 | { 279 | key: "_init", 280 | value: function _init() { 281 | var _this = this; 282 | return _async_to_generator(function() { 283 | return _ts_generator(this, function(_state) { 284 | switch(_state.label){ 285 | case 0: 286 | _this._setupDOM(); // Sets up basic DOM, including speech bubble container 287 | _this._setupThree(); 288 | _this._setupSpeechRecognition(); // Initialize SpeechManager 289 | return [ 290 | 4, 291 | _this._loadAssets() 292 | ]; 293 | case 1: 294 | _state.sent(); // Add asset loading step 295 | return [ 296 | 4, 297 | _this._setupHandTracking() 298 | ]; 299 | case 2: 300 | _state.sent(); // This needs to complete before we can proceed 301 | // Ensure webcam is playing before starting game logic dependent on it 302 | return [ 303 | 4, 304 | _this.videoElement.play() 305 | ]; 306 | case 3: 307 | _state.sent(); 308 | _this.audioManager.resumeContext(); // Resume audio context as game starts automatically 309 | _this.speechManager.requestPermissionAndStart(); // Start speech recognition 310 | _this.clock.start(); // Start the main clock as game starts automatically 311 | window.addEventListener('resize', _this._onResize.bind(_this)); 312 | _this.gameState = 'tracking'; // Change state to tracking to start immediately 313 | _this._animate(); // Start the animation loop (it will check state) 314 | return [ 315 | 2 316 | ]; 317 | } 318 | }); 319 | })(); 320 | } 321 | }, 322 | { 323 | key: "_setupDOM", 324 | value: function _setupDOM() { 325 | var _this = this; 326 | this.renderDiv.style.position = 'relative'; 327 | this.renderDiv.style.width = '100vw'; // Use viewport units for fullscreen 328 | this.renderDiv.style.height = '100vh'; 329 | this.renderDiv.style.overflow = 'hidden'; 330 | this.renderDiv.style.background = '#111'; // Fallback background 331 | // Start Screen Overlay and related DOM elements (title, instructions, loading text) removed. 332 | // --- End Start Screen Overlay --- 333 | this.videoElement = document.createElement('video'); 334 | this.videoElement.style.position = 'absolute'; 335 | this.videoElement.style.top = '0'; 336 | this.videoElement.style.left = '0'; 337 | this.videoElement.style.width = '100%'; 338 | this.videoElement.style.height = '100%'; 339 | this.videoElement.style.objectFit = 'cover'; 340 | this.videoElement.style.transform = 'scaleX(-1)'; // Mirror view for intuitive control 341 | this.videoElement.autoplay = true; 342 | this.videoElement.muted = true; // Mute video to avoid feedback loops if audio was captured 343 | this.videoElement.playsInline = true; 344 | this.videoElement.style.zIndex = '0'; // Ensure video is behind THREE canvas 345 | this.renderDiv.appendChild(this.videoElement); 346 | // Container for Status text (formerly Game Over) and restart hint 347 | this.gameOverContainer = document.createElement('div'); 348 | this.gameOverContainer.style.position = 'absolute'; 349 | this.gameOverContainer.style.top = '50%'; 350 | this.gameOverContainer.style.left = '50%'; 351 | this.gameOverContainer.style.transform = 'translate(-50%, -50%)'; 352 | this.gameOverContainer.style.zIndex = '10'; 353 | this.gameOverContainer.style.display = 'none'; // Hidden initially 354 | this.gameOverContainer.style.pointerEvents = 'none'; // Don't block clicks 355 | this.gameOverContainer.style.textAlign = 'center'; // Center text elements within 356 | this.gameOverContainer.style.color = 'white'; // Default color, can be changed by _showError 357 | // this.gameOverContainer.style.textShadow = '2px 2px 4px black'; // Removed for flatter look 358 | this.gameOverContainer.style.fontFamily = '"Arial", "Helvetica Neue", Helvetica, sans-serif'; // Cleaner, modern sans-serif 359 | // Main Status Text (formerly Game Over Text) 360 | this.gameOverText = document.createElement('div'); // Will be 'gameOverText' internally 361 | this.gameOverText.innerText = 'STATUS'; // Generic placeholder 362 | this.gameOverText.style.fontSize = 'clamp(36px, 10vw, 72px)'; // Responsive font size 363 | this.gameOverText.style.fontWeight = 'bold'; 364 | this.gameOverText.style.marginBottom = '10px'; // Space below main text 365 | this.gameOverContainer.appendChild(this.gameOverText); 366 | // Restart Hint Text (may or may not be shown depending on context) 367 | this.restartHintText = document.createElement('div'); 368 | this.restartHintText.innerText = '(click to restart tracking)'; 369 | this.restartHintText.style.fontSize = 'clamp(16px, 3vw, 24px)'; 370 | this.restartHintText.style.fontWeight = 'normal'; 371 | this.restartHintText.style.opacity = '0.8'; // Slightly faded 372 | this.gameOverContainer.appendChild(this.restartHintText); 373 | this.renderDiv.appendChild(this.gameOverContainer); 374 | // --- Speech Bubble --- 375 | this.speechBubble = document.createElement('div'); 376 | this.speechBubble.id = 'speech-bubble'; 377 | this.speechBubble.style.position = 'absolute'; 378 | this.speechBubble.style.top = '10px'; // Changed from 20px to 10px 379 | this.speechBubble.style.left = '50%'; 380 | this.speechBubble.style.transform = 'translateX(-50%)'; 381 | this.speechBubble.style.padding = '15px 25px'; 382 | this.speechBubble.style.backgroundColor = 'rgba(255, 255, 255, 0.9)'; 383 | this.speechBubble.style.border = '2px solid black'; // Solid black border 384 | this.speechBubble.style.borderRadius = '4px'; // Sharper corners 385 | this.speechBubble.style.boxShadow = '4px 4px 0px rgba(0,0,0,1)'; // Hard shadow 386 | this.speechBubble.style.color = '#333'; 387 | this.speechBubble.style.fontFamily = '"Arial", "Helvetica Neue", Helvetica, sans-serif'; // Consistent modern sans-serif 388 | this.speechBubble.style.fontSize = 'clamp(16px, 3vw, 22px)'; 389 | this.speechBubble.style.maxWidth = '80%'; 390 | this.speechBubble.style.textAlign = 'center'; 391 | this.speechBubble.style.zIndex = '25'; // Above most things but below modal popups if any 392 | this.speechBubble.style.opacity = '0'; // Hidden initially, fade in 393 | // Added boxShadow, border, padding, fontSize, top to transition for smooth active state changes 394 | this.speechBubble.style.transition = 'opacity 0.5s ease-in-out, transform 0.3s ease-in-out, box-shadow 0.3s ease-in-out, border 0.3s ease-in-out, padding 0.3s ease-in-out, font-size 0.3s ease-in-out, top 0.3s ease-in-out'; 395 | this.speechBubble.style.pointerEvents = 'none'; // Not interactive 396 | this.speechBubble.innerHTML = "..."; // Default text 397 | this.renderDiv.appendChild(this.speechBubble); 398 | // Animation buttons container 399 | this.animationButtonsContainer = document.createElement('div'); 400 | this.animationButtonsContainer.id = 'animation-buttons-container'; 401 | this.animationButtonsContainer.style.position = 'absolute'; 402 | this.animationButtonsContainer.style.bottom = 'auto'; // Remove bottom positioning 403 | this.animationButtonsContainer.style.top = '10px'; // Position from the top, changed from 20px 404 | this.animationButtonsContainer.style.left = '10px'; // Position from the left, changed from 20px 405 | this.animationButtonsContainer.style.transform = 'none'; // Remove centering transform 406 | this.animationButtonsContainer.style.zIndex = '30'; // Above speech bubble 407 | this.animationButtonsContainer.style.display = 'flex'; 408 | this.animationButtonsContainer.style.flexDirection = 'column'; // Arrange buttons in a column 409 | this.animationButtonsContainer.style.gap = '4px'; // Reduced gap for tighter vertical layout 410 | this.animationButtonsContainer.style.opacity = '0'; // Start fully transparent for fade-in 411 | this.animationButtonsContainer.style.transition = 'opacity 0.3s ease-in-out'; // Smooth fade transition 412 | this.animationButtonsContainer.style.display = 'none'; // Initially hidden (will be set to flex by logic) 413 | this.renderDiv.appendChild(this.animationButtonsContainer); 414 | // Interaction Mode UI Container 415 | this.interactionModeContainer = document.createElement('div'); 416 | this.interactionModeContainer.id = 'interaction-mode-container'; 417 | this.interactionModeContainer.style.position = 'absolute'; 418 | this.interactionModeContainer.style.top = '10px'; // Changed from 20px 419 | this.interactionModeContainer.style.right = '10px'; // Changed from 20px 420 | this.interactionModeContainer.style.zIndex = '30'; 421 | this.interactionModeContainer.style.display = 'flex'; 422 | this.interactionModeContainer.style.flexDirection = 'column'; 423 | this.interactionModeContainer.style.gap = '4px'; 424 | this.renderDiv.appendChild(this.interactionModeContainer); 425 | // Create interaction mode buttons 426 | [ 427 | 'Drag', 428 | 'Rotate', 429 | 'Scale', 430 | 'Animate' 431 | ].forEach(function(mode) { 432 | var button = document.createElement('button'); 433 | button.innerText = mode; 434 | button.id = "interaction-mode-".concat(mode.toLowerCase()); 435 | button.style.padding = '10px 22px'; // Increased padding 436 | button.style.fontSize = '18px'; // Increased font size further 437 | button.style.border = '2px solid black'; // Consistent black border 438 | button.style.borderRadius = '4px'; // Sharper corners 439 | button.style.cursor = 'pointer'; 440 | button.style.fontWeight = 'bold'; // Always bold 441 | button.style.transition = 'background-color 0.2s ease, color 0.2s ease, box-shadow 0.2s ease'; // Faster transition, added shadow 442 | button.style.boxShadow = '2px 2px 0px black'; // Default shadow for inactive 443 | button.addEventListener('click', function() { 444 | return _this._setInteractionMode(mode.toLowerCase()); 445 | }); 446 | _this.interactionModeContainer.appendChild(button); 447 | _this.interactionModeButtons[mode.toLowerCase()] = button; // Store button reference 448 | }); 449 | this._updateInteractionModeButtonStyles(); // Apply initial styles 450 | this._updateInstructionText(); // Set initial instruction text 451 | this._setupDragAndDrop(); // Add drag and drop listeners 452 | } 453 | }, 454 | { 455 | key: "_setupThree", 456 | value: function _setupThree() { 457 | var _this_interactionModeColors_this_interactionMode; 458 | var width = this.renderDiv.clientWidth; 459 | var height = this.renderDiv.clientHeight; 460 | this.scene = new THREE.Scene(); 461 | // Using OrthographicCamera for a 2D-like overlay effect 462 | this.camera = new THREE.OrthographicCamera(width / -2, width / 2, height / 2, height / -2, 1, 2000); // Increased far plane 463 | this.camera.position.z = 100; // Position along Z doesn't change scale in Ortho 464 | this.renderer = new THREE.WebGLRenderer({ 465 | alpha: true, 466 | antialias: true 467 | }); 468 | this.renderer.setSize(width, height); 469 | this.renderer.setPixelRatio(window.devicePixelRatio); 470 | this.renderer.domElement.style.position = 'absolute'; 471 | this.renderer.domElement.style.top = '0'; 472 | this.renderer.domElement.style.left = '0'; 473 | this.renderer.domElement.style.zIndex = '1'; // Canvas on top of video 474 | this.renderDiv.appendChild(this.renderer.domElement); 475 | var ambientLight = new THREE.AmbientLight(0xffffff, 1.5); // Increased intensity 476 | this.scene.add(ambientLight); 477 | var directionalLight = new THREE.DirectionalLight(0xffffff, 1.8); // Increased intensity 478 | directionalLight.position.set(0, 0, 100); // Pointing from behind camera 479 | this.scene.add(directionalLight); 480 | // Setup hand visualization (palm circles removed, lines will be added later) 481 | for(var i = 0; i < 2; i++){ 482 | var lineGroup = new THREE.Group(); 483 | lineGroup.visible = false; 484 | this.scene.add(lineGroup); 485 | this.hands.push({ 486 | landmarks: null, 487 | anchorPos: new THREE.Vector3(), 488 | lineGroup: lineGroup, 489 | isPinching: false, 490 | pinchPointScreen: new THREE.Vector2(), 491 | isFist: false // True if hand is detected as a fist 492 | }); 493 | } 494 | this.handLineMaterial = new THREE.LineBasicMaterial({ 495 | color: 0x00ccff, 496 | linewidth: 8 497 | }); // Kept line material default for now 498 | var initialModeHandColor = ((_this_interactionModeColors_this_interactionMode = this.interactionModeColors[this.interactionMode]) === null || _this_interactionModeColors_this_interactionMode === void 0 ? void 0 : _this_interactionModeColors_this_interactionMode.hand) || new THREE.Color(0x00ccff); 499 | this.fingertipMaterialHand1 = new THREE.MeshBasicMaterial({ 500 | color: initialModeHandColor.clone(), 501 | side: THREE.DoubleSide, 502 | transparent: true, 503 | opacity: this.fingertipDefaultOpacity 504 | }); 505 | this.fingertipMaterialHand2 = new THREE.MeshBasicMaterial({ 506 | color: initialModeHandColor.clone(), 507 | side: THREE.DoubleSide, 508 | transparent: true, 509 | opacity: this.fingertipDefaultOpacity 510 | }); 511 | // Define connections for MediaPipe hand landmarks 512 | // See: https://developers.google.com/mediapipe/solutions/vision/hand_landmarker#hand_landmarks 513 | this.handConnections = [ 514 | // Thumb 515 | [ 516 | 0, 517 | 1 518 | ], 519 | [ 520 | 1, 521 | 2 522 | ], 523 | [ 524 | 2, 525 | 3 526 | ], 527 | [ 528 | 3, 529 | 4 530 | ], 531 | // Index finger 532 | [ 533 | 0, 534 | 5 535 | ], 536 | [ 537 | 5, 538 | 6 539 | ], 540 | [ 541 | 6, 542 | 7 543 | ], 544 | [ 545 | 7, 546 | 8 547 | ], 548 | // Middle finger 549 | [ 550 | 0, 551 | 9 552 | ], 553 | [ 554 | 9, 555 | 10 556 | ], 557 | [ 558 | 10, 559 | 11 560 | ], 561 | [ 562 | 11, 563 | 12 564 | ], 565 | // Ring finger 566 | [ 567 | 0, 568 | 13 569 | ], 570 | [ 571 | 13, 572 | 14 573 | ], 574 | [ 575 | 14, 576 | 15 577 | ], 578 | [ 579 | 15, 580 | 16 581 | ], 582 | // Pinky 583 | [ 584 | 0, 585 | 17 586 | ], 587 | [ 588 | 17, 589 | 18 590 | ], 591 | [ 592 | 18, 593 | 19 594 | ], 595 | [ 596 | 19, 597 | 20 598 | ], 599 | // Palm 600 | [ 601 | 5, 602 | 9 603 | ], 604 | [ 605 | 9, 606 | 13 607 | ], 608 | [ 609 | 13, 610 | 17 611 | ] // Connect base of fingers 612 | ]; 613 | } 614 | }, 615 | { 616 | key: "_loadAssets", 617 | value: function _loadAssets() { 618 | var _this = this; 619 | return _async_to_generator(function() { 620 | var gltfLoader, error; 621 | return _ts_generator(this, function(_state) { 622 | switch(_state.label){ 623 | case 0: 624 | console.log("Loading assets..."); 625 | gltfLoader = new GLTFLoader(); // Changed from FBXLoader 626 | _state.label = 1; 627 | case 1: 628 | _state.trys.push([ 629 | 1, 630 | 3, 631 | , 632 | 4 633 | ]); 634 | return [ 635 | 4, 636 | new Promise(function(resolve, reject) { 637 | gltfLoader.load('assets/Stan.gltf', function(gltf) { 638 | _this.pandaModel = gltf.scene; // GLTFLoader returns an object with a 'scene' property 639 | _this.animationMixer = new THREE.AnimationMixer(_this.pandaModel); 640 | _this.animationClips = gltf.animations; 641 | if (_this.animationClips && _this.animationClips.length) { 642 | _this.animationClips.forEach(function(clip, index) { 643 | var action = _this.animationMixer.clipAction(clip); 644 | var actionName = clip.name || "Animation ".concat(index + 1); 645 | _this.animationActions[actionName] = action; 646 | // Create a button for this animation 647 | var button = document.createElement('button'); 648 | button.innerText = actionName; 649 | button.style.padding = '5px 10px'; // Adjusted padding 650 | button.style.fontSize = '13px'; // Consistent font size 651 | button.style.backgroundColor = '#f0f0f0'; // Light grey default 652 | button.style.color = 'black'; 653 | button.style.border = '2px solid black'; // Black border 654 | button.style.borderRadius = '4px'; // Sharper corners 655 | button.style.cursor = 'pointer'; 656 | button.style.transition = 'background-color 0.2s ease, box-shadow 0.2s ease'; 657 | button.style.boxShadow = '2px 2px 0px black'; // Default shadow 658 | button.addEventListener('click', function() { 659 | return _this._playAnimation(actionName); 660 | }); 661 | _this.animationButtonsContainer.appendChild(button); 662 | console.log("Loaded animation and created button for: ".concat(actionName)); 663 | }); 664 | // Play the first animation by default 665 | // Try to find and play an "idle" animation by default 666 | var defaultActionName = Object.keys(_this.animationActions)[0]; // Fallback to the first animation 667 | var idleActionKey = Object.keys(_this.animationActions).find(function(name) { 668 | return name.toLowerCase().includes('idle'); 669 | }); 670 | if (idleActionKey) { 671 | defaultActionName = idleActionKey; 672 | console.log("Found idle animation: ".concat(defaultActionName)); 673 | } else if (defaultActionName) { 674 | console.log("No specific idle animation found, defaulting to first animation: ".concat(defaultActionName)); 675 | } 676 | if (defaultActionName && _this.animationActions[defaultActionName]) { 677 | _this.currentAction = _this.animationActions[defaultActionName]; 678 | _this.currentAction.play(); 679 | console.log("Playing default animation: ".concat(defaultActionName)); 680 | _this._updateButtonStyles(defaultActionName); 681 | } else { 682 | console.log("No animations found or default animation could not be played."); 683 | } 684 | } else { 685 | console.log("Stan model has no embedded animations."); 686 | } 687 | // Scale and position the model 688 | // These values might need adjustment based on the model's original size and pivot 689 | var scale = 80; // This scale might need adjustment for Stan model 690 | _this.pandaModel.scale.set(scale, scale, scale); 691 | // Position the model: X=center, Y=roughly bottom, Z=in front of hands 692 | var sceneHeight = _this.renderDiv.clientHeight; 693 | _this.pandaModel.position.set(0, sceneHeight * -0.45, -1000); // Updated Z to -1000 694 | _this.scene.add(_this.pandaModel); 695 | console.log("Stan GLTF model loaded and added to scene."); 696 | resolve(); 697 | }, undefined, function(error) { 698 | console.error('An error occurred while loading the Stan GLTF model:', error); // Updated log 699 | reject(error); 700 | }); 701 | }) 702 | ]; 703 | case 2: 704 | _state.sent(); 705 | console.log("All specified assets loaded."); 706 | return [ 707 | 3, 708 | 4 709 | ]; 710 | case 3: 711 | error = _state.sent(); 712 | console.error("Error loading assets:", error); 713 | _this._showError("Failed to load 3D model."); 714 | throw error; // Stop initialization 715 | case 4: 716 | return [ 717 | 2 718 | ]; 719 | } 720 | }); 721 | })(); 722 | } 723 | }, 724 | { 725 | key: "_setupHandTracking", 726 | value: function _setupHandTracking() { 727 | var _this = this; 728 | return _async_to_generator(function() { 729 | var vision, stream, error; 730 | return _ts_generator(this, function(_state) { 731 | switch(_state.label){ 732 | case 0: 733 | _state.trys.push([ 734 | 0, 735 | 4, 736 | , 737 | 5 738 | ]); 739 | console.log("Setting up Hand Tracking..."); 740 | return [ 741 | 4, 742 | FilesetResolver.forVisionTasks('https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision@0.10.14/wasm') 743 | ]; 744 | case 1: 745 | vision = _state.sent(); 746 | return [ 747 | 4, 748 | HandLandmarker.createFromOptions(vision, { 749 | baseOptions: { 750 | modelAssetPath: "https://storage.googleapis.com/mediapipe-models/hand_landmarker/hand_landmarker/float16/1/hand_landmarker.task", 751 | delegate: 'GPU' 752 | }, 753 | numHands: 2, 754 | runningMode: 'VIDEO' 755 | }) 756 | ]; 757 | case 2: 758 | _this.handLandmarker = _state.sent(); 759 | console.log("HandLandmarker created."); 760 | console.log("Requesting webcam access..."); 761 | return [ 762 | 4, 763 | navigator.mediaDevices.getUserMedia({ 764 | video: { 765 | facingMode: 'user', 766 | width: { 767 | ideal: 1920 768 | }, 769 | height: { 770 | ideal: 1080 771 | } // Request Full HD height 772 | }, 773 | audio: false 774 | }) 775 | ]; 776 | case 3: 777 | stream = _state.sent(); 778 | _this.videoElement.srcObject = stream; 779 | console.log("Webcam stream obtained."); 780 | // Wait for video metadata to load to ensure dimensions are available 781 | return [ 782 | 2, 783 | new Promise(function(resolve) { 784 | _this.videoElement.onloadedmetadata = function() { 785 | console.log("Webcam metadata loaded."); 786 | // Adjust video size slightly after metadata is loaded if needed, but CSS handles most 787 | _this.videoElement.style.width = _this.renderDiv.clientWidth + 'px'; 788 | _this.videoElement.style.height = _this.renderDiv.clientHeight + 'px'; 789 | resolve(); 790 | }; 791 | }) 792 | ]; 793 | case 4: 794 | error = _state.sent(); 795 | console.error('Error setting up Hand Tracking or Webcam:', error); 796 | _this._showError("Webcam/Hand Tracking Error: ".concat(error.message, ". Please allow camera access.")); 797 | throw error; // Re-throw to stop initialization 798 | case 5: 799 | return [ 800 | 2 801 | ]; 802 | } 803 | }); 804 | })(); 805 | } 806 | }, 807 | { 808 | key: "_updateHands", 809 | value: function _updateHands() { 810 | var _this = this; 811 | if (!this.handLandmarker || !this.videoElement.srcObject || this.videoElement.readyState < 2 || this.videoElement.videoWidth === 0) return; 812 | // this.isAnyHandHovering = false; // Reset hover state each frame - REMOVED 813 | var videoTime = this.videoElement.currentTime; 814 | if (videoTime > this.lastVideoTime) { 815 | this.lastVideoTime = videoTime; 816 | try { 817 | var _this1, _loop = function(i) { 818 | var hand = _this1.hands[i]; 819 | if (results.landmarks && results.landmarks[i]) { 820 | var currentRawLandmarks = results.landmarks[i]; 821 | if (!_this1.lastLandmarkPositions[i] || _this1.lastLandmarkPositions[i].length !== currentRawLandmarks.length) { 822 | _this1.lastLandmarkPositions[i] = currentRawLandmarks.map(function(lm) { 823 | return _object_spread({}, lm); 824 | }); 825 | } 826 | var smoothedLandmarks = currentRawLandmarks.map(function(lm, lmIndex) { 827 | var prevLm = _this.lastLandmarkPositions[i][lmIndex]; 828 | return { 829 | x: _this.smoothingFactor * lm.x + (1 - _this.smoothingFactor) * prevLm.x, 830 | y: _this.smoothingFactor * lm.y + (1 - _this.smoothingFactor) * prevLm.y, 831 | z: _this.smoothingFactor * lm.z + (1 - _this.smoothingFactor) * prevLm.z 832 | }; 833 | }); 834 | _this1.lastLandmarkPositions[i] = smoothedLandmarks.map(function(lm) { 835 | return _object_spread({}, lm); 836 | }); // Update last positions with new smoothed ones 837 | hand.landmarks = smoothedLandmarks; 838 | var palm = smoothedLandmarks[9]; // MIDDLE_FINGER_MCP 839 | var lmOriginalX = palm.x * videoParams.videoNaturalWidth; 840 | var lmOriginalY = palm.y * videoParams.videoNaturalHeight; 841 | var normX_visible = (lmOriginalX - videoParams.offsetX) / videoParams.visibleWidth; 842 | var normY_visible = (lmOriginalY - videoParams.offsetY) / videoParams.visibleHeight; 843 | var handX = (1 - normX_visible) * canvasWidth - canvasWidth / 2; 844 | var handY = (1 - normY_visible) * canvasHeight - canvasHeight / 2; 845 | hand.anchorPos.set(handX, handY, 1); 846 | // Hover detection logic REMOVED 847 | var prevIsPinching = hand.isPinching; // Store previous pinch state 848 | // Pinch detection logic 849 | var thumbTipLm = smoothedLandmarks[4]; // THUMB_TIP landmark index 850 | var indexTipLm = smoothedLandmarks[8]; // INDEX_FINGER_TIP landmark index 851 | if (thumbTipLm && indexTipLm) { 852 | // Convert landmark coordinates to screen space for pinch detection 853 | var convertToScreenSpace = function(lm) { 854 | var originalX = lm.x * videoParams.videoNaturalWidth; 855 | var originalY = lm.y * videoParams.videoNaturalHeight; 856 | var normX_visible = (originalX - videoParams.offsetX) / videoParams.visibleWidth; 857 | var normY_visible = (originalY - videoParams.offsetY) / videoParams.visibleHeight; 858 | return { 859 | x: (1 - normX_visible) * canvasWidth - canvasWidth / 2, 860 | y: (1 - normY_visible) * canvasHeight - canvasHeight / 2 861 | }; 862 | }; 863 | var thumbTipScreen = convertToScreenSpace(thumbTipLm); 864 | var indexTipScreen = convertToScreenSpace(indexTipLm); 865 | var distanceX = thumbTipScreen.x - indexTipScreen.x; 866 | var distanceY = thumbTipScreen.y - indexTipScreen.y; 867 | var pinchDistance = Math.sqrt(distanceX * distanceX + distanceY * distanceY); 868 | var pinchThreshold = 45; // Increased from 35. Distance in screen pixels to consider a pinch. 869 | if (pinchDistance < pinchThreshold) { 870 | hand.isPinching = true; 871 | hand.pinchPointScreen.set((thumbTipScreen.x + indexTipScreen.x) / 2, (thumbTipScreen.y + indexTipScreen.y) / 2); 872 | } else { 873 | hand.isPinching = false; 874 | } 875 | } else { 876 | hand.isPinching = false; 877 | } 878 | // Fist detection logic (simple version based on finger curl) 879 | // This is a basic fist detection. More robust methods might involve checking distances 880 | // of all fingertips to the palm or wrist. 881 | var isTipNearMCP = function(tipLandmark, mcpLandmark) { 882 | var threshold = arguments.length > 2 && arguments[2] !== void 0 ? arguments[2] : 0.1; 883 | if (!tipLandmark || !mcpLandmark) return false; 884 | // Using 3D distance, but could simplify to 2D if performance is an issue 885 | // and Z-depth isn't significantly varying for this gesture. 886 | var dx = tipLandmark.x - mcpLandmark.x; 887 | var dy = tipLandmark.y - mcpLandmark.y; 888 | // const dz = tipLandmark.z - mcpLandmark.z; // Can include Z if needed 889 | var distance = Math.sqrt(dx * dx + dy * dy /* + dz*dz */ ); 890 | return distance < threshold; 891 | }; 892 | var indexFingerTip = smoothedLandmarks[8]; 893 | var indexFingerMcp = smoothedLandmarks[5]; 894 | var middleFingerTip = smoothedLandmarks[12]; 895 | var middleFingerMcp = smoothedLandmarks[9]; 896 | var ringFingerTip = smoothedLandmarks[16]; 897 | var ringFingerMcp = smoothedLandmarks[13]; 898 | var pinkyTip = smoothedLandmarks[20]; 899 | var pinkyMcp = smoothedLandmarks[17]; 900 | // Check if at least 3 fingers are curled (tip near MCP joint) 901 | var curledFingers = 0; 902 | if (isTipNearMCP(indexFingerTip, indexFingerMcp, 0.08)) curledFingers++; 903 | if (isTipNearMCP(middleFingerTip, middleFingerMcp, 0.08)) curledFingers++; 904 | if (isTipNearMCP(ringFingerTip, ringFingerMcp, 0.08)) curledFingers++; 905 | if (isTipNearMCP(pinkyTip, pinkyMcp, 0.08)) curledFingers++; 906 | var prevIsFist = hand.isFist; 907 | hand.isFist = curledFingers >= 3; // Requires at least 3 fingers to be curled 908 | // Interaction Logic 909 | if (_this1.interactionMode === 'animate') { 910 | // Release any model grab from other modes 911 | if (_this1.grabbingHandIndex !== -1 && _this1.pickedUpModel) { 912 | // console.log(`Switched to Animate mode or model grab was active. Releasing.`); 913 | _this1.grabbingHandIndex = -1; 914 | _this1.pickedUpModel = null; 915 | // if (this.grabMarker && this.pandaModel) this.grabMarker.visible = true; // Grab marker removed 916 | // Reset other mode-specific states 917 | _this1.rotateLastHandX = null; 918 | _this1.scaleInitialPinchDistance = null; 919 | _this1.scaleInitialModelScale = null; 920 | } 921 | if (hand.isPinching) { 922 | if (!prevIsPinching && _this1.animationControlHandIndex === -1) { 923 | _this1.animationControlHandIndex = i; 924 | _this1.animationControlInitialPinchY = hand.pinchPointScreen.y; 925 | console.log("Hand ".concat(i, " started pinch for animation control at Y: ").concat(_this1.animationControlInitialPinchY)); 926 | } else if (_this1.animationControlHandIndex === i && _this1.animationControlInitialPinchY !== null) { 927 | // Pinch continues with the controlling hand 928 | var deltaY = hand.pinchPointScreen.y - _this1.animationControlInitialPinchY; 929 | if (Math.abs(deltaY) > _this1.animationScrollThreshold) { 930 | var animationNames = Object.keys(_this1.animationActions); 931 | if (animationNames.length > 0) { 932 | var currentIndex = -1; 933 | // Find the index of the currently playing animation action 934 | if (_this1.currentAction) { 935 | for(var j = 0; j < animationNames.length; j++){ 936 | if (_this1.animationActions[animationNames[j]] === _this1.currentAction) { 937 | currentIndex = j; 938 | break; 939 | } 940 | } 941 | } 942 | var nextIndex = currentIndex; 943 | if (deltaY < 0) { 944 | nextIndex = (currentIndex + 1) % animationNames.length; // Now scrolls to next 945 | console.log("Scrolling animation UP (to next)"); 946 | } else { 947 | nextIndex = (currentIndex - 1 + animationNames.length) % animationNames.length; // Now scrolls to previous 948 | console.log("Scrolling animation DOWN (to previous)"); 949 | } 950 | if (nextIndex !== currentIndex) { 951 | _this1._playAnimation(animationNames[nextIndex]); 952 | } 953 | } 954 | // Reset initial Y to require another full threshold movement 955 | _this1.animationControlInitialPinchY = hand.pinchPointScreen.y; 956 | } 957 | } 958 | } else { 959 | if (prevIsPinching && _this1.animationControlHandIndex === i) { 960 | console.log("Hand ".concat(i, " ended pinch for animation control.")); 961 | _this1.animationControlHandIndex = -1; 962 | _this1.animationControlInitialPinchY = null; 963 | } 964 | } 965 | } else if (_this1.interactionMode === 'drag') { 966 | if (hand.isPinching) { 967 | if (!prevIsPinching && _this1.grabbingHandIndex === -1 && _this1.pandaModel) { 968 | // REMOVED: Bounding box check - drag can be initiated from anywhere if not scaling 969 | _this1.grabbingHandIndex = i; 970 | _this1.pickedUpModel = _this1.pandaModel; 971 | // Convert 2D screen pinch point to 3D world point on a plane 972 | // The plane is at the model's current Z depth 973 | _this1.modelGrabStartDepth = _this1.pickedUpModel.position.z; // Store initial depth 974 | var pinchX = hand.pinchPointScreen.x; 975 | var pinchY = hand.pinchPointScreen.y; 976 | // Convert 2D screen pinch point (origin center) to NDC (Normalized Device Coords, -1 to 1) 977 | var ndcX = pinchX / (_this1.renderDiv.clientWidth / 2); 978 | var ndcY = pinchY / (_this1.renderDiv.clientHeight / 2); 979 | var pinchPoint3DWorld = new THREE.Vector3(ndcX, ndcY, 0.5); // Start with a neutral NDC Z 980 | pinchPoint3DWorld.unproject(_this1.camera); 981 | pinchPoint3DWorld.z = _this1.modelGrabStartDepth; // Force Z to the grab depth 982 | console.log("Grab screen: (".concat(pinchX.toFixed(2), ", ").concat(pinchY.toFixed(2), "), NDC: (").concat(ndcX.toFixed(2), ", ").concat(ndcY.toFixed(2), ")")); 983 | console.log("Grab 3D World (pre-offset): ".concat(pinchPoint3DWorld.x.toFixed(2), ", ").concat(pinchPoint3DWorld.y.toFixed(2), ", ").concat(pinchPoint3DWorld.z.toFixed(2))); 984 | _this1.modelDragOffset.subVectors(_this1.pickedUpModel.position, pinchPoint3DWorld); 985 | console.log("Hand ".concat(i, " GRABBED model for DRAG at depth ").concat(_this1.modelGrabStartDepth, ". Offset:"), _this1.modelDragOffset.x.toFixed(2), _this1.modelDragOffset.y.toFixed(2), _this1.modelDragOffset.z.toFixed(2)); 986 | } else if (_this1.grabbingHandIndex === i && _this1.pickedUpModel) { 987 | // Update model position based on pinch 988 | var currentPinchX = hand.pinchPointScreen.x; 989 | var currentPinchY = hand.pinchPointScreen.y; 990 | var currentNdcX = currentPinchX / (_this1.renderDiv.clientWidth / 2); 991 | var currentNdcY = currentPinchY / (_this1.renderDiv.clientHeight / 2); 992 | var newPinchPoint3DWorld = new THREE.Vector3(currentNdcX, currentNdcY, 0.5); 993 | newPinchPoint3DWorld.unproject(_this1.camera); 994 | newPinchPoint3DWorld.z = _this1.modelGrabStartDepth; // Force Z to the original grab depth plane 995 | _this1.pickedUpModel.position.addVectors(newPinchPoint3DWorld, _this1.modelDragOffset); 996 | var minZ = -200; 997 | var maxZ = 50; 998 | _this1.pickedUpModel.position.z = Math.max(minZ, Math.min(maxZ, _this1.pickedUpModel.position.z)); 999 | } 1000 | } else { 1001 | if (prevIsPinching && _this1.grabbingHandIndex === i) { 1002 | console.log("Hand ".concat(i, " RELEASED Stan model (Drag mode) at position:"), _this1.pickedUpModel.position); 1003 | _this1.grabbingHandIndex = -1; 1004 | _this1.pickedUpModel = null; 1005 | // if (this.grabMarker && this.pandaModel) this.grabMarker.visible = true; // Show marker when released - Grab marker removed 1006 | } 1007 | } 1008 | } else if (_this1.interactionMode === 'rotate') { 1009 | if (hand.isPinching) { 1010 | if (!prevIsPinching && _this1.grabbingHandIndex === -1 && _this1.pandaModel) { 1011 | // REMOVED: Bounding box check - rotate can be initiated from anywhere if not scaling 1012 | _this1.grabbingHandIndex = i; 1013 | _this1.pickedUpModel = _this1.pandaModel; 1014 | _this1.rotateLastHandX = hand.pinchPointScreen.x; // Store initial pinch X for delta calculation 1015 | console.log("Hand ".concat(i, " INITIATED ROTATION on model via pinch from anywhere.")); 1016 | } else if (_this1.grabbingHandIndex === i && _this1.pickedUpModel && _this1.rotateLastHandX !== null) { 1017 | var currentHandX = hand.pinchPointScreen.x; // Use pinch point X for delta 1018 | var deltaX = currentHandX - _this1.rotateLastHandX; 1019 | if (_this1.pickedUpModel && Math.abs(deltaX) > 0.5) { 1020 | _this1.pickedUpModel.rotation.y -= deltaX * _this1.rotateSensitivity; 1021 | } 1022 | _this1.rotateLastHandX = currentHandX; 1023 | } 1024 | } else { 1025 | if (prevIsPinching && _this1.grabbingHandIndex === i) { 1026 | console.log("Hand ".concat(i, " RELEASED ROTATION on model (pinch ended).")); 1027 | _this1.grabbingHandIndex = -1; 1028 | _this1.pickedUpModel = null; 1029 | _this1.rotateLastHandX = null; 1030 | // if (this.grabMarker && this.pandaModel) this.grabMarker.visible = true; // Grab marker removed 1031 | } 1032 | } 1033 | } else if (_this1.interactionMode === 'scale') { 1034 | var hand0 = _this1.hands[0]; 1035 | var hand1 = _this1.hands[1]; 1036 | if (hand0 && hand1 && hand0.landmarks && hand1.landmarks && hand0.isPinching && hand1.isPinching) { 1037 | // Both hands are visible and pinching 1038 | var dist = hand0.pinchPointScreen.distanceTo(hand1.pinchPointScreen); 1039 | if (_this1.scaleInitialPinchDistance === null || _this1.scaleInitialModelScale === null) { 1040 | // Start of scaling gesture 1041 | _this1.scaleInitialPinchDistance = dist; 1042 | _this1.scaleInitialModelScale = _this1.pandaModel.scale.clone(); // Store initial scale vector 1043 | _this1.grabbingHandIndex = 0; // Mark as "grabbing" for scaling (using hand 0 as primary) 1044 | _this1.pickedUpModel = _this1.pandaModel; // Indicate model is being interacted with 1045 | // if(this.grabMarker) this.grabMarker.visible = false; // Grab marker removed 1046 | console.log("Scaling initiated. Initial pinch dist: ".concat(dist.toFixed(2), ", Initial scale: ").concat(_this1.scaleInitialModelScale.x.toFixed(2))); 1047 | } else { 1048 | // Continue scaling 1049 | var deltaDistance = dist - _this1.scaleInitialPinchDistance; 1050 | var scaleFactorChange = deltaDistance * _this1.scaleSensitivity; 1051 | var newScaleValue = _this1.scaleInitialModelScale.x + scaleFactorChange; 1052 | // Clamp scale to prevent extreme sizes or inversion 1053 | var minScale = 10; // Example min scale (adjust based on model's base size) 1054 | var maxScale = 300; // Example max scale 1055 | newScaleValue = Math.max(minScale, Math.min(maxScale, newScaleValue)); 1056 | _this1.pandaModel.scale.set(newScaleValue, newScaleValue, newScaleValue); 1057 | // console.log(`Scaling: Current pinch dist: ${dist.toFixed(2)}, Scale change: ${scaleFactorChange.toFixed(3)}, New scale value: ${newScaleValue.toFixed(2)}`); 1058 | } 1059 | } else { 1060 | // One or both hands are not pinching or not visible, or scaling was active 1061 | if (_this1.scaleInitialPinchDistance !== null) { 1062 | console.log("Scaling gesture ended."); 1063 | _this1.scaleInitialPinchDistance = null; 1064 | _this1.scaleInitialModelScale = null; 1065 | _this1.grabbingHandIndex = -1; 1066 | _this1.pickedUpModel = null; 1067 | // if(this.grabMarker && this.pandaModel) this.grabMarker.visible = true; // Grab marker removed 1068 | } 1069 | } 1070 | } 1071 | _this1._updateHandLines(i, smoothedLandmarks, videoParams, canvasWidth, canvasHeight); 1072 | } else { 1073 | if (hand.isPinching && _this1.grabbingHandIndex === i && _this1.interactionMode === 'drag') { 1074 | console.log("Hand ".concat(i, " (which was grabbing for drag) disappeared. Releasing model.")); 1075 | _this1.grabbingHandIndex = -1; 1076 | _this1.pickedUpModel = null; 1077 | // if (this.grabMarker && this.pandaModel) this.grabMarker.visible = true; // Grab marker removed 1078 | } else if (_this1.hands[i].isPinching && _this1.grabbingHandIndex === i && _this1.interactionMode === 'rotate') { 1079 | console.log("Hand ".concat(i, " (which was pinching for rotate) disappeared. Releasing model.")); 1080 | _this1.grabbingHandIndex = -1; 1081 | _this1.pickedUpModel = null; 1082 | _this1.rotateLastHandX = null; 1083 | // if (this.grabMarker && this.pandaModel) this.grabMarker.visible = true; // Grab marker removed 1084 | } else if (_this1.interactionMode === 'scale' && _this1.scaleInitialPinchDistance !== null && (i === 0 || i === 1)) { 1085 | var _this_hands_, _this_hands_1; 1086 | var hand0Exists = (_this_hands_ = _this1.hands[0]) === null || _this_hands_ === void 0 ? void 0 : _this_hands_.landmarks; 1087 | var hand1Exists = (_this_hands_1 = _this1.hands[1]) === null || _this_hands_1 === void 0 ? void 0 : _this_hands_1.landmarks; 1088 | if (!hand0Exists || !hand1Exists) { 1089 | console.log("Scaling gesture ended due to hand disappearance."); 1090 | _this1.scaleInitialPinchDistance = null; 1091 | _this1.scaleInitialModelScale = null; 1092 | _this1.grabbingHandIndex = -1; 1093 | _this1.pickedUpModel = null; 1094 | // if(this.grabMarker && this.pandaModel) this.grabMarker.visible = true; // Grab marker removed 1095 | } 1096 | } 1097 | hand.landmarks = null; 1098 | hand.isPinching = false; 1099 | hand.isFist = false; 1100 | if (hand.lineGroup) hand.lineGroup.visible = false; 1101 | } 1102 | // Play interaction click sound for this hand if applicable (not for scale, handled after loop) 1103 | var isThisHandActivelyInteractingForSound = false; 1104 | if (_this1.interactionMode === 'drag' || _this1.interactionMode === 'rotate') { 1105 | isThisHandActivelyInteractingForSound = _this1.grabbingHandIndex === i && _this1.pickedUpModel === _this1.pandaModel; 1106 | } else if (_this1.interactionMode === 'animate') { 1107 | isThisHandActivelyInteractingForSound = _this1.animationControlHandIndex === i; 1108 | } 1109 | if (hand.isPinching && isThisHandActivelyInteractingForSound && _this1.interactionMode !== 'scale') { 1110 | _this1.audioManager.playInteractionClickSound(); 1111 | } 1112 | }; 1113 | var results = this.handLandmarker.detectForVideo(this.videoElement, performance.now()); 1114 | var videoParams = this._getVisibleVideoParameters(); 1115 | if (!videoParams) return; 1116 | var canvasWidth = this.renderDiv.clientWidth; 1117 | var canvasHeight = this.renderDiv.clientHeight; 1118 | for(var i = 0; i < this.hands.length; i++)_this1 = this, _loop(i); 1119 | // End of hand loop 1120 | // After processing both hands, if in scale mode and one hand stops pinching, explicitly stop scaling. 1121 | if (this.interactionMode === 'scale' && this.scaleInitialPinchDistance !== null) { 1122 | var hand0 = this.hands[0]; 1123 | var hand1 = this.hands[1]; 1124 | var hand0PinchingAndVisible = hand0 && hand0.landmarks && hand0.isPinching; 1125 | var hand1PinchingAndVisible = hand1 && hand1.landmarks && hand1.isPinching; 1126 | if (hand0PinchingAndVisible && hand1PinchingAndVisible) { 1127 | // If scaling is active and both hands are pinching, play sound 1128 | this.audioManager.playInteractionClickSound(); 1129 | } else { 1130 | // If scaling was active but one hand stopped pinching or disappeared 1131 | if (this.scaleInitialPinchDistance !== null) { 1132 | console.log("Scaling gesture ended (one hand stopped pinching/disappeared - post-loop check)."); 1133 | this.scaleInitialPinchDistance = null; 1134 | this.scaleInitialModelScale = null; 1135 | this.grabbingHandIndex = -1; 1136 | this.pickedUpModel = null; 1137 | // if(this.grabMarker && this.pandaModel) this.grabMarker.visible = true; // Grab marker removed 1138 | } 1139 | } 1140 | } 1141 | } catch (error) { 1142 | console.error("Error during hand detection:", error); 1143 | } 1144 | } 1145 | } 1146 | }, 1147 | { 1148 | key: "_getModelScreenBoundingBox", 1149 | value: function _getModelScreenBoundingBox() { 1150 | var _this = this; 1151 | if (!this.pandaModel || !this.camera || !this.renderer) { 1152 | return null; 1153 | } 1154 | // Ensure the model's world matrix is up to date 1155 | this.pandaModel.updateMatrixWorld(true); 1156 | var box = new THREE.Box3().setFromObject(this.pandaModel); 1157 | if (box.isEmpty()) { 1158 | return null; // Model might not be loaded or has no geometry 1159 | } 1160 | var corners = [ 1161 | new THREE.Vector3(box.min.x, box.min.y, box.min.z), 1162 | new THREE.Vector3(box.min.x, box.min.y, box.max.z), 1163 | new THREE.Vector3(box.min.x, box.max.y, box.min.z), 1164 | new THREE.Vector3(box.min.x, box.max.y, box.max.z), 1165 | new THREE.Vector3(box.max.x, box.min.y, box.min.z), 1166 | new THREE.Vector3(box.max.x, box.min.y, box.max.z), 1167 | new THREE.Vector3(box.max.x, box.max.y, box.min.z), 1168 | new THREE.Vector3(box.max.x, box.max.y, box.max.z) 1169 | ]; 1170 | var minX = Infinity, maxX = -Infinity, minY = Infinity, maxY = -Infinity; 1171 | var canvasWidth = this.renderDiv.clientWidth; 1172 | var canvasHeight = this.renderDiv.clientHeight; 1173 | corners.forEach(function(corner) { 1174 | // Apply model's world transformation to the local bounding box corners 1175 | corner.applyMatrix4(_this.pandaModel.matrixWorld); 1176 | // Project to Normalized Device Coordinates (NDC) 1177 | corner.project(_this.camera); 1178 | // Convert NDC to screen coordinates (origin at center of screen) 1179 | // This matches the coordinate system of pinchPointScreen 1180 | var screenX = corner.x * (canvasWidth / 2); 1181 | var screenY = corner.y * (canvasHeight / 2); // In Three.js NDC, +Y is up 1182 | minX = Math.min(minX, screenX); 1183 | maxX = Math.max(maxX, screenX); 1184 | minY = Math.min(minY, screenY); 1185 | maxY = Math.max(maxY, screenY); 1186 | }); 1187 | if (minX === Infinity) return null; // All points were behind camera or some other issue 1188 | return { 1189 | minX: minX, 1190 | minY: minY, 1191 | maxX: maxX, 1192 | maxY: maxY 1193 | }; 1194 | } 1195 | }, 1196 | { 1197 | key: "_getVisibleVideoParameters", 1198 | value: function _getVisibleVideoParameters() { 1199 | if (!this.videoElement || this.videoElement.videoWidth === 0 || this.videoElement.videoHeight === 0) { 1200 | return null; 1201 | } 1202 | var vNatW = this.videoElement.videoWidth; 1203 | var vNatH = this.videoElement.videoHeight; 1204 | var rW = this.renderDiv.clientWidth; 1205 | var rH = this.renderDiv.clientHeight; 1206 | if (vNatW === 0 || vNatH === 0 || rW === 0 || rH === 0) return null; 1207 | var videoAR = vNatW / vNatH; 1208 | var renderDivAR = rW / rH; 1209 | var finalVideoPixelX, finalVideoPixelY; 1210 | var visibleVideoPixelWidth, visibleVideoPixelHeight; 1211 | if (videoAR > renderDivAR) { 1212 | // Video is wider than renderDiv, scaled to fit renderDiv height, cropped horizontally. 1213 | var scale = rH / vNatH; // Scale factor based on height. 1214 | var scaledVideoWidth = vNatW * scale; // Width of video if scaled to fit renderDiv height. 1215 | // Total original video pixels cropped horizontally (from both sides combined). 1216 | var totalCroppedPixelsX = (scaledVideoWidth - rW) / scale; 1217 | finalVideoPixelX = totalCroppedPixelsX / 2; // Pixels cropped from the left of original video. 1218 | finalVideoPixelY = 0; // No vertical cropping. 1219 | visibleVideoPixelWidth = vNatW - totalCroppedPixelsX; // Width of the visible part in original video pixels. 1220 | visibleVideoPixelHeight = vNatH; // Full height is visible. 1221 | } else { 1222 | // Video is taller than renderDiv (or same AR), scaled to fit renderDiv width, cropped vertically. 1223 | var scale1 = rW / vNatW; // Scale factor based on width. 1224 | var scaledVideoHeight = vNatH * scale1; // Height of video if scaled to fit renderDiv width. 1225 | // Total original video pixels cropped vertically (from top and bottom combined). 1226 | var totalCroppedPixelsY = (scaledVideoHeight - rH) / scale1; 1227 | finalVideoPixelX = 0; // No horizontal cropping. 1228 | finalVideoPixelY = totalCroppedPixelsY / 2; // Pixels cropped from the top of original video. 1229 | visibleVideoPixelWidth = vNatW; // Full width is visible. 1230 | visibleVideoPixelHeight = vNatH - totalCroppedPixelsY; // Height of the visible part in original video pixels. 1231 | } 1232 | // Safety check for degenerate cases (e.g., extreme aspect ratios leading to zero visible dimension) 1233 | if (visibleVideoPixelWidth <= 0 || visibleVideoPixelHeight <= 0) { 1234 | // Fallback or log error, this shouldn't happen in normal scenarios 1235 | console.warn("Calculated visible video dimension is zero or negative.", { 1236 | visibleVideoPixelWidth: visibleVideoPixelWidth, 1237 | visibleVideoPixelHeight: visibleVideoPixelHeight 1238 | }); 1239 | return { 1240 | offsetX: 0, 1241 | offsetY: 0, 1242 | visibleWidth: vNatW, 1243 | visibleHeight: vNatH, 1244 | videoNaturalWidth: vNatW, 1245 | videoNaturalHeight: vNatH 1246 | }; 1247 | } 1248 | return { 1249 | offsetX: finalVideoPixelX, 1250 | offsetY: finalVideoPixelY, 1251 | visibleWidth: visibleVideoPixelWidth, 1252 | visibleHeight: visibleVideoPixelHeight, 1253 | videoNaturalWidth: vNatW, 1254 | videoNaturalHeight: vNatH 1255 | }; 1256 | } 1257 | }, 1258 | { 1259 | // _updateGhosts method removed. 1260 | key: "_showStatusScreen", 1261 | value: function _showStatusScreen(message) { 1262 | var color = arguments.length > 1 && arguments[1] !== void 0 ? arguments[1] : 'white', showRestartHint = arguments.length > 2 && arguments[2] !== void 0 ? arguments[2] : false; 1263 | this.gameOverContainer.style.display = 'block'; 1264 | this.gameOverText.innerText = message; 1265 | this.gameOverText.style.color = color; 1266 | this.restartHintText.style.display = showRestartHint ? 'block' : 'none'; 1267 | // No spawning to stop for template 1268 | } 1269 | }, 1270 | { 1271 | key: "_showError", 1272 | value: function _showError(message) { 1273 | this.gameOverContainer.style.display = 'block'; 1274 | this.gameOverText.innerText = "ERROR: ".concat(message); 1275 | this.gameOverText.style.color = 'orange'; 1276 | this.restartHintText.style.display = 'true'; // Show restart hint on error 1277 | this.gameState = 'error'; 1278 | // No spawning to stop 1279 | this.hands.forEach(function(hand) { 1280 | if (hand.lineGroup) hand.lineGroup.visible = false; 1281 | }); 1282 | } 1283 | }, 1284 | { 1285 | key: "_restartGame", 1286 | value: function _restartGame() { 1287 | console.log("Restarting tracking..."); 1288 | this.gameOverContainer.style.display = 'none'; 1289 | this.hands.forEach(function(hand) { 1290 | if (hand.lineGroup) { 1291 | hand.lineGroup.visible = false; 1292 | } 1293 | }); 1294 | // Ghost removal removed 1295 | // Score reset removed 1296 | // Visibility of game elements removed 1297 | this.gameState = 'tracking'; // Changed from 'playing' 1298 | this.lastVideoTime = -1; 1299 | this.clock.start(); 1300 | // Removed _startSpawning() 1301 | } 1302 | }, 1303 | { 1304 | // _updateScoreDisplay method removed. 1305 | key: "_onResize", 1306 | value: function _onResize() { 1307 | var width = this.renderDiv.clientWidth; 1308 | var height = this.renderDiv.clientHeight; 1309 | // Update camera perspective 1310 | this.camera.left = width / -2; 1311 | this.camera.right = width / 2; 1312 | this.camera.top = height / 2; 1313 | this.camera.bottom = height / -2; 1314 | this.camera.updateProjectionMatrix(); 1315 | // Update renderer size 1316 | this.renderer.setSize(width, height); 1317 | // Update video element size 1318 | this.videoElement.style.width = width + 'px'; 1319 | this.videoElement.style.height = height + 'px'; 1320 | // Watermelon, Chad, GroundLine updates removed. 1321 | } 1322 | }, 1323 | { 1324 | key: "_updateHandLines", 1325 | value: function _updateHandLines(handIndex, landmarks, videoParams, canvasWidth, canvasHeight) { 1326 | var _this = this; 1327 | var hand = this.hands[handIndex]; 1328 | var lineGroup = hand.lineGroup; 1329 | // Determine if this specific hand is currently involved in a grab/scale interaction 1330 | var isThisHandActivelyInteracting = false; 1331 | if (this.interactionMode === 'drag' || this.interactionMode === 'rotate') { 1332 | isThisHandActivelyInteracting = this.grabbingHandIndex === handIndex && this.pickedUpModel === this.pandaModel; 1333 | } else if (this.interactionMode === 'scale') { 1334 | // For scale, both hands involved show the effect if scaling is active 1335 | isThisHandActivelyInteracting = this.scaleInitialPinchDistance !== null && (handIndex === 0 || handIndex === 1); 1336 | } else if (this.interactionMode === 'animate') { 1337 | // For animate, the hand controlling animation scrolling (via pinch) shows the effect 1338 | isThisHandActivelyInteracting = this.animationControlHandIndex === handIndex; 1339 | } 1340 | var currentHandMaterial = handIndex === 0 ? this.fingertipMaterialHand1 : this.fingertipMaterialHand2; 1341 | if (currentHandMaterial) { 1342 | currentHandMaterial.opacity = isThisHandActivelyInteracting ? this.fingertipGrabOpacity : this.fingertipDefaultOpacity; 1343 | } 1344 | while(lineGroup.children.length){ 1345 | var child = lineGroup.children[0]; 1346 | lineGroup.remove(child); 1347 | if (child.geometry) child.geometry.dispose(); 1348 | // Materials are shared, no need to dispose them here unless they are unique per line/circle 1349 | } 1350 | if (!landmarks || landmarks.length === 0 || !videoParams) { 1351 | lineGroup.visible = false; 1352 | return; 1353 | } 1354 | var isAnyLandmarkOffScreen = false; 1355 | var _iteratorNormalCompletion = true, _didIteratorError = false, _iteratorError = undefined; 1356 | try { 1357 | // First, check if any landmark is off-screen based on unclamped normalized coordinates 1358 | for(var _iterator = landmarks[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true){ 1359 | var lm = _step.value; 1360 | var lmOriginalX = lm.x * videoParams.videoNaturalWidth; 1361 | var lmOriginalY = lm.y * videoParams.videoNaturalHeight; 1362 | var normX_visible = (lmOriginalX - videoParams.offsetX) / videoParams.visibleWidth; 1363 | var normY_visible = (lmOriginalY - videoParams.offsetY) / videoParams.visibleHeight; 1364 | if (normX_visible < 0 || normX_visible > 1 || normY_visible < 0 || normY_visible > 1) { 1365 | isAnyLandmarkOffScreen = true; 1366 | break; 1367 | } 1368 | } 1369 | } catch (err) { 1370 | _didIteratorError = true; 1371 | _iteratorError = err; 1372 | } finally{ 1373 | try { 1374 | if (!_iteratorNormalCompletion && _iterator.return != null) { 1375 | _iterator.return(); 1376 | } 1377 | } finally{ 1378 | if (_didIteratorError) { 1379 | throw _iteratorError; 1380 | } 1381 | } 1382 | } 1383 | if (isAnyLandmarkOffScreen) { 1384 | lineGroup.visible = false; 1385 | return; 1386 | } 1387 | // If all landmarks are on-screen (or would be, before clamping), proceed to calculate points3D for drawing. 1388 | // These points will use clamped coordinates to ensure they are drawn within canvas bounds if very close to edge. 1389 | var points3D = landmarks.map(function(lm) { 1390 | var lmOriginalX = lm.x * videoParams.videoNaturalWidth; 1391 | var lmOriginalY = lm.y * videoParams.videoNaturalHeight; 1392 | var normX_visible = (lmOriginalX - videoParams.offsetX) / videoParams.visibleWidth; 1393 | var normY_visible = (lmOriginalY - videoParams.offsetY) / videoParams.visibleHeight; 1394 | // Clamp values FOR DRAWING purposes 1395 | normX_visible = Math.max(0, Math.min(1, normX_visible)); 1396 | normY_visible = Math.max(0, Math.min(1, normY_visible)); 1397 | var x = (1 - normX_visible) * canvasWidth - canvasWidth / 2; 1398 | var y = (1 - normY_visible) * canvasHeight - canvasHeight / 2; 1399 | return new THREE.Vector3(x, y, 1.1); // Z for fingertip circles, slightly in front of lines 1400 | }); 1401 | var lineZ = 1; // Z for connection lines 1402 | this.handConnections.forEach(function(conn) { 1403 | var p1 = points3D[conn[0]]; 1404 | var p2 = points3D[conn[1]]; 1405 | if (p1 && p2) { 1406 | // Create points for the line with the correct Z 1407 | var lineP1 = p1.clone().setZ(lineZ); 1408 | var lineP2 = p2.clone().setZ(lineZ); 1409 | var geometry = new THREE.BufferGeometry().setFromPoints([ 1410 | lineP1, 1411 | lineP2 1412 | ]); 1413 | var line = new THREE.Line(geometry, _this.handLineMaterial); 1414 | lineGroup.add(line); 1415 | } 1416 | }); 1417 | // Draw fingertip circles 1418 | var fingertipRadius = 8; // Radius of the circle for fingertips 1419 | var wristRadius = 12; // Larger radius for the wrist 1420 | var circleSegments = 16; // Smoothness of the circle 1421 | this.fingertipLandmarkIndices.forEach(function(index) { 1422 | var landmarkPosition = points3D[index]; 1423 | if (landmarkPosition) { 1424 | var radius = index === 0 ? wristRadius : fingertipRadius; // Use wristRadius for landmark 0 1425 | var circleGeometry = new THREE.CircleGeometry(radius, circleSegments); 1426 | // The 'currentHandMaterial' (fetched and opacity-updated above) is used here. 1427 | var landmarkCircle = new THREE.Mesh(circleGeometry, currentHandMaterial); 1428 | landmarkCircle.position.copy(landmarkPosition); // Already has Z=1.1 1429 | // Pulse scaling also depends on 'isThisHandActivelyInteracting' 1430 | if (isThisHandActivelyInteracting) { 1431 | // Apply pulsing effect to scale 1432 | // (1 + sin) / 2 gives a 0-1 range, perfect for modulating amplitude 1433 | var currentPulseProgress = (1 + Math.sin(_this.clock.elapsedTime * _this.grabbingPulseSpeed)) / 2; 1434 | var scaleValue = _this.pulseBaseScale + currentPulseProgress * _this.grabbingPulseAmplitude; 1435 | landmarkCircle.scale.set(scaleValue, scaleValue, 1); 1436 | } else { 1437 | landmarkCircle.scale.set(_this.pulseBaseScale, _this.pulseBaseScale, 1); // Reset scale 1438 | } 1439 | lineGroup.add(landmarkCircle); 1440 | } 1441 | }); 1442 | lineGroup.visible = true; 1443 | } 1444 | }, 1445 | { 1446 | key: "_animate", 1447 | value: function _animate() { 1448 | requestAnimationFrame(this._animate.bind(this)); 1449 | var deltaTime = this.clock.getDelta(); 1450 | // Update hands if tracking 1451 | if (this.gameState === 'tracking') { 1452 | this._updateHands(); 1453 | } 1454 | // Update animation mixer 1455 | if (this.animationMixer) { 1456 | this.animationMixer.update(deltaTime); 1457 | } 1458 | // Bounding box helper visibility logic REMOVED 1459 | // _updateGhosts and _updateParticles calls removed. 1460 | // Always render the scene 1461 | this.renderer.render(this.scene, this.camera); 1462 | } 1463 | }, 1464 | { 1465 | key: "start", 1466 | value: function start() { 1467 | var _this = this; 1468 | // Add click listener for resuming audio context and potentially restarting on error 1469 | this.renderDiv.addEventListener('click', function() { 1470 | _this.audioManager.resumeContext(); 1471 | if (_this.gameState === 'error' || _this.gameState === 'paused') { 1472 | _this._restartGame(); // Restart tracking 1473 | } 1474 | }); 1475 | console.log('Game setup initiated. Waiting for async operations...'); 1476 | // Note: Game interaction now starts automatically after _init completes. 1477 | } 1478 | }, 1479 | { 1480 | key: "_updateSpeechBubbleAppearance", 1481 | value: function _updateSpeechBubbleAppearance() { 1482 | if (!this.speechBubble) return; 1483 | var isPlaceholder = this.speechBubble.innerHTML === "..." || this.speechBubble.innerText === "..."; 1484 | // Apply active styling only if recognition is generally active AND we are not displaying the placeholder. 1485 | // This means interim/final text will get the active style, but the "..." placeholder will not, 1486 | // even if the recognition service itself is still running in the background. 1487 | var showActiveStyling = this.isSpeechActive && !isPlaceholder; 1488 | var translateY = isPlaceholder ? '-5px' : '0px'; 1489 | var scale = showActiveStyling ? '1.15' : '1.0'; 1490 | this.speechBubble.style.transform = "translateX(-50%) translateY(".concat(translateY, ") scale(").concat(scale, ")"); 1491 | if (showActiveStyling) { 1492 | // Cyan glow, blue drop shadow, enhanced original shadow 1493 | // Active speech bubble: brighter color, stronger shadow 1494 | this.speechBubble.style.boxShadow = '5px 5px 0px #007bff'; // Active blue shadow 1495 | this.speechBubble.style.border = '2px solid black'; // Keep black border 1496 | this.speechBubble.style.padding = '18px 28px'; // Slightly larger padding 1497 | this.speechBubble.style.fontSize = 'clamp(20px, 3.5vw, 26px)'; // Larger font when active 1498 | this.speechBubble.style.top = '15px'; // Increased top margin when active, reduced from 30px to complement base 10px 1499 | } else { 1500 | // Default/inactive styling 1501 | // Default/inactive speech bubble styling 1502 | this.speechBubble.style.boxShadow = '4px 4px 0px rgba(0,0,0,1)'; // Hard black shadow 1503 | this.speechBubble.style.border = '2px solid black'; // Black border 1504 | this.speechBubble.style.padding = '15px 25px'; 1505 | this.speechBubble.style.fontSize = 'clamp(16px, 3vw, 22px)'; // Original font size 1506 | this.speechBubble.style.top = '10px'; // Original top margin, changed from 20px 1507 | } 1508 | } 1509 | }, 1510 | { 1511 | key: "_setupSpeechRecognition", 1512 | value: function _setupSpeechRecognition() { 1513 | var _this = this; 1514 | this.speechManager = new SpeechManager(function(finalTranscript, interimTranscript) { 1515 | if (_this.speechBubble) { 1516 | clearTimeout(_this.speechBubbleTimeout); 1517 | if (finalTranscript) { 1518 | _this.speechBubble.innerHTML = finalTranscript; 1519 | _this.speechBubble.style.opacity = '1'; 1520 | _this.speechBubbleTimeout = setTimeout(function() { 1521 | _this.speechBubble.innerHTML = "..."; 1522 | _this.speechBubble.style.opacity = '0.7'; 1523 | _this._updateSpeechBubbleAppearance(); // Update appearance for "..." 1524 | }, 2000); 1525 | } else if (interimTranscript) { 1526 | _this.speechBubble.innerHTML = ''.concat(interimTranscript, ""); 1527 | _this.speechBubble.style.opacity = '1'; 1528 | } else { 1529 | _this.speechBubbleTimeout = setTimeout(function() { 1530 | if (_this.speechBubble.innerHTML !== "...") { 1531 | _this.speechBubble.innerHTML = "..."; 1532 | } 1533 | _this.speechBubble.style.opacity = '0.7'; 1534 | _this._updateSpeechBubbleAppearance(); // Update appearance for "..." 1535 | }, 500); 1536 | } 1537 | _this._updateSpeechBubbleAppearance(); 1538 | } 1539 | }, function(isActive) { 1540 | _this.isSpeechActive = isActive; 1541 | _this._updateSpeechBubbleAppearance(); 1542 | }, function(command) { 1543 | console.log("Game received command: ".concat(command)); 1544 | var validCommands = [ 1545 | 'drag', 1546 | 'rotate', 1547 | 'scale', 1548 | 'animate' 1549 | ]; 1550 | if (validCommands.includes(command.toLowerCase())) { 1551 | _this._setInteractionMode(command.toLowerCase()); 1552 | } else { 1553 | console.warn("Unrecognized command via speech: ".concat(command)); 1554 | } 1555 | }); 1556 | // Initialize speech bubble with "..." and apply initial appearance 1557 | if (this.speechBubble) { 1558 | this.speechBubble.innerHTML = "..."; 1559 | this.speechBubble.style.opacity = '0.7'; 1560 | this._updateSpeechBubbleAppearance(); // Apply initial styles (isSpeechActive will be false) 1561 | } 1562 | // We will call requestPermissionAndStart() on user interaction (e.g., start button) 1563 | } 1564 | }, 1565 | { 1566 | key: "_playAnimation", 1567 | value: function _playAnimation(name) { 1568 | if (!this.animationActions[name]) { 1569 | console.warn('Animation "'.concat(name, '" not found.')); 1570 | return; 1571 | } 1572 | var newAction = this.animationActions[name]; 1573 | if (this.currentAction === newAction && newAction.isRunning()) { 1574 | console.log('Animation "'.concat(name, '" is already playing.')); 1575 | return; // Already playing this animation 1576 | } 1577 | if (this.currentAction) { 1578 | this.currentAction.fadeOut(0.5); // Fade out current animation over 0.5 seconds 1579 | } 1580 | newAction.reset().fadeIn(0.5).play(); // Reset, fade in and play new animation 1581 | this.currentAction = newAction; 1582 | console.log("Playing animation: ".concat(name)); 1583 | this._updateButtonStyles(name); 1584 | } 1585 | }, 1586 | { 1587 | key: "_updateButtonStyles", 1588 | value: function _updateButtonStyles(activeAnimationName) { 1589 | var buttons = this.animationButtonsContainer.children; 1590 | for(var i = 0; i < buttons.length; i++){ 1591 | var button = buttons[i]; 1592 | var isActive = button.innerText === activeAnimationName; 1593 | button.style.backgroundColor = isActive ? '#007bff' : '#f0f0f0'; // Blue if active, light grey if not 1594 | button.style.color = isActive ? 'white' : 'black'; 1595 | button.style.fontWeight = isActive ? 'bold' : 'normal'; 1596 | // Active button has its shadow "pressed" 1597 | button.style.boxShadow = isActive ? '1px 1px 0px black' : '2px 2px 0px black'; 1598 | } 1599 | } 1600 | }, 1601 | { 1602 | key: "_setInteractionMode", 1603 | value: function _setInteractionMode(mode) { 1604 | var _this = this; 1605 | if (this.interactionMode === mode) return; // No change 1606 | console.log("Setting interaction mode to: ".concat(mode)); 1607 | this.interactionMode = mode; 1608 | // If currently grabbing, release the model 1609 | if (this.grabbingHandIndex !== -1 && this.pickedUpModel) { 1610 | console.log("Interaction mode changed while grabbing. Releasing model from hand ".concat(this.grabbingHandIndex, ".")); 1611 | this.grabbingHandIndex = -1; 1612 | this.pickedUpModel = null; 1613 | this.rotateLastHandX = null; 1614 | this.scaleInitialPinchDistance = null; // Reset scaling variables 1615 | this.scaleInitialModelScale = null; 1616 | // if (this.grabMarker && this.pandaModel) this.grabMarker.visible = true; // Grab marker removed 1617 | } 1618 | this._updateHandMaterialsForMode(mode); // Update hand colors for new mode 1619 | this._updateInteractionModeButtonStyles(); 1620 | // Show/hide animation buttons container based on mode 1621 | if (this.animationButtonsContainer) { 1622 | if (mode === 'animate') { 1623 | this.animationButtonsContainer.style.display = 'flex'; 1624 | requestAnimationFrame(function() { 1625 | _this.animationButtonsContainer.style.opacity = '1'; 1626 | }); 1627 | } else { 1628 | this.animationButtonsContainer.style.opacity = '0'; 1629 | // Wait for transition to complete before setting display to none 1630 | setTimeout(function() { 1631 | if (_this.interactionMode !== 'animate') { 1632 | _this.animationButtonsContainer.style.display = 'none'; 1633 | } 1634 | }, 300); // Corresponds to transition duration 1635 | } 1636 | } 1637 | this._updateInstructionText(); // Update instruction text when mode changes 1638 | } 1639 | }, 1640 | { 1641 | key: "_updateInstructionText", 1642 | value: function _updateInstructionText() { 1643 | if (this.instructionTextElement) { 1644 | var instruction = this.interactionModeInstructions[this.interactionMode] || "Use hand gestures to interact."; 1645 | this.instructionTextElement.innerText = instruction; 1646 | // The instruction text should always be 10px from the bottom. 1647 | // The animation buttons are positioned from the top-left and should not affect this. 1648 | this.instructionTextElement.style.bottom = '10px'; // Decreased bottom margin 1649 | } 1650 | } 1651 | }, 1652 | { 1653 | key: "_updateHandMaterialsForMode", 1654 | value: function _updateHandMaterialsForMode(mode) { 1655 | var modeConfig = this.interactionModeColors[mode]; 1656 | var colorToSet = modeConfig ? modeConfig.hand : new THREE.Color(0x00ccff); // Fallback color 1657 | if (this.fingertipMaterialHand1) { 1658 | this.fingertipMaterialHand1.color.set(colorToSet); 1659 | } 1660 | if (this.fingertipMaterialHand2) { 1661 | this.fingertipMaterialHand2.color.set(colorToSet); 1662 | } 1663 | } 1664 | }, 1665 | { 1666 | key: "_updateInteractionModeButtonStyles", 1667 | value: function _updateInteractionModeButtonStyles() { 1668 | var _this = this; 1669 | for(var modeKey in this.interactionModeButtons){ 1670 | var button = this.interactionModeButtons[modeKey]; 1671 | var modeConfig = this.interactionModeColors[modeKey]; 1672 | var fallbackColor = '#6c757d'; 1673 | var fallbackTextColor = 'white'; 1674 | if (modeKey === this.interactionMode) { 1675 | button.style.border = '2px solid black'; // All buttons have black border 1676 | if (modeConfig) { 1677 | button.style.backgroundColor = modeConfig.base; 1678 | button.style.color = modeConfig.text; 1679 | } else { 1680 | button.style.backgroundColor = fallbackColor; 1681 | button.style.color = fallbackTextColor; 1682 | } 1683 | button.style.fontWeight = 'bold'; // Already bold from initial setup, but ensure it stays 1684 | button.style.boxShadow = '1px 1px 0px black'; // "Pressed" shadow for active button 1685 | } else { 1686 | button.style.backgroundColor = 'rgba(255, 255, 255, 0.5)'; // More opaque transparent white background 1687 | button.style.border = '2px solid black'; // Black border for inactive 1688 | if (modeConfig) { 1689 | button.style.color = modeConfig.base; // Neon text color 1690 | } else { 1691 | button.style.color = fallbackColor; // Fallback text color for inactive 1692 | } 1693 | button.style.fontWeight = 'bold'; // Always bold 1694 | button.style.boxShadow = '2px 2px 0px black'; // Default shadow for inactive 1695 | } 1696 | } 1697 | // Explicitly set display for animationButtonsContainer based on current mode 1698 | // This ensures it's correct even on initial load if default mode isn't 'animate' 1699 | if (this.animationButtonsContainer) { 1700 | if (this.interactionMode === 'animate') { 1701 | this.animationButtonsContainer.style.display = 'flex'; 1702 | requestAnimationFrame(function() { 1703 | _this.animationButtonsContainer.style.opacity = '1'; 1704 | }); 1705 | } else { 1706 | this.animationButtonsContainer.style.opacity = '0'; 1707 | this.animationButtonsContainer.style.display = 'none'; // Set display none immediately if not animate 1708 | } 1709 | } 1710 | this._updateInstructionText(); // Also call here to adjust position if animation buttons are shown/hidden 1711 | } 1712 | }, 1713 | { 1714 | key: "_setupDragAndDrop", 1715 | value: function _setupDragAndDrop() { 1716 | var _this = this; 1717 | this.renderDiv.addEventListener('dragover', function(event) { 1718 | event.preventDefault(); // Prevent default behavior to allow drop 1719 | event.dataTransfer.dropEffect = 'copy'; // Show a copy icon 1720 | _this.renderDiv.style.border = '2px dashed #007bff'; // Visual feedback 1721 | }); 1722 | this.renderDiv.addEventListener('dragleave', function(event) { 1723 | _this.renderDiv.style.border = 'none'; // Remove visual feedback 1724 | }); 1725 | this.renderDiv.addEventListener('drop', function(event) { 1726 | event.preventDefault(); 1727 | _this.renderDiv.style.border = 'none'; // Remove visual feedback 1728 | if (event.dataTransfer.files && event.dataTransfer.files.length > 0) { 1729 | var file = event.dataTransfer.files[0]; 1730 | var fileName = file.name.toLowerCase(); 1731 | var fileType = file.type.toLowerCase(); 1732 | if (fileName.endsWith('.gltf') || fileName.endsWith('.glb') || fileType === 'model/gltf+json' || fileType === 'model/gltf-binary') { 1733 | console.log("GLTF file dropped: ".concat(file.name), file); 1734 | // Next step: Process and load this file. 1735 | _this._loadDroppedModel(file); 1736 | } else { 1737 | console.warn('Dropped file is not a recognized GLTF format:', file.name, file.type); 1738 | _this._showStatusScreen('"'.concat(file.name, '" is not a GLTF model.'), 'orange', false); 1739 | setTimeout(function() { 1740 | if (_this.gameOverContainer.style.display === 'block' && _this.gameOverText.innerText.includes(file.name)) { 1741 | _this.gameOverContainer.style.display = 'none'; 1742 | } 1743 | }, 3000); 1744 | } 1745 | event.dataTransfer.clearData(); 1746 | } 1747 | }); 1748 | } 1749 | }, 1750 | { 1751 | key: "_loadDroppedModel", 1752 | value: function _loadDroppedModel(file) { 1753 | var _this = this; 1754 | console.log("Processing dropped model:", file.name, file.type); 1755 | var reader = new FileReader(); 1756 | reader.onload = function(e) { 1757 | // Pass file.type as well, it might be useful for _parseAndLoadGltf context 1758 | _this._parseAndLoadGltf(e.target.result, file.name, file.type); 1759 | }; 1760 | reader.onerror = function(error) { 1761 | console.error("FileReader error for ".concat(file.name, ":"), error); 1762 | _this._showError("Error reading file ".concat(file.name, ".")); 1763 | // Ensure loading message is hidden if it was shown by this function 1764 | if (_this.gameOverContainer.style.display === 'block' && _this.gameOverText.innerText.startsWith('Loading "'.concat(file.name, '"'))) { 1765 | _this.gameOverContainer.style.display = 'none'; 1766 | } 1767 | }; 1768 | var fileNameLower = file.name.toLowerCase(); 1769 | var fileTypeLower = file.type ? file.type.toLowerCase() : ''; 1770 | if (fileNameLower.endsWith('.glb') || fileTypeLower === 'model/gltf-binary') { 1771 | console.log("Reading ".concat(file.name, " as ArrayBuffer.")); 1772 | reader.readAsArrayBuffer(file); 1773 | } else if (fileNameLower.endsWith('.gltf') || fileTypeLower === 'model/gltf+json') { 1774 | console.log("Reading ".concat(file.name, " as text.")); 1775 | reader.readAsText(file); 1776 | } else { 1777 | var message = file.type ? "Unsupported file type: ".concat(file.type) : 'Cannot determine file type.'; 1778 | console.warn("Unknown file format for GLTF loader: ".concat(file.name, ", Type: ").concat(file.type)); 1779 | this._showError("".concat(message, " for ").concat(file.name, ". Please drop a .gltf or .glb file.")); 1780 | // Ensure loading message is hidden 1781 | if (this.gameOverContainer.style.display === 'block' && this.gameOverText.innerText.startsWith('Loading "'.concat(file.name, '"'))) { 1782 | this.gameOverContainer.style.display = 'none'; 1783 | } 1784 | } 1785 | } 1786 | }, 1787 | { 1788 | key: "_parseAndLoadGltf", 1789 | value: function _parseAndLoadGltf(content, fileName, fileType) { 1790 | var _this = this; 1791 | var loader = new GLTFLoader(); // GLTFLoader is already imported at the top 1792 | try { 1793 | // The 'path' argument is for resolving relative paths for external resources like .bin or textures. 1794 | // For a single file drop, this is typically empty. If it's a .gltf with external files, 1795 | // those files would need to be handled separately (e.g., by being dropped together and identified). 1796 | // This setup works best for self-contained .glb files or .gltf files using data URIs. 1797 | loader.parse(content, '', function(gltf) { 1798 | console.log("Successfully parsed GLTF model: ".concat(fileName), gltf); 1799 | // 1. If a previous model exists, remove it and clean up its animations 1800 | if (_this.pandaModel) { 1801 | _this.scene.remove(_this.pandaModel); 1802 | // Consider disposing geometry/materials of this.pandaModel here for memory management in a larger app 1803 | console.log("Removed previous model from scene."); 1804 | if (_this.animationMixer) { 1805 | _this.animationMixer.stopAllAction(); 1806 | _this.currentAction = null; 1807 | } 1808 | // Clear out old animation buttons 1809 | while(_this.animationButtonsContainer.firstChild){ 1810 | _this.animationButtonsContainer.removeChild(_this.animationButtonsContainer.firstChild); 1811 | } 1812 | _this.animationActions = {}; 1813 | _this.animationClips = []; 1814 | } 1815 | // 2. Set the new model as the current model 1816 | _this.pandaModel = gltf.scene; 1817 | // 3. Scale and position the new model 1818 | var scale = 80; 1819 | _this.pandaModel.scale.set(scale, scale, scale); 1820 | var sceneHeight = _this.renderDiv.clientHeight; 1821 | _this.pandaModel.position.set(0, sceneHeight * -0.45, -1000); 1822 | // 4. Add the new model to the scene 1823 | _this.scene.add(_this.pandaModel); 1824 | console.log('Added new model "'.concat(fileName, '" to scene.')); 1825 | // 5. Setup animations for the new model 1826 | _this.animationMixer = new THREE.AnimationMixer(_this.pandaModel); 1827 | _this.animationClips = gltf.animations; 1828 | _this.animationActions = {}; // Ensure it's clean for new actions 1829 | if (_this.animationClips && _this.animationClips.length) { 1830 | _this.animationClips.forEach(function(clip, index) { 1831 | var action = _this.animationMixer.clipAction(clip); 1832 | var actionName = clip.name || "Animation ".concat(index + 1); 1833 | _this.animationActions[actionName] = action; 1834 | var button = document.createElement('button'); 1835 | button.innerText = actionName; 1836 | button.style.padding = '5px 10px'; 1837 | button.style.fontSize = '13px'; 1838 | button.style.backgroundColor = '#f0f0f0'; 1839 | button.style.color = 'black'; 1840 | button.style.border = '2px solid black'; 1841 | button.style.borderRadius = '4px'; 1842 | button.style.cursor = 'pointer'; 1843 | button.style.transition = 'background-color 0.2s ease, box-shadow 0.2s ease'; 1844 | button.style.boxShadow = '2px 2px 0px black'; 1845 | button.addEventListener('click', function() { 1846 | return _this._playAnimation(actionName); 1847 | }); 1848 | _this.animationButtonsContainer.appendChild(button); 1849 | }); 1850 | var defaultActionName = Object.keys(_this.animationActions)[0]; 1851 | var idleActionKey = Object.keys(_this.animationActions).find(function(name) { 1852 | return name.toLowerCase().includes('idle'); 1853 | }); 1854 | if (idleActionKey) { 1855 | defaultActionName = idleActionKey; 1856 | } 1857 | if (defaultActionName && _this.animationActions[defaultActionName]) { 1858 | _this.currentAction = _this.animationActions[defaultActionName]; 1859 | _this.currentAction.reset().play(); 1860 | _this._updateButtonStyles(defaultActionName); 1861 | } else { 1862 | _this.currentAction = null; 1863 | } 1864 | } else { 1865 | console.log('New model "'.concat(fileName, '" has no embedded animations.')); 1866 | _this.currentAction = null; 1867 | } 1868 | // 6. Reset interaction states 1869 | _this.grabbingHandIndex = -1; 1870 | _this.pickedUpModel = null; 1871 | _this.rotateLastHandX = null; 1872 | _this.scaleInitialPinchDistance = null; 1873 | _this.scaleInitialModelScale = null; 1874 | _this.animationControlHandIndex = -1; 1875 | _this.animationControlInitialPinchY = null; 1876 | // This will ensure animation buttons are shown/hidden correctly based on current mode 1877 | _this._updateInteractionModeButtonStyles(); 1878 | _this.loadedDroppedModelData = null; // Clear the temp storage 1879 | }, function(error) { 1880 | console.error("Error parsing GLTF model ".concat(fileName, ":"), error); 1881 | _this._showError('Failed to parse "'.concat(fileName, '". Model might be corrupt or unsupported. Check console.')); 1882 | }); 1883 | } catch (e) { 1884 | // This catch is for synchronous errors during loader.parse() setup, though most errors are async. 1885 | console.error("Critical error during GLTF parsing setup for ".concat(fileName, ":"), e); 1886 | this._showError('Error setting up parser for "'.concat(fileName, '".')); 1887 | } 1888 | } 1889 | } 1890 | ]); 1891 | return Game; 1892 | }(); -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 |
4 | 5 | 6 |
46 | Instagram 47 |