├── GLTFLoader ├── BufferGeometryUtils.js └── GLTFLoader.js ├── LICENSE ├── OpenCharacters ├── README.md ├── animations │ ├── README.md │ ├── angry.fbx │ └── silly_dancing.fbx ├── dummy-audio │ ├── 12.mp3 │ ├── 24.mp3 │ ├── 3.mp3 │ ├── 6.mp3 │ └── README.md └── plugin1.js ├── OrbitControls.js ├── README.md ├── avatars ├── 5575583359519712199.vrm ├── 6246813290354663411.vrm ├── AvatarSample_B.vrm └── README.md ├── features ├── emoteController │ ├── autoBlink.js │ ├── autoLookAt.js │ ├── emoteConstants.js │ ├── emoteController.js │ └── expressionController.js ├── lipSync │ └── lipSync.js └── vrmViewer │ ├── model.js │ └── viewer.js ├── idle_loop.vrma ├── lib ├── VRMAnimation │ ├── VRMAnimation.js │ ├── VRMAnimationLoaderPlugin.js │ ├── loadVRMAnimation.js │ └── utils │ │ ├── arrayChunk.js │ │ ├── linearstep.js │ │ └── saturate.js └── VRMLookAtSmootherLoaderPlugin │ ├── VRMLookAtSmoother.js │ └── VRMLookAtSmootherLoaderPlugin.js └── mixamo ├── FBXLoader.js ├── NURBSCurve.js ├── NURBSUtils.js ├── fflate.module.js ├── loadMixamoAnimation.js └── mixamoVRMRigMap.js /GLTFLoader/BufferGeometryUtils.js: -------------------------------------------------------------------------------- 1 | import { 2 | BufferAttribute, 3 | BufferGeometry, 4 | Float32BufferAttribute, 5 | InstancedBufferAttribute, 6 | InterleavedBuffer, 7 | InterleavedBufferAttribute, 8 | TriangleFanDrawMode, 9 | TriangleStripDrawMode, 10 | TrianglesDrawMode, 11 | Vector3, 12 | } from 'https://cdn.jsdelivr.net/npm/three@0.149.0/+esm'; 13 | 14 | function computeTangents() { 15 | 16 | throw new Error( 'BufferGeometryUtils: computeTangents renamed to computeMikkTSpaceTangents.' ); 17 | 18 | } 19 | 20 | function computeMikkTSpaceTangents( geometry, MikkTSpace, negateSign = true ) { 21 | 22 | if ( ! MikkTSpace || ! MikkTSpace.isReady ) { 23 | 24 | throw new Error( 'BufferGeometryUtils: Initialized MikkTSpace library required.' ); 25 | 26 | } 27 | 28 | if ( ! geometry.hasAttribute( 'position' ) || ! geometry.hasAttribute( 'normal' ) || ! geometry.hasAttribute( 'uv' ) ) { 29 | 30 | throw new Error( 'BufferGeometryUtils: Tangents require "position", "normal", and "uv" attributes.' ); 31 | 32 | } 33 | 34 | function getAttributeArray( attribute ) { 35 | 36 | if ( attribute.normalized || attribute.isInterleavedBufferAttribute ) { 37 | 38 | const dstArray = new Float32Array( attribute.getCount() * attribute.itemSize ); 39 | 40 | for ( let i = 0, j = 0; i < attribute.getCount(); i ++ ) { 41 | 42 | dstArray[ j ++ ] = attribute.getX( i ); 43 | dstArray[ j ++ ] = attribute.getY( i ); 44 | 45 | if ( attribute.itemSize > 2 ) { 46 | 47 | dstArray[ j ++ ] = attribute.getZ( i ); 48 | 49 | } 50 | 51 | } 52 | 53 | return dstArray; 54 | 55 | } 56 | 57 | if ( attribute.array instanceof Float32Array ) { 58 | 59 | return attribute.array; 60 | 61 | } 62 | 63 | return new Float32Array( attribute.array ); 64 | 65 | } 66 | 67 | // MikkTSpace algorithm requires non-indexed input. 68 | 69 | const _geometry = geometry.index ? geometry.toNonIndexed() : geometry; 70 | 71 | // Compute vertex tangents. 72 | 73 | const tangents = MikkTSpace.generateTangents( 74 | 75 | getAttributeArray( _geometry.attributes.position ), 76 | getAttributeArray( _geometry.attributes.normal ), 77 | getAttributeArray( _geometry.attributes.uv ) 78 | 79 | ); 80 | 81 | // Texture coordinate convention of glTF differs from the apparent 82 | // default of the MikkTSpace library; .w component must be flipped. 83 | 84 | if ( negateSign ) { 85 | 86 | for ( let i = 3; i < tangents.length; i += 4 ) { 87 | 88 | tangents[ i ] *= - 1; 89 | 90 | } 91 | 92 | } 93 | 94 | // 95 | 96 | _geometry.setAttribute( 'tangent', new BufferAttribute( tangents, 4 ) ); 97 | 98 | if ( geometry !== _geometry ) { 99 | 100 | geometry.copy( _geometry ); 101 | 102 | } 103 | 104 | return geometry; 105 | 106 | } 107 | 108 | /** 109 | * @param {Array} geometries 110 | * @param {Boolean} useGroups 111 | * @return {BufferGeometry} 112 | */ 113 | function mergeBufferGeometries( geometries, useGroups = false ) { 114 | 115 | const isIndexed = geometries[ 0 ].index !== null; 116 | 117 | const attributesUsed = new Set( Object.keys( geometries[ 0 ].attributes ) ); 118 | const morphAttributesUsed = new Set( Object.keys( geometries[ 0 ].morphAttributes ) ); 119 | 120 | const attributes = {}; 121 | const morphAttributes = {}; 122 | 123 | const morphTargetsRelative = geometries[ 0 ].morphTargetsRelative; 124 | 125 | const mergedGeometry = new BufferGeometry(); 126 | 127 | let offset = 0; 128 | 129 | for ( let i = 0; i < geometries.length; ++ i ) { 130 | 131 | const geometry = geometries[ i ]; 132 | let attributesCount = 0; 133 | 134 | // ensure that all geometries are indexed, or none 135 | 136 | if ( isIndexed !== ( geometry.index !== null ) ) { 137 | 138 | console.error( 'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed with geometry at index ' + i + '. All geometries must have compatible attributes; make sure index attribute exists among all geometries, or in none of them.' ); 139 | return null; 140 | 141 | } 142 | 143 | // gather attributes, exit early if they're different 144 | 145 | for ( const name in geometry.attributes ) { 146 | 147 | if ( ! attributesUsed.has( name ) ) { 148 | 149 | console.error( 'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed with geometry at index ' + i + '. All geometries must have compatible attributes; make sure "' + name + '" attribute exists among all geometries, or in none of them.' ); 150 | return null; 151 | 152 | } 153 | 154 | if ( attributes[ name ] === undefined ) attributes[ name ] = []; 155 | 156 | attributes[ name ].push( geometry.attributes[ name ] ); 157 | 158 | attributesCount ++; 159 | 160 | } 161 | 162 | // ensure geometries have the same number of attributes 163 | 164 | if ( attributesCount !== attributesUsed.size ) { 165 | 166 | console.error( 'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed with geometry at index ' + i + '. Make sure all geometries have the same number of attributes.' ); 167 | return null; 168 | 169 | } 170 | 171 | // gather morph attributes, exit early if they're different 172 | 173 | if ( morphTargetsRelative !== geometry.morphTargetsRelative ) { 174 | 175 | console.error( 'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed with geometry at index ' + i + '. .morphTargetsRelative must be consistent throughout all geometries.' ); 176 | return null; 177 | 178 | } 179 | 180 | for ( const name in geometry.morphAttributes ) { 181 | 182 | if ( ! morphAttributesUsed.has( name ) ) { 183 | 184 | console.error( 'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed with geometry at index ' + i + '. .morphAttributes must be consistent throughout all geometries.' ); 185 | return null; 186 | 187 | } 188 | 189 | if ( morphAttributes[ name ] === undefined ) morphAttributes[ name ] = []; 190 | 191 | morphAttributes[ name ].push( geometry.morphAttributes[ name ] ); 192 | 193 | } 194 | 195 | if ( useGroups ) { 196 | 197 | let count; 198 | 199 | if ( isIndexed ) { 200 | 201 | count = geometry.index.count; 202 | 203 | } else if ( geometry.attributes.position !== undefined ) { 204 | 205 | count = geometry.attributes.position.count; 206 | 207 | } else { 208 | 209 | console.error( 'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed with geometry at index ' + i + '. The geometry must have either an index or a position attribute' ); 210 | return null; 211 | 212 | } 213 | 214 | mergedGeometry.addGroup( offset, count, i ); 215 | 216 | offset += count; 217 | 218 | } 219 | 220 | } 221 | 222 | // merge indices 223 | 224 | if ( isIndexed ) { 225 | 226 | let indexOffset = 0; 227 | const mergedIndex = []; 228 | 229 | for ( let i = 0; i < geometries.length; ++ i ) { 230 | 231 | const index = geometries[ i ].index; 232 | 233 | for ( let j = 0; j < index.count; ++ j ) { 234 | 235 | mergedIndex.push( index.getX( j ) + indexOffset ); 236 | 237 | } 238 | 239 | indexOffset += geometries[ i ].attributes.position.count; 240 | 241 | } 242 | 243 | mergedGeometry.setIndex( mergedIndex ); 244 | 245 | } 246 | 247 | // merge attributes 248 | 249 | for ( const name in attributes ) { 250 | 251 | const mergedAttribute = mergeBufferAttributes( attributes[ name ] ); 252 | 253 | if ( ! mergedAttribute ) { 254 | 255 | console.error( 'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed while trying to merge the ' + name + ' attribute.' ); 256 | return null; 257 | 258 | } 259 | 260 | mergedGeometry.setAttribute( name, mergedAttribute ); 261 | 262 | } 263 | 264 | // merge morph attributes 265 | 266 | for ( const name in morphAttributes ) { 267 | 268 | const numMorphTargets = morphAttributes[ name ][ 0 ].length; 269 | 270 | if ( numMorphTargets === 0 ) break; 271 | 272 | mergedGeometry.morphAttributes = mergedGeometry.morphAttributes || {}; 273 | mergedGeometry.morphAttributes[ name ] = []; 274 | 275 | for ( let i = 0; i < numMorphTargets; ++ i ) { 276 | 277 | const morphAttributesToMerge = []; 278 | 279 | for ( let j = 0; j < morphAttributes[ name ].length; ++ j ) { 280 | 281 | morphAttributesToMerge.push( morphAttributes[ name ][ j ][ i ] ); 282 | 283 | } 284 | 285 | const mergedMorphAttribute = mergeBufferAttributes( morphAttributesToMerge ); 286 | 287 | if ( ! mergedMorphAttribute ) { 288 | 289 | console.error( 'THREE.BufferGeometryUtils: .mergeBufferGeometries() failed while trying to merge the ' + name + ' morphAttribute.' ); 290 | return null; 291 | 292 | } 293 | 294 | mergedGeometry.morphAttributes[ name ].push( mergedMorphAttribute ); 295 | 296 | } 297 | 298 | } 299 | 300 | return mergedGeometry; 301 | 302 | } 303 | 304 | /** 305 | * @param {Array} attributes 306 | * @return {BufferAttribute} 307 | */ 308 | function mergeBufferAttributes( attributes ) { 309 | 310 | let TypedArray; 311 | let itemSize; 312 | let normalized; 313 | let arrayLength = 0; 314 | 315 | for ( let i = 0; i < attributes.length; ++ i ) { 316 | 317 | const attribute = attributes[ i ]; 318 | 319 | if ( attribute.isInterleavedBufferAttribute ) { 320 | 321 | console.error( 'THREE.BufferGeometryUtils: .mergeBufferAttributes() failed. InterleavedBufferAttributes are not supported.' ); 322 | return null; 323 | 324 | } 325 | 326 | if ( TypedArray === undefined ) TypedArray = attribute.array.constructor; 327 | if ( TypedArray !== attribute.array.constructor ) { 328 | 329 | console.error( 'THREE.BufferGeometryUtils: .mergeBufferAttributes() failed. BufferAttribute.array must be of consistent array types across matching attributes.' ); 330 | return null; 331 | 332 | } 333 | 334 | if ( itemSize === undefined ) itemSize = attribute.itemSize; 335 | if ( itemSize !== attribute.itemSize ) { 336 | 337 | console.error( 'THREE.BufferGeometryUtils: .mergeBufferAttributes() failed. BufferAttribute.itemSize must be consistent across matching attributes.' ); 338 | return null; 339 | 340 | } 341 | 342 | if ( normalized === undefined ) normalized = attribute.normalized; 343 | if ( normalized !== attribute.normalized ) { 344 | 345 | console.error( 'THREE.BufferGeometryUtils: .mergeBufferAttributes() failed. BufferAttribute.normalized must be consistent across matching attributes.' ); 346 | return null; 347 | 348 | } 349 | 350 | arrayLength += attribute.array.length; 351 | 352 | } 353 | 354 | const array = new TypedArray( arrayLength ); 355 | let offset = 0; 356 | 357 | for ( let i = 0; i < attributes.length; ++ i ) { 358 | 359 | array.set( attributes[ i ].array, offset ); 360 | 361 | offset += attributes[ i ].array.length; 362 | 363 | } 364 | 365 | return new BufferAttribute( array, itemSize, normalized ); 366 | 367 | } 368 | 369 | /** 370 | * @param {BufferAttribute} 371 | * @return {BufferAttribute} 372 | */ 373 | export function deepCloneAttribute( attribute ) { 374 | 375 | if ( attribute.isInstancedInterleavedBufferAttribute || attribute.isInterleavedBufferAttribute ) { 376 | 377 | return deinterleaveAttribute( attribute ); 378 | 379 | } 380 | 381 | if ( attribute.isInstancedBufferAttribute ) { 382 | 383 | return new InstancedBufferAttribute().copy( attribute ); 384 | 385 | } 386 | 387 | return new BufferAttribute().copy( attribute ); 388 | 389 | } 390 | 391 | /** 392 | * @param {Array} attributes 393 | * @return {Array} 394 | */ 395 | function interleaveAttributes( attributes ) { 396 | 397 | // Interleaves the provided attributes into an InterleavedBuffer and returns 398 | // a set of InterleavedBufferAttributes for each attribute 399 | let TypedArray; 400 | let arrayLength = 0; 401 | let stride = 0; 402 | 403 | // calculate the length and type of the interleavedBuffer 404 | for ( let i = 0, l = attributes.length; i < l; ++ i ) { 405 | 406 | const attribute = attributes[ i ]; 407 | 408 | if ( TypedArray === undefined ) TypedArray = attribute.array.constructor; 409 | if ( TypedArray !== attribute.array.constructor ) { 410 | 411 | console.error( 'AttributeBuffers of different types cannot be interleaved' ); 412 | return null; 413 | 414 | } 415 | 416 | arrayLength += attribute.array.length; 417 | stride += attribute.itemSize; 418 | 419 | } 420 | 421 | // Create the set of buffer attributes 422 | const interleavedBuffer = new InterleavedBuffer( new TypedArray( arrayLength ), stride ); 423 | let offset = 0; 424 | const res = []; 425 | const getters = [ 'getX', 'getY', 'getZ', 'getW' ]; 426 | const setters = [ 'setX', 'setY', 'setZ', 'setW' ]; 427 | 428 | for ( let j = 0, l = attributes.length; j < l; j ++ ) { 429 | 430 | const attribute = attributes[ j ]; 431 | const itemSize = attribute.itemSize; 432 | const count = attribute.count; 433 | const iba = new InterleavedBufferAttribute( interleavedBuffer, itemSize, offset, attribute.normalized ); 434 | res.push( iba ); 435 | 436 | offset += itemSize; 437 | 438 | // Move the data for each attribute into the new interleavedBuffer 439 | // at the appropriate offset 440 | for ( let c = 0; c < count; c ++ ) { 441 | 442 | for ( let k = 0; k < itemSize; k ++ ) { 443 | 444 | iba[ setters[ k ] ]( c, attribute[ getters[ k ] ]( c ) ); 445 | 446 | } 447 | 448 | } 449 | 450 | } 451 | 452 | return res; 453 | 454 | } 455 | 456 | // returns a new, non-interleaved version of the provided attribute 457 | export function deinterleaveAttribute( attribute ) { 458 | 459 | const cons = attribute.data.array.constructor; 460 | const count = attribute.count; 461 | const itemSize = attribute.itemSize; 462 | const normalized = attribute.normalized; 463 | 464 | const array = new cons( count * itemSize ); 465 | let newAttribute; 466 | if ( attribute.isInstancedInterleavedBufferAttribute ) { 467 | 468 | newAttribute = new InstancedBufferAttribute( array, itemSize, normalized, attribute.meshPerAttribute ); 469 | 470 | } else { 471 | 472 | newAttribute = new BufferAttribute( array, itemSize, normalized ); 473 | 474 | } 475 | 476 | for ( let i = 0; i < count; i ++ ) { 477 | 478 | newAttribute.setX( i, attribute.getX( i ) ); 479 | 480 | if ( itemSize >= 2 ) { 481 | 482 | newAttribute.setY( i, attribute.getY( i ) ); 483 | 484 | } 485 | 486 | if ( itemSize >= 3 ) { 487 | 488 | newAttribute.setZ( i, attribute.getZ( i ) ); 489 | 490 | } 491 | 492 | if ( itemSize >= 4 ) { 493 | 494 | newAttribute.setW( i, attribute.getW( i ) ); 495 | 496 | } 497 | 498 | } 499 | 500 | return newAttribute; 501 | 502 | } 503 | 504 | // deinterleaves all attributes on the geometry 505 | export function deinterleaveGeometry( geometry ) { 506 | 507 | const attributes = geometry.attributes; 508 | const morphTargets = geometry.morphTargets; 509 | const attrMap = new Map(); 510 | 511 | for ( const key in attributes ) { 512 | 513 | const attr = attributes[ key ]; 514 | if ( attr.isInterleavedBufferAttribute ) { 515 | 516 | if ( ! attrMap.has( attr ) ) { 517 | 518 | attrMap.set( attr, deinterleaveAttribute( attr ) ); 519 | 520 | } 521 | 522 | attributes[ key ] = attrMap.get( attr ); 523 | 524 | } 525 | 526 | } 527 | 528 | for ( const key in morphTargets ) { 529 | 530 | const attr = morphTargets[ key ]; 531 | if ( attr.isInterleavedBufferAttribute ) { 532 | 533 | if ( ! attrMap.has( attr ) ) { 534 | 535 | attrMap.set( attr, deinterleaveAttribute( attr ) ); 536 | 537 | } 538 | 539 | morphTargets[ key ] = attrMap.get( attr ); 540 | 541 | } 542 | 543 | } 544 | 545 | } 546 | 547 | /** 548 | * @param {Array} geometry 549 | * @return {number} 550 | */ 551 | function estimateBytesUsed( geometry ) { 552 | 553 | // Return the estimated memory used by this geometry in bytes 554 | // Calculate using itemSize, count, and BYTES_PER_ELEMENT to account 555 | // for InterleavedBufferAttributes. 556 | let mem = 0; 557 | for ( const name in geometry.attributes ) { 558 | 559 | const attr = geometry.getAttribute( name ); 560 | mem += attr.count * attr.itemSize * attr.array.BYTES_PER_ELEMENT; 561 | 562 | } 563 | 564 | const indices = geometry.getIndex(); 565 | mem += indices ? indices.count * indices.itemSize * indices.array.BYTES_PER_ELEMENT : 0; 566 | return mem; 567 | 568 | } 569 | 570 | /** 571 | * @param {BufferGeometry} geometry 572 | * @param {number} tolerance 573 | * @return {BufferGeometry} 574 | */ 575 | function mergeVertices( geometry, tolerance = 1e-4 ) { 576 | 577 | tolerance = Math.max( tolerance, Number.EPSILON ); 578 | 579 | // Generate an index buffer if the geometry doesn't have one, or optimize it 580 | // if it's already available. 581 | const hashToIndex = {}; 582 | const indices = geometry.getIndex(); 583 | const positions = geometry.getAttribute( 'position' ); 584 | const vertexCount = indices ? indices.count : positions.count; 585 | 586 | // next value for triangle indices 587 | let nextIndex = 0; 588 | 589 | // attributes and new attribute arrays 590 | const attributeNames = Object.keys( geometry.attributes ); 591 | const tmpAttributes = {}; 592 | const tmpMorphAttributes = {}; 593 | const newIndices = []; 594 | const getters = [ 'getX', 'getY', 'getZ', 'getW' ]; 595 | const setters = [ 'setX', 'setY', 'setZ', 'setW' ]; 596 | 597 | // Initialize the arrays, allocating space conservatively. Extra 598 | // space will be trimmed in the last step. 599 | for ( let i = 0, l = attributeNames.length; i < l; i ++ ) { 600 | 601 | const name = attributeNames[ i ]; 602 | const attr = geometry.attributes[ name ]; 603 | 604 | tmpAttributes[ name ] = new BufferAttribute( 605 | new attr.array.constructor( attr.count * attr.itemSize ), 606 | attr.itemSize, 607 | attr.normalized 608 | ); 609 | 610 | const morphAttr = geometry.morphAttributes[ name ]; 611 | if ( morphAttr ) { 612 | 613 | tmpMorphAttributes[ name ] = new BufferAttribute( 614 | new morphAttr.array.constructor( morphAttr.count * morphAttr.itemSize ), 615 | morphAttr.itemSize, 616 | morphAttr.normalized 617 | ); 618 | 619 | } 620 | 621 | } 622 | 623 | // convert the error tolerance to an amount of decimal places to truncate to 624 | const decimalShift = Math.log10( 1 / tolerance ); 625 | const shiftMultiplier = Math.pow( 10, decimalShift ); 626 | for ( let i = 0; i < vertexCount; i ++ ) { 627 | 628 | const index = indices ? indices.getX( i ) : i; 629 | 630 | // Generate a hash for the vertex attributes at the current index 'i' 631 | let hash = ''; 632 | for ( let j = 0, l = attributeNames.length; j < l; j ++ ) { 633 | 634 | const name = attributeNames[ j ]; 635 | const attribute = geometry.getAttribute( name ); 636 | const itemSize = attribute.itemSize; 637 | 638 | for ( let k = 0; k < itemSize; k ++ ) { 639 | 640 | // double tilde truncates the decimal value 641 | hash += `${ ~ ~ ( attribute[ getters[ k ] ]( index ) * shiftMultiplier ) },`; 642 | 643 | } 644 | 645 | } 646 | 647 | // Add another reference to the vertex if it's already 648 | // used by another index 649 | if ( hash in hashToIndex ) { 650 | 651 | newIndices.push( hashToIndex[ hash ] ); 652 | 653 | } else { 654 | 655 | // copy data to the new index in the temporary attributes 656 | for ( let j = 0, l = attributeNames.length; j < l; j ++ ) { 657 | 658 | const name = attributeNames[ j ]; 659 | const attribute = geometry.getAttribute( name ); 660 | const morphAttr = geometry.morphAttributes[ name ]; 661 | const itemSize = attribute.itemSize; 662 | const newarray = tmpAttributes[ name ]; 663 | const newMorphArrays = tmpMorphAttributes[ name ]; 664 | 665 | for ( let k = 0; k < itemSize; k ++ ) { 666 | 667 | const getterFunc = getters[ k ]; 668 | const setterFunc = setters[ k ]; 669 | newarray[ setterFunc ]( nextIndex, attribute[ getterFunc ]( index ) ); 670 | 671 | if ( morphAttr ) { 672 | 673 | for ( let m = 0, ml = morphAttr.length; m < ml; m ++ ) { 674 | 675 | newMorphArrays[ m ][ setterFunc ]( nextIndex, morphAttr[ m ][ getterFunc ]( index ) ); 676 | 677 | } 678 | 679 | } 680 | 681 | } 682 | 683 | } 684 | 685 | hashToIndex[ hash ] = nextIndex; 686 | newIndices.push( nextIndex ); 687 | nextIndex ++; 688 | 689 | } 690 | 691 | } 692 | 693 | // generate result BufferGeometry 694 | const result = geometry.clone(); 695 | for ( const name in geometry.attributes ) { 696 | 697 | const tmpAttribute = tmpAttributes[ name ]; 698 | 699 | result.setAttribute( name, new BufferAttribute( 700 | tmpAttribute.array.slice( 0, nextIndex * tmpAttribute.itemSize ), 701 | tmpAttribute.itemSize, 702 | tmpAttribute.normalized, 703 | ) ); 704 | 705 | if ( ! ( name in tmpMorphAttributes ) ) continue; 706 | 707 | for ( let j = 0; j < tmpMorphAttributes[ name ].length; j ++ ) { 708 | 709 | const tmpMorphAttribute = tmpMorphAttributes[ name ][ j ]; 710 | 711 | result.morphAttributes[ name ][ j ] = new BufferAttribute( 712 | tmpMorphAttribute.array.slice( 0, nextIndex * tmpMorphAttribute.itemSize ), 713 | tmpMorphAttribute.itemSize, 714 | tmpMorphAttribute.normalized, 715 | ); 716 | 717 | } 718 | 719 | } 720 | 721 | // indices 722 | 723 | result.setIndex( newIndices ); 724 | 725 | return result; 726 | 727 | } 728 | 729 | /** 730 | * @param {BufferGeometry} geometry 731 | * @param {number} drawMode 732 | * @return {BufferGeometry} 733 | */ 734 | function toTrianglesDrawMode( geometry, drawMode ) { 735 | 736 | if ( drawMode === TrianglesDrawMode ) { 737 | 738 | console.warn( 'THREE.BufferGeometryUtils.toTrianglesDrawMode(): Geometry already defined as triangles.' ); 739 | return geometry; 740 | 741 | } 742 | 743 | if ( drawMode === TriangleFanDrawMode || drawMode === TriangleStripDrawMode ) { 744 | 745 | let index = geometry.getIndex(); 746 | 747 | // generate index if not present 748 | 749 | if ( index === null ) { 750 | 751 | const indices = []; 752 | 753 | const position = geometry.getAttribute( 'position' ); 754 | 755 | if ( position !== undefined ) { 756 | 757 | for ( let i = 0; i < position.count; i ++ ) { 758 | 759 | indices.push( i ); 760 | 761 | } 762 | 763 | geometry.setIndex( indices ); 764 | index = geometry.getIndex(); 765 | 766 | } else { 767 | 768 | console.error( 'THREE.BufferGeometryUtils.toTrianglesDrawMode(): Undefined position attribute. Processing not possible.' ); 769 | return geometry; 770 | 771 | } 772 | 773 | } 774 | 775 | // 776 | 777 | const numberOfTriangles = index.count - 2; 778 | const newIndices = []; 779 | 780 | if ( drawMode === TriangleFanDrawMode ) { 781 | 782 | // gl.TRIANGLE_FAN 783 | 784 | for ( let i = 1; i <= numberOfTriangles; i ++ ) { 785 | 786 | newIndices.push( index.getX( 0 ) ); 787 | newIndices.push( index.getX( i ) ); 788 | newIndices.push( index.getX( i + 1 ) ); 789 | 790 | } 791 | 792 | } else { 793 | 794 | // gl.TRIANGLE_STRIP 795 | 796 | for ( let i = 0; i < numberOfTriangles; i ++ ) { 797 | 798 | if ( i % 2 === 0 ) { 799 | 800 | newIndices.push( index.getX( i ) ); 801 | newIndices.push( index.getX( i + 1 ) ); 802 | newIndices.push( index.getX( i + 2 ) ); 803 | 804 | } else { 805 | 806 | newIndices.push( index.getX( i + 2 ) ); 807 | newIndices.push( index.getX( i + 1 ) ); 808 | newIndices.push( index.getX( i ) ); 809 | 810 | } 811 | 812 | } 813 | 814 | } 815 | 816 | if ( ( newIndices.length / 3 ) !== numberOfTriangles ) { 817 | 818 | console.error( 'THREE.BufferGeometryUtils.toTrianglesDrawMode(): Unable to generate correct amount of triangles.' ); 819 | 820 | } 821 | 822 | // build final geometry 823 | 824 | const newGeometry = geometry.clone(); 825 | newGeometry.setIndex( newIndices ); 826 | newGeometry.clearGroups(); 827 | 828 | return newGeometry; 829 | 830 | } else { 831 | 832 | console.error( 'THREE.BufferGeometryUtils.toTrianglesDrawMode(): Unknown draw mode:', drawMode ); 833 | return geometry; 834 | 835 | } 836 | 837 | } 838 | 839 | /** 840 | * Calculates the morphed attributes of a morphed/skinned BufferGeometry. 841 | * Helpful for Raytracing or Decals. 842 | * @param {Mesh | Line | Points} object An instance of Mesh, Line or Points. 843 | * @return {Object} An Object with original position/normal attributes and morphed ones. 844 | */ 845 | function computeMorphedAttributes( object ) { 846 | 847 | if ( object.geometry.isBufferGeometry !== true ) { 848 | 849 | console.error( 'THREE.BufferGeometryUtils: Geometry is not of type BufferGeometry.' ); 850 | return null; 851 | 852 | } 853 | 854 | const _vA = new Vector3(); 855 | const _vB = new Vector3(); 856 | const _vC = new Vector3(); 857 | 858 | const _tempA = new Vector3(); 859 | const _tempB = new Vector3(); 860 | const _tempC = new Vector3(); 861 | 862 | const _morphA = new Vector3(); 863 | const _morphB = new Vector3(); 864 | const _morphC = new Vector3(); 865 | 866 | function _calculateMorphedAttributeData( 867 | object, 868 | attribute, 869 | morphAttribute, 870 | morphTargetsRelative, 871 | a, 872 | b, 873 | c, 874 | modifiedAttributeArray 875 | ) { 876 | 877 | _vA.fromBufferAttribute( attribute, a ); 878 | _vB.fromBufferAttribute( attribute, b ); 879 | _vC.fromBufferAttribute( attribute, c ); 880 | 881 | const morphInfluences = object.morphTargetInfluences; 882 | 883 | if ( morphAttribute && morphInfluences ) { 884 | 885 | _morphA.set( 0, 0, 0 ); 886 | _morphB.set( 0, 0, 0 ); 887 | _morphC.set( 0, 0, 0 ); 888 | 889 | for ( let i = 0, il = morphAttribute.length; i < il; i ++ ) { 890 | 891 | const influence = morphInfluences[ i ]; 892 | const morph = morphAttribute[ i ]; 893 | 894 | if ( influence === 0 ) continue; 895 | 896 | _tempA.fromBufferAttribute( morph, a ); 897 | _tempB.fromBufferAttribute( morph, b ); 898 | _tempC.fromBufferAttribute( morph, c ); 899 | 900 | if ( morphTargetsRelative ) { 901 | 902 | _morphA.addScaledVector( _tempA, influence ); 903 | _morphB.addScaledVector( _tempB, influence ); 904 | _morphC.addScaledVector( _tempC, influence ); 905 | 906 | } else { 907 | 908 | _morphA.addScaledVector( _tempA.sub( _vA ), influence ); 909 | _morphB.addScaledVector( _tempB.sub( _vB ), influence ); 910 | _morphC.addScaledVector( _tempC.sub( _vC ), influence ); 911 | 912 | } 913 | 914 | } 915 | 916 | _vA.add( _morphA ); 917 | _vB.add( _morphB ); 918 | _vC.add( _morphC ); 919 | 920 | } 921 | 922 | if ( object.isSkinnedMesh ) { 923 | 924 | object.boneTransform( a, _vA ); 925 | object.boneTransform( b, _vB ); 926 | object.boneTransform( c, _vC ); 927 | 928 | } 929 | 930 | modifiedAttributeArray[ a * 3 + 0 ] = _vA.x; 931 | modifiedAttributeArray[ a * 3 + 1 ] = _vA.y; 932 | modifiedAttributeArray[ a * 3 + 2 ] = _vA.z; 933 | modifiedAttributeArray[ b * 3 + 0 ] = _vB.x; 934 | modifiedAttributeArray[ b * 3 + 1 ] = _vB.y; 935 | modifiedAttributeArray[ b * 3 + 2 ] = _vB.z; 936 | modifiedAttributeArray[ c * 3 + 0 ] = _vC.x; 937 | modifiedAttributeArray[ c * 3 + 1 ] = _vC.y; 938 | modifiedAttributeArray[ c * 3 + 2 ] = _vC.z; 939 | 940 | } 941 | 942 | const geometry = object.geometry; 943 | const material = object.material; 944 | 945 | let a, b, c; 946 | const index = geometry.index; 947 | const positionAttribute = geometry.attributes.position; 948 | const morphPosition = geometry.morphAttributes.position; 949 | const morphTargetsRelative = geometry.morphTargetsRelative; 950 | const normalAttribute = geometry.attributes.normal; 951 | const morphNormal = geometry.morphAttributes.position; 952 | 953 | const groups = geometry.groups; 954 | const drawRange = geometry.drawRange; 955 | let i, j, il, jl; 956 | let group; 957 | let start, end; 958 | 959 | const modifiedPosition = new Float32Array( positionAttribute.count * positionAttribute.itemSize ); 960 | const modifiedNormal = new Float32Array( normalAttribute.count * normalAttribute.itemSize ); 961 | 962 | if ( index !== null ) { 963 | 964 | // indexed buffer geometry 965 | 966 | if ( Array.isArray( material ) ) { 967 | 968 | for ( i = 0, il = groups.length; i < il; i ++ ) { 969 | 970 | group = groups[ i ]; 971 | 972 | start = Math.max( group.start, drawRange.start ); 973 | end = Math.min( ( group.start + group.count ), ( drawRange.start + drawRange.count ) ); 974 | 975 | for ( j = start, jl = end; j < jl; j += 3 ) { 976 | 977 | a = index.getX( j ); 978 | b = index.getX( j + 1 ); 979 | c = index.getX( j + 2 ); 980 | 981 | _calculateMorphedAttributeData( 982 | object, 983 | positionAttribute, 984 | morphPosition, 985 | morphTargetsRelative, 986 | a, b, c, 987 | modifiedPosition 988 | ); 989 | 990 | _calculateMorphedAttributeData( 991 | object, 992 | normalAttribute, 993 | morphNormal, 994 | morphTargetsRelative, 995 | a, b, c, 996 | modifiedNormal 997 | ); 998 | 999 | } 1000 | 1001 | } 1002 | 1003 | } else { 1004 | 1005 | start = Math.max( 0, drawRange.start ); 1006 | end = Math.min( index.count, ( drawRange.start + drawRange.count ) ); 1007 | 1008 | for ( i = start, il = end; i < il; i += 3 ) { 1009 | 1010 | a = index.getX( i ); 1011 | b = index.getX( i + 1 ); 1012 | c = index.getX( i + 2 ); 1013 | 1014 | _calculateMorphedAttributeData( 1015 | object, 1016 | positionAttribute, 1017 | morphPosition, 1018 | morphTargetsRelative, 1019 | a, b, c, 1020 | modifiedPosition 1021 | ); 1022 | 1023 | _calculateMorphedAttributeData( 1024 | object, 1025 | normalAttribute, 1026 | morphNormal, 1027 | morphTargetsRelative, 1028 | a, b, c, 1029 | modifiedNormal 1030 | ); 1031 | 1032 | } 1033 | 1034 | } 1035 | 1036 | } else { 1037 | 1038 | // non-indexed buffer geometry 1039 | 1040 | if ( Array.isArray( material ) ) { 1041 | 1042 | for ( i = 0, il = groups.length; i < il; i ++ ) { 1043 | 1044 | group = groups[ i ]; 1045 | 1046 | start = Math.max( group.start, drawRange.start ); 1047 | end = Math.min( ( group.start + group.count ), ( drawRange.start + drawRange.count ) ); 1048 | 1049 | for ( j = start, jl = end; j < jl; j += 3 ) { 1050 | 1051 | a = j; 1052 | b = j + 1; 1053 | c = j + 2; 1054 | 1055 | _calculateMorphedAttributeData( 1056 | object, 1057 | positionAttribute, 1058 | morphPosition, 1059 | morphTargetsRelative, 1060 | a, b, c, 1061 | modifiedPosition 1062 | ); 1063 | 1064 | _calculateMorphedAttributeData( 1065 | object, 1066 | normalAttribute, 1067 | morphNormal, 1068 | morphTargetsRelative, 1069 | a, b, c, 1070 | modifiedNormal 1071 | ); 1072 | 1073 | } 1074 | 1075 | } 1076 | 1077 | } else { 1078 | 1079 | start = Math.max( 0, drawRange.start ); 1080 | end = Math.min( positionAttribute.count, ( drawRange.start + drawRange.count ) ); 1081 | 1082 | for ( i = start, il = end; i < il; i += 3 ) { 1083 | 1084 | a = i; 1085 | b = i + 1; 1086 | c = i + 2; 1087 | 1088 | _calculateMorphedAttributeData( 1089 | object, 1090 | positionAttribute, 1091 | morphPosition, 1092 | morphTargetsRelative, 1093 | a, b, c, 1094 | modifiedPosition 1095 | ); 1096 | 1097 | _calculateMorphedAttributeData( 1098 | object, 1099 | normalAttribute, 1100 | morphNormal, 1101 | morphTargetsRelative, 1102 | a, b, c, 1103 | modifiedNormal 1104 | ); 1105 | 1106 | } 1107 | 1108 | } 1109 | 1110 | } 1111 | 1112 | const morphedPositionAttribute = new Float32BufferAttribute( modifiedPosition, 3 ); 1113 | const morphedNormalAttribute = new Float32BufferAttribute( modifiedNormal, 3 ); 1114 | 1115 | return { 1116 | 1117 | positionAttribute: positionAttribute, 1118 | normalAttribute: normalAttribute, 1119 | morphedPositionAttribute: morphedPositionAttribute, 1120 | morphedNormalAttribute: morphedNormalAttribute 1121 | 1122 | }; 1123 | 1124 | } 1125 | 1126 | function mergeGroups( geometry ) { 1127 | 1128 | if ( geometry.groups.length === 0 ) { 1129 | 1130 | console.warn( 'THREE.BufferGeometryUtils.mergeGroups(): No groups are defined. Nothing to merge.' ); 1131 | return geometry; 1132 | 1133 | } 1134 | 1135 | let groups = geometry.groups; 1136 | 1137 | // sort groups by material index 1138 | 1139 | groups = groups.sort( ( a, b ) => { 1140 | 1141 | if ( a.materialIndex !== b.materialIndex ) return a.materialIndex - b.materialIndex; 1142 | 1143 | return a.start - b.start; 1144 | 1145 | } ); 1146 | 1147 | // create index for non-indexed geometries 1148 | 1149 | if ( geometry.getIndex() === null ) { 1150 | 1151 | const positionAttribute = geometry.getAttribute( 'position' ); 1152 | const indices = []; 1153 | 1154 | for ( let i = 0; i < positionAttribute.count; i += 3 ) { 1155 | 1156 | indices.push( i, i + 1, i + 2 ); 1157 | 1158 | } 1159 | 1160 | geometry.setIndex( indices ); 1161 | 1162 | } 1163 | 1164 | // sort index 1165 | 1166 | const index = geometry.getIndex(); 1167 | 1168 | const newIndices = []; 1169 | 1170 | for ( let i = 0; i < groups.length; i ++ ) { 1171 | 1172 | const group = groups[ i ]; 1173 | 1174 | const groupStart = group.start; 1175 | const groupLength = groupStart + group.count; 1176 | 1177 | for ( let j = groupStart; j < groupLength; j ++ ) { 1178 | 1179 | newIndices.push( index.getX( j ) ); 1180 | 1181 | } 1182 | 1183 | } 1184 | 1185 | geometry.dispose(); // Required to force buffer recreation 1186 | geometry.setIndex( newIndices ); 1187 | 1188 | // update groups indices 1189 | 1190 | let start = 0; 1191 | 1192 | for ( let i = 0; i < groups.length; i ++ ) { 1193 | 1194 | const group = groups[ i ]; 1195 | 1196 | group.start = start; 1197 | start += group.count; 1198 | 1199 | } 1200 | 1201 | // merge groups 1202 | 1203 | let currentGroup = groups[ 0 ]; 1204 | 1205 | geometry.groups = [ currentGroup ]; 1206 | 1207 | for ( let i = 1; i < groups.length; i ++ ) { 1208 | 1209 | const group = groups[ i ]; 1210 | 1211 | if ( currentGroup.materialIndex === group.materialIndex ) { 1212 | 1213 | currentGroup.count += group.count; 1214 | 1215 | } else { 1216 | 1217 | currentGroup = group; 1218 | geometry.groups.push( currentGroup ); 1219 | 1220 | } 1221 | 1222 | } 1223 | 1224 | return geometry; 1225 | 1226 | } 1227 | 1228 | 1229 | // Creates a new, non-indexed geometry with smooth normals everywhere except faces that meet at 1230 | // an angle greater than the crease angle. 1231 | function toCreasedNormals( geometry, creaseAngle = Math.PI / 3 /* 60 degrees */ ) { 1232 | 1233 | const creaseDot = Math.cos( creaseAngle ); 1234 | const hashMultiplier = ( 1 + 1e-10 ) * 1e2; 1235 | 1236 | // reusable vertors 1237 | const verts = [ new Vector3(), new Vector3(), new Vector3() ]; 1238 | const tempVec1 = new Vector3(); 1239 | const tempVec2 = new Vector3(); 1240 | const tempNorm = new Vector3(); 1241 | const tempNorm2 = new Vector3(); 1242 | 1243 | // hashes a vector 1244 | function hashVertex( v ) { 1245 | 1246 | const x = ~ ~ ( v.x * hashMultiplier ); 1247 | const y = ~ ~ ( v.y * hashMultiplier ); 1248 | const z = ~ ~ ( v.z * hashMultiplier ); 1249 | return `${x},${y},${z}`; 1250 | 1251 | } 1252 | 1253 | const resultGeometry = geometry.toNonIndexed(); 1254 | const posAttr = resultGeometry.attributes.position; 1255 | const vertexMap = {}; 1256 | 1257 | // find all the normals shared by commonly located vertices 1258 | for ( let i = 0, l = posAttr.count / 3; i < l; i ++ ) { 1259 | 1260 | const i3 = 3 * i; 1261 | const a = verts[ 0 ].fromBufferAttribute( posAttr, i3 + 0 ); 1262 | const b = verts[ 1 ].fromBufferAttribute( posAttr, i3 + 1 ); 1263 | const c = verts[ 2 ].fromBufferAttribute( posAttr, i3 + 2 ); 1264 | 1265 | tempVec1.subVectors( c, b ); 1266 | tempVec2.subVectors( a, b ); 1267 | 1268 | // add the normal to the map for all vertices 1269 | const normal = new Vector3().crossVectors( tempVec1, tempVec2 ).normalize(); 1270 | for ( let n = 0; n < 3; n ++ ) { 1271 | 1272 | const vert = verts[ n ]; 1273 | const hash = hashVertex( vert ); 1274 | if ( ! ( hash in vertexMap ) ) { 1275 | 1276 | vertexMap[ hash ] = []; 1277 | 1278 | } 1279 | 1280 | vertexMap[ hash ].push( normal ); 1281 | 1282 | } 1283 | 1284 | } 1285 | 1286 | // average normals from all vertices that share a common location if they are within the 1287 | // provided crease threshold 1288 | const normalArray = new Float32Array( posAttr.count * 3 ); 1289 | const normAttr = new BufferAttribute( normalArray, 3, false ); 1290 | for ( let i = 0, l = posAttr.count / 3; i < l; i ++ ) { 1291 | 1292 | // get the face normal for this vertex 1293 | const i3 = 3 * i; 1294 | const a = verts[ 0 ].fromBufferAttribute( posAttr, i3 + 0 ); 1295 | const b = verts[ 1 ].fromBufferAttribute( posAttr, i3 + 1 ); 1296 | const c = verts[ 2 ].fromBufferAttribute( posAttr, i3 + 2 ); 1297 | 1298 | tempVec1.subVectors( c, b ); 1299 | tempVec2.subVectors( a, b ); 1300 | 1301 | tempNorm.crossVectors( tempVec1, tempVec2 ).normalize(); 1302 | 1303 | // average all normals that meet the threshold and set the normal value 1304 | for ( let n = 0; n < 3; n ++ ) { 1305 | 1306 | const vert = verts[ n ]; 1307 | const hash = hashVertex( vert ); 1308 | const otherNormals = vertexMap[ hash ]; 1309 | tempNorm2.set( 0, 0, 0 ); 1310 | 1311 | for ( let k = 0, lk = otherNormals.length; k < lk; k ++ ) { 1312 | 1313 | const otherNorm = otherNormals[ k ]; 1314 | if ( tempNorm.dot( otherNorm ) > creaseDot ) { 1315 | 1316 | tempNorm2.add( otherNorm ); 1317 | 1318 | } 1319 | 1320 | } 1321 | 1322 | tempNorm2.normalize(); 1323 | normAttr.setXYZ( i3 + n, tempNorm2.x, tempNorm2.y, tempNorm2.z ); 1324 | 1325 | } 1326 | 1327 | } 1328 | 1329 | resultGeometry.setAttribute( 'normal', normAttr ); 1330 | return resultGeometry; 1331 | 1332 | } 1333 | 1334 | export { 1335 | computeTangents, 1336 | computeMikkTSpaceTangents, 1337 | mergeBufferGeometries, 1338 | mergeBufferAttributes, 1339 | interleaveAttributes, 1340 | estimateBytesUsed, 1341 | mergeVertices, 1342 | toTrianglesDrawMode, 1343 | computeMorphedAttributes, 1344 | mergeGroups, 1345 | toCreasedNormals 1346 | }; 1347 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 josephrocca 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /OpenCharacters/README.md: -------------------------------------------------------------------------------- 1 | This folder is just for OpenCharacters-specific code. Please ignore if OpenCharacters is not relevant to you. 2 | -------------------------------------------------------------------------------- /OpenCharacters/animations/README.md: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /OpenCharacters/animations/angry.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephrocca/ChatVRM-js/cb6eacf7bd8dc137e967a4c9d84dd5af22f98714/OpenCharacters/animations/angry.fbx -------------------------------------------------------------------------------- /OpenCharacters/animations/silly_dancing.fbx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephrocca/ChatVRM-js/cb6eacf7bd8dc137e967a4c9d84dd5af22f98714/OpenCharacters/animations/silly_dancing.fbx -------------------------------------------------------------------------------- /OpenCharacters/dummy-audio/12.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephrocca/ChatVRM-js/cb6eacf7bd8dc137e967a4c9d84dd5af22f98714/OpenCharacters/dummy-audio/12.mp3 -------------------------------------------------------------------------------- /OpenCharacters/dummy-audio/24.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephrocca/ChatVRM-js/cb6eacf7bd8dc137e967a4c9d84dd5af22f98714/OpenCharacters/dummy-audio/24.mp3 -------------------------------------------------------------------------------- /OpenCharacters/dummy-audio/3.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephrocca/ChatVRM-js/cb6eacf7bd8dc137e967a4c9d84dd5af22f98714/OpenCharacters/dummy-audio/3.mp3 -------------------------------------------------------------------------------- /OpenCharacters/dummy-audio/6.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephrocca/ChatVRM-js/cb6eacf7bd8dc137e967a4c9d84dd5af22f98714/OpenCharacters/dummy-audio/6.mp3 -------------------------------------------------------------------------------- /OpenCharacters/dummy-audio/README.md: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /OpenCharacters/plugin1.js: -------------------------------------------------------------------------------- 1 | let [ 2 | Viewer, 3 | prompt2, 4 | ] = await Promise.all([ 5 | import("https://cdn.jsdelivr.net/gh/josephrocca/ChatVRM-js@v0.0.27/features/vrmViewer/viewer.js").then(m => m.Viewer), 6 | import("https://cdn.jsdelivr.net/gh/josephrocca/prompt2@v0.0.8/mod.js").then(m => m.default), 7 | ]); 8 | 9 | window.onerror = async function(errorMsg, url, lineNumber, columnNumber, errorObj) { 10 | let result = await prompt2({ 11 | blah: {type:"none", html:`An error occurred:
${errorMsg}\n\nstack: ${errorObj?.stack}\n\nline: ${lineNumber}
`}, 12 | }, {cancelButtonText:"okay"}); 13 | return false; 14 | } 15 | 16 | window.viewer = new Viewer(); 17 | 18 | document.body.style.cssText = "margin:0; padding:0;"; 19 | 20 | oc.window.show(); 21 | 22 | await new Promise(r => { 23 | setTimeout(r, 1000); 24 | //window.addEventListener("DOMContentLoaded", r); 25 | }); 26 | 27 | const canvas = document.createElement("canvas"); 28 | canvas.height = window.innerHeight; 29 | canvas.width = window.innerWidth; 30 | canvas.style.cssText = "width:100vw; height:100vh; display:block;"; 31 | document.body.appendChild(canvas); 32 | 33 | let defaultVrmUrl = "https://raw.githubusercontent.com/josephrocca/ChatVRM-js/main/avatars/AvatarSample_B.vrm"; 34 | 35 | viewer.setup(canvas); 36 | await viewer.loadVrm(oc.character.customData.vrmUrl || defaultVrmUrl); 37 | 38 | canvas.addEventListener("dragover", function (event) { 39 | event.preventDefault(); 40 | }); 41 | 42 | canvas.addEventListener("drop", function (event) { 43 | event.preventDefault(); 44 | 45 | const files = event.dataTransfer?.files; 46 | if(!files) { 47 | return; 48 | } 49 | 50 | const file = files[0]; 51 | if(!file) { 52 | return; 53 | } 54 | 55 | const file_type = file.name.split(".").pop(); 56 | if(file_type === "vrm") { 57 | const blob = new Blob([file], { type: "application/octet-stream" }); 58 | const url = window.URL.createObjectURL(blob); 59 | viewer.loadVrm(url); 60 | } 61 | }); 62 | 63 | async function pluginSettings() { 64 | let originalVrmUrl = oc.character.customData.vrmUrl; 65 | let result = await prompt2({ 66 | vrmUrl: {type:"textLine", label: "Upload a VRM file to catbox.moe and put the URL here (url should end in .vrm). You can make a VRM character here, or you can download one here.", defaultValue:oc.character.customData.vrmUrl || defaultVrmUrl}, 67 | voiceAudioEnabled: {type:"select", label: "Voice audio enabled? (requires ElevenLabs API key)", options:[{value:"no"}, {value:"yes"}], defaultValue:oc.character.customData.voiceAudioEnabled || "no"}, 68 | elevenLabsVoiceId: {show:d=>d.voiceAudioEnabled=="yes", type:"textLine", label: "Enter an ElevenLabs voice ID:", defaultValue:oc.character.customData.elevenLabsVoiceId || "21m00Tcm4TlvDq8ikWAM"}, 69 | elevenLabsApiKey: {show:d=>d.voiceAudioEnabled=="yes", type:"textLine", label: "Enter your ElevenLabs API key (see user settings):", defaultValue:oc.character.customData.elevenLabsApiKey || ""}, 70 | }, {cancelButtonText:null}); 71 | 72 | oc.character.customData.vrmUrl = result.vrmUrl; 73 | oc.character.customData.voiceAudioEnabled = result.voiceAudioEnabled; 74 | oc.character.customData.elevenLabsVoiceId = result.elevenLabsVoiceId; 75 | oc.character.customData.elevenLabsApiKey = result.elevenLabsApiKey; 76 | 77 | if(oc.character.customData.vrmUrl !== originalVrmUrl) { 78 | viewer.loadVrm(oc.character.customData.vrmUrl); 79 | } 80 | } 81 | 82 | // button that hovers in bottom right: 83 | let settingsButton = document.createElement("button"); 84 | settingsButton.style.cssText = ` 85 | position: fixed; 86 | bottom: 0.5rem; 87 | right: 0.5rem; 88 | z-index: 100; 89 | `; 90 | settingsButton.textContent = "⚙️ settings"; 91 | settingsButton.onclick = pluginSettings; 92 | document.body.appendChild(settingsButton); 93 | 94 | // polyfill for navigator.userActivation 95 | if(!navigator.userActivation) { 96 | navigator.userActivation = {hasBeenActive:false}; 97 | let pageActivationClickHandler = (e) => { 98 | if(e.isTrusted) { 99 | navigator.userActivation.hasBeenActive = true; 100 | window.removeEventListener("click", pageActivationClickHandler); 101 | } 102 | } 103 | window.addEventListener("click", pageActivationClickHandler); 104 | } 105 | 106 | if(oc.character.customData.voiceAudioEnabled === undefined) { 107 | await pluginSettings(); 108 | } 109 | 110 | if(!navigator.userActivation.hasBeenActive) { 111 | let result = await prompt2({ 112 | blah: {type:"none", html:"Click start to initialize character (this is needed for technical reasons)."}, 113 | }, {cancelButtonText:null, submitButtonText:"start"}); 114 | } 115 | 116 | let sentence = ""; 117 | oc.thread.on("MessageAdded", async function () { 118 | let lastMessage = oc.thread.messages.at(-1); 119 | if(lastMessage.author !== "ai") return; 120 | await textToSpeechAndActions(lastMessage.content); 121 | }); 122 | 123 | function parseSpeechActionText(text) { 124 | const regex = /(\[@(?:expression|action)=[^\]]+\])/g; 125 | const matches = text.split(regex); 126 | 127 | const chunks = []; 128 | 129 | matches.forEach(match => { 130 | if (match.startsWith("[@")) { 131 | const [property, value] = match.slice(2, -1).split("="); 132 | 133 | const chunk = {}; 134 | chunk[property] = value; 135 | chunks.push(chunk); 136 | } else { 137 | chunks.push({ text: match }); 138 | } 139 | }); 140 | 141 | return chunks; 142 | } 143 | 144 | async function textToSpeechAndActions(text) { 145 | let chunks = parseSpeechActionText(text); 146 | 147 | let volume = oc.character.customData.voiceAudioEnabled === "yes" ? 1 : 0; 148 | let lastExpression = "neutral"; 149 | let speechActionChunks = []; 150 | for(let chunk of chunks) { 151 | if(chunk.expression) { 152 | lastExpression = chunk.expression; 153 | continue; 154 | } 155 | if(chunk.text) { 156 | let bufferPromise; 157 | if(oc.character.customData.voiceAudioEnabled === "yes") { 158 | bufferPromise = fetch(`https://api.elevenlabs.io/v1/text-to-speech/${oc.character.customData.elevenLabsVoiceId}?optimize_streaming_latency=0`, { 159 | method: "POST", 160 | headers: { 161 | "Content-Type": "application/json", 162 | "accept": "audio/mpeg", 163 | "xi-api-key": oc.character.customData.elevenLabsApiKey, 164 | }, 165 | body: JSON.stringify({ 166 | text: chunk.text, 167 | model_id: "eleven_monolingual_v1", 168 | voice_settings: { 169 | "stability": 0, 170 | "similarity_boost": 0, 171 | }, 172 | }), 173 | }).then(res => res.arrayBuffer()); 174 | } else { 175 | let numWords = chunk.text.split(" ").length / 2; 176 | let filename = roundToNearestOfSet(numWords, [3, 6, 12, 24]) + ".mp3"; 177 | bufferPromise = fetch(`https://cdn.jsdelivr.net/gh/josephrocca/ChatVRM-js@v0.0.22/OpenCharacters/dummy-audio/${filename}`).then(r => r.arrayBuffer()); 178 | } 179 | speechActionChunks.push({bufferPromise, expression:lastExpression, text:chunk.text, volume}); 180 | } 181 | } 182 | let buffers = await Promise.all(speechActionChunks.map(c => c.bufferPromise)); 183 | for(let i = 0; i < speechActionChunks.length; i++) { 184 | let chunk = speechActionChunks[i]; 185 | let buffer = buffers[i]; 186 | await viewer.model.speak(buffer, {expression:chunk.expression, volume:chunk.volume}); 187 | } 188 | 189 | // viewer.model.emoteController.playEmotion("neutral"); 190 | } 191 | 192 | function roundToNearestOfSet(num, options) { 193 | return options.reduce((a, b) => Math.abs(num - a) < Math.abs(num - b) ? a : b); 194 | } 195 | 196 | 197 | oc.messageRenderingPipeline.push(function({message, reader}) { 198 | if(reader === "user") message.content = message.content.replace(/(\[@(?:expression|action)=[^\]]+\])/g, ""); 199 | }); 200 | -------------------------------------------------------------------------------- /OrbitControls.js: -------------------------------------------------------------------------------- 1 | import { 2 | EventDispatcher, 3 | MOUSE, 4 | Quaternion, 5 | Spherical, 6 | TOUCH, 7 | Vector2, 8 | Vector3 9 | } from 'https://cdn.jsdelivr.net/npm/three@0.149.0/+esm'; 10 | 11 | // This set of controls performs orbiting, dollying (zooming), and panning. 12 | // Unlike TrackballControls, it maintains the "up" direction object.up (+Y by default). 13 | // 14 | // Orbit - left mouse / touch: one-finger move 15 | // Zoom - middle mouse, or mousewheel / touch: two-finger spread or squish 16 | // Pan - right mouse, or left mouse + ctrl/meta/shiftKey, or arrow keys / touch: two-finger move 17 | 18 | const _changeEvent = { type: 'change' }; 19 | const _startEvent = { type: 'start' }; 20 | const _endEvent = { type: 'end' }; 21 | 22 | class OrbitControls extends EventDispatcher { 23 | 24 | constructor( object, domElement ) { 25 | 26 | super(); 27 | 28 | this.object = object; 29 | this.domElement = domElement; 30 | this.domElement.style.touchAction = 'none'; // disable touch scroll 31 | 32 | // Set to false to disable this control 33 | this.enabled = true; 34 | 35 | // "target" sets the location of focus, where the object orbits around 36 | this.target = new Vector3(); 37 | 38 | // How far you can dolly in and out ( PerspectiveCamera only ) 39 | this.minDistance = 0; 40 | this.maxDistance = Infinity; 41 | 42 | // How far you can zoom in and out ( OrthographicCamera only ) 43 | this.minZoom = 0; 44 | this.maxZoom = Infinity; 45 | 46 | // How far you can orbit vertically, upper and lower limits. 47 | // Range is 0 to Math.PI radians. 48 | this.minPolarAngle = 0; // radians 49 | this.maxPolarAngle = Math.PI; // radians 50 | 51 | // How far you can orbit horizontally, upper and lower limits. 52 | // If set, the interval [ min, max ] must be a sub-interval of [ - 2 PI, 2 PI ], with ( max - min < 2 PI ) 53 | this.minAzimuthAngle = - Infinity; // radians 54 | this.maxAzimuthAngle = Infinity; // radians 55 | 56 | // Set to true to enable damping (inertia) 57 | // If damping is enabled, you must call controls.update() in your animation loop 58 | this.enableDamping = false; 59 | this.dampingFactor = 0.05; 60 | 61 | // This option actually enables dollying in and out; left as "zoom" for backwards compatibility. 62 | // Set to false to disable zooming 63 | this.enableZoom = true; 64 | this.zoomSpeed = 1.0; 65 | 66 | // Set to false to disable rotating 67 | this.enableRotate = true; 68 | this.rotateSpeed = 1.0; 69 | 70 | // Set to false to disable panning 71 | this.enablePan = true; 72 | this.panSpeed = 1.0; 73 | this.screenSpacePanning = true; // if false, pan orthogonal to world-space direction camera.up 74 | this.keyPanSpeed = 7.0; // pixels moved per arrow key push 75 | 76 | // Set to true to automatically rotate around the target 77 | // If auto-rotate is enabled, you must call controls.update() in your animation loop 78 | this.autoRotate = false; 79 | this.autoRotateSpeed = 2.0; // 30 seconds per orbit when fps is 60 80 | 81 | // The four arrow keys 82 | this.keys = { LEFT: 'ArrowLeft', UP: 'ArrowUp', RIGHT: 'ArrowRight', BOTTOM: 'ArrowDown' }; 83 | 84 | // Mouse buttons 85 | this.mouseButtons = { LEFT: MOUSE.ROTATE, MIDDLE: MOUSE.DOLLY, RIGHT: MOUSE.PAN }; 86 | 87 | // Touch fingers 88 | this.touches = { ONE: TOUCH.ROTATE, TWO: TOUCH.DOLLY_PAN }; 89 | 90 | // for reset 91 | this.target0 = this.target.clone(); 92 | this.position0 = this.object.position.clone(); 93 | this.zoom0 = this.object.zoom; 94 | 95 | // the target DOM element for key events 96 | this._domElementKeyEvents = null; 97 | 98 | // 99 | // public methods 100 | // 101 | 102 | this.getPolarAngle = function () { 103 | 104 | return spherical.phi; 105 | 106 | }; 107 | 108 | this.getAzimuthalAngle = function () { 109 | 110 | return spherical.theta; 111 | 112 | }; 113 | 114 | this.getDistance = function () { 115 | 116 | return this.object.position.distanceTo( this.target ); 117 | 118 | }; 119 | 120 | this.listenToKeyEvents = function ( domElement ) { 121 | 122 | domElement.addEventListener( 'keydown', onKeyDown ); 123 | this._domElementKeyEvents = domElement; 124 | 125 | }; 126 | 127 | this.saveState = function () { 128 | 129 | scope.target0.copy( scope.target ); 130 | scope.position0.copy( scope.object.position ); 131 | scope.zoom0 = scope.object.zoom; 132 | 133 | }; 134 | 135 | this.reset = function () { 136 | 137 | scope.target.copy( scope.target0 ); 138 | scope.object.position.copy( scope.position0 ); 139 | scope.object.zoom = scope.zoom0; 140 | 141 | scope.object.updateProjectionMatrix(); 142 | scope.dispatchEvent( _changeEvent ); 143 | 144 | scope.update(); 145 | 146 | state = STATE.NONE; 147 | 148 | }; 149 | 150 | // this method is exposed, but perhaps it would be better if we can make it private... 151 | this.update = function () { 152 | 153 | const offset = new Vector3(); 154 | 155 | // so camera.up is the orbit axis 156 | const quat = new Quaternion().setFromUnitVectors( object.up, new Vector3( 0, 1, 0 ) ); 157 | const quatInverse = quat.clone().invert(); 158 | 159 | const lastPosition = new Vector3(); 160 | const lastQuaternion = new Quaternion(); 161 | 162 | const twoPI = 2 * Math.PI; 163 | 164 | return function update() { 165 | 166 | const position = scope.object.position; 167 | 168 | offset.copy( position ).sub( scope.target ); 169 | 170 | // rotate offset to "y-axis-is-up" space 171 | offset.applyQuaternion( quat ); 172 | 173 | // angle from z-axis around y-axis 174 | spherical.setFromVector3( offset ); 175 | 176 | if ( scope.autoRotate && state === STATE.NONE ) { 177 | 178 | rotateLeft( getAutoRotationAngle() ); 179 | 180 | } 181 | 182 | if ( scope.enableDamping ) { 183 | 184 | spherical.theta += sphericalDelta.theta * scope.dampingFactor; 185 | spherical.phi += sphericalDelta.phi * scope.dampingFactor; 186 | 187 | } else { 188 | 189 | spherical.theta += sphericalDelta.theta; 190 | spherical.phi += sphericalDelta.phi; 191 | 192 | } 193 | 194 | // restrict theta to be between desired limits 195 | 196 | let min = scope.minAzimuthAngle; 197 | let max = scope.maxAzimuthAngle; 198 | 199 | if ( isFinite( min ) && isFinite( max ) ) { 200 | 201 | if ( min < - Math.PI ) min += twoPI; else if ( min > Math.PI ) min -= twoPI; 202 | 203 | if ( max < - Math.PI ) max += twoPI; else if ( max > Math.PI ) max -= twoPI; 204 | 205 | if ( min <= max ) { 206 | 207 | spherical.theta = Math.max( min, Math.min( max, spherical.theta ) ); 208 | 209 | } else { 210 | 211 | spherical.theta = ( spherical.theta > ( min + max ) / 2 ) ? 212 | Math.max( min, spherical.theta ) : 213 | Math.min( max, spherical.theta ); 214 | 215 | } 216 | 217 | } 218 | 219 | // restrict phi to be between desired limits 220 | spherical.phi = Math.max( scope.minPolarAngle, Math.min( scope.maxPolarAngle, spherical.phi ) ); 221 | 222 | spherical.makeSafe(); 223 | 224 | 225 | spherical.radius *= scale; 226 | 227 | // restrict radius to be between desired limits 228 | spherical.radius = Math.max( scope.minDistance, Math.min( scope.maxDistance, spherical.radius ) ); 229 | 230 | // move target to panned location 231 | 232 | if ( scope.enableDamping === true ) { 233 | 234 | scope.target.addScaledVector( panOffset, scope.dampingFactor ); 235 | 236 | } else { 237 | 238 | scope.target.add( panOffset ); 239 | 240 | } 241 | 242 | offset.setFromSpherical( spherical ); 243 | 244 | // rotate offset back to "camera-up-vector-is-up" space 245 | offset.applyQuaternion( quatInverse ); 246 | 247 | position.copy( scope.target ).add( offset ); 248 | 249 | scope.object.lookAt( scope.target ); 250 | 251 | if ( scope.enableDamping === true ) { 252 | 253 | sphericalDelta.theta *= ( 1 - scope.dampingFactor ); 254 | sphericalDelta.phi *= ( 1 - scope.dampingFactor ); 255 | 256 | panOffset.multiplyScalar( 1 - scope.dampingFactor ); 257 | 258 | } else { 259 | 260 | sphericalDelta.set( 0, 0, 0 ); 261 | 262 | panOffset.set( 0, 0, 0 ); 263 | 264 | } 265 | 266 | scale = 1; 267 | 268 | // update condition is: 269 | // min(camera displacement, camera rotation in radians)^2 > EPS 270 | // using small-angle approximation cos(x/2) = 1 - x^2 / 8 271 | 272 | if ( zoomChanged || 273 | lastPosition.distanceToSquared( scope.object.position ) > EPS || 274 | 8 * ( 1 - lastQuaternion.dot( scope.object.quaternion ) ) > EPS ) { 275 | 276 | scope.dispatchEvent( _changeEvent ); 277 | 278 | lastPosition.copy( scope.object.position ); 279 | lastQuaternion.copy( scope.object.quaternion ); 280 | zoomChanged = false; 281 | 282 | return true; 283 | 284 | } 285 | 286 | return false; 287 | 288 | }; 289 | 290 | }(); 291 | 292 | this.dispose = function () { 293 | 294 | scope.domElement.removeEventListener( 'contextmenu', onContextMenu ); 295 | 296 | scope.domElement.removeEventListener( 'pointerdown', onPointerDown ); 297 | scope.domElement.removeEventListener( 'pointercancel', onPointerCancel ); 298 | scope.domElement.removeEventListener( 'wheel', onMouseWheel ); 299 | 300 | scope.domElement.removeEventListener( 'pointermove', onPointerMove ); 301 | scope.domElement.removeEventListener( 'pointerup', onPointerUp ); 302 | 303 | 304 | if ( scope._domElementKeyEvents !== null ) { 305 | 306 | scope._domElementKeyEvents.removeEventListener( 'keydown', onKeyDown ); 307 | 308 | } 309 | 310 | //scope.dispatchEvent( { type: 'dispose' } ); // should this be added here? 311 | 312 | }; 313 | 314 | // 315 | // internals 316 | // 317 | 318 | const scope = this; 319 | 320 | const STATE = { 321 | NONE: - 1, 322 | ROTATE: 0, 323 | DOLLY: 1, 324 | PAN: 2, 325 | TOUCH_ROTATE: 3, 326 | TOUCH_PAN: 4, 327 | TOUCH_DOLLY_PAN: 5, 328 | TOUCH_DOLLY_ROTATE: 6 329 | }; 330 | 331 | let state = STATE.NONE; 332 | 333 | const EPS = 0.000001; 334 | 335 | // current position in spherical coordinates 336 | const spherical = new Spherical(); 337 | const sphericalDelta = new Spherical(); 338 | 339 | let scale = 1; 340 | const panOffset = new Vector3(); 341 | let zoomChanged = false; 342 | 343 | const rotateStart = new Vector2(); 344 | const rotateEnd = new Vector2(); 345 | const rotateDelta = new Vector2(); 346 | 347 | const panStart = new Vector2(); 348 | const panEnd = new Vector2(); 349 | const panDelta = new Vector2(); 350 | 351 | const dollyStart = new Vector2(); 352 | const dollyEnd = new Vector2(); 353 | const dollyDelta = new Vector2(); 354 | 355 | const pointers = []; 356 | const pointerPositions = {}; 357 | 358 | function getAutoRotationAngle() { 359 | 360 | return 2 * Math.PI / 60 / 60 * scope.autoRotateSpeed; 361 | 362 | } 363 | 364 | function getZoomScale() { 365 | 366 | return Math.pow( 0.95, scope.zoomSpeed ); 367 | 368 | } 369 | 370 | function rotateLeft( angle ) { 371 | 372 | sphericalDelta.theta -= angle; 373 | 374 | } 375 | 376 | function rotateUp( angle ) { 377 | 378 | sphericalDelta.phi -= angle; 379 | 380 | } 381 | 382 | const panLeft = function () { 383 | 384 | const v = new Vector3(); 385 | 386 | return function panLeft( distance, objectMatrix ) { 387 | 388 | v.setFromMatrixColumn( objectMatrix, 0 ); // get X column of objectMatrix 389 | v.multiplyScalar( - distance ); 390 | 391 | panOffset.add( v ); 392 | 393 | }; 394 | 395 | }(); 396 | 397 | const panUp = function () { 398 | 399 | const v = new Vector3(); 400 | 401 | return function panUp( distance, objectMatrix ) { 402 | 403 | if ( scope.screenSpacePanning === true ) { 404 | 405 | v.setFromMatrixColumn( objectMatrix, 1 ); 406 | 407 | } else { 408 | 409 | v.setFromMatrixColumn( objectMatrix, 0 ); 410 | v.crossVectors( scope.object.up, v ); 411 | 412 | } 413 | 414 | v.multiplyScalar( distance ); 415 | 416 | panOffset.add( v ); 417 | 418 | }; 419 | 420 | }(); 421 | 422 | // deltaX and deltaY are in pixels; right and down are positive 423 | const pan = function () { 424 | 425 | const offset = new Vector3(); 426 | 427 | return function pan( deltaX, deltaY ) { 428 | 429 | const element = scope.domElement; 430 | 431 | if ( scope.object.isPerspectiveCamera ) { 432 | 433 | // perspective 434 | const position = scope.object.position; 435 | offset.copy( position ).sub( scope.target ); 436 | let targetDistance = offset.length(); 437 | 438 | // half of the fov is center to top of screen 439 | targetDistance *= Math.tan( ( scope.object.fov / 2 ) * Math.PI / 180.0 ); 440 | 441 | // we use only clientHeight here so aspect ratio does not distort speed 442 | panLeft( 2 * deltaX * targetDistance / element.clientHeight, scope.object.matrix ); 443 | panUp( 2 * deltaY * targetDistance / element.clientHeight, scope.object.matrix ); 444 | 445 | } else if ( scope.object.isOrthographicCamera ) { 446 | 447 | // orthographic 448 | panLeft( deltaX * ( scope.object.right - scope.object.left ) / scope.object.zoom / element.clientWidth, scope.object.matrix ); 449 | panUp( deltaY * ( scope.object.top - scope.object.bottom ) / scope.object.zoom / element.clientHeight, scope.object.matrix ); 450 | 451 | } else { 452 | 453 | // camera neither orthographic nor perspective 454 | console.warn( 'WARNING: OrbitControls.js encountered an unknown camera type - pan disabled.' ); 455 | scope.enablePan = false; 456 | 457 | } 458 | 459 | }; 460 | 461 | }(); 462 | 463 | function dollyOut( dollyScale ) { 464 | 465 | if ( scope.object.isPerspectiveCamera ) { 466 | 467 | scale /= dollyScale; 468 | 469 | } else if ( scope.object.isOrthographicCamera ) { 470 | 471 | scope.object.zoom = Math.max( scope.minZoom, Math.min( scope.maxZoom, scope.object.zoom * dollyScale ) ); 472 | scope.object.updateProjectionMatrix(); 473 | zoomChanged = true; 474 | 475 | } else { 476 | 477 | console.warn( 'WARNING: OrbitControls.js encountered an unknown camera type - dolly/zoom disabled.' ); 478 | scope.enableZoom = false; 479 | 480 | } 481 | 482 | } 483 | 484 | function dollyIn( dollyScale ) { 485 | 486 | if ( scope.object.isPerspectiveCamera ) { 487 | 488 | scale *= dollyScale; 489 | 490 | } else if ( scope.object.isOrthographicCamera ) { 491 | 492 | scope.object.zoom = Math.max( scope.minZoom, Math.min( scope.maxZoom, scope.object.zoom / dollyScale ) ); 493 | scope.object.updateProjectionMatrix(); 494 | zoomChanged = true; 495 | 496 | } else { 497 | 498 | console.warn( 'WARNING: OrbitControls.js encountered an unknown camera type - dolly/zoom disabled.' ); 499 | scope.enableZoom = false; 500 | 501 | } 502 | 503 | } 504 | 505 | // 506 | // event callbacks - update the object state 507 | // 508 | 509 | function handleMouseDownRotate( event ) { 510 | 511 | rotateStart.set( event.clientX, event.clientY ); 512 | 513 | } 514 | 515 | function handleMouseDownDolly( event ) { 516 | 517 | dollyStart.set( event.clientX, event.clientY ); 518 | 519 | } 520 | 521 | function handleMouseDownPan( event ) { 522 | 523 | panStart.set( event.clientX, event.clientY ); 524 | 525 | } 526 | 527 | function handleMouseMoveRotate( event ) { 528 | 529 | rotateEnd.set( event.clientX, event.clientY ); 530 | 531 | rotateDelta.subVectors( rotateEnd, rotateStart ).multiplyScalar( scope.rotateSpeed ); 532 | 533 | const element = scope.domElement; 534 | 535 | rotateLeft( 2 * Math.PI * rotateDelta.x / element.clientHeight ); // yes, height 536 | 537 | rotateUp( 2 * Math.PI * rotateDelta.y / element.clientHeight ); 538 | 539 | rotateStart.copy( rotateEnd ); 540 | 541 | scope.update(); 542 | 543 | } 544 | 545 | function handleMouseMoveDolly( event ) { 546 | 547 | dollyEnd.set( event.clientX, event.clientY ); 548 | 549 | dollyDelta.subVectors( dollyEnd, dollyStart ); 550 | 551 | if ( dollyDelta.y > 0 ) { 552 | 553 | dollyOut( getZoomScale() ); 554 | 555 | } else if ( dollyDelta.y < 0 ) { 556 | 557 | dollyIn( getZoomScale() ); 558 | 559 | } 560 | 561 | dollyStart.copy( dollyEnd ); 562 | 563 | scope.update(); 564 | 565 | } 566 | 567 | function handleMouseMovePan( event ) { 568 | 569 | panEnd.set( event.clientX, event.clientY ); 570 | 571 | panDelta.subVectors( panEnd, panStart ).multiplyScalar( scope.panSpeed ); 572 | 573 | pan( panDelta.x, panDelta.y ); 574 | 575 | panStart.copy( panEnd ); 576 | 577 | scope.update(); 578 | 579 | } 580 | 581 | function handleMouseWheel( event ) { 582 | 583 | if ( event.deltaY < 0 ) { 584 | 585 | dollyIn( getZoomScale() ); 586 | 587 | } else if ( event.deltaY > 0 ) { 588 | 589 | dollyOut( getZoomScale() ); 590 | 591 | } 592 | 593 | scope.update(); 594 | 595 | } 596 | 597 | function handleKeyDown( event ) { 598 | 599 | let needsUpdate = false; 600 | 601 | switch ( event.code ) { 602 | 603 | case scope.keys.UP: 604 | 605 | if ( event.ctrlKey || event.metaKey || event.shiftKey ) { 606 | 607 | rotateUp( 2 * Math.PI * scope.rotateSpeed / scope.domElement.clientHeight ); 608 | 609 | } else { 610 | 611 | pan( 0, scope.keyPanSpeed ); 612 | 613 | } 614 | 615 | needsUpdate = true; 616 | break; 617 | 618 | case scope.keys.BOTTOM: 619 | 620 | if ( event.ctrlKey || event.metaKey || event.shiftKey ) { 621 | 622 | rotateUp( - 2 * Math.PI * scope.rotateSpeed / scope.domElement.clientHeight ); 623 | 624 | } else { 625 | 626 | pan( 0, - scope.keyPanSpeed ); 627 | 628 | } 629 | 630 | needsUpdate = true; 631 | break; 632 | 633 | case scope.keys.LEFT: 634 | 635 | if ( event.ctrlKey || event.metaKey || event.shiftKey ) { 636 | 637 | rotateLeft( 2 * Math.PI * scope.rotateSpeed / scope.domElement.clientHeight ); 638 | 639 | } else { 640 | 641 | pan( scope.keyPanSpeed, 0 ); 642 | 643 | } 644 | 645 | needsUpdate = true; 646 | break; 647 | 648 | case scope.keys.RIGHT: 649 | 650 | if ( event.ctrlKey || event.metaKey || event.shiftKey ) { 651 | 652 | rotateLeft( - 2 * Math.PI * scope.rotateSpeed / scope.domElement.clientHeight ); 653 | 654 | } else { 655 | 656 | pan( - scope.keyPanSpeed, 0 ); 657 | 658 | } 659 | 660 | needsUpdate = true; 661 | break; 662 | 663 | } 664 | 665 | if ( needsUpdate ) { 666 | 667 | // prevent the browser from scrolling on cursor keys 668 | event.preventDefault(); 669 | 670 | scope.update(); 671 | 672 | } 673 | 674 | 675 | } 676 | 677 | function handleTouchStartRotate() { 678 | 679 | if ( pointers.length === 1 ) { 680 | 681 | rotateStart.set( pointers[ 0 ].pageX, pointers[ 0 ].pageY ); 682 | 683 | } else { 684 | 685 | const x = 0.5 * ( pointers[ 0 ].pageX + pointers[ 1 ].pageX ); 686 | const y = 0.5 * ( pointers[ 0 ].pageY + pointers[ 1 ].pageY ); 687 | 688 | rotateStart.set( x, y ); 689 | 690 | } 691 | 692 | } 693 | 694 | function handleTouchStartPan() { 695 | 696 | if ( pointers.length === 1 ) { 697 | 698 | panStart.set( pointers[ 0 ].pageX, pointers[ 0 ].pageY ); 699 | 700 | } else { 701 | 702 | const x = 0.5 * ( pointers[ 0 ].pageX + pointers[ 1 ].pageX ); 703 | const y = 0.5 * ( pointers[ 0 ].pageY + pointers[ 1 ].pageY ); 704 | 705 | panStart.set( x, y ); 706 | 707 | } 708 | 709 | } 710 | 711 | function handleTouchStartDolly() { 712 | 713 | const dx = pointers[ 0 ].pageX - pointers[ 1 ].pageX; 714 | const dy = pointers[ 0 ].pageY - pointers[ 1 ].pageY; 715 | 716 | const distance = Math.sqrt( dx * dx + dy * dy ); 717 | 718 | dollyStart.set( 0, distance ); 719 | 720 | } 721 | 722 | function handleTouchStartDollyPan() { 723 | 724 | if ( scope.enableZoom ) handleTouchStartDolly(); 725 | 726 | if ( scope.enablePan ) handleTouchStartPan(); 727 | 728 | } 729 | 730 | function handleTouchStartDollyRotate() { 731 | 732 | if ( scope.enableZoom ) handleTouchStartDolly(); 733 | 734 | if ( scope.enableRotate ) handleTouchStartRotate(); 735 | 736 | } 737 | 738 | function handleTouchMoveRotate( event ) { 739 | 740 | if ( pointers.length == 1 ) { 741 | 742 | rotateEnd.set( event.pageX, event.pageY ); 743 | 744 | } else { 745 | 746 | const position = getSecondPointerPosition( event ); 747 | 748 | const x = 0.5 * ( event.pageX + position.x ); 749 | const y = 0.5 * ( event.pageY + position.y ); 750 | 751 | rotateEnd.set( x, y ); 752 | 753 | } 754 | 755 | rotateDelta.subVectors( rotateEnd, rotateStart ).multiplyScalar( scope.rotateSpeed ); 756 | 757 | const element = scope.domElement; 758 | 759 | rotateLeft( 2 * Math.PI * rotateDelta.x / element.clientHeight ); // yes, height 760 | 761 | rotateUp( 2 * Math.PI * rotateDelta.y / element.clientHeight ); 762 | 763 | rotateStart.copy( rotateEnd ); 764 | 765 | } 766 | 767 | function handleTouchMovePan( event ) { 768 | 769 | if ( pointers.length === 1 ) { 770 | 771 | panEnd.set( event.pageX, event.pageY ); 772 | 773 | } else { 774 | 775 | const position = getSecondPointerPosition( event ); 776 | 777 | const x = 0.5 * ( event.pageX + position.x ); 778 | const y = 0.5 * ( event.pageY + position.y ); 779 | 780 | panEnd.set( x, y ); 781 | 782 | } 783 | 784 | panDelta.subVectors( panEnd, panStart ).multiplyScalar( scope.panSpeed ); 785 | 786 | pan( panDelta.x, panDelta.y ); 787 | 788 | panStart.copy( panEnd ); 789 | 790 | } 791 | 792 | function handleTouchMoveDolly( event ) { 793 | 794 | const position = getSecondPointerPosition( event ); 795 | 796 | const dx = event.pageX - position.x; 797 | const dy = event.pageY - position.y; 798 | 799 | const distance = Math.sqrt( dx * dx + dy * dy ); 800 | 801 | dollyEnd.set( 0, distance ); 802 | 803 | dollyDelta.set( 0, Math.pow( dollyEnd.y / dollyStart.y, scope.zoomSpeed ) ); 804 | 805 | dollyOut( dollyDelta.y ); 806 | 807 | dollyStart.copy( dollyEnd ); 808 | 809 | } 810 | 811 | function handleTouchMoveDollyPan( event ) { 812 | 813 | if ( scope.enableZoom ) handleTouchMoveDolly( event ); 814 | 815 | if ( scope.enablePan ) handleTouchMovePan( event ); 816 | 817 | } 818 | 819 | function handleTouchMoveDollyRotate( event ) { 820 | 821 | if ( scope.enableZoom ) handleTouchMoveDolly( event ); 822 | 823 | if ( scope.enableRotate ) handleTouchMoveRotate( event ); 824 | 825 | } 826 | 827 | // 828 | // event handlers - FSM: listen for events and reset state 829 | // 830 | 831 | function onPointerDown( event ) { 832 | 833 | if ( scope.enabled === false ) return; 834 | 835 | if ( pointers.length === 0 ) { 836 | 837 | scope.domElement.setPointerCapture( event.pointerId ); 838 | 839 | scope.domElement.addEventListener( 'pointermove', onPointerMove ); 840 | scope.domElement.addEventListener( 'pointerup', onPointerUp ); 841 | 842 | } 843 | 844 | // 845 | 846 | addPointer( event ); 847 | 848 | if ( event.pointerType === 'touch' ) { 849 | 850 | onTouchStart( event ); 851 | 852 | } else { 853 | 854 | onMouseDown( event ); 855 | 856 | } 857 | 858 | } 859 | 860 | function onPointerMove( event ) { 861 | 862 | if ( scope.enabled === false ) return; 863 | 864 | if ( event.pointerType === 'touch' ) { 865 | 866 | onTouchMove( event ); 867 | 868 | } else { 869 | 870 | onMouseMove( event ); 871 | 872 | } 873 | 874 | } 875 | 876 | function onPointerUp( event ) { 877 | 878 | removePointer( event ); 879 | 880 | if ( pointers.length === 0 ) { 881 | 882 | scope.domElement.releasePointerCapture( event.pointerId ); 883 | 884 | scope.domElement.removeEventListener( 'pointermove', onPointerMove ); 885 | scope.domElement.removeEventListener( 'pointerup', onPointerUp ); 886 | 887 | } 888 | 889 | scope.dispatchEvent( _endEvent ); 890 | 891 | state = STATE.NONE; 892 | 893 | } 894 | 895 | function onPointerCancel( event ) { 896 | 897 | removePointer( event ); 898 | 899 | } 900 | 901 | function onMouseDown( event ) { 902 | 903 | let mouseAction; 904 | 905 | switch ( event.button ) { 906 | 907 | case 0: 908 | 909 | mouseAction = scope.mouseButtons.LEFT; 910 | break; 911 | 912 | case 1: 913 | 914 | mouseAction = scope.mouseButtons.MIDDLE; 915 | break; 916 | 917 | case 2: 918 | 919 | mouseAction = scope.mouseButtons.RIGHT; 920 | break; 921 | 922 | default: 923 | 924 | mouseAction = - 1; 925 | 926 | } 927 | 928 | switch ( mouseAction ) { 929 | 930 | case MOUSE.DOLLY: 931 | 932 | if ( scope.enableZoom === false ) return; 933 | 934 | handleMouseDownDolly( event ); 935 | 936 | state = STATE.DOLLY; 937 | 938 | break; 939 | 940 | case MOUSE.ROTATE: 941 | 942 | if ( event.ctrlKey || event.metaKey || event.shiftKey ) { 943 | 944 | if ( scope.enablePan === false ) return; 945 | 946 | handleMouseDownPan( event ); 947 | 948 | state = STATE.PAN; 949 | 950 | } else { 951 | 952 | if ( scope.enableRotate === false ) return; 953 | 954 | handleMouseDownRotate( event ); 955 | 956 | state = STATE.ROTATE; 957 | 958 | } 959 | 960 | break; 961 | 962 | case MOUSE.PAN: 963 | 964 | if ( event.ctrlKey || event.metaKey || event.shiftKey ) { 965 | 966 | if ( scope.enableRotate === false ) return; 967 | 968 | handleMouseDownRotate( event ); 969 | 970 | state = STATE.ROTATE; 971 | 972 | } else { 973 | 974 | if ( scope.enablePan === false ) return; 975 | 976 | handleMouseDownPan( event ); 977 | 978 | state = STATE.PAN; 979 | 980 | } 981 | 982 | break; 983 | 984 | default: 985 | 986 | state = STATE.NONE; 987 | 988 | } 989 | 990 | if ( state !== STATE.NONE ) { 991 | 992 | scope.dispatchEvent( _startEvent ); 993 | 994 | } 995 | 996 | } 997 | 998 | function onMouseMove( event ) { 999 | 1000 | switch ( state ) { 1001 | 1002 | case STATE.ROTATE: 1003 | 1004 | if ( scope.enableRotate === false ) return; 1005 | 1006 | handleMouseMoveRotate( event ); 1007 | 1008 | break; 1009 | 1010 | case STATE.DOLLY: 1011 | 1012 | if ( scope.enableZoom === false ) return; 1013 | 1014 | handleMouseMoveDolly( event ); 1015 | 1016 | break; 1017 | 1018 | case STATE.PAN: 1019 | 1020 | if ( scope.enablePan === false ) return; 1021 | 1022 | handleMouseMovePan( event ); 1023 | 1024 | break; 1025 | 1026 | } 1027 | 1028 | } 1029 | 1030 | function onMouseWheel( event ) { 1031 | 1032 | if ( scope.enabled === false || scope.enableZoom === false || state !== STATE.NONE ) return; 1033 | 1034 | event.preventDefault(); 1035 | 1036 | scope.dispatchEvent( _startEvent ); 1037 | 1038 | handleMouseWheel( event ); 1039 | 1040 | scope.dispatchEvent( _endEvent ); 1041 | 1042 | } 1043 | 1044 | function onKeyDown( event ) { 1045 | 1046 | if ( scope.enabled === false || scope.enablePan === false ) return; 1047 | 1048 | handleKeyDown( event ); 1049 | 1050 | } 1051 | 1052 | function onTouchStart( event ) { 1053 | 1054 | trackPointer( event ); 1055 | 1056 | switch ( pointers.length ) { 1057 | 1058 | case 1: 1059 | 1060 | switch ( scope.touches.ONE ) { 1061 | 1062 | case TOUCH.ROTATE: 1063 | 1064 | if ( scope.enableRotate === false ) return; 1065 | 1066 | handleTouchStartRotate(); 1067 | 1068 | state = STATE.TOUCH_ROTATE; 1069 | 1070 | break; 1071 | 1072 | case TOUCH.PAN: 1073 | 1074 | if ( scope.enablePan === false ) return; 1075 | 1076 | handleTouchStartPan(); 1077 | 1078 | state = STATE.TOUCH_PAN; 1079 | 1080 | break; 1081 | 1082 | default: 1083 | 1084 | state = STATE.NONE; 1085 | 1086 | } 1087 | 1088 | break; 1089 | 1090 | case 2: 1091 | 1092 | switch ( scope.touches.TWO ) { 1093 | 1094 | case TOUCH.DOLLY_PAN: 1095 | 1096 | if ( scope.enableZoom === false && scope.enablePan === false ) return; 1097 | 1098 | handleTouchStartDollyPan(); 1099 | 1100 | state = STATE.TOUCH_DOLLY_PAN; 1101 | 1102 | break; 1103 | 1104 | case TOUCH.DOLLY_ROTATE: 1105 | 1106 | if ( scope.enableZoom === false && scope.enableRotate === false ) return; 1107 | 1108 | handleTouchStartDollyRotate(); 1109 | 1110 | state = STATE.TOUCH_DOLLY_ROTATE; 1111 | 1112 | break; 1113 | 1114 | default: 1115 | 1116 | state = STATE.NONE; 1117 | 1118 | } 1119 | 1120 | break; 1121 | 1122 | default: 1123 | 1124 | state = STATE.NONE; 1125 | 1126 | } 1127 | 1128 | if ( state !== STATE.NONE ) { 1129 | 1130 | scope.dispatchEvent( _startEvent ); 1131 | 1132 | } 1133 | 1134 | } 1135 | 1136 | function onTouchMove( event ) { 1137 | 1138 | trackPointer( event ); 1139 | 1140 | switch ( state ) { 1141 | 1142 | case STATE.TOUCH_ROTATE: 1143 | 1144 | if ( scope.enableRotate === false ) return; 1145 | 1146 | handleTouchMoveRotate( event ); 1147 | 1148 | scope.update(); 1149 | 1150 | break; 1151 | 1152 | case STATE.TOUCH_PAN: 1153 | 1154 | if ( scope.enablePan === false ) return; 1155 | 1156 | handleTouchMovePan( event ); 1157 | 1158 | scope.update(); 1159 | 1160 | break; 1161 | 1162 | case STATE.TOUCH_DOLLY_PAN: 1163 | 1164 | if ( scope.enableZoom === false && scope.enablePan === false ) return; 1165 | 1166 | handleTouchMoveDollyPan( event ); 1167 | 1168 | scope.update(); 1169 | 1170 | break; 1171 | 1172 | case STATE.TOUCH_DOLLY_ROTATE: 1173 | 1174 | if ( scope.enableZoom === false && scope.enableRotate === false ) return; 1175 | 1176 | handleTouchMoveDollyRotate( event ); 1177 | 1178 | scope.update(); 1179 | 1180 | break; 1181 | 1182 | default: 1183 | 1184 | state = STATE.NONE; 1185 | 1186 | } 1187 | 1188 | } 1189 | 1190 | function onContextMenu( event ) { 1191 | 1192 | if ( scope.enabled === false ) return; 1193 | 1194 | event.preventDefault(); 1195 | 1196 | } 1197 | 1198 | function addPointer( event ) { 1199 | 1200 | pointers.push( event ); 1201 | 1202 | } 1203 | 1204 | function removePointer( event ) { 1205 | 1206 | delete pointerPositions[ event.pointerId ]; 1207 | 1208 | for ( let i = 0; i < pointers.length; i ++ ) { 1209 | 1210 | if ( pointers[ i ].pointerId == event.pointerId ) { 1211 | 1212 | pointers.splice( i, 1 ); 1213 | return; 1214 | 1215 | } 1216 | 1217 | } 1218 | 1219 | } 1220 | 1221 | function trackPointer( event ) { 1222 | 1223 | let position = pointerPositions[ event.pointerId ]; 1224 | 1225 | if ( position === undefined ) { 1226 | 1227 | position = new Vector2(); 1228 | pointerPositions[ event.pointerId ] = position; 1229 | 1230 | } 1231 | 1232 | position.set( event.pageX, event.pageY ); 1233 | 1234 | } 1235 | 1236 | function getSecondPointerPosition( event ) { 1237 | 1238 | const pointer = ( event.pointerId === pointers[ 0 ].pointerId ) ? pointers[ 1 ] : pointers[ 0 ]; 1239 | 1240 | return pointerPositions[ pointer.pointerId ]; 1241 | 1242 | } 1243 | 1244 | // 1245 | 1246 | scope.domElement.addEventListener( 'contextmenu', onContextMenu ); 1247 | 1248 | scope.domElement.addEventListener( 'pointerdown', onPointerDown ); 1249 | scope.domElement.addEventListener( 'pointercancel', onPointerCancel ); 1250 | scope.domElement.addEventListener( 'wheel', onMouseWheel, { passive: false } ); 1251 | 1252 | // force an update at start 1253 | 1254 | this.update(); 1255 | 1256 | } 1257 | 1258 | } 1259 | 1260 | 1261 | // This set of controls performs orbiting, dollying (zooming), and panning. 1262 | // Unlike TrackballControls, it maintains the "up" direction object.up (+Y by default). 1263 | // This is very similar to OrbitControls, another set of touch behavior 1264 | // 1265 | // Orbit - right mouse, or left mouse + ctrl/meta/shiftKey / touch: two-finger rotate 1266 | // Zoom - middle mouse, or mousewheel / touch: two-finger spread or squish 1267 | // Pan - left mouse, or arrow keys / touch: one-finger move 1268 | 1269 | class MapControls extends OrbitControls { 1270 | 1271 | constructor( object, domElement ) { 1272 | 1273 | super( object, domElement ); 1274 | 1275 | this.screenSpacePanning = false; // pan orthogonal to world-space direction camera.up 1276 | 1277 | this.mouseButtons.LEFT = MOUSE.PAN; 1278 | this.mouseButtons.RIGHT = MOUSE.ROTATE; 1279 | 1280 | this.touches.ONE = TOUCH.PAN; 1281 | this.touches.TWO = TOUCH.DOLLY_ROTATE; 1282 | 1283 | } 1284 | 1285 | } 1286 | 1287 | export { OrbitControls, MapControls }; 1288 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ChatVRM Viewer (JS) 2 | A JS conversion/adaptation of parts of the [ChatVRM](https://github.com/pixiv/ChatVRM) (TypeScript) code for use in [OpenCharacters](https://github.com/josephrocca/OpenCharacters), but can be used outside of OpenCharacters as shown with the code below. I've extracted just the VRM "speaking character" viewer from the web app. 3 | 4 | ```js 5 | // setup viewer: 6 | const Viewer = await import("https://cdn.jsdelivr.net/gh/josephrocca/ChatVRM-js@v0.0.28/features/vrmViewer/viewer.js").then(m => m.Viewer); 7 | window.viewer = new Viewer(); 8 | 9 | // add canvas 10 | const canvas = document.createElement("canvas"); 11 | canvas.height = window.innerHeight; 12 | canvas.width = window.innerWidth; 13 | document.body.appendChild(canvas); 14 | canvas.style.cssText = "width:100vw; height:100vh; display:block;"; 15 | document.body.style.cssText = "margin:0; padding:0;"; 16 | 17 | // link viewer to canvas: 18 | viewer.setup(canvas); 19 | 20 | // load VRM character file: 21 | await viewer.loadVrm("https://raw.githubusercontent.com/josephrocca/ChatVRM-js/main/avatars/AvatarSample_B.vrm"); 22 | 23 | // Change emotion / facial expression: 24 | viewer.model.emoteController.playEmotion("happy"); // Valid expressions: neutral, happy, angry, sad, relaxed 25 | 26 | // Play animation (download FBX animations from Mixamo.com): 27 | await viewer.model.loadAnimation("https://cdn.jsdelivr.net/gh/josephrocca/ChatVRM-js@v0.0.28/OpenCharacters/animations/silly_dancing.fbx"); 28 | 29 | // Wait for user to interact with the page before trying to play audio 30 | if(!navigator.userActivation?.hasBeenActive) { 31 | await new Promise(resolve => window.addEventListener("click", resolve, {once:true})); 32 | } 33 | 34 | // Speak: 35 | let arrayBuffer = await fetch("https://cdn.jsdelivr.net/gh/josephrocca/ChatVRM-js@v0.0.22/OpenCharacters/dummy-audio/12.mp3").then(r => r.arrayBuffer()); 36 | await viewer.model.speak(arrayBuffer, {expression:"happy", volume:0}); // here i set volume to zero because this is just dummy audio - but you can e.g. use elevenlabs, or whatever, of course 37 | ``` 38 | 39 | Example of loading a new VRM file when dragged-and-dropped on the page: 40 | ```js 41 | canvas.addEventListener("dragover", function (event) { 42 | event.preventDefault(); 43 | }); 44 | canvas.addEventListener("drop", awync function (event) { 45 | event.preventDefault(); 46 | const file = event.dataTransfer?.files?[0]; 47 | 48 | const file_type = file.name.split(".").pop(); 49 | if(file_type === "vrm") { 50 | const blob = new Blob([file], { type: "application/octet-stream" }); 51 | const url = window.URL.createObjectURL(blob); 52 | await viewer.loadVrm(url); 53 | } 54 | }); 55 | ``` 56 | -------------------------------------------------------------------------------- /avatars/5575583359519712199.vrm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephrocca/ChatVRM-js/cb6eacf7bd8dc137e967a4c9d84dd5af22f98714/avatars/5575583359519712199.vrm -------------------------------------------------------------------------------- /avatars/6246813290354663411.vrm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephrocca/ChatVRM-js/cb6eacf7bd8dc137e967a4c9d84dd5af22f98714/avatars/6246813290354663411.vrm -------------------------------------------------------------------------------- /avatars/AvatarSample_B.vrm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephrocca/ChatVRM-js/cb6eacf7bd8dc137e967a4c9d84dd5af22f98714/avatars/AvatarSample_B.vrm -------------------------------------------------------------------------------- /avatars/README.md: -------------------------------------------------------------------------------- 1 | This folder just contains a couple of example VRM files. You can download more here: 2 | 3 | https://hub.vroid.com/en/models?is_other_users_available=1&is_downloadable=1 4 | 5 | And you can make your own VRM characters here: 6 | 7 | https://vroid.com/en/studio 8 | -------------------------------------------------------------------------------- /features/emoteController/autoBlink.js: -------------------------------------------------------------------------------- 1 | import { VRMExpressionManager } from "https://cdn.jsdelivr.net/npm/@pixiv/three-vrm@1.0.9/+esm"; 2 | import { BLINK_CLOSE_MAX, BLINK_OPEN_MAX } from "./emoteConstants.js"; 3 | 4 | /** 5 | * 自動瞬きを制御するクラス 6 | */ 7 | export class AutoBlink { 8 | // private _expressionManager: VRMExpressionManager; 9 | // private _remainingTime: number; 10 | // private _isOpen: boolean; 11 | // private _isAutoBlink: boolean; 12 | 13 | constructor(expressionManager) { 14 | this._expressionManager = expressionManager; 15 | this._remainingTime = 0; 16 | this._isAutoBlink = true; 17 | this._isOpen = true; 18 | } 19 | 20 | /** 21 | * 自動瞬きをON/OFFする。 22 | * 23 | * 目を閉じている(blinkが1の)時に感情表現を入れてしまうと不自然になるので、 24 | * 目が開くまでの秒を返し、その時間待ってから感情表現を適用する。 25 | * @param isAuto 26 | * @returns 目が開くまでの秒 27 | */ 28 | setEnable(isAuto) { 29 | this._isAutoBlink = isAuto; 30 | 31 | // 目が閉じている場合、目が開くまでの時間を返す 32 | if (!this._isOpen) { 33 | return this._remainingTime; 34 | } 35 | 36 | return 0; 37 | } 38 | 39 | update(delta) { 40 | if (this._remainingTime > 0) { 41 | this._remainingTime -= delta; 42 | return; 43 | } 44 | 45 | if (this._isOpen && this._isAutoBlink) { 46 | this.close(); 47 | return; 48 | } 49 | 50 | this.open(); 51 | } 52 | 53 | close() { 54 | this._isOpen = false; 55 | this._remainingTime = BLINK_CLOSE_MAX; 56 | this._expressionManager.setValue("blink", 1); 57 | } 58 | 59 | open() { 60 | this._isOpen = true; 61 | this._remainingTime = BLINK_OPEN_MAX; 62 | this._expressionManager.setValue("blink", 0); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /features/emoteController/autoLookAt.js: -------------------------------------------------------------------------------- 1 | import * as THREE from "https://cdn.jsdelivr.net/npm/three@0.149.0/+esm"; 2 | 3 | /** 4 | * 目線を制御するクラス 5 | * 6 | * サッケードはVRMLookAtSmootherの中でやっているので、 7 | * より目線を大きく動かしたい場合はここに実装する。 8 | */ 9 | export class AutoLookAt { 10 | // private _lookAtTarget: THREE.Object3D; 11 | constructor(vrm, camera) { 12 | this._lookAtTarget = new THREE.Object3D(); 13 | camera.add(this._lookAtTarget); 14 | 15 | if (vrm.lookAt) vrm.lookAt.target = this._lookAtTarget; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /features/emoteController/emoteConstants.js: -------------------------------------------------------------------------------- 1 | // 瞬きで目を閉じている時間(sec) 2 | export const BLINK_CLOSE_MAX = 0.12; 3 | // 瞬きで目を開いている時間(sec) 4 | export const BLINK_OPEN_MAX = 5; 5 | -------------------------------------------------------------------------------- /features/emoteController/emoteController.js: -------------------------------------------------------------------------------- 1 | import { ExpressionController } from "./expressionController.js"; 2 | 3 | /** 4 | * 感情表現としてExpressionとMotionを操作する為のクラス 5 | * デモにはExpressionのみが含まれています 6 | */ 7 | export class EmoteController { 8 | // private _expressionController: ExpressionController; 9 | 10 | constructor(vrm, camera) { 11 | this._expressionController = new ExpressionController(vrm, camera); 12 | } 13 | 14 | playEmotion(preset) { 15 | this._expressionController.playEmotion(preset); 16 | } 17 | 18 | lipSync(preset, value) { 19 | this._expressionController.lipSync(preset, value); 20 | } 21 | 22 | update(delta) { 23 | this._expressionController.update(delta); 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /features/emoteController/expressionController.js: -------------------------------------------------------------------------------- 1 | import { AutoLookAt } from "./autoLookAt.js"; 2 | import { AutoBlink } from "./autoBlink.js"; 3 | 4 | /** 5 | * Expressionを管理するクラス 6 | * 7 | * 主に前の表情を保持しておいて次の表情を適用する際に0に戻す作業や、 8 | * 前の表情が終わるまで待ってから表情適用する役割を持っている。 9 | */ 10 | export class ExpressionController { 11 | // private _autoLookAt: AutoLookAt; 12 | // private _autoBlink?: AutoBlink; 13 | // private _expressionManager?: VRMExpressionManager; 14 | // private _currentEmotion: VRMExpressionPresetName; 15 | // private _currentLipSync: { 16 | // preset: VRMExpressionPresetName; 17 | // value: number; 18 | // } | null; 19 | constructor(vrm, camera) { 20 | this._autoLookAt = new AutoLookAt(vrm, camera); 21 | this._currentEmotion = "neutral"; 22 | this._currentLipSync = null; 23 | if (vrm.expressionManager) { 24 | this._expressionManager = vrm.expressionManager; 25 | this._autoBlink = new AutoBlink(vrm.expressionManager); 26 | } 27 | } 28 | 29 | playEmotion(preset) { 30 | if (this._currentEmotion != "neutral") { 31 | this._expressionManager?.setValue(this._currentEmotion, 0); 32 | } 33 | 34 | if (preset == "neutral") { 35 | this._autoBlink?.setEnable(true); 36 | this._currentEmotion = preset; 37 | return; 38 | } 39 | 40 | const t = this._autoBlink?.setEnable(false) || 0; 41 | this._currentEmotion = preset; 42 | setTimeout(() => { 43 | this._expressionManager?.setValue(preset, 1); 44 | }, t * 1000); 45 | } 46 | 47 | lipSync(preset, value) { 48 | if (this._currentLipSync) { 49 | this._expressionManager?.setValue(this._currentLipSync.preset, 0); 50 | } 51 | this._currentLipSync = { 52 | preset, 53 | value, 54 | }; 55 | } 56 | 57 | update(delta) { 58 | if (this._autoBlink) { 59 | this._autoBlink.update(delta); 60 | } 61 | 62 | if (this._currentLipSync) { 63 | const weight = 64 | this._currentEmotion === "neutral" 65 | ? this._currentLipSync.value * 0.5 66 | : this._currentLipSync.value * 0.25; 67 | this._expressionManager?.setValue(this._currentLipSync.preset, weight); 68 | } 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /features/lipSync/lipSync.js: -------------------------------------------------------------------------------- 1 | const TIME_DOMAIN_DATA_LENGTH = 2048; 2 | 3 | export class LipSync { 4 | // public readonly audio: AudioContext; 5 | // public readonly analyser: AnalyserNode; 6 | // public readonly timeDomainData: Float32Array; 7 | 8 | constructor(audio) { 9 | this.audio = audio; 10 | 11 | this.analyser = audio.createAnalyser(); 12 | this.timeDomainData = new Float32Array(TIME_DOMAIN_DATA_LENGTH); 13 | } 14 | 15 | update() { 16 | this.analyser.getFloatTimeDomainData(this.timeDomainData); 17 | 18 | let volume = 0.0; 19 | for (let i = 0; i < TIME_DOMAIN_DATA_LENGTH; i++) { 20 | volume = Math.max(volume, Math.abs(this.timeDomainData[i])); 21 | } 22 | 23 | // cook 24 | volume = 1 / (1 + Math.exp(-45 * volume + 5)); 25 | if (volume < 0.1) volume = 0; 26 | 27 | return { 28 | volume, 29 | }; 30 | } 31 | 32 | async playFromArrayBuffer(buffer, onEnded, {volume=1}={}) { 33 | const audioBuffer = await this.audio.decodeAudioData(buffer); 34 | 35 | const bufferSource = this.audio.createBufferSource(); 36 | bufferSource.buffer = audioBuffer; 37 | 38 | // Create a gain node to control the volume 39 | const gainNode = this.audio.createGain(); 40 | gainNode.gain.value = volume; 41 | 42 | // Connect the bufferSource to the gainNode and the gainNode to the destination 43 | bufferSource.connect(gainNode); 44 | gainNode.connect(this.audio.destination); 45 | 46 | // Connect the bufferSource directly to the analyser to get raw audio data 47 | bufferSource.connect(this.analyser); 48 | 49 | bufferSource.start(); 50 | if (onEnded) { 51 | bufferSource.addEventListener("ended", onEnded); 52 | } 53 | } 54 | 55 | async playFromURL(url, onEnded) { 56 | const res = await fetch(url); 57 | const buffer = await res.arrayBuffer(); 58 | this.playFromArrayBuffer(buffer, onEnded); 59 | } 60 | } 61 | -------------------------------------------------------------------------------- /features/vrmViewer/model.js: -------------------------------------------------------------------------------- 1 | import * as THREE from "https://cdn.jsdelivr.net/npm/three@0.149.0/+esm"; 2 | import { VRM, VRMLoaderPlugin, VRMUtils } from "https://cdn.jsdelivr.net/npm/@pixiv/three-vrm@1.0.9/+esm"; 3 | import { GLTFLoader } from "../../GLTFLoader/GLTFLoader.js"; 4 | import { loadMixamoAnimation } from '../../mixamo/loadMixamoAnimation.js'; 5 | import { VRMLookAtSmootherLoaderPlugin } from "../../lib/VRMLookAtSmootherLoaderPlugin/VRMLookAtSmootherLoaderPlugin.js"; 6 | import { LipSync } from "../lipSync/lipSync.js"; 7 | import { EmoteController } from "../emoteController/emoteController.js"; 8 | import { loadVRMAnimation } from "../../lib/VRMAnimation/loadVRMAnimation.js"; 9 | 10 | 11 | /** 12 | * 3Dキャラクターを管理するクラス 13 | */ 14 | export class Model { 15 | // public vrm?: VRM | null; 16 | // public mixer?: THREE.AnimationMixer; 17 | // public emoteController?: EmoteController; 18 | 19 | // private _lookAtTargetParent: THREE.Object3D; 20 | // private _lipSync?: LipSync; 21 | 22 | constructor(lookAtTargetParent) { 23 | this._lookAtTargetParent = lookAtTargetParent; 24 | this._lipSync = new LipSync(new AudioContext()); 25 | } 26 | 27 | async loadVRM(url) { 28 | const loader = new GLTFLoader(); 29 | loader.register( 30 | (parser) => 31 | new VRMLoaderPlugin(parser, { 32 | lookAtPlugin: new VRMLookAtSmootherLoaderPlugin(parser), 33 | }) 34 | ); 35 | 36 | const gltf = await loader.loadAsync(url); 37 | 38 | const vrm = (this.vrm = gltf.userData.vrm); 39 | vrm.scene.name = "VRMRoot"; 40 | 41 | VRMUtils.rotateVRM0(vrm); 42 | this.mixer = new THREE.AnimationMixer(vrm.scene); 43 | 44 | this.emoteController = new EmoteController(vrm, this._lookAtTargetParent); 45 | } 46 | 47 | unLoadVrm() { 48 | if (this.vrm) { 49 | VRMUtils.deepDispose(this.vrm.scene); 50 | this.vrm = null; 51 | } 52 | } 53 | 54 | /** 55 | * VRMアニメーションを読み込む 56 | * 57 | * https://github.com/vrm-c/vrm-specification/blob/master/specification/VRMC_vrm_animation-1.0/README.ja.md 58 | */ 59 | async loadAnimation(input) { 60 | const { vrm, mixer } = this; 61 | if (vrm == null || mixer == null) { 62 | throw new Error("You have to load VRM first"); 63 | } 64 | 65 | let clip; 66 | if(input.expressionTracks) { // <-- crudely detect if it's a VRMAnimation object 67 | clip = input.createAnimationClip(vrm); 68 | } else if(typeof input === "string") { // <-- it's a URL 69 | if(input.endsWith(".vrma")) { 70 | const vrma = await loadVRMAnimation(input); 71 | if (vrma) { 72 | this.loadAnimation(vrma); 73 | return; 74 | } 75 | } else { 76 | // assume it's an fbx file 77 | clip = await loadMixamoAnimation(input, vrm); 78 | } 79 | } 80 | const action = mixer.clipAction(clip); 81 | action.play(); 82 | } 83 | 84 | /** 85 | * 音声を再生し、リップシンクを行う 86 | */ 87 | async speak(buffer, opts={}) { // opts={expression, volume} 88 | this.emoteController?.playEmotion(opts.expression); 89 | await new Promise((resolve) => { 90 | this._lipSync?.playFromArrayBuffer(buffer, () => { 91 | resolve(true); 92 | }, opts); 93 | }); 94 | } 95 | 96 | update(delta) { 97 | if (this._lipSync) { 98 | const { volume } = this._lipSync.update(); 99 | this.emoteController?.lipSync("aa", volume); 100 | } 101 | 102 | this.emoteController?.update(delta); 103 | this.mixer?.update(delta); 104 | this.vrm?.update(delta); 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /features/vrmViewer/viewer.js: -------------------------------------------------------------------------------- 1 | import * as THREE from "https://cdn.jsdelivr.net/npm/three@0.149.0/+esm"; 2 | import { OrbitControls } from "../../OrbitControls.js"; 3 | import { loadVRMAnimation } from "../../lib/VRMAnimation/loadVRMAnimation.js"; 4 | import { Model } from "./model.js"; 5 | 6 | export class Viewer { 7 | constructor() { 8 | this.isReady = false; 9 | 10 | // scene 11 | const scene = new THREE.Scene(); 12 | this._scene = scene; 13 | 14 | // light 15 | const directionalLight = new THREE.DirectionalLight(0xffffff, 0.6); 16 | directionalLight.position.set(1.0, 1.0, 1.0).normalize(); 17 | scene.add(directionalLight); 18 | 19 | const ambientLight = new THREE.AmbientLight(0xffffff, 0.4); 20 | scene.add(ambientLight); 21 | 22 | // animate 23 | this._clock = new THREE.Clock(); 24 | this._clock.start(); 25 | } 26 | 27 | async loadVrm(url) { 28 | if (this.model?.vrm) { 29 | this.unloadVRM(); 30 | } 31 | 32 | // gltf and vrm 33 | this.model = new Model(this._camera || new THREE.Object3D()); 34 | await this.model.loadVRM(url).then(async () => { 35 | if (!this.model?.vrm) return; 36 | 37 | // Disable frustum culling 38 | this.model.vrm.scene.traverse((obj) => { 39 | obj.frustumCulled = false; 40 | }); 41 | 42 | this._scene.add(this.model.vrm.scene); 43 | 44 | const vrma = await loadVRMAnimation("https://raw.githubusercontent.com/josephrocca/ChatVRM-js/main/idle_loop.vrma"); 45 | if (vrma) this.model.loadAnimation(vrma); 46 | 47 | // HACK: アニメーションの原点がずれているので再生後にカメラ位置を調整する 48 | requestAnimationFrame(() => { 49 | this.resetCamera(); 50 | }); 51 | }); 52 | } 53 | 54 | unloadVRM() { 55 | if (this.model?.vrm) { 56 | this._scene.remove(this.model.vrm.scene); 57 | this.model?.unLoadVrm(); 58 | } 59 | } 60 | 61 | /** 62 | * Reactで管理しているCanvasを後から設定する 63 | */ 64 | setup(canvas) { 65 | const parentElement = canvas.parentElement; 66 | const width = parentElement?.clientWidth || canvas.width; 67 | const height = parentElement?.clientHeight || canvas.height; 68 | // renderer 69 | this._renderer = new THREE.WebGLRenderer({ 70 | canvas: canvas, 71 | alpha: true, 72 | antialias: true, 73 | }); 74 | this._renderer.outputEncoding = THREE.sRGBEncoding; 75 | this._renderer.setSize(width, height); 76 | this._renderer.setPixelRatio(window.devicePixelRatio); 77 | 78 | // camera 79 | this._camera = new THREE.PerspectiveCamera(20.0, width / height, 0.1, 20.0); 80 | this._camera.position.set(0, 1.3, 1.5); 81 | this._cameraControls?.target.set(0, 1.3, 0); 82 | this._cameraControls?.update(); 83 | // camera controls 84 | this._cameraControls = new OrbitControls( 85 | this._camera, 86 | this._renderer.domElement 87 | ); 88 | this._cameraControls.screenSpacePanning = true; 89 | this._cameraControls.update(); 90 | 91 | window.addEventListener("resize", () => { 92 | this.resize(); 93 | }); 94 | this.isReady = true; 95 | this.update(); 96 | } 97 | 98 | /** 99 | * canvasの親要素を参照してサイズを変更する 100 | */ 101 | resize() { 102 | if (!this._renderer) return; 103 | 104 | const parentElement = this._renderer.domElement.parentElement; 105 | if (!parentElement) return; 106 | 107 | this._renderer.setPixelRatio(window.devicePixelRatio); 108 | this._renderer.setSize( 109 | parentElement.clientWidth, 110 | parentElement.clientHeight 111 | ); 112 | 113 | if (!this._camera) return; 114 | this._camera.aspect = 115 | parentElement.clientWidth / parentElement.clientHeight; 116 | this._camera.updateProjectionMatrix(); 117 | } 118 | 119 | /** 120 | * VRMのheadノードを参照してカメラ位置を調整する 121 | */ 122 | resetCamera() { 123 | const headNode = this.model?.vrm?.humanoid.getNormalizedBoneNode("head"); 124 | 125 | if (headNode) { 126 | const headWPos = headNode.getWorldPosition(new THREE.Vector3()); 127 | this._camera?.position.set( 128 | this._camera.position.x, 129 | headWPos.y, 130 | this._camera.position.z 131 | ); 132 | this._cameraControls?.target.set(headWPos.x, headWPos.y, headWPos.z); 133 | this._cameraControls?.update(); 134 | } 135 | } 136 | 137 | update() { 138 | requestAnimationFrame(this.update.bind(this)); 139 | const delta = this._clock.getDelta(); 140 | // update vrm components 141 | if (this.model) { 142 | this.model.update(delta); 143 | } 144 | 145 | if (this._renderer && this._camera) { 146 | this._renderer.render(this._scene, this._camera); 147 | } 148 | } 149 | } 150 | -------------------------------------------------------------------------------- /idle_loop.vrma: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/josephrocca/ChatVRM-js/cb6eacf7bd8dc137e967a4c9d84dd5af22f98714/idle_loop.vrma -------------------------------------------------------------------------------- /lib/VRMAnimation/VRMAnimation.js: -------------------------------------------------------------------------------- 1 | import * as THREE from "https://cdn.jsdelivr.net/npm/three@0.149.0/+esm"; 2 | import { VRM, VRMExpressionManager, VRMHumanBoneName } from "https://cdn.jsdelivr.net/npm/@pixiv/three-vrm@1.0.9/+esm"; 3 | 4 | export class VRMAnimation { 5 | // public duration: number; 6 | // public restHipsPosition: THREE.Vector3; 7 | 8 | // public humanoidTracks: { 9 | // translation: Map; 10 | // rotation: Map; 11 | // }; 12 | // public expressionTracks: Map; 13 | // public lookAtTrack: THREE.QuaternionKeyframeTrack | null; 14 | 15 | constructor() { 16 | this.duration = 0.0; 17 | this.restHipsPosition = new THREE.Vector3(); 18 | 19 | this.humanoidTracks = { 20 | translation: new Map(), 21 | rotation: new Map(), 22 | }; 23 | 24 | this.expressionTracks = new Map(); 25 | this.lookAtTrack = null; 26 | } 27 | 28 | createAnimationClip(vrm) { 29 | const tracks = []; 30 | 31 | tracks.push(...this.createHumanoidTracks(vrm)); 32 | 33 | if (vrm.expressionManager != null) { 34 | tracks.push(...this.createExpressionTracks(vrm.expressionManager)); 35 | } 36 | 37 | if (vrm.lookAt != null) { 38 | const track = this.createLookAtTrack("lookAtTargetParent.quaternion"); 39 | 40 | if (track != null) { 41 | tracks.push(track); 42 | } 43 | } 44 | 45 | return new THREE.AnimationClip("Clip", this.duration, tracks); 46 | } 47 | 48 | createHumanoidTracks(vrm) { 49 | const humanoid = vrm.humanoid; 50 | const metaVersion = vrm.meta.metaVersion; 51 | const tracks = []; 52 | 53 | for (const [name, origTrack] of this.humanoidTracks.rotation.entries()) { 54 | const nodeName = humanoid.getNormalizedBoneNode(name)?.name; 55 | 56 | if (nodeName != null) { 57 | const track = new THREE.VectorKeyframeTrack( 58 | `${nodeName}.quaternion`, 59 | origTrack.times, 60 | origTrack.values.map((v, i) => 61 | metaVersion === "0" && i % 2 === 0 ? -v : v 62 | ) 63 | ); 64 | tracks.push(track); 65 | } 66 | } 67 | 68 | for (const [name, origTrack] of this.humanoidTracks.translation.entries()) { 69 | const nodeName = humanoid.getNormalizedBoneNode(name)?.name; 70 | 71 | if (nodeName != null) { 72 | const animationY = this.restHipsPosition.y; 73 | const humanoidY = humanoid.getNormalizedAbsolutePose().hips.position[1]; 74 | const scale = humanoidY / animationY; 75 | 76 | const track = origTrack.clone(); 77 | track.values = track.values.map( 78 | (v, i) => (metaVersion === "0" && i % 3 !== 1 ? -v : v) * scale 79 | ); 80 | track.name = `${nodeName}.position`; 81 | tracks.push(track); 82 | } 83 | } 84 | 85 | return tracks; 86 | } 87 | 88 | createExpressionTracks(expressionManager) { 89 | const tracks = []; 90 | 91 | for (const [name, origTrack] of this.expressionTracks.entries()) { 92 | const trackName = expressionManager.getExpressionTrackName(name); 93 | 94 | if (trackName != null) { 95 | const track = origTrack.clone(); 96 | track.name = trackName; 97 | tracks.push(track); 98 | } 99 | } 100 | 101 | return tracks; 102 | } 103 | 104 | createLookAtTrack(trackName) { 105 | if(this.lookAtTrack == null) { 106 | return null; 107 | } 108 | 109 | const track = this.lookAtTrack.clone(); 110 | track.name = trackName; 111 | return track; 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /lib/VRMAnimation/VRMAnimationLoaderPlugin.js: -------------------------------------------------------------------------------- 1 | import * as THREE from "https://cdn.jsdelivr.net/npm/three@0.149.0/+esm"; 2 | import { VRMHumanBoneName, VRMHumanBoneParentMap } from "https://cdn.jsdelivr.net/npm/@pixiv/three-vrm@1.0.9/+esm"; 3 | import { VRMAnimation } from "./VRMAnimation.js"; 4 | import { arrayChunk } from "./utils/arrayChunk.js"; 5 | 6 | const MAT4_IDENTITY = new THREE.Matrix4(); 7 | 8 | const _v3A = new THREE.Vector3(); 9 | const _quatA = new THREE.Quaternion(); 10 | const _quatB = new THREE.Quaternion(); 11 | const _quatC = new THREE.Quaternion(); 12 | 13 | // interface VRMAnimationLoaderPluginNodeMap { 14 | // humanoidIndexToName: Map; 15 | // expressionsIndexToName: Map; 16 | // lookAtIndex: number | null; 17 | // } 18 | 19 | // type VRMAnimationLoaderPluginWorldMatrixMap = Map< 20 | // VRMHumanBoneName | "hipsParent", 21 | // THREE.Matrix4 22 | // >; 23 | 24 | export class VRMAnimationLoaderPlugin { 25 | // public readonly parser: GLTFParser; 26 | 27 | constructor( 28 | parser, 29 | options 30 | ) { 31 | this.parser = parser; 32 | } 33 | 34 | get name() { 35 | return "VRMC_vrm_animation"; 36 | } 37 | 38 | async afterRoot(gltf) { 39 | const defGltf = gltf.parser.json; 40 | const defExtensionsUsed = defGltf.extensionsUsed; 41 | 42 | if ( 43 | defExtensionsUsed == null || 44 | defExtensionsUsed.indexOf(this.name) == -1 45 | ) { 46 | return; 47 | } 48 | 49 | const defExtension = defGltf.extensions?.[this.name]; 50 | 51 | if (defExtension == null) { 52 | return; 53 | } 54 | 55 | const nodeMap = this._createNodeMap(defExtension); 56 | const worldMatrixMap = await this._createBoneWorldMatrixMap( 57 | gltf, 58 | defExtension 59 | ); 60 | 61 | const hipsNode = defExtension.humanoid.humanBones["hips"].node; 62 | const hips = (await gltf.parser.getDependency( 63 | "node", 64 | hipsNode 65 | )); 66 | const restHipsPosition = hips.getWorldPosition(new THREE.Vector3()); 67 | 68 | const clips = gltf.animations; 69 | const animations = clips.map((clip, iAnimation) => { 70 | const defAnimation = defGltf.animations[iAnimation]; 71 | 72 | const animation = this._parseAnimation( 73 | clip, 74 | defAnimation, 75 | nodeMap, 76 | worldMatrixMap 77 | ); 78 | animation.restHipsPosition = restHipsPosition; 79 | 80 | return animation; 81 | }); 82 | 83 | gltf.userData.vrmAnimations = animations; 84 | } 85 | 86 | _createNodeMap(defExtension) { 87 | const humanoidIndexToName = new Map(); 88 | const expressionsIndexToName = new Map(); 89 | let lookAtIndex; 90 | 91 | // humanoid 92 | const humanBones = defExtension.humanoid?.humanBones; 93 | 94 | if (humanBones) { 95 | Object.entries(humanBones).forEach(([name, bone]) => { 96 | const { node } = bone; 97 | humanoidIndexToName.set(node, name); 98 | }); 99 | } 100 | 101 | // expressions 102 | const preset = defExtension.expressions?.preset; 103 | 104 | if (preset) { 105 | Object.entries(preset).forEach(([name, expression]) => { 106 | const { node } = expression; 107 | expressionsIndexToName.set(node, name); 108 | }); 109 | } 110 | 111 | const custom = defExtension.expressions?.custom; 112 | 113 | if (custom) { 114 | Object.entries(custom).forEach(([name, expression]) => { 115 | const { node } = expression; 116 | expressionsIndexToName.set(node, name); 117 | }); 118 | } 119 | 120 | // lookAt 121 | lookAtIndex = defExtension.lookAt?.node ?? null; 122 | 123 | return { humanoidIndexToName, expressionsIndexToName, lookAtIndex }; 124 | } 125 | 126 | async _createBoneWorldMatrixMap(gltf, defExtension) { 127 | // update the entire hierarchy first 128 | gltf.scene.updateWorldMatrix(false, true); 129 | 130 | const threeNodes = (await gltf.parser.getDependencies( 131 | "node" 132 | )); 133 | 134 | const worldMatrixMap = new Map(); 135 | 136 | for (const [boneName, { node }] of Object.entries( 137 | defExtension.humanoid.humanBones 138 | )) { 139 | const threeNode = threeNodes[node]; 140 | worldMatrixMap.set(boneName, threeNode.matrixWorld); 141 | 142 | if (boneName === "hips") { 143 | worldMatrixMap.set( 144 | "hipsParent", 145 | threeNode.parent?.matrixWorld ?? MAT4_IDENTITY 146 | ); 147 | } 148 | } 149 | 150 | return worldMatrixMap; 151 | } 152 | 153 | _parseAnimation( 154 | animationClip, 155 | defAnimation, 156 | nodeMap, 157 | worldMatrixMap 158 | ) { 159 | const tracks = animationClip.tracks; 160 | const defChannels = defAnimation.channels; 161 | 162 | const result = new VRMAnimation(); 163 | 164 | result.duration = animationClip.duration; 165 | 166 | defChannels.forEach((channel, iChannel) => { 167 | const { node, path } = channel.target; 168 | const origTrack = tracks[iChannel]; 169 | 170 | if (node == null) { 171 | return; 172 | } 173 | 174 | // humanoid 175 | const boneName = nodeMap.humanoidIndexToName.get(node); 176 | if (boneName != null) { 177 | let parentBoneName = VRMHumanBoneParentMap[boneName]; 178 | while ( 179 | parentBoneName != null && 180 | worldMatrixMap.get(parentBoneName) == null 181 | ) { 182 | parentBoneName = VRMHumanBoneParentMap[parentBoneName]; 183 | } 184 | parentBoneName ??= "hipsParent"; 185 | 186 | if (path === "translation") { 187 | const hipsParentWorldMatrix = worldMatrixMap.get("hipsParent"); 188 | 189 | const trackValues = arrayChunk(origTrack.values, 3).flatMap((v) => 190 | _v3A.fromArray(v).applyMatrix4(hipsParentWorldMatrix).toArray() 191 | ); 192 | 193 | const track = origTrack.clone(); 194 | track.values = new Float32Array(trackValues); 195 | 196 | result.humanoidTracks.translation.set(boneName, track); 197 | } else if (path === "rotation") { 198 | // a = p^-1 * a' * p * c 199 | // a' = p * p^-1 * a' * p * c * c^-1 * p^-1 200 | // = p * a * c^-1 * p^-1 201 | 202 | const worldMatrix = worldMatrixMap.get(boneName); 203 | const parentWorldMatrix = worldMatrixMap.get(parentBoneName); 204 | 205 | _quatA.setFromRotationMatrix(worldMatrix).normalize().invert(); 206 | _quatB.setFromRotationMatrix(parentWorldMatrix).normalize(); 207 | 208 | const trackValues = arrayChunk(origTrack.values, 4).flatMap((q) => 209 | _quatC.fromArray(q).premultiply(_quatB).multiply(_quatA).toArray() 210 | ); 211 | 212 | const track = origTrack.clone(); 213 | track.values = new Float32Array(trackValues); 214 | 215 | result.humanoidTracks.rotation.set(boneName, track); 216 | } else { 217 | throw new Error(`Invalid path "${path}"`); 218 | } 219 | return; 220 | } 221 | 222 | // expressions 223 | const expressionName = nodeMap.expressionsIndexToName.get(node); 224 | if (expressionName != null) { 225 | if (path === "translation") { 226 | const times = origTrack.times; 227 | const values = new Float32Array(origTrack.values.length / 3); 228 | for (let i = 0; i < values.length; i++) { 229 | values[i] = origTrack.values[3 * i]; 230 | } 231 | 232 | const newTrack = new THREE.NumberKeyframeTrack( 233 | `${expressionName}.weight`, 234 | times, 235 | values 236 | ); 237 | result.expressionTracks.set(expressionName, newTrack); 238 | } else { 239 | throw new Error(`Invalid path "${path}"`); 240 | } 241 | return; 242 | } 243 | 244 | // lookAt 245 | if (node === nodeMap.lookAtIndex) { 246 | if (path === "rotation") { 247 | result.lookAtTrack = origTrack; 248 | } else { 249 | throw new Error(`Invalid path "${path}"`); 250 | } 251 | } 252 | }); 253 | 254 | return result; 255 | } 256 | } 257 | -------------------------------------------------------------------------------- /lib/VRMAnimation/loadVRMAnimation.js: -------------------------------------------------------------------------------- 1 | import { GLTFLoader } from '../../GLTFLoader/GLTFLoader.js'; 2 | import { VRMAnimation } from './VRMAnimation.js'; 3 | import { VRMAnimationLoaderPlugin } from './VRMAnimationLoaderPlugin.js'; 4 | 5 | const loader = new GLTFLoader(); 6 | loader.register((parser) => new VRMAnimationLoaderPlugin(parser)); 7 | 8 | export async function loadVRMAnimation(url) { 9 | const gltf = await loader.loadAsync(url); 10 | 11 | const vrmAnimations = gltf.userData.vrmAnimations; 12 | const vrmAnimation = vrmAnimations[0]; 13 | 14 | return vrmAnimation ?? null; 15 | } 16 | -------------------------------------------------------------------------------- /lib/VRMAnimation/utils/arrayChunk.js: -------------------------------------------------------------------------------- 1 | /** 2 | * ```js 3 | * arrayChunk( [ 1, 2, 3, 4, 5, 6 ], 2 ) 4 | * // will be 5 | * [ [ 1, 2 ], [ 3, 4 ], [ 5, 6 ] ] 6 | * ``` 7 | */ 8 | export function arrayChunk(array, every) { 9 | const N = array.length; 10 | 11 | const ret = []; 12 | 13 | let current = []; 14 | let remaining = 0; 15 | 16 | for (let i = 0; i < N; i ++) { 17 | const el = array[i]; 18 | 19 | if (remaining <= 0) { 20 | remaining = every; 21 | current = []; 22 | ret.push(current); 23 | } 24 | 25 | current.push(el); 26 | remaining--; 27 | } 28 | 29 | return ret; 30 | } 31 | -------------------------------------------------------------------------------- /lib/VRMAnimation/utils/linearstep.js: -------------------------------------------------------------------------------- 1 | import { saturate } from './saturate.js'; 2 | 3 | export const linearstep = (a, b, t) => ( 4 | saturate((t - a) / (b - a)) 5 | ); 6 | -------------------------------------------------------------------------------- /lib/VRMAnimation/utils/saturate.js: -------------------------------------------------------------------------------- 1 | export const saturate = (x) => Math.min(Math.max(x, 0.0), 1.0); 2 | -------------------------------------------------------------------------------- /lib/VRMLookAtSmootherLoaderPlugin/VRMLookAtSmoother.js: -------------------------------------------------------------------------------- 1 | import { VRMLookAt } from "https://cdn.jsdelivr.net/npm/@pixiv/three-vrm@1.0.9/+esm"; 2 | import * as THREE from "https://cdn.jsdelivr.net/npm/three@0.149.0/+esm"; 3 | 4 | /** サッケードが発生するまでの最小間隔 */ 5 | const SACCADE_MIN_INTERVAL = 0.5; 6 | 7 | /** 8 | * サッケードが発生する確率 9 | */ 10 | const SACCADE_PROC = 0.05; 11 | 12 | /** サッケードの範囲半径。lookAtに渡される値で、実際の眼球の移動半径ではないので、若干大きめに。 in degrees */ 13 | const SACCADE_RADIUS = 5.0; 14 | 15 | const _v3A = new THREE.Vector3(); 16 | const _quatA = new THREE.Quaternion(); 17 | const _eulerA = new THREE.Euler(); 18 | 19 | /** 20 | * `VRMLookAt` に以下の機能を追加する: 21 | * 22 | * - `userTarget` がアサインされている場合、ユーザ方向にスムージングしながら向く 23 | * - 目だけでなく、頭の回転でも向く 24 | * - 眼球のサッケード運動を追加する 25 | */ 26 | export class VRMLookAtSmoother extends VRMLookAt { 27 | // /** スムージング用の係数 */ 28 | smoothFactor = 4.0; 29 | 30 | // /** ユーザ向きに向く限界の角度 in degree */ 31 | userLimitAngle = 90.0; 32 | 33 | // /** ユーザへの向き。もともと存在する `target` はアニメーションに使う */ 34 | userTarget; 35 | 36 | // /** `false` にするとサッケードを無効にできます */ 37 | enableSaccade; 38 | 39 | // /** サッケードの移動方向を格納しておく */ 40 | #_saccadeYaw = 0.0; 41 | 42 | // /** サッケードの移動方向を格納しておく */ 43 | #_saccadePitch = 0.0; 44 | 45 | // /** このタイマーが SACCADE_MIN_INTERVAL を超えたら SACCADE_PROC の確率でサッケードを発生させる */ 46 | #_saccadeTimer = 0.0; 47 | 48 | // /** スムージングするyaw */ 49 | #_yawDamped = 0.0; 50 | 51 | // /** スムージングするpitch */ 52 | #_pitchDamped = 0.0; 53 | 54 | // /** firstPersonBoneの回転を一時的にしまっておくやつ */ 55 | #_tempFirstPersonBoneQuat = new THREE.Quaternion(); 56 | 57 | constructor(humanoid, applier) { 58 | super(humanoid, applier); 59 | 60 | this.enableSaccade = true; 61 | } 62 | 63 | update(delta) { 64 | if (this.target && this.autoUpdate) { 65 | // アニメーションの視線 66 | // `_yaw` と `_pitch` のアップデート 67 | this.lookAt(this.target.getWorldPosition(_v3A)); 68 | 69 | // アニメーションによって指定されたyaw / pitch。この関数内で不変 70 | const yawAnimation = this._yaw; 71 | const pitchAnimation = this._pitch; 72 | 73 | // このフレームで最終的に使うことになるyaw / pitch 74 | let yawFrame = yawAnimation; 75 | let pitchFrame = pitchAnimation; 76 | 77 | // ユーザ向き 78 | if (this.userTarget) { 79 | // `_yaw` と `_pitch` のアップデート 80 | this.lookAt(this.userTarget.getWorldPosition(_v3A)); 81 | 82 | // 角度の制限。 `userLimitAngle` を超えていた場合はアニメーションで指定された方向を向く 83 | if ( 84 | this.userLimitAngle < Math.abs(this._yaw) || 85 | this.userLimitAngle < Math.abs(this._pitch) 86 | ) { 87 | this._yaw = yawAnimation; 88 | this._pitch = pitchAnimation; 89 | } 90 | 91 | // yawDamped / pitchDampedをスムージングする 92 | const k = 1.0 - Math.exp(-this.smoothFactor * delta); 93 | this.#_yawDamped += (this._yaw - this.#_yawDamped) * k; 94 | this.#_pitchDamped += (this._pitch - this.#_pitchDamped) * k; 95 | 96 | // アニメーションとブレンディングする 97 | // アニメーションが横とかを向いている場合はそっちを尊重する 98 | const userRatio = 99 | 1.0 - 100 | THREE.MathUtils.smoothstep( 101 | Math.sqrt( 102 | yawAnimation * yawAnimation + pitchAnimation * pitchAnimation 103 | ), 104 | 30.0, 105 | 90.0 106 | ); 107 | 108 | // yawFrame / pitchFrame に結果を代入 109 | yawFrame = THREE.MathUtils.lerp( 110 | yawAnimation, 111 | 0.6 * this.#_yawDamped, 112 | userRatio 113 | ); 114 | pitchFrame = THREE.MathUtils.lerp( 115 | pitchAnimation, 116 | 0.6 * this.#_pitchDamped, 117 | userRatio 118 | ); 119 | 120 | // 頭も回す 121 | _eulerA.set( 122 | -this.#_pitchDamped * THREE.MathUtils.DEG2RAD, 123 | this.#_yawDamped * THREE.MathUtils.DEG2RAD, 124 | 0.0, 125 | VRMLookAt.EULER_ORDER 126 | ); 127 | _quatA.setFromEuler(_eulerA); 128 | 129 | const head = this.humanoid.getRawBoneNode("head"); 130 | this.#_tempFirstPersonBoneQuat.copy(head.quaternion); 131 | head.quaternion.slerp(_quatA, 0.4); 132 | head.updateMatrixWorld(); 133 | } 134 | 135 | if (this.enableSaccade) { 136 | // サッケードの移動方向を計算 137 | if ( 138 | SACCADE_MIN_INTERVAL < this.#_saccadeTimer && 139 | Math.random() < SACCADE_PROC 140 | ) { 141 | this.#_saccadeYaw = (2.0 * Math.random() - 1.0) * SACCADE_RADIUS; 142 | this.#_saccadePitch = (2.0 * Math.random() - 1.0) * SACCADE_RADIUS; 143 | this.#_saccadeTimer = 0.0; 144 | } 145 | 146 | this.#_saccadeTimer += delta; 147 | 148 | // サッケードの移動分を加算 149 | yawFrame += this.#_saccadeYaw; 150 | pitchFrame += this.#_saccadePitch; 151 | 152 | // applierにわたす 153 | this.applier.applyYawPitch(yawFrame, pitchFrame); 154 | } 155 | 156 | // applyはもうしたので、このフレーム内でアップデートする必要はない 157 | this._needsUpdate = false; 158 | } 159 | 160 | // targetでlookAtを制御しない場合 161 | if (this._needsUpdate) { 162 | this._needsUpdate = false; 163 | this.applier.applyYawPitch(this._yaw, this._pitch); 164 | } 165 | } 166 | 167 | /** renderしたあとに叩いて頭の回転をもとに戻す */ 168 | revertFirstPersonBoneQuat() { 169 | if (this.userTarget) { 170 | const head = this.humanoid.getNormalizedBoneNode("head"); 171 | head.quaternion.copy(this.#_tempFirstPersonBoneQuat); 172 | } 173 | } 174 | } 175 | -------------------------------------------------------------------------------- /lib/VRMLookAtSmootherLoaderPlugin/VRMLookAtSmootherLoaderPlugin.js: -------------------------------------------------------------------------------- 1 | import { VRMLookAt, VRMLookAtLoaderPlugin } from "https://cdn.jsdelivr.net/npm/@pixiv/three-vrm@1.0.9/+esm"; 2 | // import { GLTF } from "https://cdn.jsdelivr.net/npm/three@0.149.0/examples/jsm/loaders/GLTFLoader.js"; 3 | import { VRMLookAtSmoother } from "./VRMLookAtSmoother.js"; 4 | 5 | export class VRMLookAtSmootherLoaderPlugin extends VRMLookAtLoaderPlugin { 6 | get name() { 7 | return "VRMLookAtSmootherLoaderPlugin"; 8 | } 9 | 10 | async afterRoot(gltf) { 11 | await super.afterRoot(gltf); 12 | 13 | const humanoid = gltf.userData.vrmHumanoid; 14 | const lookAt = gltf.userData.vrmLookAt; 15 | 16 | if (humanoid != null && lookAt != null) { 17 | const lookAtSmoother = new VRMLookAtSmoother(humanoid, lookAt.applier); 18 | lookAtSmoother.copy(lookAt); 19 | gltf.userData.vrmLookAt = lookAtSmoother; 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /mixamo/NURBSCurve.js: -------------------------------------------------------------------------------- 1 | import { 2 | Curve, 3 | Vector3, 4 | Vector4 5 | } from 'https://cdn.jsdelivr.net/npm/three@0.149.0/+esm'; 6 | import * as NURBSUtils from './NURBSUtils.js'; 7 | 8 | /** 9 | * NURBS curve object 10 | * 11 | * Derives from Curve, overriding getPoint and getTangent. 12 | * 13 | * Implementation is based on (x, y [, z=0 [, w=1]]) control points with w=weight. 14 | * 15 | **/ 16 | 17 | class NURBSCurve extends Curve { 18 | 19 | constructor( 20 | degree, 21 | knots /* array of reals */, 22 | controlPoints /* array of Vector(2|3|4) */, 23 | startKnot /* index in knots */, 24 | endKnot /* index in knots */ 25 | ) { 26 | 27 | super(); 28 | 29 | this.degree = degree; 30 | this.knots = knots; 31 | this.controlPoints = []; 32 | // Used by periodic NURBS to remove hidden spans 33 | this.startKnot = startKnot || 0; 34 | this.endKnot = endKnot || ( this.knots.length - 1 ); 35 | 36 | for ( let i = 0; i < controlPoints.length; ++ i ) { 37 | 38 | // ensure Vector4 for control points 39 | const point = controlPoints[ i ]; 40 | this.controlPoints[ i ] = new Vector4( point.x, point.y, point.z, point.w ); 41 | 42 | } 43 | 44 | } 45 | 46 | getPoint( t, optionalTarget = new Vector3() ) { 47 | 48 | const point = optionalTarget; 49 | 50 | const u = this.knots[ this.startKnot ] + t * ( this.knots[ this.endKnot ] - this.knots[ this.startKnot ] ); // linear mapping t->u 51 | 52 | // following results in (wx, wy, wz, w) homogeneous point 53 | const hpoint = NURBSUtils.calcBSplinePoint( this.degree, this.knots, this.controlPoints, u ); 54 | 55 | if ( hpoint.w !== 1.0 ) { 56 | 57 | // project to 3D space: (wx, wy, wz, w) -> (x, y, z, 1) 58 | hpoint.divideScalar( hpoint.w ); 59 | 60 | } 61 | 62 | return point.set( hpoint.x, hpoint.y, hpoint.z ); 63 | 64 | } 65 | 66 | getTangent( t, optionalTarget = new Vector3() ) { 67 | 68 | const tangent = optionalTarget; 69 | 70 | const u = this.knots[ 0 ] + t * ( this.knots[ this.knots.length - 1 ] - this.knots[ 0 ] ); 71 | const ders = NURBSUtils.calcNURBSDerivatives( this.degree, this.knots, this.controlPoints, u, 1 ); 72 | tangent.copy( ders[ 1 ] ).normalize(); 73 | 74 | return tangent; 75 | 76 | } 77 | 78 | } 79 | 80 | export { NURBSCurve }; -------------------------------------------------------------------------------- /mixamo/NURBSUtils.js: -------------------------------------------------------------------------------- 1 | import { 2 | Vector3, 3 | Vector4 4 | } from 'https://cdn.jsdelivr.net/npm/three@0.149.0/+esm'; 5 | 6 | /** 7 | * NURBS utils 8 | * 9 | * See NURBSCurve and NURBSSurface. 10 | **/ 11 | 12 | 13 | /************************************************************** 14 | * NURBS Utils 15 | **************************************************************/ 16 | 17 | /* 18 | Finds knot vector span. 19 | 20 | p : degree 21 | u : parametric value 22 | U : knot vector 23 | 24 | returns the span 25 | */ 26 | function findSpan( p, u, U ) { 27 | 28 | const n = U.length - p - 1; 29 | 30 | if ( u >= U[ n ] ) { 31 | 32 | return n - 1; 33 | 34 | } 35 | 36 | if ( u <= U[ p ] ) { 37 | 38 | return p; 39 | 40 | } 41 | 42 | let low = p; 43 | let high = n; 44 | let mid = Math.floor( ( low + high ) / 2 ); 45 | 46 | while ( u < U[ mid ] || u >= U[ mid + 1 ] ) { 47 | 48 | if ( u < U[ mid ] ) { 49 | 50 | high = mid; 51 | 52 | } else { 53 | 54 | low = mid; 55 | 56 | } 57 | 58 | mid = Math.floor( ( low + high ) / 2 ); 59 | 60 | } 61 | 62 | return mid; 63 | 64 | } 65 | 66 | 67 | /* 68 | Calculate basis functions. See The NURBS Book, page 70, algorithm A2.2 69 | 70 | span : span in which u lies 71 | u : parametric point 72 | p : degree 73 | U : knot vector 74 | 75 | returns array[p+1] with basis functions values. 76 | */ 77 | function calcBasisFunctions( span, u, p, U ) { 78 | 79 | const N = []; 80 | const left = []; 81 | const right = []; 82 | N[ 0 ] = 1.0; 83 | 84 | for ( let j = 1; j <= p; ++ j ) { 85 | 86 | left[ j ] = u - U[ span + 1 - j ]; 87 | right[ j ] = U[ span + j ] - u; 88 | 89 | let saved = 0.0; 90 | 91 | for ( let r = 0; r < j; ++ r ) { 92 | 93 | const rv = right[ r + 1 ]; 94 | const lv = left[ j - r ]; 95 | const temp = N[ r ] / ( rv + lv ); 96 | N[ r ] = saved + rv * temp; 97 | saved = lv * temp; 98 | 99 | } 100 | 101 | N[ j ] = saved; 102 | 103 | } 104 | 105 | return N; 106 | 107 | } 108 | 109 | 110 | /* 111 | Calculate B-Spline curve points. See The NURBS Book, page 82, algorithm A3.1. 112 | 113 | p : degree of B-Spline 114 | U : knot vector 115 | P : control points (x, y, z, w) 116 | u : parametric point 117 | 118 | returns point for given u 119 | */ 120 | function calcBSplinePoint( p, U, P, u ) { 121 | 122 | const span = findSpan( p, u, U ); 123 | const N = calcBasisFunctions( span, u, p, U ); 124 | const C = new Vector4( 0, 0, 0, 0 ); 125 | 126 | for ( let j = 0; j <= p; ++ j ) { 127 | 128 | const point = P[ span - p + j ]; 129 | const Nj = N[ j ]; 130 | const wNj = point.w * Nj; 131 | C.x += point.x * wNj; 132 | C.y += point.y * wNj; 133 | C.z += point.z * wNj; 134 | C.w += point.w * Nj; 135 | 136 | } 137 | 138 | return C; 139 | 140 | } 141 | 142 | 143 | /* 144 | Calculate basis functions derivatives. See The NURBS Book, page 72, algorithm A2.3. 145 | 146 | span : span in which u lies 147 | u : parametric point 148 | p : degree 149 | n : number of derivatives to calculate 150 | U : knot vector 151 | 152 | returns array[n+1][p+1] with basis functions derivatives 153 | */ 154 | function calcBasisFunctionDerivatives( span, u, p, n, U ) { 155 | 156 | const zeroArr = []; 157 | for ( let i = 0; i <= p; ++ i ) 158 | zeroArr[ i ] = 0.0; 159 | 160 | const ders = []; 161 | 162 | for ( let i = 0; i <= n; ++ i ) 163 | ders[ i ] = zeroArr.slice( 0 ); 164 | 165 | const ndu = []; 166 | 167 | for ( let i = 0; i <= p; ++ i ) 168 | ndu[ i ] = zeroArr.slice( 0 ); 169 | 170 | ndu[ 0 ][ 0 ] = 1.0; 171 | 172 | const left = zeroArr.slice( 0 ); 173 | const right = zeroArr.slice( 0 ); 174 | 175 | for ( let j = 1; j <= p; ++ j ) { 176 | 177 | left[ j ] = u - U[ span + 1 - j ]; 178 | right[ j ] = U[ span + j ] - u; 179 | 180 | let saved = 0.0; 181 | 182 | for ( let r = 0; r < j; ++ r ) { 183 | 184 | const rv = right[ r + 1 ]; 185 | const lv = left[ j - r ]; 186 | ndu[ j ][ r ] = rv + lv; 187 | 188 | const temp = ndu[ r ][ j - 1 ] / ndu[ j ][ r ]; 189 | ndu[ r ][ j ] = saved + rv * temp; 190 | saved = lv * temp; 191 | 192 | } 193 | 194 | ndu[ j ][ j ] = saved; 195 | 196 | } 197 | 198 | for ( let j = 0; j <= p; ++ j ) { 199 | 200 | ders[ 0 ][ j ] = ndu[ j ][ p ]; 201 | 202 | } 203 | 204 | for ( let r = 0; r <= p; ++ r ) { 205 | 206 | let s1 = 0; 207 | let s2 = 1; 208 | 209 | const a = []; 210 | for ( let i = 0; i <= p; ++ i ) { 211 | 212 | a[ i ] = zeroArr.slice( 0 ); 213 | 214 | } 215 | 216 | a[ 0 ][ 0 ] = 1.0; 217 | 218 | for ( let k = 1; k <= n; ++ k ) { 219 | 220 | let d = 0.0; 221 | const rk = r - k; 222 | const pk = p - k; 223 | 224 | if ( r >= k ) { 225 | 226 | a[ s2 ][ 0 ] = a[ s1 ][ 0 ] / ndu[ pk + 1 ][ rk ]; 227 | d = a[ s2 ][ 0 ] * ndu[ rk ][ pk ]; 228 | 229 | } 230 | 231 | const j1 = ( rk >= - 1 ) ? 1 : - rk; 232 | const j2 = ( r - 1 <= pk ) ? k - 1 : p - r; 233 | 234 | for ( let j = j1; j <= j2; ++ j ) { 235 | 236 | a[ s2 ][ j ] = ( a[ s1 ][ j ] - a[ s1 ][ j - 1 ] ) / ndu[ pk + 1 ][ rk + j ]; 237 | d += a[ s2 ][ j ] * ndu[ rk + j ][ pk ]; 238 | 239 | } 240 | 241 | if ( r <= pk ) { 242 | 243 | a[ s2 ][ k ] = - a[ s1 ][ k - 1 ] / ndu[ pk + 1 ][ r ]; 244 | d += a[ s2 ][ k ] * ndu[ r ][ pk ]; 245 | 246 | } 247 | 248 | ders[ k ][ r ] = d; 249 | 250 | const j = s1; 251 | s1 = s2; 252 | s2 = j; 253 | 254 | } 255 | 256 | } 257 | 258 | let r = p; 259 | 260 | for ( let k = 1; k <= n; ++ k ) { 261 | 262 | for ( let j = 0; j <= p; ++ j ) { 263 | 264 | ders[ k ][ j ] *= r; 265 | 266 | } 267 | 268 | r *= p - k; 269 | 270 | } 271 | 272 | return ders; 273 | 274 | } 275 | 276 | 277 | /* 278 | Calculate derivatives of a B-Spline. See The NURBS Book, page 93, algorithm A3.2. 279 | 280 | p : degree 281 | U : knot vector 282 | P : control points 283 | u : Parametric points 284 | nd : number of derivatives 285 | 286 | returns array[d+1] with derivatives 287 | */ 288 | function calcBSplineDerivatives( p, U, P, u, nd ) { 289 | 290 | const du = nd < p ? nd : p; 291 | const CK = []; 292 | const span = findSpan( p, u, U ); 293 | const nders = calcBasisFunctionDerivatives( span, u, p, du, U ); 294 | const Pw = []; 295 | 296 | for ( let i = 0; i < P.length; ++ i ) { 297 | 298 | const point = P[ i ].clone(); 299 | const w = point.w; 300 | 301 | point.x *= w; 302 | point.y *= w; 303 | point.z *= w; 304 | 305 | Pw[ i ] = point; 306 | 307 | } 308 | 309 | for ( let k = 0; k <= du; ++ k ) { 310 | 311 | const point = Pw[ span - p ].clone().multiplyScalar( nders[ k ][ 0 ] ); 312 | 313 | for ( let j = 1; j <= p; ++ j ) { 314 | 315 | point.add( Pw[ span - p + j ].clone().multiplyScalar( nders[ k ][ j ] ) ); 316 | 317 | } 318 | 319 | CK[ k ] = point; 320 | 321 | } 322 | 323 | for ( let k = du + 1; k <= nd + 1; ++ k ) { 324 | 325 | CK[ k ] = new Vector4( 0, 0, 0 ); 326 | 327 | } 328 | 329 | return CK; 330 | 331 | } 332 | 333 | 334 | /* 335 | Calculate "K over I" 336 | 337 | returns k!/(i!(k-i)!) 338 | */ 339 | function calcKoverI( k, i ) { 340 | 341 | let nom = 1; 342 | 343 | for ( let j = 2; j <= k; ++ j ) { 344 | 345 | nom *= j; 346 | 347 | } 348 | 349 | let denom = 1; 350 | 351 | for ( let j = 2; j <= i; ++ j ) { 352 | 353 | denom *= j; 354 | 355 | } 356 | 357 | for ( let j = 2; j <= k - i; ++ j ) { 358 | 359 | denom *= j; 360 | 361 | } 362 | 363 | return nom / denom; 364 | 365 | } 366 | 367 | 368 | /* 369 | Calculate derivatives (0-nd) of rational curve. See The NURBS Book, page 127, algorithm A4.2. 370 | 371 | Pders : result of function calcBSplineDerivatives 372 | 373 | returns array with derivatives for rational curve. 374 | */ 375 | function calcRationalCurveDerivatives( Pders ) { 376 | 377 | const nd = Pders.length; 378 | const Aders = []; 379 | const wders = []; 380 | 381 | for ( let i = 0; i < nd; ++ i ) { 382 | 383 | const point = Pders[ i ]; 384 | Aders[ i ] = new Vector3( point.x, point.y, point.z ); 385 | wders[ i ] = point.w; 386 | 387 | } 388 | 389 | const CK = []; 390 | 391 | for ( let k = 0; k < nd; ++ k ) { 392 | 393 | const v = Aders[ k ].clone(); 394 | 395 | for ( let i = 1; i <= k; ++ i ) { 396 | 397 | v.sub( CK[ k - i ].clone().multiplyScalar( calcKoverI( k, i ) * wders[ i ] ) ); 398 | 399 | } 400 | 401 | CK[ k ] = v.divideScalar( wders[ 0 ] ); 402 | 403 | } 404 | 405 | return CK; 406 | 407 | } 408 | 409 | 410 | /* 411 | Calculate NURBS curve derivatives. See The NURBS Book, page 127, algorithm A4.2. 412 | 413 | p : degree 414 | U : knot vector 415 | P : control points in homogeneous space 416 | u : parametric points 417 | nd : number of derivatives 418 | 419 | returns array with derivatives. 420 | */ 421 | function calcNURBSDerivatives( p, U, P, u, nd ) { 422 | 423 | const Pders = calcBSplineDerivatives( p, U, P, u, nd ); 424 | return calcRationalCurveDerivatives( Pders ); 425 | 426 | } 427 | 428 | 429 | /* 430 | Calculate rational B-Spline surface point. See The NURBS Book, page 134, algorithm A4.3. 431 | 432 | p1, p2 : degrees of B-Spline surface 433 | U1, U2 : knot vectors 434 | P : control points (x, y, z, w) 435 | u, v : parametric values 436 | 437 | returns point for given (u, v) 438 | */ 439 | function calcSurfacePoint( p, q, U, V, P, u, v, target ) { 440 | 441 | const uspan = findSpan( p, u, U ); 442 | const vspan = findSpan( q, v, V ); 443 | const Nu = calcBasisFunctions( uspan, u, p, U ); 444 | const Nv = calcBasisFunctions( vspan, v, q, V ); 445 | const temp = []; 446 | 447 | for ( let l = 0; l <= q; ++ l ) { 448 | 449 | temp[ l ] = new Vector4( 0, 0, 0, 0 ); 450 | for ( let k = 0; k <= p; ++ k ) { 451 | 452 | const point = P[ uspan - p + k ][ vspan - q + l ].clone(); 453 | const w = point.w; 454 | point.x *= w; 455 | point.y *= w; 456 | point.z *= w; 457 | temp[ l ].add( point.multiplyScalar( Nu[ k ] ) ); 458 | 459 | } 460 | 461 | } 462 | 463 | const Sw = new Vector4( 0, 0, 0, 0 ); 464 | for ( let l = 0; l <= q; ++ l ) { 465 | 466 | Sw.add( temp[ l ].multiplyScalar( Nv[ l ] ) ); 467 | 468 | } 469 | 470 | Sw.divideScalar( Sw.w ); 471 | target.set( Sw.x, Sw.y, Sw.z ); 472 | 473 | } 474 | 475 | 476 | 477 | export { 478 | findSpan, 479 | calcBasisFunctions, 480 | calcBSplinePoint, 481 | calcBasisFunctionDerivatives, 482 | calcBSplineDerivatives, 483 | calcKoverI, 484 | calcRationalCurveDerivatives, 485 | calcNURBSDerivatives, 486 | calcSurfacePoint, 487 | }; -------------------------------------------------------------------------------- /mixamo/loadMixamoAnimation.js: -------------------------------------------------------------------------------- 1 | import * as THREE from 'https://cdn.jsdelivr.net/npm/three@0.149.0/+esm'; 2 | import { FBXLoader } from './FBXLoader.js'; 3 | import { mixamoVRMRigMap } from './mixamoVRMRigMap.js'; 4 | 5 | /** 6 | * Load Mixamo animation, convert for three-vrm use, and return it. 7 | * 8 | * @param {string} url A url of mixamo animation data 9 | * @param {VRM} vrm A target VRM 10 | * @returns {Promise} The converted AnimationClip 11 | */ 12 | export function loadMixamoAnimation( url, vrm ) { 13 | 14 | const loader = new FBXLoader(); // A loader which loads FBX 15 | return loader.loadAsync( url ).then( ( asset ) => { 16 | 17 | const clip = THREE.AnimationClip.findByName( asset.animations, 'mixamo.com' ); // extract the AnimationClip 18 | 19 | const tracks = []; // KeyframeTracks compatible with VRM will be added here 20 | 21 | const restRotationInverse = new THREE.Quaternion(); 22 | const parentRestWorldRotation = new THREE.Quaternion(); 23 | const _quatA = new THREE.Quaternion(); 24 | const _vec3 = new THREE.Vector3(); 25 | 26 | // Adjust with reference to hips height. 27 | const motionHipsHeight = asset.getObjectByName( 'mixamorigHips' ).position.y; 28 | const vrmHipsY = vrm.humanoid?.getNormalizedBoneNode( 'hips' ).getWorldPosition( _vec3 ).y; 29 | const vrmRootY = vrm.scene.getWorldPosition( _vec3 ).y; 30 | const vrmHipsHeight = Math.abs( vrmHipsY - vrmRootY ); 31 | const hipsPositionScale = vrmHipsHeight / motionHipsHeight; 32 | 33 | clip.tracks.forEach( ( track ) => { 34 | 35 | // Convert each tracks for VRM use, and push to `tracks` 36 | const trackSplitted = track.name.split( '.' ); 37 | const mixamoRigName = trackSplitted[ 0 ]; 38 | const vrmBoneName = mixamoVRMRigMap[ mixamoRigName ]; 39 | const vrmNodeName = vrm.humanoid?.getNormalizedBoneNode( vrmBoneName )?.name; 40 | const mixamoRigNode = asset.getObjectByName( mixamoRigName ); 41 | 42 | if ( vrmNodeName != null ) { 43 | 44 | const propertyName = trackSplitted[ 1 ]; 45 | 46 | // Store rotations of rest-pose. 47 | mixamoRigNode.getWorldQuaternion( restRotationInverse ).invert(); 48 | mixamoRigNode.parent.getWorldQuaternion( parentRestWorldRotation ); 49 | 50 | if ( track instanceof THREE.QuaternionKeyframeTrack ) { 51 | 52 | // Retarget rotation of mixamoRig to NormalizedBone. 53 | for ( let i = 0; i < track.values.length; i += 4 ) { 54 | 55 | const flatQuaternion = track.values.slice( i, i + 4 ); 56 | 57 | _quatA.fromArray( flatQuaternion ); 58 | 59 | // 親のレスト時ワールド回転 * トラックの回転 * レスト時ワールド回転の逆 60 | _quatA 61 | .premultiply( parentRestWorldRotation ) 62 | .multiply( restRotationInverse ); 63 | 64 | _quatA.toArray( flatQuaternion ); 65 | 66 | flatQuaternion.forEach( ( v, index ) => { 67 | 68 | track.values[ index + i ] = v; 69 | 70 | } ); 71 | 72 | } 73 | 74 | tracks.push( 75 | new THREE.QuaternionKeyframeTrack( 76 | `${vrmNodeName}.${propertyName}`, 77 | track.times, 78 | track.values.map( ( v, i ) => ( vrm.meta?.metaVersion === '0' && i % 2 === 0 ? - v : v ) ), 79 | ), 80 | ); 81 | 82 | } else if ( track instanceof THREE.VectorKeyframeTrack ) { 83 | 84 | const value = track.values.map( ( v, i ) => ( vrm.meta?.metaVersion === '0' && i % 3 !== 1 ? - v : v ) * hipsPositionScale ); 85 | tracks.push( new THREE.VectorKeyframeTrack( `${vrmNodeName}.${propertyName}`, track.times, value ) ); 86 | 87 | } 88 | 89 | } 90 | 91 | } ); 92 | 93 | return new THREE.AnimationClip( 'vrmAnimation', clip.duration, tracks ); 94 | 95 | } ); 96 | 97 | } -------------------------------------------------------------------------------- /mixamo/mixamoVRMRigMap.js: -------------------------------------------------------------------------------- 1 | /** 2 | * A map from Mixamo rig name to VRM Humanoid bone name 3 | */ 4 | export const mixamoVRMRigMap = { 5 | mixamorigHips: 'hips', 6 | mixamorigSpine: 'spine', 7 | mixamorigSpine1: 'chest', 8 | mixamorigSpine2: 'upperChest', 9 | mixamorigNeck: 'neck', 10 | mixamorigHead: 'head', 11 | mixamorigLeftShoulder: 'leftShoulder', 12 | mixamorigLeftArm: 'leftUpperArm', 13 | mixamorigLeftForeArm: 'leftLowerArm', 14 | mixamorigLeftHand: 'leftHand', 15 | mixamorigLeftHandThumb1: 'leftThumbMetacarpal', 16 | mixamorigLeftHandThumb2: 'leftThumbProximal', 17 | mixamorigLeftHandThumb3: 'leftThumbDistal', 18 | mixamorigLeftHandIndex1: 'leftIndexProximal', 19 | mixamorigLeftHandIndex2: 'leftIndexIntermediate', 20 | mixamorigLeftHandIndex3: 'leftIndexDistal', 21 | mixamorigLeftHandMiddle1: 'leftMiddleProximal', 22 | mixamorigLeftHandMiddle2: 'leftMiddleIntermediate', 23 | mixamorigLeftHandMiddle3: 'leftMiddleDistal', 24 | mixamorigLeftHandRing1: 'leftRingProximal', 25 | mixamorigLeftHandRing2: 'leftRingIntermediate', 26 | mixamorigLeftHandRing3: 'leftRingDistal', 27 | mixamorigLeftHandPinky1: 'leftLittleProximal', 28 | mixamorigLeftHandPinky2: 'leftLittleIntermediate', 29 | mixamorigLeftHandPinky3: 'leftLittleDistal', 30 | mixamorigRightShoulder: 'rightShoulder', 31 | mixamorigRightArm: 'rightUpperArm', 32 | mixamorigRightForeArm: 'rightLowerArm', 33 | mixamorigRightHand: 'rightHand', 34 | mixamorigRightHandPinky1: 'rightLittleProximal', 35 | mixamorigRightHandPinky2: 'rightLittleIntermediate', 36 | mixamorigRightHandPinky3: 'rightLittleDistal', 37 | mixamorigRightHandRing1: 'rightRingProximal', 38 | mixamorigRightHandRing2: 'rightRingIntermediate', 39 | mixamorigRightHandRing3: 'rightRingDistal', 40 | mixamorigRightHandMiddle1: 'rightMiddleProximal', 41 | mixamorigRightHandMiddle2: 'rightMiddleIntermediate', 42 | mixamorigRightHandMiddle3: 'rightMiddleDistal', 43 | mixamorigRightHandIndex1: 'rightIndexProximal', 44 | mixamorigRightHandIndex2: 'rightIndexIntermediate', 45 | mixamorigRightHandIndex3: 'rightIndexDistal', 46 | mixamorigRightHandThumb1: 'rightThumbMetacarpal', 47 | mixamorigRightHandThumb2: 'rightThumbProximal', 48 | mixamorigRightHandThumb3: 'rightThumbDistal', 49 | mixamorigLeftUpLeg: 'leftUpperLeg', 50 | mixamorigLeftLeg: 'leftLowerLeg', 51 | mixamorigLeftFoot: 'leftFoot', 52 | mixamorigLeftToeBase: 'leftToes', 53 | mixamorigRightUpLeg: 'rightUpperLeg', 54 | mixamorigRightLeg: 'rightLowerLeg', 55 | mixamorigRightFoot: 'rightFoot', 56 | mixamorigRightToeBase: 'rightToes', 57 | }; --------------------------------------------------------------------------------