├── .editorconfig ├── .eslintignore ├── .eslintrc ├── .github └── workflows │ └── deploy-github-pages.yml ├── .gitignore ├── .prettierignore ├── .prettierrc ├── LICENSE ├── ProjectedMaterial.d.ts ├── README.md ├── build ├── ProjectedMaterial.js └── ProjectedMaterial.module.js ├── examples ├── 3d-model.html ├── basic.html ├── css │ └── style.css ├── envmap.html ├── images │ ├── bigbucksbunny.mp4 │ ├── black-spot.png │ ├── charles-unsplash.jpg │ ├── kandao3_blurred.jpg │ ├── lukasz-szmigiel-unsplash.jpg │ ├── source.svg │ ├── three-projected-material-1.png │ ├── three-projected-material-2.png │ ├── three-projected-material-3.png │ ├── three-projected-material-4.png │ ├── three-projected-material-5.png │ ├── three-projected-material-6.png │ └── uv.jpg ├── index.html ├── instancing.html ├── lib │ ├── Controls.js │ ├── ProjectedMaterial.module.js │ ├── WebGLApp.js │ ├── controls-gui.module.js │ ├── controls-state.module.js │ ├── math-utils.js │ └── three-utils.js ├── models │ ├── cinema_screen.glb │ └── suzanne.gltf ├── multiple-projections-instancing.html ├── multiple-projections.html ├── orthographic-camera.html ├── same-camera.html ├── screenshots │ ├── 3d-model.png │ ├── basic.png │ ├── envmap.png │ ├── instancing.png │ ├── multiple-projections-instancing.png │ ├── multiple-projections.png │ ├── orthographic-camera.png │ ├── same-camera.png │ ├── transparency.png │ └── video.png ├── transparency.html └── video.html ├── package-lock.json ├── package.json ├── rollup.config.js ├── screenshot.png ├── src ├── ProjectedMaterial.js └── three-utils.js └── yarn.lock /.editorconfig: -------------------------------------------------------------------------------- 1 | # editorconfig.org 2 | root = true 3 | 4 | [*] 5 | indent_style = space 6 | indent_size = 2 7 | end_of_line = lf 8 | charset = utf-8 9 | trim_trailing_whitespace = true 10 | insert_final_newline = true 11 | 12 | [*.md] 13 | trim_trailing_whitespace = false 14 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | build/ 3 | examples/lib/controls-state.module.js 4 | examples/lib/controls-gui.module.js 5 | examples/lib/ProjectedMaterial.module.js -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "eslint-config-accurapp" 3 | } 4 | -------------------------------------------------------------------------------- /.github/workflows/deploy-github-pages.yml: -------------------------------------------------------------------------------- 1 | name: 'Deploy to GitHub Pages' 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v2 13 | 14 | - name: Deploy 15 | uses: peaceiris/actions-gh-pages@v3 16 | with: 17 | github_token: ${{ secrets.GITHUB_TOKEN }} 18 | publish_dir: ./examples 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | *.log 3 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | package.json 2 | .* 3 | node_modules/ 4 | build/ -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "printWidth": 100, 3 | "semi": false, 4 | "singleQuote": true, 5 | "trailingComma": "es5" 6 | } 7 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Marco Fugaro 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /ProjectedMaterial.d.ts: -------------------------------------------------------------------------------- 1 | declare module 'three-projected-material' { 2 | import { 3 | MeshPhysicalMaterial, 4 | PerspectiveCamera, 5 | Texture, 6 | Vector2, 7 | Matrix4, 8 | Vector3, 9 | BufferGeometry, 10 | InstancedMesh, 11 | MeshPhysicalMaterialParameters, 12 | OrthographicCamera, 13 | Mesh, 14 | } from 'three' 15 | 16 | interface ProjectedMaterialParameters extends MeshPhysicalMaterialParameters { 17 | camera?: PerspectiveCamera | OrthographicCamera 18 | texture?: Texture 19 | textureScale?: number 20 | textureOffset?: Vector2 21 | cover?: boolean 22 | } 23 | 24 | export default class ProjectedMaterial extends MeshPhysicalMaterial { 25 | camera: PerspectiveCamera | OrthographicCamera 26 | texture: Texture 27 | textureScale: number 28 | textureOffset: Vector2 29 | cover: boolean 30 | 31 | uniforms: { 32 | projectedTexture: { 33 | value: Texture 34 | } 35 | isTextureLoaded: { 36 | value: boolean 37 | } 38 | isTextureProjected: { 39 | value: boolean 40 | } 41 | backgroundOpacity: { 42 | value: number 43 | } 44 | viewMatrixCamera: { 45 | value: Matrix4 46 | } 47 | projectionMatrixCamera: { 48 | value: Matrix4 49 | } 50 | projPosition: { 51 | value: Vector3 52 | } 53 | projDirection: { 54 | value: Vector3 55 | } 56 | savedModelMatrix: { 57 | value: Matrix4 58 | } 59 | widthScaled: { 60 | value: number 61 | } 62 | heightScaled: { 63 | value: number 64 | } 65 | textureOffset: { 66 | value: Vector2 67 | } 68 | } 69 | 70 | readonly isProjectedMaterial = true 71 | 72 | constructor({ 73 | camera, 74 | texture, 75 | textureScale, 76 | textureOffset, 77 | cover, 78 | ...options 79 | }?: ProjectedMaterialParameters) 80 | 81 | project(mesh: Mesh): void 82 | 83 | projectInstanceAt( 84 | index: number, 85 | instancedMesh: InstancedMesh, 86 | matrixWorld: Matrix4, 87 | { 88 | forceCameraSave, 89 | }?: { 90 | forceCameraSave?: boolean | undefined 91 | } 92 | ): void 93 | } 94 | 95 | export function allocateProjectionData(geometry: BufferGeometry, instancesCount: number): void 96 | } 97 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # three-projected-material 2 | 3 | > Three.js Material which lets you do [Texture Projection](https://en.wikipedia.org/wiki/Projective_texture_mapping) on a 3d Model. 4 | 5 |

6 | 7 |

8 | 9 | ## Installation 10 | 11 | After having installed three.js, install it from npm with: 12 | 13 | ``` 14 | npm install three-projected-material 15 | ``` 16 | 17 | or 18 | 19 | ``` 20 | yarn add three-projected-material 21 | ``` 22 | 23 | You can also use it from the CDN, just make sure to put this after the three.js script: 24 | 25 | ```html 26 | 27 | ``` 28 | 29 | ## Getting started 30 | 31 | You can import it like this 32 | 33 | ```js 34 | import ProjectedMaterial from 'three-projected-material' 35 | ``` 36 | 37 | or, if you're using CommonJS 38 | 39 | ```js 40 | const ProjectedMaterial = require('three-projected-material').default 41 | ``` 42 | 43 | Instead, if you install it from the CDN, its exposed under `window.projectedMaterial`, and you use it like this 44 | 45 | ```js 46 | const ProjectedMaterial = window.projectedMaterial.default 47 | ``` 48 | 49 | Then, you can use it like this: 50 | 51 | ```js 52 | const geometry = new THREE.BoxGeometry(1, 1, 1) 53 | const material = new ProjectedMaterial({ 54 | camera, // the camera that acts as a projector 55 | texture, // the texture being projected 56 | textureScale: 0.8, // scale down the texture a bit 57 | textureOffset: new THREE.Vector2(0.1, 0.1), // you can translate the texture if you want 58 | cover: true, // enable background-size: cover behaviour, by default it's like background-size: contain 59 | color: '#ccc', // the color of the object if it's not projected on 60 | roughness: 0.3, // you can pass any other option that belongs to MeshPhysicalMaterial 61 | }) 62 | const box = new THREE.Mesh(geometry, material) 63 | webgl.scene.add(box) 64 | 65 | // move the mesh any way you want! 66 | box.rotation.y = -Math.PI / 4 67 | 68 | // and when you're ready project the texture on the box! 69 | material.project(box) 70 | ``` 71 | 72 | ProjectedMaterial also supports **instanced meshes** via three.js' [InstancedMesh](https://threejs.org/docs/index.html#api/en/objects/InstancedMesh), and even **multiple projections**. Check out the examples below for a detailed guide! 73 | 74 | ## [Examples](https://three-projected-material.netlify.app/) 75 | 76 |

77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 |

87 | 88 | ## API Reference 89 | 90 | ### new ProjectedMaterial({ camera, texture, ...others }) 91 | 92 | Create a new material to later use for a mesh. 93 | 94 | | Option | Default | Description | 95 | | --------------- | --------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | 96 | | `camera` | | The [PerspectiveCamera](https://threejs.org/docs/#api/en/cameras/PerspectiveCamera) the texture will be projected from. | 97 | | `texture` | | The [Texture](https://threejs.org/docs/#api/en/textures/Texture) being projected. | 98 | | `textureScale` | 1 | Make the texture bigger or smaller. | 99 | | `textureOffset` | `new THREE.Vector2()` | Offset the texture in a x or y direction. The unit system goes from 0 to 1, from the bottom left corner to the top right corner of the projector camera frustum. | 100 | | `cover` | false | Wheter the texture should act like [`background-size: cover`](https://css-tricks.com/almanac/properties/b/background-size/) on the projector frustum. By default it works like [`background-size: contain`](https://css-tricks.com/almanac/properties/b/background-size/). | 101 | | `...options` | | Other options you pass to any three.js material like `color`, `opacity`, `envMap` and so on. The material is built from a [MeshPhysicalMaterial](https://threejs.org/docs/index.html#api/en/materials/MeshPhysicalMaterial), so you can pass any property of that material and of its parent [MeshStandardMaterial](https://threejs.org/docs/index.html#api/en/materials/MeshStandardMaterial). | 102 | 103 | These properties are exposed as properties of the material, so you can change them later. 104 | 105 | For example, to update the material texture and change its scale: 106 | 107 | ```js 108 | material.texture = newTexture 109 | material.textureScale = 0.8 110 | ``` 111 | 112 | ### material.project(mesh) 113 | 114 | Project the texture from the camera on the mesh. With this method we "take a snaphot" of the current mesh and camera position in space. The 115 | After calling this method, you can move the mesh or the camera freely. 116 | 117 | | Option | Description | 118 | | ------ | ---------------------------------------------------- | 119 | | `mesh` | The mesh that has a `ProjectedMaterial` as material. | 120 | 121 | ### allocateProjectionData(geometry, instancesCount) 122 | 123 | Allocate the data that will be used when projecting on an [InstancedMesh](https://threejs.org/docs/#api/en/objects/InstancedMesh). Use this on the geometry that will be used in pair with a `ProjectedMaterial` when initializing `InstancedMesh`. 124 | 125 | This needs to be called before `.projectInstanceAt()`. 126 | 127 | | Option | Description | 128 | | ---------------- | ----------------------------------------------------------------------------- | 129 | | `geometry` | The geometry that will be passed to the `InstancedMesh`. | 130 | | `instancesCount` | The number of instances, the same that will be passed to the `InstancedMesh`. | 131 | 132 | ### material.projectInstanceAt(index, instancedMesh, matrix) 133 | 134 | Do the projection for an [InstancedMesh](https://threejs.org/docs/#api/en/objects/InstancedMesh). Don't forget to call `updateMatrix()` like you do before calling `InstancedMesh.setMatrixAt()`. 135 | 136 | To do projection an an instanced mesh, the geometry needs to be prepared with `allocateProjectionData()` beforehand. 137 | 138 | ```js 139 | dummy.updateMatrix() 140 | projectInstanceAt(i, instancedMesh, dummy.matrix) 141 | ``` 142 | 143 | [Link to the full example about instancing](https://three-projected-material.netlify.app/instancing). 144 | 145 | | Option | Description | 146 | | --------------- | ------------------------------------------------------------------------------------------------------------------------ | 147 | | `index` | The index of the instanced element to project. | 148 | | `instancedMesh` | The [InstancedMesh](https://threejs.org/docs/#api/en/objects/InstancedMesh) with a projected material. | 149 | | `matrix` | The `matrix` of the dummy you used to position the instanced mesh element. Be sure to call `.updateMatrix()` beforehand. | 150 | -------------------------------------------------------------------------------- /build/ProjectedMaterial.js: -------------------------------------------------------------------------------- 1 | (function (global, factory) { 2 | typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('three')) : 3 | typeof define === 'function' && define.amd ? define(['exports', 'three'], factory) : 4 | (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.projectedMaterial = {}, global.THREE)); 5 | })(this, (function (exports, THREE) { 'use strict'; 6 | 7 | function _interopNamespace(e) { 8 | if (e && e.__esModule) return e; 9 | var n = Object.create(null); 10 | if (e) { 11 | Object.keys(e).forEach(function (k) { 12 | if (k !== 'default') { 13 | var d = Object.getOwnPropertyDescriptor(e, k); 14 | Object.defineProperty(n, k, d.get ? d : { 15 | enumerable: true, 16 | get: function () { return e[k]; } 17 | }); 18 | } 19 | }); 20 | } 21 | n["default"] = e; 22 | return Object.freeze(n); 23 | } 24 | 25 | var THREE__namespace = /*#__PURE__*/_interopNamespace(THREE); 26 | 27 | var id = 0; 28 | 29 | function _classPrivateFieldLooseKey(name) { 30 | return "__private_" + id++ + "_" + name; 31 | } 32 | 33 | function _classPrivateFieldLooseBase(receiver, privateKey) { 34 | if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { 35 | throw new TypeError("attempted to use private field on non-instance"); 36 | } 37 | 38 | return receiver; 39 | } 40 | 41 | function monkeyPatch(shader, _ref) { 42 | let { 43 | defines = '', 44 | header = '', 45 | main = '', 46 | ...replaces 47 | } = _ref; 48 | let patchedShader = shader; 49 | 50 | const replaceAll = (str, find, rep) => str.split(find).join(rep); 51 | 52 | Object.keys(replaces).forEach(key => { 53 | patchedShader = replaceAll(patchedShader, key, replaces[key]); 54 | }); 55 | patchedShader = patchedShader.replace('void main() {', ` 56 | ${header} 57 | void main() { 58 | ${main} 59 | `); 60 | const stringDefines = Object.keys(defines).map(d => `#define ${d} ${defines[d]}`).join('\n'); 61 | return ` 62 | ${stringDefines} 63 | ${patchedShader} 64 | `; 65 | } // run the callback when the image will be loaded 66 | 67 | function addLoadListener(texture, callback) { 68 | // return if it's already loaded 69 | if (texture.image && texture.image.videoWidth !== 0 && texture.image.videoHeight !== 0) { 70 | return; 71 | } 72 | 73 | const interval = setInterval(() => { 74 | if (texture.image && texture.image.videoWidth !== 0 && texture.image.videoHeight !== 0) { 75 | clearInterval(interval); 76 | return callback(texture); 77 | } 78 | }, 16); 79 | } 80 | 81 | var _camera = /*#__PURE__*/_classPrivateFieldLooseKey("camera"); 82 | 83 | var _cover = /*#__PURE__*/_classPrivateFieldLooseKey("cover"); 84 | 85 | var _textureScale = /*#__PURE__*/_classPrivateFieldLooseKey("textureScale"); 86 | 87 | var _saveCameraProjectionMatrix = /*#__PURE__*/_classPrivateFieldLooseKey("saveCameraProjectionMatrix"); 88 | 89 | var _saveDimensions = /*#__PURE__*/_classPrivateFieldLooseKey("saveDimensions"); 90 | 91 | var _saveCameraMatrices = /*#__PURE__*/_classPrivateFieldLooseKey("saveCameraMatrices"); 92 | 93 | class ProjectedMaterial extends THREE__namespace.MeshPhysicalMaterial { 94 | // internal values... they are exposed via getters 95 | get camera() { 96 | return _classPrivateFieldLooseBase(this, _camera)[_camera]; 97 | } 98 | 99 | set camera(camera) { 100 | if (!camera || !camera.isCamera) { 101 | throw new Error('Invalid camera set to the ProjectedMaterial'); 102 | } 103 | 104 | _classPrivateFieldLooseBase(this, _camera)[_camera] = camera; 105 | 106 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 107 | } 108 | 109 | get texture() { 110 | return this.uniforms.projectedTexture.value; 111 | } 112 | 113 | set texture(texture) { 114 | if (!(texture != null && texture.isTexture)) { 115 | throw new Error('Invalid texture set to the ProjectedMaterial'); 116 | } 117 | 118 | this.uniforms.projectedTexture.value = texture; 119 | this.uniforms.isTextureLoaded.value = Boolean(texture.image); 120 | 121 | if (!this.uniforms.isTextureLoaded.value) { 122 | addLoadListener(texture, () => { 123 | this.uniforms.isTextureLoaded.value = true; 124 | 125 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 126 | }); 127 | } else { 128 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 129 | } 130 | } 131 | 132 | get textureScale() { 133 | return _classPrivateFieldLooseBase(this, _textureScale)[_textureScale]; 134 | } 135 | 136 | set textureScale(textureScale) { 137 | _classPrivateFieldLooseBase(this, _textureScale)[_textureScale] = textureScale; 138 | 139 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 140 | } 141 | 142 | get textureOffset() { 143 | return this.uniforms.textureOffset.value; 144 | } 145 | 146 | set textureOffset(textureOffset) { 147 | this.uniforms.textureOffset.value = textureOffset; 148 | } 149 | 150 | get cover() { 151 | return _classPrivateFieldLooseBase(this, _cover)[_cover]; 152 | } 153 | 154 | set cover(cover) { 155 | _classPrivateFieldLooseBase(this, _cover)[_cover] = cover; 156 | 157 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 158 | } 159 | 160 | constructor(_temp) { 161 | let { 162 | camera = new THREE__namespace.PerspectiveCamera(), 163 | texture = new THREE__namespace.Texture(), 164 | textureScale = 1, 165 | textureOffset = new THREE__namespace.Vector2(), 166 | cover = false, 167 | ...options 168 | } = _temp === void 0 ? {} : _temp; 169 | 170 | if (!texture.isTexture) { 171 | throw new Error('Invalid texture passed to the ProjectedMaterial'); 172 | } 173 | 174 | if (!camera.isCamera) { 175 | throw new Error('Invalid camera passed to the ProjectedMaterial'); 176 | } 177 | 178 | super(options); 179 | Object.defineProperty(this, _saveCameraMatrices, { 180 | value: _saveCameraMatrices2 181 | }); 182 | Object.defineProperty(this, _saveDimensions, { 183 | value: _saveDimensions2 184 | }); 185 | Object.defineProperty(this, _camera, { 186 | writable: true, 187 | value: void 0 188 | }); 189 | Object.defineProperty(this, _cover, { 190 | writable: true, 191 | value: void 0 192 | }); 193 | Object.defineProperty(this, _textureScale, { 194 | writable: true, 195 | value: void 0 196 | }); 197 | Object.defineProperty(this, _saveCameraProjectionMatrix, { 198 | writable: true, 199 | value: () => { 200 | this.uniforms.projectionMatrixCamera.value.copy(this.camera.projectionMatrix); 201 | 202 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 203 | } 204 | }); 205 | Object.defineProperty(this, 'isProjectedMaterial', { 206 | value: true 207 | }); // save the private variables 208 | 209 | _classPrivateFieldLooseBase(this, _camera)[_camera] = camera; 210 | _classPrivateFieldLooseBase(this, _cover)[_cover] = cover; 211 | _classPrivateFieldLooseBase(this, _textureScale)[_textureScale] = textureScale; // scale to keep the image proportions and apply textureScale 212 | 213 | const [_widthScaled, _heightScaled] = computeScaledDimensions(texture, camera, textureScale, cover); 214 | this.uniforms = { 215 | projectedTexture: { 216 | value: texture 217 | }, 218 | // this avoids rendering black if the texture 219 | // hasn't loaded yet 220 | isTextureLoaded: { 221 | value: Boolean(texture.image) 222 | }, 223 | // don't show the texture if we haven't called project() 224 | isTextureProjected: { 225 | value: false 226 | }, 227 | // if we have multiple materials we want to show the 228 | // background only of the first material 229 | backgroundOpacity: { 230 | value: 1 231 | }, 232 | // these will be set on project() 233 | viewMatrixCamera: { 234 | value: new THREE__namespace.Matrix4() 235 | }, 236 | projectionMatrixCamera: { 237 | value: new THREE__namespace.Matrix4() 238 | }, 239 | projPosition: { 240 | value: new THREE__namespace.Vector3() 241 | }, 242 | projDirection: { 243 | value: new THREE__namespace.Vector3(0, 0, -1) 244 | }, 245 | // we will set this later when we will have positioned the object 246 | savedModelMatrix: { 247 | value: new THREE__namespace.Matrix4() 248 | }, 249 | widthScaled: { 250 | value: _widthScaled 251 | }, 252 | heightScaled: { 253 | value: _heightScaled 254 | }, 255 | textureOffset: { 256 | value: textureOffset 257 | } 258 | }; 259 | 260 | this.onBeforeCompile = shader => { 261 | // expose also the material's uniforms 262 | Object.assign(this.uniforms, shader.uniforms); 263 | shader.uniforms = this.uniforms; 264 | 265 | if (this.camera.isOrthographicCamera) { 266 | shader.defines.ORTHOGRAPHIC = ''; 267 | } 268 | 269 | shader.vertexShader = monkeyPatch(shader.vertexShader, { 270 | header: 271 | /* glsl */ 272 | ` 273 | uniform mat4 viewMatrixCamera; 274 | uniform mat4 projectionMatrixCamera; 275 | 276 | #ifdef USE_INSTANCING 277 | attribute vec4 savedModelMatrix0; 278 | attribute vec4 savedModelMatrix1; 279 | attribute vec4 savedModelMatrix2; 280 | attribute vec4 savedModelMatrix3; 281 | #else 282 | uniform mat4 savedModelMatrix; 283 | #endif 284 | 285 | varying vec3 vSavedNormal; 286 | varying vec4 vTexCoords; 287 | #ifndef ORTHOGRAPHIC 288 | varying vec4 vWorldPosition; 289 | #endif 290 | `, 291 | main: 292 | /* glsl */ 293 | ` 294 | #ifdef USE_INSTANCING 295 | mat4 savedModelMatrix = mat4( 296 | savedModelMatrix0, 297 | savedModelMatrix1, 298 | savedModelMatrix2, 299 | savedModelMatrix3 300 | ); 301 | #endif 302 | 303 | vSavedNormal = mat3(savedModelMatrix) * normal; 304 | vTexCoords = projectionMatrixCamera * viewMatrixCamera * savedModelMatrix * vec4(position, 1.0); 305 | #ifndef ORTHOGRAPHIC 306 | vWorldPosition = savedModelMatrix * vec4(position, 1.0); 307 | #endif 308 | ` 309 | }); 310 | shader.fragmentShader = monkeyPatch(shader.fragmentShader, { 311 | header: 312 | /* glsl */ 313 | ` 314 | uniform sampler2D projectedTexture; 315 | uniform bool isTextureLoaded; 316 | uniform bool isTextureProjected; 317 | uniform float backgroundOpacity; 318 | uniform vec3 projPosition; 319 | uniform vec3 projDirection; 320 | uniform float widthScaled; 321 | uniform float heightScaled; 322 | uniform vec2 textureOffset; 323 | 324 | varying vec3 vSavedNormal; 325 | varying vec4 vTexCoords; 326 | #ifndef ORTHOGRAPHIC 327 | varying vec4 vWorldPosition; 328 | #endif 329 | 330 | float mapRange(float value, float min1, float max1, float min2, float max2) { 331 | return min2 + (value - min1) * (max2 - min2) / (max1 - min1); 332 | } 333 | `, 334 | 'vec4 diffuseColor = vec4( diffuse, opacity );': 335 | /* glsl */ 336 | ` 337 | // clamp the w to make sure we don't project behind 338 | float w = max(vTexCoords.w, 0.0); 339 | 340 | vec2 uv = (vTexCoords.xy / w) * 0.5 + 0.5; 341 | 342 | uv += textureOffset; 343 | 344 | // apply the corrected width and height 345 | uv.x = mapRange(uv.x, 0.0, 1.0, 0.5 - widthScaled / 2.0, 0.5 + widthScaled / 2.0); 346 | uv.y = mapRange(uv.y, 0.0, 1.0, 0.5 - heightScaled / 2.0, 0.5 + heightScaled / 2.0); 347 | 348 | // this makes sure we don't sample out of the texture 349 | bool isInTexture = (max(uv.x, uv.y) <= 1.0 && min(uv.x, uv.y) >= 0.0); 350 | 351 | // this makes sure we don't render also the back of the object 352 | #ifdef ORTHOGRAPHIC 353 | vec3 projectorDirection = projDirection; 354 | #else 355 | vec3 projectorDirection = normalize(projPosition - vWorldPosition.xyz); 356 | #endif 357 | float dotProduct = dot(vSavedNormal, projectorDirection); 358 | bool isFacingProjector = dotProduct > 0.0000001; 359 | 360 | 361 | vec4 diffuseColor = vec4(diffuse, opacity * backgroundOpacity); 362 | 363 | if (isFacingProjector && isInTexture && isTextureLoaded && isTextureProjected) { 364 | vec4 textureColor = texture2D(projectedTexture, uv); 365 | 366 | // apply the material opacity 367 | textureColor.a *= opacity; 368 | 369 | // https://learnopengl.com/Advanced-OpenGL/Blending 370 | diffuseColor = textureColor * textureColor.a + diffuseColor * (1.0 - textureColor.a); 371 | } 372 | ` 373 | }); 374 | }; // Listen on resize if the camera used for the projection 375 | // is the same used to render. 376 | // We do this on window resize because there is no way to 377 | // listen for the resize of the renderer 378 | 379 | 380 | window.addEventListener('resize', _classPrivateFieldLooseBase(this, _saveCameraProjectionMatrix)[_saveCameraProjectionMatrix]); // If the image texture passed hasn't loaded yet, 381 | // wait for it to load and compute the correct proportions. 382 | // This avoids rendering black while the texture is loading 383 | 384 | addLoadListener(texture, () => { 385 | this.uniforms.isTextureLoaded.value = true; 386 | 387 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 388 | }); 389 | } 390 | 391 | project(mesh) { 392 | if (!(Array.isArray(mesh.material) ? mesh.material.every(m => m.isProjectedMaterial) : mesh.material.isProjectedMaterial)) { 393 | throw new Error(`The mesh material must be a ProjectedMaterial`); 394 | } 395 | 396 | if (!(Array.isArray(mesh.material) ? mesh.material.some(m => m === this) : mesh.material === this)) { 397 | throw new Error(`The provided mesh doesn't have the same material as where project() has been called from`); 398 | } // make sure the matrix is updated 399 | 400 | 401 | mesh.updateWorldMatrix(true, false); // we save the object model matrix so it's projected relative 402 | // to that position, like a snapshot 403 | 404 | this.uniforms.savedModelMatrix.value.copy(mesh.matrixWorld); // if the material is not the first, output just the texture 405 | 406 | if (Array.isArray(mesh.material)) { 407 | const materialIndex = mesh.material.indexOf(this); 408 | 409 | if (!mesh.material[materialIndex].transparent) { 410 | throw new Error(`You have to pass "transparent: true" to the ProjectedMaterial if you're working with multiple materials.`); 411 | } 412 | 413 | if (materialIndex > 0) { 414 | this.uniforms.backgroundOpacity.value = 0; 415 | } 416 | } // persist also the current camera position and matrices 417 | 418 | 419 | _classPrivateFieldLooseBase(this, _saveCameraMatrices)[_saveCameraMatrices](); 420 | } 421 | 422 | projectInstanceAt(index, instancedMesh, matrixWorld, _temp2) { 423 | let { 424 | forceCameraSave = false 425 | } = _temp2 === void 0 ? {} : _temp2; 426 | 427 | if (!instancedMesh.isInstancedMesh) { 428 | throw new Error(`The provided mesh is not an InstancedMesh`); 429 | } 430 | 431 | if (!(Array.isArray(instancedMesh.material) ? instancedMesh.material.every(m => m.isProjectedMaterial) : instancedMesh.material.isProjectedMaterial)) { 432 | throw new Error(`The InstancedMesh material must be a ProjectedMaterial`); 433 | } 434 | 435 | if (!(Array.isArray(instancedMesh.material) ? instancedMesh.material.some(m => m === this) : instancedMesh.material === this)) { 436 | throw new Error(`The provided InstancedMeshhave't i samenclude thas e material where project() has been called from`); 437 | } 438 | 439 | if (!instancedMesh.geometry.attributes[`savedModelMatrix0`] || !instancedMesh.geometry.attributes[`savedModelMatrix1`] || !instancedMesh.geometry.attributes[`savedModelMatrix2`] || !instancedMesh.geometry.attributes[`savedModelMatrix3`]) { 440 | throw new Error(`No allocated data found on the geometry, please call 'allocateProjectionData(geometry, instancesCount)'`); 441 | } 442 | 443 | instancedMesh.geometry.attributes[`savedModelMatrix0`].setXYZW(index, matrixWorld.elements[0], matrixWorld.elements[1], matrixWorld.elements[2], matrixWorld.elements[3]); 444 | instancedMesh.geometry.attributes[`savedModelMatrix1`].setXYZW(index, matrixWorld.elements[4], matrixWorld.elements[5], matrixWorld.elements[6], matrixWorld.elements[7]); 445 | instancedMesh.geometry.attributes[`savedModelMatrix2`].setXYZW(index, matrixWorld.elements[8], matrixWorld.elements[9], matrixWorld.elements[10], matrixWorld.elements[11]); 446 | instancedMesh.geometry.attributes[`savedModelMatrix3`].setXYZW(index, matrixWorld.elements[12], matrixWorld.elements[13], matrixWorld.elements[14], matrixWorld.elements[15]); // if the material is not the first, output just the texture 447 | 448 | if (Array.isArray(instancedMesh.material)) { 449 | const materialIndex = instancedMesh.material.indexOf(this); 450 | 451 | if (!instancedMesh.material[materialIndex].transparent) { 452 | throw new Error(`You have to pass "transparent: true" to the ProjectedMaterial if you're working with multiple materials.`); 453 | } 454 | 455 | if (materialIndex > 0) { 456 | this.uniforms.backgroundOpacity.value = 0; 457 | } 458 | } // persist the current camera position and matrices 459 | // only if it's the first instance since most surely 460 | // in all other instances the camera won't change 461 | 462 | 463 | if (index === 0 || forceCameraSave) { 464 | _classPrivateFieldLooseBase(this, _saveCameraMatrices)[_saveCameraMatrices](); 465 | } 466 | } 467 | 468 | copy(source) { 469 | super.copy(source); 470 | this.camera = source.camera; 471 | this.texture = source.texture; 472 | this.textureScale = source.textureScale; 473 | this.textureOffset = source.textureOffset; 474 | this.cover = source.cover; 475 | return this; 476 | } 477 | 478 | dispose() { 479 | super.dispose(); 480 | window.removeEventListener('resize', _classPrivateFieldLooseBase(this, _saveCameraProjectionMatrix)[_saveCameraProjectionMatrix]); 481 | } 482 | 483 | } // get camera ratio from different types of cameras 484 | 485 | function _saveDimensions2() { 486 | const [widthScaled, heightScaled] = computeScaledDimensions(this.texture, this.camera, this.textureScale, this.cover); 487 | this.uniforms.widthScaled.value = widthScaled; 488 | this.uniforms.heightScaled.value = heightScaled; 489 | } 490 | 491 | function _saveCameraMatrices2() { 492 | // make sure the camera matrices are updated 493 | this.camera.updateProjectionMatrix(); 494 | this.camera.updateMatrixWorld(); 495 | this.camera.updateWorldMatrix(); // update the uniforms from the camera so they're 496 | // fixed in the camera's position at the projection time 497 | 498 | const viewMatrixCamera = this.camera.matrixWorldInverse; 499 | const projectionMatrixCamera = this.camera.projectionMatrix; 500 | const modelMatrixCamera = this.camera.matrixWorld; 501 | this.uniforms.viewMatrixCamera.value.copy(viewMatrixCamera); 502 | this.uniforms.projectionMatrixCamera.value.copy(projectionMatrixCamera); 503 | this.uniforms.projPosition.value.copy(this.camera.position); 504 | this.uniforms.projDirection.value.set(0, 0, 1).applyMatrix4(modelMatrixCamera); // tell the shader we've projected 505 | 506 | this.uniforms.isTextureProjected.value = true; 507 | } 508 | 509 | function getCameraRatio(camera) { 510 | switch (camera.type) { 511 | case 'PerspectiveCamera': 512 | { 513 | return camera.aspect; 514 | } 515 | 516 | case 'OrthographicCamera': 517 | { 518 | const width = Math.abs(camera.right - camera.left); 519 | const height = Math.abs(camera.top - camera.bottom); 520 | return width / height; 521 | } 522 | 523 | default: 524 | { 525 | throw new Error(`${camera.type} is currently not supported in ProjectedMaterial`); 526 | } 527 | } 528 | } // scale to keep the image proportions and apply textureScale 529 | 530 | 531 | function computeScaledDimensions(texture, camera, textureScale, cover) { 532 | // return some default values if the image hasn't loaded yet 533 | if (!texture.image) { 534 | return [1, 1]; 535 | } // return if it's a video and if the video hasn't loaded yet 536 | 537 | 538 | if (texture.image.videoWidth === 0 && texture.image.videoHeight === 0) { 539 | return [1, 1]; 540 | } 541 | 542 | const sourceWidth = texture.image.naturalWidth || texture.image.videoWidth || texture.image.clientWidth; 543 | const sourceHeight = texture.image.naturalHeight || texture.image.videoHeight || texture.image.clientHeight; 544 | const ratio = sourceWidth / sourceHeight; 545 | const ratioCamera = getCameraRatio(camera); 546 | const widthCamera = 1; 547 | const heightCamera = widthCamera * (1 / ratioCamera); 548 | let widthScaled; 549 | let heightScaled; 550 | 551 | if (cover ? ratio > ratioCamera : ratio < ratioCamera) { 552 | const width = heightCamera * ratio; 553 | widthScaled = 1 / (width / widthCamera * textureScale); 554 | heightScaled = 1 / textureScale; 555 | } else { 556 | const height = widthCamera * (1 / ratio); 557 | heightScaled = 1 / (height / heightCamera * textureScale); 558 | widthScaled = 1 / textureScale; 559 | } 560 | 561 | return [widthScaled, heightScaled]; 562 | } 563 | 564 | function allocateProjectionData(geometry, instancesCount) { 565 | geometry.setAttribute(`savedModelMatrix0`, new THREE__namespace.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4)); 566 | geometry.setAttribute(`savedModelMatrix1`, new THREE__namespace.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4)); 567 | geometry.setAttribute(`savedModelMatrix2`, new THREE__namespace.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4)); 568 | geometry.setAttribute(`savedModelMatrix3`, new THREE__namespace.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4)); 569 | } 570 | 571 | exports.allocateProjectionData = allocateProjectionData; 572 | exports["default"] = ProjectedMaterial; 573 | 574 | Object.defineProperty(exports, '__esModule', { value: true }); 575 | 576 | })); 577 | -------------------------------------------------------------------------------- /build/ProjectedMaterial.module.js: -------------------------------------------------------------------------------- 1 | import * as THREE from 'three'; 2 | 3 | var id = 0; 4 | 5 | function _classPrivateFieldLooseKey(name) { 6 | return "__private_" + id++ + "_" + name; 7 | } 8 | 9 | function _classPrivateFieldLooseBase(receiver, privateKey) { 10 | if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { 11 | throw new TypeError("attempted to use private field on non-instance"); 12 | } 13 | 14 | return receiver; 15 | } 16 | 17 | function monkeyPatch(shader, _ref) { 18 | let { 19 | defines = '', 20 | header = '', 21 | main = '', 22 | ...replaces 23 | } = _ref; 24 | let patchedShader = shader; 25 | 26 | const replaceAll = (str, find, rep) => str.split(find).join(rep); 27 | 28 | Object.keys(replaces).forEach(key => { 29 | patchedShader = replaceAll(patchedShader, key, replaces[key]); 30 | }); 31 | patchedShader = patchedShader.replace('void main() {', ` 32 | ${header} 33 | void main() { 34 | ${main} 35 | `); 36 | const stringDefines = Object.keys(defines).map(d => `#define ${d} ${defines[d]}`).join('\n'); 37 | return ` 38 | ${stringDefines} 39 | ${patchedShader} 40 | `; 41 | } // run the callback when the image will be loaded 42 | 43 | function addLoadListener(texture, callback) { 44 | // return if it's already loaded 45 | if (texture.image && texture.image.videoWidth !== 0 && texture.image.videoHeight !== 0) { 46 | return; 47 | } 48 | 49 | const interval = setInterval(() => { 50 | if (texture.image && texture.image.videoWidth !== 0 && texture.image.videoHeight !== 0) { 51 | clearInterval(interval); 52 | return callback(texture); 53 | } 54 | }, 16); 55 | } 56 | 57 | var _camera = /*#__PURE__*/_classPrivateFieldLooseKey("camera"); 58 | 59 | var _cover = /*#__PURE__*/_classPrivateFieldLooseKey("cover"); 60 | 61 | var _textureScale = /*#__PURE__*/_classPrivateFieldLooseKey("textureScale"); 62 | 63 | var _saveCameraProjectionMatrix = /*#__PURE__*/_classPrivateFieldLooseKey("saveCameraProjectionMatrix"); 64 | 65 | var _saveDimensions = /*#__PURE__*/_classPrivateFieldLooseKey("saveDimensions"); 66 | 67 | var _saveCameraMatrices = /*#__PURE__*/_classPrivateFieldLooseKey("saveCameraMatrices"); 68 | 69 | class ProjectedMaterial extends THREE.MeshPhysicalMaterial { 70 | // internal values... they are exposed via getters 71 | get camera() { 72 | return _classPrivateFieldLooseBase(this, _camera)[_camera]; 73 | } 74 | 75 | set camera(camera) { 76 | if (!camera || !camera.isCamera) { 77 | throw new Error('Invalid camera set to the ProjectedMaterial'); 78 | } 79 | 80 | _classPrivateFieldLooseBase(this, _camera)[_camera] = camera; 81 | 82 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 83 | } 84 | 85 | get texture() { 86 | return this.uniforms.projectedTexture.value; 87 | } 88 | 89 | set texture(texture) { 90 | if (!(texture != null && texture.isTexture)) { 91 | throw new Error('Invalid texture set to the ProjectedMaterial'); 92 | } 93 | 94 | this.uniforms.projectedTexture.value = texture; 95 | this.uniforms.isTextureLoaded.value = Boolean(texture.image); 96 | 97 | if (!this.uniforms.isTextureLoaded.value) { 98 | addLoadListener(texture, () => { 99 | this.uniforms.isTextureLoaded.value = true; 100 | 101 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 102 | }); 103 | } else { 104 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 105 | } 106 | } 107 | 108 | get textureScale() { 109 | return _classPrivateFieldLooseBase(this, _textureScale)[_textureScale]; 110 | } 111 | 112 | set textureScale(textureScale) { 113 | _classPrivateFieldLooseBase(this, _textureScale)[_textureScale] = textureScale; 114 | 115 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 116 | } 117 | 118 | get textureOffset() { 119 | return this.uniforms.textureOffset.value; 120 | } 121 | 122 | set textureOffset(textureOffset) { 123 | this.uniforms.textureOffset.value = textureOffset; 124 | } 125 | 126 | get cover() { 127 | return _classPrivateFieldLooseBase(this, _cover)[_cover]; 128 | } 129 | 130 | set cover(cover) { 131 | _classPrivateFieldLooseBase(this, _cover)[_cover] = cover; 132 | 133 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 134 | } 135 | 136 | constructor(_temp) { 137 | let { 138 | camera = new THREE.PerspectiveCamera(), 139 | texture = new THREE.Texture(), 140 | textureScale = 1, 141 | textureOffset = new THREE.Vector2(), 142 | cover = false, 143 | ...options 144 | } = _temp === void 0 ? {} : _temp; 145 | 146 | if (!texture.isTexture) { 147 | throw new Error('Invalid texture passed to the ProjectedMaterial'); 148 | } 149 | 150 | if (!camera.isCamera) { 151 | throw new Error('Invalid camera passed to the ProjectedMaterial'); 152 | } 153 | 154 | super(options); 155 | Object.defineProperty(this, _saveCameraMatrices, { 156 | value: _saveCameraMatrices2 157 | }); 158 | Object.defineProperty(this, _saveDimensions, { 159 | value: _saveDimensions2 160 | }); 161 | Object.defineProperty(this, _camera, { 162 | writable: true, 163 | value: void 0 164 | }); 165 | Object.defineProperty(this, _cover, { 166 | writable: true, 167 | value: void 0 168 | }); 169 | Object.defineProperty(this, _textureScale, { 170 | writable: true, 171 | value: void 0 172 | }); 173 | Object.defineProperty(this, _saveCameraProjectionMatrix, { 174 | writable: true, 175 | value: () => { 176 | this.uniforms.projectionMatrixCamera.value.copy(this.camera.projectionMatrix); 177 | 178 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 179 | } 180 | }); 181 | Object.defineProperty(this, 'isProjectedMaterial', { 182 | value: true 183 | }); // save the private variables 184 | 185 | _classPrivateFieldLooseBase(this, _camera)[_camera] = camera; 186 | _classPrivateFieldLooseBase(this, _cover)[_cover] = cover; 187 | _classPrivateFieldLooseBase(this, _textureScale)[_textureScale] = textureScale; // scale to keep the image proportions and apply textureScale 188 | 189 | const [_widthScaled, _heightScaled] = computeScaledDimensions(texture, camera, textureScale, cover); 190 | this.uniforms = { 191 | projectedTexture: { 192 | value: texture 193 | }, 194 | // this avoids rendering black if the texture 195 | // hasn't loaded yet 196 | isTextureLoaded: { 197 | value: Boolean(texture.image) 198 | }, 199 | // don't show the texture if we haven't called project() 200 | isTextureProjected: { 201 | value: false 202 | }, 203 | // if we have multiple materials we want to show the 204 | // background only of the first material 205 | backgroundOpacity: { 206 | value: 1 207 | }, 208 | // these will be set on project() 209 | viewMatrixCamera: { 210 | value: new THREE.Matrix4() 211 | }, 212 | projectionMatrixCamera: { 213 | value: new THREE.Matrix4() 214 | }, 215 | projPosition: { 216 | value: new THREE.Vector3() 217 | }, 218 | projDirection: { 219 | value: new THREE.Vector3(0, 0, -1) 220 | }, 221 | // we will set this later when we will have positioned the object 222 | savedModelMatrix: { 223 | value: new THREE.Matrix4() 224 | }, 225 | widthScaled: { 226 | value: _widthScaled 227 | }, 228 | heightScaled: { 229 | value: _heightScaled 230 | }, 231 | textureOffset: { 232 | value: textureOffset 233 | } 234 | }; 235 | 236 | this.onBeforeCompile = shader => { 237 | // expose also the material's uniforms 238 | Object.assign(this.uniforms, shader.uniforms); 239 | shader.uniforms = this.uniforms; 240 | 241 | if (this.camera.isOrthographicCamera) { 242 | shader.defines.ORTHOGRAPHIC = ''; 243 | } 244 | 245 | shader.vertexShader = monkeyPatch(shader.vertexShader, { 246 | header: 247 | /* glsl */ 248 | ` 249 | uniform mat4 viewMatrixCamera; 250 | uniform mat4 projectionMatrixCamera; 251 | 252 | #ifdef USE_INSTANCING 253 | attribute vec4 savedModelMatrix0; 254 | attribute vec4 savedModelMatrix1; 255 | attribute vec4 savedModelMatrix2; 256 | attribute vec4 savedModelMatrix3; 257 | #else 258 | uniform mat4 savedModelMatrix; 259 | #endif 260 | 261 | varying vec3 vSavedNormal; 262 | varying vec4 vTexCoords; 263 | #ifndef ORTHOGRAPHIC 264 | varying vec4 vWorldPosition; 265 | #endif 266 | `, 267 | main: 268 | /* glsl */ 269 | ` 270 | #ifdef USE_INSTANCING 271 | mat4 savedModelMatrix = mat4( 272 | savedModelMatrix0, 273 | savedModelMatrix1, 274 | savedModelMatrix2, 275 | savedModelMatrix3 276 | ); 277 | #endif 278 | 279 | vSavedNormal = mat3(savedModelMatrix) * normal; 280 | vTexCoords = projectionMatrixCamera * viewMatrixCamera * savedModelMatrix * vec4(position, 1.0); 281 | #ifndef ORTHOGRAPHIC 282 | vWorldPosition = savedModelMatrix * vec4(position, 1.0); 283 | #endif 284 | ` 285 | }); 286 | shader.fragmentShader = monkeyPatch(shader.fragmentShader, { 287 | header: 288 | /* glsl */ 289 | ` 290 | uniform sampler2D projectedTexture; 291 | uniform bool isTextureLoaded; 292 | uniform bool isTextureProjected; 293 | uniform float backgroundOpacity; 294 | uniform vec3 projPosition; 295 | uniform vec3 projDirection; 296 | uniform float widthScaled; 297 | uniform float heightScaled; 298 | uniform vec2 textureOffset; 299 | 300 | varying vec3 vSavedNormal; 301 | varying vec4 vTexCoords; 302 | #ifndef ORTHOGRAPHIC 303 | varying vec4 vWorldPosition; 304 | #endif 305 | 306 | float mapRange(float value, float min1, float max1, float min2, float max2) { 307 | return min2 + (value - min1) * (max2 - min2) / (max1 - min1); 308 | } 309 | `, 310 | 'vec4 diffuseColor = vec4( diffuse, opacity );': 311 | /* glsl */ 312 | ` 313 | // clamp the w to make sure we don't project behind 314 | float w = max(vTexCoords.w, 0.0); 315 | 316 | vec2 uv = (vTexCoords.xy / w) * 0.5 + 0.5; 317 | 318 | uv += textureOffset; 319 | 320 | // apply the corrected width and height 321 | uv.x = mapRange(uv.x, 0.0, 1.0, 0.5 - widthScaled / 2.0, 0.5 + widthScaled / 2.0); 322 | uv.y = mapRange(uv.y, 0.0, 1.0, 0.5 - heightScaled / 2.0, 0.5 + heightScaled / 2.0); 323 | 324 | // this makes sure we don't sample out of the texture 325 | bool isInTexture = (max(uv.x, uv.y) <= 1.0 && min(uv.x, uv.y) >= 0.0); 326 | 327 | // this makes sure we don't render also the back of the object 328 | #ifdef ORTHOGRAPHIC 329 | vec3 projectorDirection = projDirection; 330 | #else 331 | vec3 projectorDirection = normalize(projPosition - vWorldPosition.xyz); 332 | #endif 333 | float dotProduct = dot(vSavedNormal, projectorDirection); 334 | bool isFacingProjector = dotProduct > 0.0000001; 335 | 336 | 337 | vec4 diffuseColor = vec4(diffuse, opacity * backgroundOpacity); 338 | 339 | if (isFacingProjector && isInTexture && isTextureLoaded && isTextureProjected) { 340 | vec4 textureColor = texture2D(projectedTexture, uv); 341 | 342 | // apply the material opacity 343 | textureColor.a *= opacity; 344 | 345 | // https://learnopengl.com/Advanced-OpenGL/Blending 346 | diffuseColor = textureColor * textureColor.a + diffuseColor * (1.0 - textureColor.a); 347 | } 348 | ` 349 | }); 350 | }; // Listen on resize if the camera used for the projection 351 | // is the same used to render. 352 | // We do this on window resize because there is no way to 353 | // listen for the resize of the renderer 354 | 355 | 356 | window.addEventListener('resize', _classPrivateFieldLooseBase(this, _saveCameraProjectionMatrix)[_saveCameraProjectionMatrix]); // If the image texture passed hasn't loaded yet, 357 | // wait for it to load and compute the correct proportions. 358 | // This avoids rendering black while the texture is loading 359 | 360 | addLoadListener(texture, () => { 361 | this.uniforms.isTextureLoaded.value = true; 362 | 363 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 364 | }); 365 | } 366 | 367 | project(mesh) { 368 | if (!(Array.isArray(mesh.material) ? mesh.material.every(m => m.isProjectedMaterial) : mesh.material.isProjectedMaterial)) { 369 | throw new Error(`The mesh material must be a ProjectedMaterial`); 370 | } 371 | 372 | if (!(Array.isArray(mesh.material) ? mesh.material.some(m => m === this) : mesh.material === this)) { 373 | throw new Error(`The provided mesh doesn't have the same material as where project() has been called from`); 374 | } // make sure the matrix is updated 375 | 376 | 377 | mesh.updateWorldMatrix(true, false); // we save the object model matrix so it's projected relative 378 | // to that position, like a snapshot 379 | 380 | this.uniforms.savedModelMatrix.value.copy(mesh.matrixWorld); // if the material is not the first, output just the texture 381 | 382 | if (Array.isArray(mesh.material)) { 383 | const materialIndex = mesh.material.indexOf(this); 384 | 385 | if (!mesh.material[materialIndex].transparent) { 386 | throw new Error(`You have to pass "transparent: true" to the ProjectedMaterial if you're working with multiple materials.`); 387 | } 388 | 389 | if (materialIndex > 0) { 390 | this.uniforms.backgroundOpacity.value = 0; 391 | } 392 | } // persist also the current camera position and matrices 393 | 394 | 395 | _classPrivateFieldLooseBase(this, _saveCameraMatrices)[_saveCameraMatrices](); 396 | } 397 | 398 | projectInstanceAt(index, instancedMesh, matrixWorld, _temp2) { 399 | let { 400 | forceCameraSave = false 401 | } = _temp2 === void 0 ? {} : _temp2; 402 | 403 | if (!instancedMesh.isInstancedMesh) { 404 | throw new Error(`The provided mesh is not an InstancedMesh`); 405 | } 406 | 407 | if (!(Array.isArray(instancedMesh.material) ? instancedMesh.material.every(m => m.isProjectedMaterial) : instancedMesh.material.isProjectedMaterial)) { 408 | throw new Error(`The InstancedMesh material must be a ProjectedMaterial`); 409 | } 410 | 411 | if (!(Array.isArray(instancedMesh.material) ? instancedMesh.material.some(m => m === this) : instancedMesh.material === this)) { 412 | throw new Error(`The provided InstancedMeshhave't i samenclude thas e material where project() has been called from`); 413 | } 414 | 415 | if (!instancedMesh.geometry.attributes[`savedModelMatrix0`] || !instancedMesh.geometry.attributes[`savedModelMatrix1`] || !instancedMesh.geometry.attributes[`savedModelMatrix2`] || !instancedMesh.geometry.attributes[`savedModelMatrix3`]) { 416 | throw new Error(`No allocated data found on the geometry, please call 'allocateProjectionData(geometry, instancesCount)'`); 417 | } 418 | 419 | instancedMesh.geometry.attributes[`savedModelMatrix0`].setXYZW(index, matrixWorld.elements[0], matrixWorld.elements[1], matrixWorld.elements[2], matrixWorld.elements[3]); 420 | instancedMesh.geometry.attributes[`savedModelMatrix1`].setXYZW(index, matrixWorld.elements[4], matrixWorld.elements[5], matrixWorld.elements[6], matrixWorld.elements[7]); 421 | instancedMesh.geometry.attributes[`savedModelMatrix2`].setXYZW(index, matrixWorld.elements[8], matrixWorld.elements[9], matrixWorld.elements[10], matrixWorld.elements[11]); 422 | instancedMesh.geometry.attributes[`savedModelMatrix3`].setXYZW(index, matrixWorld.elements[12], matrixWorld.elements[13], matrixWorld.elements[14], matrixWorld.elements[15]); // if the material is not the first, output just the texture 423 | 424 | if (Array.isArray(instancedMesh.material)) { 425 | const materialIndex = instancedMesh.material.indexOf(this); 426 | 427 | if (!instancedMesh.material[materialIndex].transparent) { 428 | throw new Error(`You have to pass "transparent: true" to the ProjectedMaterial if you're working with multiple materials.`); 429 | } 430 | 431 | if (materialIndex > 0) { 432 | this.uniforms.backgroundOpacity.value = 0; 433 | } 434 | } // persist the current camera position and matrices 435 | // only if it's the first instance since most surely 436 | // in all other instances the camera won't change 437 | 438 | 439 | if (index === 0 || forceCameraSave) { 440 | _classPrivateFieldLooseBase(this, _saveCameraMatrices)[_saveCameraMatrices](); 441 | } 442 | } 443 | 444 | copy(source) { 445 | super.copy(source); 446 | this.camera = source.camera; 447 | this.texture = source.texture; 448 | this.textureScale = source.textureScale; 449 | this.textureOffset = source.textureOffset; 450 | this.cover = source.cover; 451 | return this; 452 | } 453 | 454 | dispose() { 455 | super.dispose(); 456 | window.removeEventListener('resize', _classPrivateFieldLooseBase(this, _saveCameraProjectionMatrix)[_saveCameraProjectionMatrix]); 457 | } 458 | 459 | } // get camera ratio from different types of cameras 460 | 461 | function _saveDimensions2() { 462 | const [widthScaled, heightScaled] = computeScaledDimensions(this.texture, this.camera, this.textureScale, this.cover); 463 | this.uniforms.widthScaled.value = widthScaled; 464 | this.uniforms.heightScaled.value = heightScaled; 465 | } 466 | 467 | function _saveCameraMatrices2() { 468 | // make sure the camera matrices are updated 469 | this.camera.updateProjectionMatrix(); 470 | this.camera.updateMatrixWorld(); 471 | this.camera.updateWorldMatrix(); // update the uniforms from the camera so they're 472 | // fixed in the camera's position at the projection time 473 | 474 | const viewMatrixCamera = this.camera.matrixWorldInverse; 475 | const projectionMatrixCamera = this.camera.projectionMatrix; 476 | const modelMatrixCamera = this.camera.matrixWorld; 477 | this.uniforms.viewMatrixCamera.value.copy(viewMatrixCamera); 478 | this.uniforms.projectionMatrixCamera.value.copy(projectionMatrixCamera); 479 | this.uniforms.projPosition.value.copy(this.camera.position); 480 | this.uniforms.projDirection.value.set(0, 0, 1).applyMatrix4(modelMatrixCamera); // tell the shader we've projected 481 | 482 | this.uniforms.isTextureProjected.value = true; 483 | } 484 | 485 | function getCameraRatio(camera) { 486 | switch (camera.type) { 487 | case 'PerspectiveCamera': 488 | { 489 | return camera.aspect; 490 | } 491 | 492 | case 'OrthographicCamera': 493 | { 494 | const width = Math.abs(camera.right - camera.left); 495 | const height = Math.abs(camera.top - camera.bottom); 496 | return width / height; 497 | } 498 | 499 | default: 500 | { 501 | throw new Error(`${camera.type} is currently not supported in ProjectedMaterial`); 502 | } 503 | } 504 | } // scale to keep the image proportions and apply textureScale 505 | 506 | 507 | function computeScaledDimensions(texture, camera, textureScale, cover) { 508 | // return some default values if the image hasn't loaded yet 509 | if (!texture.image) { 510 | return [1, 1]; 511 | } // return if it's a video and if the video hasn't loaded yet 512 | 513 | 514 | if (texture.image.videoWidth === 0 && texture.image.videoHeight === 0) { 515 | return [1, 1]; 516 | } 517 | 518 | const sourceWidth = texture.image.naturalWidth || texture.image.videoWidth || texture.image.clientWidth; 519 | const sourceHeight = texture.image.naturalHeight || texture.image.videoHeight || texture.image.clientHeight; 520 | const ratio = sourceWidth / sourceHeight; 521 | const ratioCamera = getCameraRatio(camera); 522 | const widthCamera = 1; 523 | const heightCamera = widthCamera * (1 / ratioCamera); 524 | let widthScaled; 525 | let heightScaled; 526 | 527 | if (cover ? ratio > ratioCamera : ratio < ratioCamera) { 528 | const width = heightCamera * ratio; 529 | widthScaled = 1 / (width / widthCamera * textureScale); 530 | heightScaled = 1 / textureScale; 531 | } else { 532 | const height = widthCamera * (1 / ratio); 533 | heightScaled = 1 / (height / heightCamera * textureScale); 534 | widthScaled = 1 / textureScale; 535 | } 536 | 537 | return [widthScaled, heightScaled]; 538 | } 539 | 540 | function allocateProjectionData(geometry, instancesCount) { 541 | geometry.setAttribute(`savedModelMatrix0`, new THREE.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4)); 542 | geometry.setAttribute(`savedModelMatrix1`, new THREE.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4)); 543 | geometry.setAttribute(`savedModelMatrix2`, new THREE.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4)); 544 | geometry.setAttribute(`savedModelMatrix3`, new THREE.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4)); 545 | } 546 | 547 | export { allocateProjectionData, ProjectedMaterial as default }; 548 | -------------------------------------------------------------------------------- /examples/3d-model.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 3D Model example - three-projected-material 7 | 8 | 9 | 10 | 11 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 31 | 32 | 100 | 101 | 102 | -------------------------------------------------------------------------------- /examples/basic.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Basic example - three-projected-material 7 | 8 | 9 | 10 | 11 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 31 | 32 | 92 | 93 | 94 | -------------------------------------------------------------------------------- /examples/css/style.css: -------------------------------------------------------------------------------- 1 | body { 2 | margin: 0; 3 | padding: 0; 4 | } 5 | 6 | .source-fab { 7 | display: block; 8 | position: fixed; 9 | bottom: 1.5rem; 10 | right: 1.5rem; 11 | padding: 0.75rem; 12 | border-radius: 50%; 13 | background-color: #f1f1f1; 14 | z-index: 999; 15 | box-shadow: 0 3px 5px rgba(0, 0, 0, 0.2); 16 | cursor: pointer; 17 | } 18 | .source-fab img { 19 | display: block; 20 | width: 20px; 21 | } 22 | 23 | canvas { 24 | outline: none; 25 | display: block; 26 | } 27 | 28 | .title { 29 | position: fixed; 30 | top: 0; 31 | left: 50%; 32 | transform: translateX(-50%); 33 | max-width: 90vw; 34 | font-family: monospace; 35 | text-align: center; 36 | } 37 | 38 | .title a { 39 | color: #05f; 40 | } 41 | 42 | .title.white { 43 | color: white; 44 | } 45 | .title.white a { 46 | color: #0f0; 47 | } 48 | -------------------------------------------------------------------------------- /examples/envmap.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Envmap example - three-projected-material 7 | 8 | 9 | 10 | 11 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 31 | 32 | 94 | 95 | 96 | -------------------------------------------------------------------------------- /examples/images/bigbucksbunny.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/images/bigbucksbunny.mp4 -------------------------------------------------------------------------------- /examples/images/black-spot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/images/black-spot.png -------------------------------------------------------------------------------- /examples/images/charles-unsplash.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/images/charles-unsplash.jpg -------------------------------------------------------------------------------- /examples/images/kandao3_blurred.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/images/kandao3_blurred.jpg -------------------------------------------------------------------------------- /examples/images/lukasz-szmigiel-unsplash.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/images/lukasz-szmigiel-unsplash.jpg -------------------------------------------------------------------------------- /examples/images/source.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | -------------------------------------------------------------------------------- /examples/images/three-projected-material-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/images/three-projected-material-1.png -------------------------------------------------------------------------------- /examples/images/three-projected-material-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/images/three-projected-material-2.png -------------------------------------------------------------------------------- /examples/images/three-projected-material-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/images/three-projected-material-3.png -------------------------------------------------------------------------------- /examples/images/three-projected-material-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/images/three-projected-material-4.png -------------------------------------------------------------------------------- /examples/images/three-projected-material-5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/images/three-projected-material-5.png -------------------------------------------------------------------------------- /examples/images/three-projected-material-6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/images/three-projected-material-6.png -------------------------------------------------------------------------------- /examples/images/uv.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/images/uv.jpg -------------------------------------------------------------------------------- /examples/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | three-projected-material 7 | 11 | 12 | 13 | 14 | 132 | 133 | 134 | 135 | 136 | 137 | 144 | 145 | 194 | 195 | 325 | 326 | 327 |
328 | 329 |

three-projected-material

330 |

331 | Designed by Dennis Lee 332 |

333 |
334 |
335 |
336 |
337 | 338 |
339 | 340 |
341 |
342 | 343 | 344 | -------------------------------------------------------------------------------- /examples/instancing.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Instancing example - three-projected-material 7 | 8 | 9 | 10 | 11 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 31 | 32 | 142 | 143 | 144 | -------------------------------------------------------------------------------- /examples/lib/Controls.js: -------------------------------------------------------------------------------- 1 | import State from './controls-state.module.js' 2 | import wrapGUI from './controls-gui.module.js' 3 | 4 | let controls 5 | 6 | function mapValues(obj, fn) { 7 | return Object.fromEntries(Object.entries(obj).map(([k, v], i) => [k, fn(v, k, i)])) 8 | } 9 | 10 | function fromObjectToSlider(object) { 11 | return State.Slider(object.value, { 12 | min: object.min, 13 | max: object.max, 14 | step: object.step || 0.01, 15 | ...(object.scale === 'exp' && { 16 | min: object.min || 0.01, 17 | mapping: (x) => Math.pow(10, x), 18 | inverseMapping: Math.log10, 19 | }), 20 | }) 21 | } 22 | 23 | export function initControls(object, options = {}) { 24 | const stateObject = mapValues(object, (value) => { 25 | if ( 26 | typeof value === 'object' && 27 | (value.hasOwnProperty('value') || 28 | value.hasOwnProperty('max') || 29 | value.hasOwnProperty('min') || 30 | value.hasOwnProperty('step')) 31 | ) { 32 | return fromObjectToSlider(value) 33 | } 34 | 35 | if (typeof value === 'object') { 36 | return mapValues(value, (v) => { 37 | if ( 38 | typeof v === 'object' && 39 | (v.hasOwnProperty('value') || 40 | v.hasOwnProperty('max') || 41 | v.hasOwnProperty('min') || 42 | v.hasOwnProperty('step')) 43 | ) { 44 | return fromObjectToSlider(v) 45 | } 46 | 47 | return value 48 | }) 49 | } 50 | 51 | return value 52 | }) 53 | 54 | const controlsState = State(stateObject) 55 | const controlsInstance = options.hideControls 56 | ? controlsState 57 | : wrapGUI(controlsState, { expanded: !options.closeControls }) 58 | 59 | // add the custom controls-gui styles 60 | if (!options.hideControls) { 61 | const styles = ` 62 | [class^="controlPanel-"] [class*="__field"]::before { 63 | content: initial !important; 64 | } 65 | [class^="controlPanel-"] [class*="__labelText"] { 66 | text-indent: 6px !important; 67 | } 68 | [class^="controlPanel-"] [class*="__field--button"] > button::before { 69 | content: initial !important; 70 | } 71 | ` 72 | const style = document.createElement('style') 73 | style.type = 'text/css' 74 | style.innerHTML = styles 75 | document.head.appendChild(style) 76 | } 77 | 78 | controls = controlsInstance 79 | return controlsInstance 80 | } 81 | 82 | function extractAccessor(fnString) { 83 | if (fnString.slice(-1) === '}') { 84 | fnString = fnString.slice(0, -1) 85 | } 86 | 87 | const accessorStart = fnString.indexOf('.controls.') + '.controls.'.length 88 | fnString = fnString.slice(accessorStart) 89 | 90 | return fnString.trim() 91 | } 92 | 93 | export function wireValue(object, fn) { 94 | const fnString = fn.toString() 95 | const accessor = extractAccessor(fnString) 96 | 97 | controls.$onChanges((cons) => { 98 | if (cons[accessor]) { 99 | object[accessor] = cons[accessor].value 100 | } 101 | }) 102 | 103 | return fn() 104 | } 105 | 106 | export function wireUniform(object, fn) { 107 | const fnString = fn.toString() 108 | const accessor = extractAccessor(fnString) 109 | 110 | const key = accessor.includes('.') ? accessor.slice(accessor.lastIndexOf('.') + 1) : accessor 111 | 112 | controls.$onChanges((cons) => { 113 | if (cons[accessor]) { 114 | object.uniforms[key].value = cons[accessor].value 115 | } 116 | }) 117 | 118 | return { value: fn() } 119 | } 120 | -------------------------------------------------------------------------------- /examples/lib/ProjectedMaterial.module.js: -------------------------------------------------------------------------------- 1 | import * as THREE from 'three'; 2 | 3 | var id = 0; 4 | 5 | function _classPrivateFieldLooseKey(name) { 6 | return "__private_" + id++ + "_" + name; 7 | } 8 | 9 | function _classPrivateFieldLooseBase(receiver, privateKey) { 10 | if (!Object.prototype.hasOwnProperty.call(receiver, privateKey)) { 11 | throw new TypeError("attempted to use private field on non-instance"); 12 | } 13 | 14 | return receiver; 15 | } 16 | 17 | function monkeyPatch(shader, _ref) { 18 | let { 19 | defines = '', 20 | header = '', 21 | main = '', 22 | ...replaces 23 | } = _ref; 24 | let patchedShader = shader; 25 | 26 | const replaceAll = (str, find, rep) => str.split(find).join(rep); 27 | 28 | Object.keys(replaces).forEach(key => { 29 | patchedShader = replaceAll(patchedShader, key, replaces[key]); 30 | }); 31 | patchedShader = patchedShader.replace('void main() {', ` 32 | ${header} 33 | void main() { 34 | ${main} 35 | `); 36 | const stringDefines = Object.keys(defines).map(d => `#define ${d} ${defines[d]}`).join('\n'); 37 | return ` 38 | ${stringDefines} 39 | ${patchedShader} 40 | `; 41 | } // run the callback when the image will be loaded 42 | 43 | function addLoadListener(texture, callback) { 44 | // return if it's already loaded 45 | if (texture.image && texture.image.videoWidth !== 0 && texture.image.videoHeight !== 0) { 46 | return; 47 | } 48 | 49 | const interval = setInterval(() => { 50 | if (texture.image && texture.image.videoWidth !== 0 && texture.image.videoHeight !== 0) { 51 | clearInterval(interval); 52 | return callback(texture); 53 | } 54 | }, 16); 55 | } 56 | 57 | var _camera = /*#__PURE__*/_classPrivateFieldLooseKey("camera"); 58 | 59 | var _cover = /*#__PURE__*/_classPrivateFieldLooseKey("cover"); 60 | 61 | var _textureScale = /*#__PURE__*/_classPrivateFieldLooseKey("textureScale"); 62 | 63 | var _saveCameraProjectionMatrix = /*#__PURE__*/_classPrivateFieldLooseKey("saveCameraProjectionMatrix"); 64 | 65 | var _saveDimensions = /*#__PURE__*/_classPrivateFieldLooseKey("saveDimensions"); 66 | 67 | var _saveCameraMatrices = /*#__PURE__*/_classPrivateFieldLooseKey("saveCameraMatrices"); 68 | 69 | class ProjectedMaterial extends THREE.MeshPhysicalMaterial { 70 | // internal values... they are exposed via getters 71 | get camera() { 72 | return _classPrivateFieldLooseBase(this, _camera)[_camera]; 73 | } 74 | 75 | set camera(camera) { 76 | if (!camera || !camera.isCamera) { 77 | throw new Error('Invalid camera set to the ProjectedMaterial'); 78 | } 79 | 80 | _classPrivateFieldLooseBase(this, _camera)[_camera] = camera; 81 | 82 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 83 | } 84 | 85 | get texture() { 86 | return this.uniforms.projectedTexture.value; 87 | } 88 | 89 | set texture(texture) { 90 | if (!(texture != null && texture.isTexture)) { 91 | throw new Error('Invalid texture set to the ProjectedMaterial'); 92 | } 93 | 94 | this.uniforms.projectedTexture.value = texture; 95 | this.uniforms.isTextureLoaded.value = Boolean(texture.image); 96 | 97 | if (!this.uniforms.isTextureLoaded.value) { 98 | addLoadListener(texture, () => { 99 | this.uniforms.isTextureLoaded.value = true; 100 | 101 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 102 | }); 103 | } else { 104 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 105 | } 106 | } 107 | 108 | get textureScale() { 109 | return _classPrivateFieldLooseBase(this, _textureScale)[_textureScale]; 110 | } 111 | 112 | set textureScale(textureScale) { 113 | _classPrivateFieldLooseBase(this, _textureScale)[_textureScale] = textureScale; 114 | 115 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 116 | } 117 | 118 | get textureOffset() { 119 | return this.uniforms.textureOffset.value; 120 | } 121 | 122 | set textureOffset(textureOffset) { 123 | this.uniforms.textureOffset.value = textureOffset; 124 | } 125 | 126 | get cover() { 127 | return _classPrivateFieldLooseBase(this, _cover)[_cover]; 128 | } 129 | 130 | set cover(cover) { 131 | _classPrivateFieldLooseBase(this, _cover)[_cover] = cover; 132 | 133 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 134 | } 135 | 136 | constructor(_temp) { 137 | let { 138 | camera = new THREE.PerspectiveCamera(), 139 | texture = new THREE.Texture(), 140 | textureScale = 1, 141 | textureOffset = new THREE.Vector2(), 142 | cover = false, 143 | ...options 144 | } = _temp === void 0 ? {} : _temp; 145 | 146 | if (!texture.isTexture) { 147 | throw new Error('Invalid texture passed to the ProjectedMaterial'); 148 | } 149 | 150 | if (!camera.isCamera) { 151 | throw new Error('Invalid camera passed to the ProjectedMaterial'); 152 | } 153 | 154 | super(options); 155 | Object.defineProperty(this, _saveCameraMatrices, { 156 | value: _saveCameraMatrices2 157 | }); 158 | Object.defineProperty(this, _saveDimensions, { 159 | value: _saveDimensions2 160 | }); 161 | Object.defineProperty(this, _camera, { 162 | writable: true, 163 | value: void 0 164 | }); 165 | Object.defineProperty(this, _cover, { 166 | writable: true, 167 | value: void 0 168 | }); 169 | Object.defineProperty(this, _textureScale, { 170 | writable: true, 171 | value: void 0 172 | }); 173 | Object.defineProperty(this, _saveCameraProjectionMatrix, { 174 | writable: true, 175 | value: () => { 176 | this.uniforms.projectionMatrixCamera.value.copy(this.camera.projectionMatrix); 177 | 178 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 179 | } 180 | }); 181 | Object.defineProperty(this, 'isProjectedMaterial', { 182 | value: true 183 | }); // save the private variables 184 | 185 | _classPrivateFieldLooseBase(this, _camera)[_camera] = camera; 186 | _classPrivateFieldLooseBase(this, _cover)[_cover] = cover; 187 | _classPrivateFieldLooseBase(this, _textureScale)[_textureScale] = textureScale; // scale to keep the image proportions and apply textureScale 188 | 189 | const [_widthScaled, _heightScaled] = computeScaledDimensions(texture, camera, textureScale, cover); 190 | this.uniforms = { 191 | projectedTexture: { 192 | value: texture 193 | }, 194 | // this avoids rendering black if the texture 195 | // hasn't loaded yet 196 | isTextureLoaded: { 197 | value: Boolean(texture.image) 198 | }, 199 | // don't show the texture if we haven't called project() 200 | isTextureProjected: { 201 | value: false 202 | }, 203 | // if we have multiple materials we want to show the 204 | // background only of the first material 205 | backgroundOpacity: { 206 | value: 1 207 | }, 208 | // these will be set on project() 209 | viewMatrixCamera: { 210 | value: new THREE.Matrix4() 211 | }, 212 | projectionMatrixCamera: { 213 | value: new THREE.Matrix4() 214 | }, 215 | projPosition: { 216 | value: new THREE.Vector3() 217 | }, 218 | projDirection: { 219 | value: new THREE.Vector3(0, 0, -1) 220 | }, 221 | // we will set this later when we will have positioned the object 222 | savedModelMatrix: { 223 | value: new THREE.Matrix4() 224 | }, 225 | widthScaled: { 226 | value: _widthScaled 227 | }, 228 | heightScaled: { 229 | value: _heightScaled 230 | }, 231 | textureOffset: { 232 | value: textureOffset 233 | } 234 | }; 235 | 236 | this.onBeforeCompile = shader => { 237 | // expose also the material's uniforms 238 | Object.assign(this.uniforms, shader.uniforms); 239 | shader.uniforms = this.uniforms; 240 | 241 | if (this.camera.isOrthographicCamera) { 242 | shader.defines.ORTHOGRAPHIC = ''; 243 | } 244 | 245 | shader.vertexShader = monkeyPatch(shader.vertexShader, { 246 | header: 247 | /* glsl */ 248 | ` 249 | uniform mat4 viewMatrixCamera; 250 | uniform mat4 projectionMatrixCamera; 251 | 252 | #ifdef USE_INSTANCING 253 | attribute vec4 savedModelMatrix0; 254 | attribute vec4 savedModelMatrix1; 255 | attribute vec4 savedModelMatrix2; 256 | attribute vec4 savedModelMatrix3; 257 | #else 258 | uniform mat4 savedModelMatrix; 259 | #endif 260 | 261 | varying vec3 vSavedNormal; 262 | varying vec4 vTexCoords; 263 | #ifndef ORTHOGRAPHIC 264 | varying vec4 vWorldPosition; 265 | #endif 266 | `, 267 | main: 268 | /* glsl */ 269 | ` 270 | #ifdef USE_INSTANCING 271 | mat4 savedModelMatrix = mat4( 272 | savedModelMatrix0, 273 | savedModelMatrix1, 274 | savedModelMatrix2, 275 | savedModelMatrix3 276 | ); 277 | #endif 278 | 279 | vSavedNormal = mat3(savedModelMatrix) * normal; 280 | vTexCoords = projectionMatrixCamera * viewMatrixCamera * savedModelMatrix * vec4(position, 1.0); 281 | #ifndef ORTHOGRAPHIC 282 | vWorldPosition = savedModelMatrix * vec4(position, 1.0); 283 | #endif 284 | ` 285 | }); 286 | shader.fragmentShader = monkeyPatch(shader.fragmentShader, { 287 | header: 288 | /* glsl */ 289 | ` 290 | uniform sampler2D projectedTexture; 291 | uniform bool isTextureLoaded; 292 | uniform bool isTextureProjected; 293 | uniform float backgroundOpacity; 294 | uniform vec3 projPosition; 295 | uniform vec3 projDirection; 296 | uniform float widthScaled; 297 | uniform float heightScaled; 298 | uniform vec2 textureOffset; 299 | 300 | varying vec3 vSavedNormal; 301 | varying vec4 vTexCoords; 302 | #ifndef ORTHOGRAPHIC 303 | varying vec4 vWorldPosition; 304 | #endif 305 | 306 | float mapRange(float value, float min1, float max1, float min2, float max2) { 307 | return min2 + (value - min1) * (max2 - min2) / (max1 - min1); 308 | } 309 | `, 310 | 'vec4 diffuseColor = vec4( diffuse, opacity );': 311 | /* glsl */ 312 | ` 313 | // clamp the w to make sure we don't project behind 314 | float w = max(vTexCoords.w, 0.0); 315 | 316 | vec2 uv = (vTexCoords.xy / w) * 0.5 + 0.5; 317 | 318 | uv += textureOffset; 319 | 320 | // apply the corrected width and height 321 | uv.x = mapRange(uv.x, 0.0, 1.0, 0.5 - widthScaled / 2.0, 0.5 + widthScaled / 2.0); 322 | uv.y = mapRange(uv.y, 0.0, 1.0, 0.5 - heightScaled / 2.0, 0.5 + heightScaled / 2.0); 323 | 324 | // this makes sure we don't sample out of the texture 325 | bool isInTexture = (max(uv.x, uv.y) <= 1.0 && min(uv.x, uv.y) >= 0.0); 326 | 327 | // this makes sure we don't render also the back of the object 328 | #ifdef ORTHOGRAPHIC 329 | vec3 projectorDirection = projDirection; 330 | #else 331 | vec3 projectorDirection = normalize(projPosition - vWorldPosition.xyz); 332 | #endif 333 | float dotProduct = dot(vSavedNormal, projectorDirection); 334 | bool isFacingProjector = dotProduct > 0.0000001; 335 | 336 | 337 | vec4 diffuseColor = vec4(diffuse, opacity * backgroundOpacity); 338 | 339 | if (isFacingProjector && isInTexture && isTextureLoaded && isTextureProjected) { 340 | vec4 textureColor = texture2D(projectedTexture, uv); 341 | 342 | // apply the material opacity 343 | textureColor.a *= opacity; 344 | 345 | // https://learnopengl.com/Advanced-OpenGL/Blending 346 | diffuseColor = textureColor * textureColor.a + diffuseColor * (1.0 - textureColor.a); 347 | } 348 | ` 349 | }); 350 | }; // Listen on resize if the camera used for the projection 351 | // is the same used to render. 352 | // We do this on window resize because there is no way to 353 | // listen for the resize of the renderer 354 | 355 | 356 | window.addEventListener('resize', _classPrivateFieldLooseBase(this, _saveCameraProjectionMatrix)[_saveCameraProjectionMatrix]); // If the image texture passed hasn't loaded yet, 357 | // wait for it to load and compute the correct proportions. 358 | // This avoids rendering black while the texture is loading 359 | 360 | addLoadListener(texture, () => { 361 | this.uniforms.isTextureLoaded.value = true; 362 | 363 | _classPrivateFieldLooseBase(this, _saveDimensions)[_saveDimensions](); 364 | }); 365 | } 366 | 367 | project(mesh) { 368 | if (!(Array.isArray(mesh.material) ? mesh.material.every(m => m.isProjectedMaterial) : mesh.material.isProjectedMaterial)) { 369 | throw new Error(`The mesh material must be a ProjectedMaterial`); 370 | } 371 | 372 | if (!(Array.isArray(mesh.material) ? mesh.material.some(m => m === this) : mesh.material === this)) { 373 | throw new Error(`The provided mesh doesn't have the same material as where project() has been called from`); 374 | } // make sure the matrix is updated 375 | 376 | 377 | mesh.updateWorldMatrix(true, false); // we save the object model matrix so it's projected relative 378 | // to that position, like a snapshot 379 | 380 | this.uniforms.savedModelMatrix.value.copy(mesh.matrixWorld); // if the material is not the first, output just the texture 381 | 382 | if (Array.isArray(mesh.material)) { 383 | const materialIndex = mesh.material.indexOf(this); 384 | 385 | if (!mesh.material[materialIndex].transparent) { 386 | throw new Error(`You have to pass "transparent: true" to the ProjectedMaterial if you're working with multiple materials.`); 387 | } 388 | 389 | if (materialIndex > 0) { 390 | this.uniforms.backgroundOpacity.value = 0; 391 | } 392 | } // persist also the current camera position and matrices 393 | 394 | 395 | _classPrivateFieldLooseBase(this, _saveCameraMatrices)[_saveCameraMatrices](); 396 | } 397 | 398 | projectInstanceAt(index, instancedMesh, matrixWorld, _temp2) { 399 | let { 400 | forceCameraSave = false 401 | } = _temp2 === void 0 ? {} : _temp2; 402 | 403 | if (!instancedMesh.isInstancedMesh) { 404 | throw new Error(`The provided mesh is not an InstancedMesh`); 405 | } 406 | 407 | if (!(Array.isArray(instancedMesh.material) ? instancedMesh.material.every(m => m.isProjectedMaterial) : instancedMesh.material.isProjectedMaterial)) { 408 | throw new Error(`The InstancedMesh material must be a ProjectedMaterial`); 409 | } 410 | 411 | if (!(Array.isArray(instancedMesh.material) ? instancedMesh.material.some(m => m === this) : instancedMesh.material === this)) { 412 | throw new Error(`The provided InstancedMeshhave't i samenclude thas e material where project() has been called from`); 413 | } 414 | 415 | if (!instancedMesh.geometry.attributes[`savedModelMatrix0`] || !instancedMesh.geometry.attributes[`savedModelMatrix1`] || !instancedMesh.geometry.attributes[`savedModelMatrix2`] || !instancedMesh.geometry.attributes[`savedModelMatrix3`]) { 416 | throw new Error(`No allocated data found on the geometry, please call 'allocateProjectionData(geometry, instancesCount)'`); 417 | } 418 | 419 | instancedMesh.geometry.attributes[`savedModelMatrix0`].setXYZW(index, matrixWorld.elements[0], matrixWorld.elements[1], matrixWorld.elements[2], matrixWorld.elements[3]); 420 | instancedMesh.geometry.attributes[`savedModelMatrix1`].setXYZW(index, matrixWorld.elements[4], matrixWorld.elements[5], matrixWorld.elements[6], matrixWorld.elements[7]); 421 | instancedMesh.geometry.attributes[`savedModelMatrix2`].setXYZW(index, matrixWorld.elements[8], matrixWorld.elements[9], matrixWorld.elements[10], matrixWorld.elements[11]); 422 | instancedMesh.geometry.attributes[`savedModelMatrix3`].setXYZW(index, matrixWorld.elements[12], matrixWorld.elements[13], matrixWorld.elements[14], matrixWorld.elements[15]); // if the material is not the first, output just the texture 423 | 424 | if (Array.isArray(instancedMesh.material)) { 425 | const materialIndex = instancedMesh.material.indexOf(this); 426 | 427 | if (!instancedMesh.material[materialIndex].transparent) { 428 | throw new Error(`You have to pass "transparent: true" to the ProjectedMaterial if you're working with multiple materials.`); 429 | } 430 | 431 | if (materialIndex > 0) { 432 | this.uniforms.backgroundOpacity.value = 0; 433 | } 434 | } // persist the current camera position and matrices 435 | // only if it's the first instance since most surely 436 | // in all other instances the camera won't change 437 | 438 | 439 | if (index === 0 || forceCameraSave) { 440 | _classPrivateFieldLooseBase(this, _saveCameraMatrices)[_saveCameraMatrices](); 441 | } 442 | } 443 | 444 | copy(source) { 445 | super.copy(source); 446 | this.camera = source.camera; 447 | this.texture = source.texture; 448 | this.textureScale = source.textureScale; 449 | this.textureOffset = source.textureOffset; 450 | this.cover = source.cover; 451 | return this; 452 | } 453 | 454 | dispose() { 455 | super.dispose(); 456 | window.removeEventListener('resize', _classPrivateFieldLooseBase(this, _saveCameraProjectionMatrix)[_saveCameraProjectionMatrix]); 457 | } 458 | 459 | } // get camera ratio from different types of cameras 460 | 461 | function _saveDimensions2() { 462 | const [widthScaled, heightScaled] = computeScaledDimensions(this.texture, this.camera, this.textureScale, this.cover); 463 | this.uniforms.widthScaled.value = widthScaled; 464 | this.uniforms.heightScaled.value = heightScaled; 465 | } 466 | 467 | function _saveCameraMatrices2() { 468 | // make sure the camera matrices are updated 469 | this.camera.updateProjectionMatrix(); 470 | this.camera.updateMatrixWorld(); 471 | this.camera.updateWorldMatrix(); // update the uniforms from the camera so they're 472 | // fixed in the camera's position at the projection time 473 | 474 | const viewMatrixCamera = this.camera.matrixWorldInverse; 475 | const projectionMatrixCamera = this.camera.projectionMatrix; 476 | const modelMatrixCamera = this.camera.matrixWorld; 477 | this.uniforms.viewMatrixCamera.value.copy(viewMatrixCamera); 478 | this.uniforms.projectionMatrixCamera.value.copy(projectionMatrixCamera); 479 | this.uniforms.projPosition.value.copy(this.camera.position); 480 | this.uniforms.projDirection.value.set(0, 0, 1).applyMatrix4(modelMatrixCamera); // tell the shader we've projected 481 | 482 | this.uniforms.isTextureProjected.value = true; 483 | } 484 | 485 | function getCameraRatio(camera) { 486 | switch (camera.type) { 487 | case 'PerspectiveCamera': 488 | { 489 | return camera.aspect; 490 | } 491 | 492 | case 'OrthographicCamera': 493 | { 494 | const width = Math.abs(camera.right - camera.left); 495 | const height = Math.abs(camera.top - camera.bottom); 496 | return width / height; 497 | } 498 | 499 | default: 500 | { 501 | throw new Error(`${camera.type} is currently not supported in ProjectedMaterial`); 502 | } 503 | } 504 | } // scale to keep the image proportions and apply textureScale 505 | 506 | 507 | function computeScaledDimensions(texture, camera, textureScale, cover) { 508 | // return some default values if the image hasn't loaded yet 509 | if (!texture.image) { 510 | return [1, 1]; 511 | } // return if it's a video and if the video hasn't loaded yet 512 | 513 | 514 | if (texture.image.videoWidth === 0 && texture.image.videoHeight === 0) { 515 | return [1, 1]; 516 | } 517 | 518 | const sourceWidth = texture.image.naturalWidth || texture.image.videoWidth || texture.image.clientWidth; 519 | const sourceHeight = texture.image.naturalHeight || texture.image.videoHeight || texture.image.clientHeight; 520 | const ratio = sourceWidth / sourceHeight; 521 | const ratioCamera = getCameraRatio(camera); 522 | const widthCamera = 1; 523 | const heightCamera = widthCamera * (1 / ratioCamera); 524 | let widthScaled; 525 | let heightScaled; 526 | 527 | if (cover ? ratio > ratioCamera : ratio < ratioCamera) { 528 | const width = heightCamera * ratio; 529 | widthScaled = 1 / (width / widthCamera * textureScale); 530 | heightScaled = 1 / textureScale; 531 | } else { 532 | const height = widthCamera * (1 / ratio); 533 | heightScaled = 1 / (height / heightCamera * textureScale); 534 | widthScaled = 1 / textureScale; 535 | } 536 | 537 | return [widthScaled, heightScaled]; 538 | } 539 | 540 | function allocateProjectionData(geometry, instancesCount) { 541 | geometry.setAttribute(`savedModelMatrix0`, new THREE.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4)); 542 | geometry.setAttribute(`savedModelMatrix1`, new THREE.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4)); 543 | geometry.setAttribute(`savedModelMatrix2`, new THREE.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4)); 544 | geometry.setAttribute(`savedModelMatrix3`, new THREE.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4)); 545 | } 546 | 547 | export { allocateProjectionData, ProjectedMaterial as default }; 548 | -------------------------------------------------------------------------------- /examples/lib/WebGLApp.js: -------------------------------------------------------------------------------- 1 | // Taken from https://github.com/marcofugaro/threejs-modern-app/blob/master/src/lib/WebGLApp.js 2 | import * as THREE from 'https://unpkg.com/three@0.139.2/build/three.module.js' 3 | import { OrbitControls } from 'https://unpkg.com/three@0.139.2/examples/jsm/controls/OrbitControls.js' 4 | import Stats from 'https://unpkg.com/three@0.139.2/examples/jsm/libs/stats.module.js' 5 | import { initControls } from './Controls.js' 6 | 7 | export default class WebGLApp { 8 | #width 9 | #height 10 | isRunning = false 11 | time = 0 12 | dt = 0 13 | #lastTime = performance.now() 14 | #updateListeners = [] 15 | #pointerdownListeners = [] 16 | #pointermoveListeners = [] 17 | #pointerupListeners = [] 18 | #startX 19 | #startY 20 | #mp4 21 | #mp4Encoder 22 | #fileName 23 | #frames = [] 24 | 25 | get background() { 26 | return this.renderer.getClearColor(new THREE.Color()) 27 | } 28 | 29 | get backgroundAlpha() { 30 | return this.renderer.getClearAlpha() 31 | } 32 | 33 | set background(background) { 34 | this.renderer.setClearColor(background, this.backgroundAlpha) 35 | } 36 | 37 | set backgroundAlpha(backgroundAlpha) { 38 | this.renderer.setClearColor(this.background, backgroundAlpha) 39 | } 40 | 41 | get isRecording() { 42 | return Boolean(this.#mp4Encoder) 43 | } 44 | 45 | constructor({ 46 | background = '#111', 47 | backgroundAlpha = 1, 48 | fov = 45, 49 | frustumSize = 3, 50 | near = 0.01, 51 | far = 100, 52 | gamma = false, 53 | physicallyCorrectLights = false, 54 | ...options 55 | } = {}) { 56 | this.renderer = new THREE.WebGLRenderer({ 57 | antialias: !options.postprocessing, 58 | alpha: backgroundAlpha !== 1, 59 | // enabled for recording gifs or videos, 60 | // might disable it for performance reasons 61 | preserveDrawingBuffer: true, 62 | ...options, 63 | }) 64 | if (options.sortObjects !== undefined) { 65 | this.renderer.sortObjects = options.sortObjects 66 | } 67 | if (gamma) { 68 | // enable gamma correction, read more about it here: 69 | // https://www.donmccurdy.com/2020/06/17/color-management-in-threejs/ 70 | this.renderer.outputEncoding = THREE.sRGBEncoding 71 | } 72 | if (physicallyCorrectLights) { 73 | this.renderer.physicallyCorrectLights = true 74 | } 75 | if (options.xr) { 76 | this.renderer.xr.enabled = true 77 | } 78 | 79 | this.canvas = this.renderer.domElement 80 | 81 | this.renderer.setClearColor(background, backgroundAlpha) 82 | 83 | // save the fixed dimensions 84 | this.#width = options.width 85 | this.#height = options.height 86 | 87 | // clamp pixel ratio for performance 88 | this.maxPixelRatio = options.maxPixelRatio || 1.5 89 | // clamp delta to avoid stepping anything too far forward 90 | this.maxDeltaTime = options.maxDeltaTime || 1 / 30 91 | 92 | // setup the camera 93 | const aspect = this.#width / this.#height 94 | if (!options.orthographic) { 95 | this.camera = new THREE.PerspectiveCamera(fov, aspect, near, far) 96 | } else { 97 | this.camera = new THREE.OrthographicCamera( 98 | -(frustumSize * aspect) / 2, 99 | (frustumSize * aspect) / 2, 100 | frustumSize / 2, 101 | -frustumSize / 2, 102 | near, 103 | far 104 | ) 105 | this.camera.frustumSize = frustumSize 106 | } 107 | this.camera.position.copy(options.cameraPosition || new THREE.Vector3(0, 0, 4)) 108 | this.camera.lookAt(0, 0, 0) 109 | 110 | this.scene = new THREE.Scene() 111 | 112 | this.gl = this.renderer.getContext() 113 | 114 | // handle resize events 115 | window.addEventListener('resize', this.resize) 116 | window.addEventListener('orientationchange', this.resize) 117 | 118 | // force an initial resize event 119 | this.resize() 120 | 121 | // __________________________ADDONS__________________________ 122 | 123 | // really basic pointer events handler, the second argument 124 | // contains the x and y relative to the top left corner 125 | // of the canvas. 126 | // In case of touches with multiple fingers, only the 127 | // first touch is registered. 128 | this.isDragging = false 129 | this.canvas.addEventListener('pointerdown', (event) => { 130 | if (!event.isPrimary) return 131 | this.isDragging = true 132 | this.#startX = event.offsetX 133 | this.#startY = event.offsetY 134 | // call onPointerDown method 135 | this.scene.traverse((child) => { 136 | if (typeof child.onPointerDown === 'function') { 137 | child.onPointerDown(event, { x: event.offsetX, y: event.offsetY }) 138 | } 139 | }) 140 | // call the pointerdown listeners 141 | this.#pointerdownListeners.forEach((fn) => fn(event, { x: event.offsetX, y: event.offsetY })) 142 | }) 143 | this.canvas.addEventListener('pointermove', (event) => { 144 | if (!event.isPrimary) return 145 | // call onPointerMove method 146 | const position = { 147 | x: event.offsetX, 148 | y: event.offsetY, 149 | ...(this.#startX !== undefined && { dragX: event.offsetX - this.#startX }), 150 | ...(this.#startY !== undefined && { dragY: event.offsetY - this.#startY }), 151 | } 152 | this.scene.traverse((child) => { 153 | if (typeof child.onPointerMove === 'function') { 154 | child.onPointerMove(event, position) 155 | } 156 | }) 157 | // call the pointermove listeners 158 | this.#pointermoveListeners.forEach((fn) => fn(event, position)) 159 | }) 160 | this.canvas.addEventListener('pointerup', (event) => { 161 | if (!event.isPrimary) return 162 | this.isDragging = false 163 | // call onPointerUp method 164 | const position = { 165 | x: event.offsetX, 166 | y: event.offsetY, 167 | ...(this.#startX !== undefined && { dragX: event.offsetX - this.#startX }), 168 | ...(this.#startY !== undefined && { dragY: event.offsetY - this.#startY }), 169 | } 170 | this.scene.traverse((child) => { 171 | if (typeof child.onPointerUp === 'function') { 172 | child.onPointerUp(event, position) 173 | } 174 | }) 175 | // call the pointerup listeners 176 | this.#pointerupListeners.forEach((fn) => fn(event, position)) 177 | 178 | this.#startX = undefined 179 | this.#startY = undefined 180 | }) 181 | 182 | // expose a composer for postprocessing passes 183 | if (options.postprocessing) { 184 | const maxMultisampling = this.gl.getParameter(this.gl.MAX_SAMPLES) 185 | this.composer = new EffectComposer(this.renderer, { 186 | multisampling: Math.min(8, maxMultisampling), 187 | frameBufferType: gamma ? THREE.HalfFloatType : undefined, 188 | ...options, 189 | }) 190 | this.composer.addPass(new RenderPass(this.scene, this.camera)) 191 | } 192 | 193 | // set up OrbitControls 194 | if (options.orbitControls) { 195 | this.orbitControls = new OrbitControls(this.camera, this.canvas) 196 | 197 | this.orbitControls.enableDamping = true 198 | this.orbitControls.dampingFactor = 0.15 199 | this.orbitControls.enablePan = false 200 | 201 | if (options.orbitControls instanceof Object) { 202 | Object.keys(options.orbitControls).forEach((key) => { 203 | this.orbitControls[key] = options.orbitControls[key] 204 | }) 205 | } 206 | } 207 | 208 | // Attach the Cannon physics engine 209 | if (options.world) { 210 | this.world = options.world 211 | if (options.showWorldWireframes) { 212 | this.cannonDebugger = cannonDebugger(this.scene, this.world.bodies, { autoUpdate: false }) 213 | } 214 | } 215 | 216 | // show the fps meter 217 | if (options.showFps) { 218 | this.stats = new Stats({ showMinMax: false, context: this.gl }) 219 | this.stats.showPanel(0) 220 | document.body.appendChild(this.stats.dom) 221 | } 222 | 223 | // initialize the controls-state 224 | if (options.controls) { 225 | this.controls = initControls(options.controls, options) 226 | } 227 | 228 | // detect the gpu info 229 | // this.loadGPUTier = getGPUTier({ glContext: this.gl }).then((gpuTier) => { 230 | // this.gpu = { 231 | // name: gpuTier.gpu, 232 | // tier: gpuTier.tier, 233 | // isMobile: gpuTier.isMobile, 234 | // fps: gpuTier.fps, 235 | // } 236 | // }) 237 | 238 | // initialize the mp4 recorder 239 | // if (isWebCodecsSupported()) { 240 | // loadMP4Module().then((mp4) => { 241 | // this.#mp4 = mp4 242 | // }) 243 | // } 244 | } 245 | 246 | get width() { 247 | return this.#width || window.innerWidth 248 | } 249 | 250 | get height() { 251 | return this.#height || window.innerHeight 252 | } 253 | 254 | get pixelRatio() { 255 | return Math.min(this.maxPixelRatio, window.devicePixelRatio) 256 | } 257 | 258 | resize = ({ width = this.width, height = this.height, pixelRatio = this.pixelRatio } = {}) => { 259 | // update pixel ratio if necessary 260 | if (this.renderer.getPixelRatio() !== pixelRatio) { 261 | this.renderer.setPixelRatio(pixelRatio) 262 | } 263 | 264 | // setup new size & update camera aspect if necessary 265 | this.renderer.setSize(width, height) 266 | if (this.camera.isPerspectiveCamera) { 267 | this.camera.aspect = width / height 268 | } else { 269 | const aspect = width / height 270 | this.camera.left = -(this.camera.frustumSize * aspect) / 2 271 | this.camera.right = (this.camera.frustumSize * aspect) / 2 272 | this.camera.top = this.camera.frustumSize / 2 273 | this.camera.bottom = -this.camera.frustumSize / 2 274 | } 275 | this.camera.updateProjectionMatrix() 276 | 277 | // resize also the composer, width and height 278 | // are automatically extracted from the renderer 279 | if (this.composer) { 280 | this.composer.setSize() 281 | } 282 | 283 | // recursively tell all child objects to resize 284 | this.scene.traverse((obj) => { 285 | if (typeof obj.resize === 'function') { 286 | obj.resize({ 287 | width, 288 | height, 289 | pixelRatio, 290 | }) 291 | } 292 | }) 293 | 294 | // draw a frame to ensure the new size has been registered visually 295 | this.draw() 296 | return this 297 | } 298 | 299 | // convenience function to trigger a PNG download of the canvas 300 | saveScreenshot = async ({ 301 | width = this.width, 302 | height = this.height, 303 | fileName = 'Screenshot', 304 | } = {}) => { 305 | // force a specific output size 306 | this.resize({ width, height, pixelRatio: 1 }) 307 | 308 | const blob = await new Promise((resolve) => this.canvas.toBlob(resolve, 'image/png')) 309 | 310 | // reset to default size 311 | this.resize() 312 | 313 | // save 314 | downloadFile(`${fileName}.png`, blob) 315 | } 316 | 317 | // start recording of a gif or a video 318 | startRecording = ({ 319 | width = this.width, 320 | height = this.height, 321 | fileName = 'Recording', 322 | ...options 323 | } = {}) => { 324 | if (!isWebCodecsSupported()) { 325 | throw new Error('You need the WebCodecs API to use mp4-wasm') 326 | } 327 | 328 | if (this.isRecording) { 329 | return 330 | } 331 | 332 | this.#fileName = fileName 333 | 334 | // force a specific output size 335 | this.resize({ width, height, pixelRatio: 1 }) 336 | this.draw() 337 | 338 | this.#mp4Encoder = this.#mp4.createWebCodecsEncoder({ 339 | width, 340 | height, 341 | fps: 60, 342 | bitrate: 120 * 1000 * 1000, // 120 Mbit/s 343 | ...options, 344 | }) 345 | } 346 | 347 | stopRecording = async () => { 348 | if (!this.isRecording) { 349 | return 350 | } 351 | 352 | for (let frame of this.#frames) { 353 | await this.#mp4Encoder.addFrame(frame) 354 | } 355 | const buffer = await this.#mp4Encoder.end() 356 | const blob = new Blob([buffer]) 357 | 358 | this.#mp4Encoder = undefined 359 | // dispose the graphical resources associated with the ImageBitmap 360 | this.#frames.forEach((frame) => frame.close()) 361 | this.#frames.length = 0 362 | 363 | // reset to default size 364 | this.resize() 365 | this.draw() 366 | 367 | downloadFile(`${this.#fileName}.mp4`, blob) 368 | } 369 | 370 | update = (dt, time, xrframe) => { 371 | if (this.orbitControls) { 372 | this.orbitControls.update() 373 | } 374 | 375 | // recursively tell all child objects to update 376 | this.scene.traverse((obj) => { 377 | if (typeof obj.update === 'function' && !obj.isTransformControls) { 378 | obj.update(dt, time, xrframe) 379 | } 380 | }) 381 | 382 | if (this.world) { 383 | // update the cannon-es physics engine 384 | this.world.step(1 / 60, dt) 385 | 386 | // update the debug wireframe renderer 387 | if (this.cannonDebugger) { 388 | this.cannonDebugger.update() 389 | } 390 | 391 | // recursively tell all child bodies to update 392 | this.world.bodies.forEach((body) => { 393 | if (typeof body.update === 'function') { 394 | body.update(dt, time) 395 | } 396 | }) 397 | } 398 | 399 | // call the update listeners 400 | this.#updateListeners.forEach((fn) => fn(dt, time, xrframe)) 401 | 402 | return this 403 | } 404 | 405 | onUpdate(fn) { 406 | this.#updateListeners.push(fn) 407 | } 408 | 409 | onPointerDown(fn) { 410 | this.#pointerdownListeners.push(fn) 411 | } 412 | 413 | onPointerMove(fn) { 414 | this.#pointermoveListeners.push(fn) 415 | } 416 | 417 | onPointerUp(fn) { 418 | this.#pointerupListeners.push(fn) 419 | } 420 | 421 | offUpdate(fn) { 422 | const index = this.#updateListeners.indexOf(fn) 423 | 424 | // return silently if the function can't be found 425 | if (index === -1) { 426 | return 427 | } 428 | 429 | this.#updateListeners.splice(index, 1) 430 | } 431 | 432 | offPointerDown(fn) { 433 | const index = this.#pointerdownListeners.indexOf(fn) 434 | 435 | // return silently if the function can't be found 436 | if (index === -1) { 437 | return 438 | } 439 | 440 | this.#pointerdownListeners.splice(index, 1) 441 | } 442 | 443 | offPointerMove(fn) { 444 | const index = this.#pointermoveListeners.indexOf(fn) 445 | 446 | // return silently if the function can't be found 447 | if (index === -1) { 448 | return 449 | } 450 | 451 | this.#pointermoveListeners.splice(index, 1) 452 | } 453 | 454 | offPointerUp(fn) { 455 | const index = this.#pointerupListeners.indexOf(fn) 456 | 457 | // return silently if the function can't be found 458 | if (index === -1) { 459 | return 460 | } 461 | 462 | this.#pointerupListeners.splice(index, 1) 463 | } 464 | 465 | draw = () => { 466 | // postprocessing doesn't currently work in WebXR 467 | const isXR = this.renderer.xr.enabled && this.renderer.xr.isPresenting 468 | 469 | if (this.composer && !isXR) { 470 | this.composer.render(this.dt) 471 | } else { 472 | this.renderer.render(this.scene, this.camera) 473 | } 474 | return this 475 | } 476 | 477 | start = () => { 478 | if (this.isRunning) return 479 | this.isRunning = true 480 | 481 | // draw immediately 482 | this.draw() 483 | 484 | this.renderer.setAnimationLoop(this.animate) 485 | return this 486 | } 487 | 488 | stop = () => { 489 | if (!this.isRunning) return 490 | this.renderer.setAnimationLoop(null) 491 | this.isRunning = false 492 | return this 493 | } 494 | 495 | animate = (now, xrframe) => { 496 | if (!this.isRunning) return 497 | 498 | if (this.stats) this.stats.begin() 499 | 500 | this.dt = Math.min(this.maxDeltaTime, (now - this.#lastTime) / 1000) 501 | this.time += this.dt 502 | this.#lastTime = now 503 | this.update(this.dt, this.time, xrframe) 504 | this.draw() 505 | 506 | // save the bitmap of the canvas for the recorder 507 | if (this.isRecording) { 508 | const index = this.#frames.length 509 | createImageBitmap(this.canvas).then((bitmap) => { 510 | this.#frames[index] = bitmap 511 | }) 512 | } 513 | 514 | if (this.stats) this.stats.end() 515 | } 516 | 517 | get cursor() { 518 | return this.canvas.style.cursor 519 | } 520 | 521 | set cursor(cursor) { 522 | if (cursor) { 523 | this.canvas.style.cursor = cursor 524 | } else { 525 | this.canvas.style.cursor = null 526 | } 527 | } 528 | } 529 | 530 | function downloadFile(name, blob) { 531 | const link = document.createElement('a') 532 | link.download = name 533 | link.href = URL.createObjectURL(blob) 534 | link.click() 535 | 536 | setTimeout(() => { 537 | URL.revokeObjectURL(blob) 538 | link.removeAttribute('href') 539 | }, 0) 540 | } 541 | -------------------------------------------------------------------------------- /examples/lib/controls-state.module.js: -------------------------------------------------------------------------------- 1 | var commonjsGlobal = typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {}; 2 | 3 | function createCommonjsModule(fn, module) { 4 | return module = { exports: {} }, fn(module, module.exports), module.exports; 5 | } 6 | 7 | var isImplemented = function () { 8 | var assign = Object.assign, 9 | obj; 10 | if (typeof assign !== "function") return false; 11 | obj = { foo: "raz" }; 12 | assign(obj, { bar: "dwa" }, { trzy: "trzy" }); 13 | return obj.foo + obj.bar + obj.trzy === "razdwatrzy"; 14 | }; 15 | 16 | var isImplemented$1 = function () { 17 | try { 18 | return true; 19 | } catch (e) { 20 | return false; 21 | } 22 | }; 23 | 24 | // eslint-disable-next-line no-empty-function 25 | 26 | var noop = function () {}; 27 | 28 | var _undefined = noop(); // Support ES3 engines 29 | 30 | var isValue = function (val) { 31 | return val !== _undefined && val !== null; 32 | }; 33 | 34 | var keys = Object.keys; 35 | 36 | var shim = function (object) { 37 | return keys(isValue(object) ? Object(object) : object); 38 | }; 39 | 40 | var keys$1 = isImplemented$1() ? Object.keys : shim; 41 | 42 | var validValue = function (value) { 43 | if (!isValue(value)) throw new TypeError("Cannot use null or undefined"); 44 | return value; 45 | }; 46 | 47 | var max = Math.max; 48 | 49 | var shim$1 = function (dest, src /*, …srcn*/) { 50 | var error, 51 | i, 52 | length = max(arguments.length, 2), 53 | assign; 54 | dest = Object(validValue(dest)); 55 | assign = function (key) { 56 | try { 57 | dest[key] = src[key]; 58 | } catch (e) { 59 | if (!error) error = e; 60 | } 61 | }; 62 | for (i = 1; i < length; ++i) { 63 | src = arguments[i]; 64 | keys$1(src).forEach(assign); 65 | } 66 | if (error !== undefined) throw error; 67 | return dest; 68 | }; 69 | 70 | var assign = isImplemented() ? Object.assign : shim$1; 71 | 72 | var forEach = Array.prototype.forEach, 73 | create = Object.create; 74 | 75 | var process$1 = function (src, obj) { 76 | var key; 77 | for (key in src) { 78 | obj[key] = src[key]; 79 | } 80 | }; 81 | 82 | // eslint-disable-next-line no-unused-vars 83 | var normalizeOptions = function (opts1 /*, …options*/) { 84 | var result = create(null); 85 | forEach.call(arguments, function (options) { 86 | if (!isValue(options)) return; 87 | process$1(Object(options), result); 88 | }); 89 | return result; 90 | }; 91 | 92 | // Deprecated 93 | 94 | var isCallable = function (obj) { 95 | return typeof obj === "function"; 96 | }; 97 | 98 | var str = "razdwatrzy"; 99 | 100 | var isImplemented$2 = function () { 101 | if (typeof str.contains !== "function") return false; 102 | return str.contains("dwa") === true && str.contains("foo") === false; 103 | }; 104 | 105 | var indexOf = String.prototype.indexOf; 106 | 107 | var shim$2 = function (searchString /*, position*/) { 108 | return indexOf.call(this, searchString, arguments[1]) > -1; 109 | }; 110 | 111 | var contains = isImplemented$2() ? String.prototype.contains : shim$2; 112 | 113 | var d_1 = createCommonjsModule(function (module) { 114 | 115 | var d; 116 | 117 | d = module.exports = function (dscr, value /*, options*/) { 118 | var c, e, w, options, desc; 119 | if (arguments.length < 2 || typeof dscr !== 'string') { 120 | options = value; 121 | value = dscr; 122 | dscr = null; 123 | } else { 124 | options = arguments[2]; 125 | } 126 | if (dscr == null) { 127 | c = w = true; 128 | e = false; 129 | } else { 130 | c = contains.call(dscr, 'c'); 131 | e = contains.call(dscr, 'e'); 132 | w = contains.call(dscr, 'w'); 133 | } 134 | 135 | desc = { value: value, configurable: c, enumerable: e, writable: w }; 136 | return !options ? desc : assign(normalizeOptions(options), desc); 137 | }; 138 | 139 | d.gs = function (dscr, get, set /*, options*/) { 140 | var c, e, options, desc; 141 | if (typeof dscr !== 'string') { 142 | options = set; 143 | set = get; 144 | get = dscr; 145 | dscr = null; 146 | } else { 147 | options = arguments[3]; 148 | } 149 | if (get == null) { 150 | get = undefined; 151 | } else if (!isCallable(get)) { 152 | options = get; 153 | get = set = undefined; 154 | } else if (set == null) { 155 | set = undefined; 156 | } else if (!isCallable(set)) { 157 | options = set; 158 | set = undefined; 159 | } 160 | if (dscr == null) { 161 | c = true; 162 | e = false; 163 | } else { 164 | c = contains.call(dscr, 'c'); 165 | e = contains.call(dscr, 'e'); 166 | } 167 | 168 | desc = { get: get, set: set, configurable: c, enumerable: e }; 169 | return !options ? desc : assign(normalizeOptions(options), desc); 170 | }; 171 | }); 172 | 173 | var validCallable = function (fn) { 174 | if (typeof fn !== "function") throw new TypeError(fn + " is not a function"); 175 | return fn; 176 | }; 177 | 178 | var eventEmitter = createCommonjsModule(function (module, exports) { 179 | 180 | var apply = Function.prototype.apply, 181 | call = Function.prototype.call, 182 | create = Object.create, 183 | defineProperty = Object.defineProperty, 184 | defineProperties = Object.defineProperties, 185 | hasOwnProperty = Object.prototype.hasOwnProperty, 186 | descriptor = { configurable: true, enumerable: false, writable: true }, 187 | on, 188 | once, 189 | off, 190 | emit, 191 | methods, 192 | descriptors, 193 | base; 194 | 195 | on = function (type, listener) { 196 | var data; 197 | 198 | validCallable(listener); 199 | 200 | if (!hasOwnProperty.call(this, '__ee__')) { 201 | data = descriptor.value = create(null); 202 | defineProperty(this, '__ee__', descriptor); 203 | descriptor.value = null; 204 | } else { 205 | data = this.__ee__; 206 | } 207 | if (!data[type]) data[type] = listener;else if (typeof data[type] === 'object') data[type].push(listener);else data[type] = [data[type], listener]; 208 | 209 | return this; 210 | }; 211 | 212 | once = function (type, listener) { 213 | var once, self; 214 | 215 | validCallable(listener); 216 | self = this; 217 | on.call(this, type, once = function () { 218 | off.call(self, type, once); 219 | apply.call(listener, this, arguments); 220 | }); 221 | 222 | once.__eeOnceListener__ = listener; 223 | return this; 224 | }; 225 | 226 | off = function (type, listener) { 227 | var data, listeners, candidate, i; 228 | 229 | validCallable(listener); 230 | 231 | if (!hasOwnProperty.call(this, '__ee__')) return this; 232 | data = this.__ee__; 233 | if (!data[type]) return this; 234 | listeners = data[type]; 235 | 236 | if (typeof listeners === 'object') { 237 | for (i = 0; candidate = listeners[i]; ++i) { 238 | if (candidate === listener || candidate.__eeOnceListener__ === listener) { 239 | if (listeners.length === 2) data[type] = listeners[i ? 0 : 1];else listeners.splice(i, 1); 240 | } 241 | } 242 | } else { 243 | if (listeners === listener || listeners.__eeOnceListener__ === listener) { 244 | delete data[type]; 245 | } 246 | } 247 | 248 | return this; 249 | }; 250 | 251 | emit = function (type) { 252 | var i, l, listener, listeners, args; 253 | 254 | if (!hasOwnProperty.call(this, '__ee__')) return; 255 | listeners = this.__ee__[type]; 256 | if (!listeners) return; 257 | 258 | if (typeof listeners === 'object') { 259 | l = arguments.length; 260 | args = new Array(l - 1); 261 | for (i = 1; i < l; ++i) { 262 | args[i - 1] = arguments[i]; 263 | }listeners = listeners.slice(); 264 | for (i = 0; listener = listeners[i]; ++i) { 265 | apply.call(listener, this, args); 266 | } 267 | } else { 268 | switch (arguments.length) { 269 | case 1: 270 | call.call(listeners, this); 271 | break; 272 | case 2: 273 | call.call(listeners, this, arguments[1]); 274 | break; 275 | case 3: 276 | call.call(listeners, this, arguments[1], arguments[2]); 277 | break; 278 | default: 279 | l = arguments.length; 280 | args = new Array(l - 1); 281 | for (i = 1; i < l; ++i) { 282 | args[i - 1] = arguments[i]; 283 | } 284 | apply.call(listeners, this, args); 285 | } 286 | } 287 | }; 288 | 289 | methods = { 290 | on: on, 291 | once: once, 292 | off: off, 293 | emit: emit 294 | }; 295 | 296 | descriptors = { 297 | on: d_1(on), 298 | once: d_1(once), 299 | off: d_1(off), 300 | emit: d_1(emit) 301 | }; 302 | 303 | base = defineProperties({}, descriptors); 304 | 305 | module.exports = exports = function (o) { 306 | return o == null ? create(base) : defineProperties(Object(o), descriptors); 307 | }; 308 | exports.methods = methods; 309 | }); 310 | var eventEmitter_1 = eventEmitter.methods; 311 | 312 | var performanceNow = createCommonjsModule(function (module) { 313 | // Generated by CoffeeScript 1.12.2 314 | (function () { 315 | var getNanoSeconds, hrtime, loadTime, moduleLoadTime, nodeLoadTime, upTime; 316 | 317 | if (typeof performance !== "undefined" && performance !== null && performance.now) { 318 | module.exports = function () { 319 | return performance.now(); 320 | }; 321 | } else if (typeof process !== "undefined" && process !== null && process.hrtime) { 322 | module.exports = function () { 323 | return (getNanoSeconds() - nodeLoadTime) / 1e6; 324 | }; 325 | hrtime = process.hrtime; 326 | getNanoSeconds = function () { 327 | var hr; 328 | hr = hrtime(); 329 | return hr[0] * 1e9 + hr[1]; 330 | }; 331 | moduleLoadTime = getNanoSeconds(); 332 | upTime = process.uptime() * 1e9; 333 | nodeLoadTime = moduleLoadTime - upTime; 334 | } else if (Date.now) { 335 | module.exports = function () { 336 | return Date.now() - loadTime; 337 | }; 338 | loadTime = Date.now(); 339 | } else { 340 | module.exports = function () { 341 | return new Date().getTime() - loadTime; 342 | }; 343 | loadTime = new Date().getTime(); 344 | } 345 | }).call(commonjsGlobal); 346 | 347 | }); 348 | 349 | var root = typeof window === 'undefined' ? commonjsGlobal : window, 350 | vendors = ['moz', 'webkit'], 351 | suffix = 'AnimationFrame', 352 | raf = root['request' + suffix], 353 | caf = root['cancel' + suffix] || root['cancelRequest' + suffix]; 354 | 355 | for (var i = 0; !raf && i < vendors.length; i++) { 356 | raf = root[vendors[i] + 'Request' + suffix]; 357 | caf = root[vendors[i] + 'Cancel' + suffix] || root[vendors[i] + 'CancelRequest' + suffix]; 358 | } 359 | 360 | // Some versions of FF have rAF but not cAF 361 | if (!raf || !caf) { 362 | var last = 0, 363 | id = 0, 364 | queue = [], 365 | frameDuration = 1000 / 60; 366 | 367 | raf = function (callback) { 368 | if (queue.length === 0) { 369 | var _now = performanceNow(), 370 | next = Math.max(0, frameDuration - (_now - last)); 371 | last = next + _now; 372 | setTimeout(function () { 373 | var cp = queue.slice(0); 374 | // Clear queue here to prevent 375 | // callbacks from appending listeners 376 | // to the current frame's queue 377 | queue.length = 0; 378 | for (var i = 0; i < cp.length; i++) { 379 | if (!cp[i].cancelled) { 380 | try { 381 | cp[i].callback(last); 382 | } catch (e) { 383 | setTimeout(function () { 384 | throw e; 385 | }, 0); 386 | } 387 | } 388 | } 389 | }, Math.round(next)); 390 | } 391 | queue.push({ 392 | handle: ++id, 393 | callback: callback, 394 | cancelled: false 395 | }); 396 | return id; 397 | }; 398 | 399 | caf = function (handle) { 400 | for (var i = 0; i < queue.length; i++) { 401 | if (queue[i].handle === handle) { 402 | queue[i].cancelled = true; 403 | } 404 | } 405 | }; 406 | } 407 | 408 | var raf_1 = function (fn) { 409 | // Wrap in a new function to prevent 410 | // `cancel` potentially being assigned 411 | // to the native rAF function 412 | return raf.call(root, fn); 413 | }; 414 | var cancel = function () { 415 | caf.apply(root, arguments); 416 | }; 417 | var polyfill = function (object) { 418 | if (!object) { 419 | object = root; 420 | } 421 | object.requestAnimationFrame = raf; 422 | object.cancelAnimationFrame = caf; 423 | }; 424 | raf_1.cancel = cancel; 425 | raf_1.polyfill = polyfill; 426 | 427 | var field = Field; 428 | 429 | function Field(name, initialValue, parentField, config) { 430 | if (/\./.test(name)) { 431 | throw new Error('Field names may not contain a period'); 432 | } 433 | 434 | config = config || {}; 435 | 436 | var value = initialValue; 437 | 438 | this.parent = parentField || null; 439 | this.events = new eventEmitter(); 440 | 441 | this.type = null; 442 | this.name = name; 443 | 444 | this.batchedUpdates = {}; 445 | this.batchUpdatePaths = []; 446 | this.batchUpdateRaf = null; 447 | 448 | Object.defineProperties(this, { 449 | '$field': { 450 | enumerable: false, 451 | value: this 452 | }, 453 | '$config': { 454 | enumerable: false, 455 | value: config 456 | }, 457 | 'value': { 458 | get: function () { 459 | return value; 460 | }, 461 | set: function (newValue) { 462 | var event = { 463 | field: this, 464 | name: this.name, 465 | path: this.path, 466 | fullPath: this.path, 467 | oldValue: value, 468 | value: newValue 469 | }; 470 | 471 | var path = []; 472 | var field = this; 473 | 474 | do { 475 | event.path = path.join('.'); 476 | 477 | var changes = {}; 478 | changes[event.path || this.name] = Object.assign({}, event); 479 | 480 | if (field.events.emit) { 481 | field.events.emit('beforeChange', Object.assign({}, event)); 482 | field.events.emit('beforeChanges', changes); 483 | } 484 | 485 | if (field._batchEmit) { 486 | field._batchEmit(event.path, Object.assign({}, event)); 487 | } 488 | 489 | path.unshift(field.name); 490 | } while (field = field.parent); 491 | 492 | value = newValue; 493 | } 494 | }, 495 | 'path': { 496 | enumerable: true, 497 | get: function () { 498 | var parentPath = (this.parent || {}).path; 499 | if (!this.name) return null; 500 | return (parentPath ? parentPath + '.' : '') + this.name; 501 | } 502 | } 503 | }); 504 | } 505 | 506 | Field.prototype = { 507 | onBeforeChange: function (callback) { 508 | this.events.on('beforeChange', callback); 509 | return this; 510 | }, 511 | offBeforeChange: function (callback) { 512 | this.events.off('beforeChange', callback); 513 | return this; 514 | }, 515 | 516 | onBeforeChanges: function (callback) { 517 | this.events.on('beforeChanges', callback); 518 | return this; 519 | }, 520 | offBeforeChanges: function (callback) { 521 | this.events.off('beforeChanges', callback); 522 | return this; 523 | }, 524 | 525 | onChange: function (callback) { 526 | this.events.on('change', callback); 527 | return this; 528 | }, 529 | offChange: function (callback) { 530 | this.events.off('change', callback); 531 | return this; 532 | }, 533 | 534 | onChanges: function (callback) { 535 | this.events.on('changes', callback); 536 | return this; 537 | }, 538 | offChanges: function (callback) { 539 | this.events.off('changes', callback); 540 | return this; 541 | }, 542 | 543 | _emitUpdate: function () { 544 | this.events.emit('changes', Object.assign({}, this.batchedUpdates)); 545 | 546 | while (this.batchUpdatePaths.length) { 547 | var updateKeys = Object.keys(this.batchedUpdates); 548 | for (var i = 0; i < updateKeys.length; i++) { 549 | var event = this.batchedUpdates[updateKeys[i]]; 550 | var path = this.batchUpdatePaths.pop(); 551 | this.events.emit('change', event); 552 | this.events.emit('change:' + path, event); 553 | } 554 | } 555 | this.batchedUpdates = {}; 556 | this.batchUpdateRaf = null; 557 | }, 558 | _batchEmit: function (path, event) { 559 | var existingUpdate = this.batchedUpdates[event.path]; 560 | if (existingUpdate) { 561 | event.oldValue = existingUpdate.oldValue; 562 | } 563 | this.batchUpdatePaths.push(path); 564 | this.batchedUpdates[path] = event; 565 | 566 | if (!this.batchUpdateRaf) { 567 | this.batchUpdateRaf = raf_1(this._emitUpdate.bind(this)); 568 | } 569 | } 570 | }; 571 | 572 | var raw = Raw; 573 | 574 | function Raw(name, htmlContent, config, parentField) { 575 | if (!(this instanceof Raw)) return new Raw(name, htmlContent, config, parentField); 576 | 577 | field.call(this, name, htmlContent, parentField, config); 578 | 579 | this.type = 'raw'; 580 | } 581 | 582 | Raw.prototype = Object.create(field.prototype); 583 | 584 | var slider = Slider; 585 | 586 | function identity(x) { 587 | return x; 588 | } 589 | 590 | function Slider(name, initialValue, config, parentField) { 591 | if (!(this instanceof Slider)) return new Slider(name, initialValue, config, parentField); 592 | 593 | initialValue = initialValue === undefined ? 0 : initialValue; 594 | config = config || {}; 595 | 596 | field.call(this, name, initialValue, parentField, config); 597 | 598 | var isValueBetween0and1 = 0 <= initialValue && initialValue <= 1; 599 | var defaultMin = isValueBetween0and1 ? 0 : Math.min(initialValue * 2, 0); 600 | var defaultMax = isValueBetween0and1 ? 1 : Math.max(initialValue * 2, 1); 601 | var defaultStep = isValueBetween0and1 ? 0.01 : 1; 602 | this.min = config.min === undefined ? defaultMin : config.min; 603 | this.max = config.max === undefined ? defaultMax : config.max; 604 | this.mapping = typeof config.mapping !== 'function' ? identity : config.mapping; 605 | this.inverseMapping = typeof config.inverseMapping !== 'function' ? identity : config.inverseMapping; 606 | 607 | this.steps = Math.round((this.max - this.min) / defaultStep); 608 | if (config.steps !== undefined) { 609 | this.steps = config.steps; 610 | } else if (config.step !== undefined) { 611 | this.steps = Math.round((this.max - this.min) / config.step); 612 | } 613 | 614 | this.type = 'slider'; 615 | } 616 | 617 | Slider.prototype = Object.create(field.prototype); 618 | 619 | var button = Button; 620 | 621 | function Button(name, htmlContent, config, parentField) { 622 | if (!(this instanceof Button)) return new Button(name, htmlContent, config, parentField); 623 | 624 | field.call(this, name, htmlContent, parentField, config); 625 | 626 | this.type = 'button'; 627 | } 628 | 629 | Button.prototype = Object.create(field.prototype); 630 | 631 | var textinput = TextInput; 632 | 633 | function TextInput(name, initialValue, config, parentField) { 634 | if (!(this instanceof TextInput)) return new TextInput(name, initialValue, config, parentField); 635 | 636 | initialValue = initialValue === undefined ? '' : initialValue; 637 | 638 | field.call(this, name, initialValue, parentField, config); 639 | 640 | this.type = 'textinput'; 641 | } 642 | 643 | TextInput.prototype = Object.create(field.prototype); 644 | 645 | var color = Color; 646 | 647 | function Color(name, initialValue, config, parentField) { 648 | if (!(this instanceof Color)) return new Color(name, initialValue, config); 649 | 650 | initialValue = initialValue === undefined ? '#ffffff' : initialValue; 651 | 652 | field.call(this, name, initialValue, parentField, config); 653 | 654 | this.type = 'color'; 655 | } 656 | 657 | Color.prototype = Object.create(field.prototype); 658 | 659 | var checkbox = Checkbox; 660 | 661 | function Checkbox(name, initialValue, config, parentField) { 662 | if (!(this instanceof Checkbox)) return new Checkbox(name, initialValue, config, parentField); 663 | 664 | initialValue = initialValue === undefined ? true : !!initialValue; 665 | 666 | field.call(this, name, initialValue, parentField, config); 667 | 668 | this.type = 'checkbox'; 669 | } 670 | 671 | Checkbox.prototype = Object.create(field.prototype); 672 | 673 | var win; 674 | 675 | if (typeof window !== "undefined") { 676 | win = window; 677 | } else if (typeof commonjsGlobal !== "undefined") { 678 | win = commonjsGlobal; 679 | } else if (typeof self !== "undefined") { 680 | win = self; 681 | } else { 682 | win = {}; 683 | } 684 | 685 | var window_1 = win; 686 | 687 | function isHTMLElement(element) { 688 | return window_1.Element && element instanceof window_1.Element || window_1.HTMLDocument && element instanceof window_1.HTMLDocument; 689 | } 690 | 691 | var COLOR_REGEX = /(#(?:[0-9a-fA-F]{2,4}){2,4}|(#[0-9a-fA-F]{3})|(rgb|hsl)a?((-?\d+%?[,\s]+){2,3}\s*[\d.]+%?))/; 692 | 693 | var inferType = function inferType(value) { 694 | if (value && value.type) { 695 | return value.type + 'field'; 696 | } 697 | 698 | if (isHTMLElement(value)) { 699 | return 'rawfield'; 700 | } 701 | 702 | if (typeof value === 'function') { 703 | return 'button'; 704 | } 705 | 706 | switch (typeof value) { 707 | case 'string': 708 | if (COLOR_REGEX.test(value)) { 709 | return 'color'; 710 | } 711 | return 'textinput'; 712 | case 'number': 713 | return 'number'; 714 | case 'boolean': 715 | return 'boolean'; 716 | case 'raw': 717 | return 'raw'; 718 | case 'button': 719 | return 'button'; 720 | case 'object': 721 | return 'object'; 722 | } 723 | }; 724 | 725 | var section = Section; 726 | 727 | function constructField(fieldName, fieldValue, parentField) { 728 | switch (inferType(fieldValue)) { 729 | case 'rawfield': 730 | case 'buttonfield': 731 | case 'colorfield': 732 | case 'textfield': 733 | case 'sliderfield': 734 | case 'selectfield': 735 | case 'rangesliderfield': 736 | case 'checkboxfield': 737 | case 'sectionfield': 738 | case 'tabsfield': 739 | if (fieldValue.path) { 740 | throw new Error('You may only add an field to a set of controls once.'); 741 | } 742 | 743 | fieldValue.$field.parent = parentField; 744 | fieldValue.name = fieldName; 745 | 746 | return fieldValue; 747 | case 'color': 748 | return new color(fieldName, fieldValue, {}, parentField); 749 | case 'raw': 750 | return new raw(fieldName, fieldValue, {}, parentField); 751 | case 'button': 752 | return new button(fieldName, fieldValue, {}, parentField); 753 | case 'textinput': 754 | return new textinput(fieldName, fieldValue, {}, parentField); 755 | case 'number': 756 | return new slider(fieldName, fieldValue, {}, parentField); 757 | case 'boolean': 758 | return new checkbox(fieldName, fieldValue, {}, parentField); 759 | case 'object': 760 | return new Section(fieldName, fieldValue, {}, parentField); 761 | default: 762 | return null; 763 | } 764 | } 765 | 766 | function Section(name, inputFields, config, parentField) { 767 | var _this = this; 768 | 769 | config = config || {}; 770 | var displayFields = {}; 771 | var fieldAccessor = {}; 772 | var value = {}; 773 | 774 | field.call(this, name, value, parentField, config); 775 | 776 | this.type = 'section'; 777 | 778 | Object.defineProperty(fieldAccessor, '$field', { 779 | enumerable: false, 780 | value: this 781 | }); 782 | 783 | Object.defineProperties(value, { 784 | '$field': { 785 | enumerable: false, 786 | value: this 787 | }, 788 | '$path': { 789 | enumerable: false, 790 | value: fieldAccessor 791 | }, 792 | '$displayFields': { 793 | enumerable: false, 794 | value: displayFields 795 | } 796 | }); 797 | 798 | Object.keys(inputFields).forEach(function (fieldName) { 799 | var field = displayFields[fieldName] = constructField(fieldName, inputFields[fieldName], _this); 800 | var config = field.$config; 801 | 802 | if (field.type === 'raw' || field.type === 'button') { 803 | 804 | var enumerable = config.enumerable === undefined ? false : !!config.enumerable; 805 | 806 | Object.defineProperty(value, fieldName, { 807 | enumerable: enumerable, 808 | get: function () { 809 | return field.value; 810 | } 811 | }); 812 | 813 | Object.defineProperty(fieldAccessor, fieldName, { 814 | enumerable: enumerable, 815 | get: function () { 816 | return field; 817 | } 818 | }); 819 | } else if (field.type === 'section' || field.type === 'tabs') { 820 | 821 | var enumerable = config.enumerable === undefined ? true : !!config.enumerable; 822 | 823 | // For folders, it needs to return the section object with fancy getters and setters 824 | Object.defineProperty(value, fieldName, { 825 | enumerable: enumerable, 826 | value: field.value 827 | }); 828 | 829 | Object.defineProperty(fieldAccessor, fieldName, { 830 | enumerable: enumerable, 831 | value: field.value.$path 832 | }); 833 | } else { 834 | 835 | var enumerable = config.enumerable === undefined ? true : !!config.enumerable; 836 | 837 | Object.defineProperty(value, fieldName, { 838 | enumerable: enumerable, 839 | get: function () { 840 | return field.value; 841 | }, 842 | set: function (value) { 843 | field.value = value; 844 | } 845 | }); 846 | 847 | Object.defineProperty(fieldAccessor, fieldName, { 848 | enumerable: enumerable, 849 | get: function () { 850 | return field; 851 | } 852 | }); 853 | } 854 | }); 855 | 856 | Object.defineProperties(value, { 857 | $onBeforeChanges: { 858 | enumerable: false, 859 | value: this.onBeforeChanges.bind(this) 860 | }, 861 | $onBeforeChange: { 862 | enumerable: false, 863 | value: this.onBeforeChange.bind(this) 864 | }, 865 | 866 | $offBeforeChanges: { 867 | enumerable: false, 868 | value: this.offBeforeChanges.bind(this) 869 | }, 870 | $offBeforeChange: { 871 | enumerable: false, 872 | value: this.offBeforeChange.bind(this) 873 | }, 874 | 875 | $onChanges: { 876 | enumerable: false, 877 | value: this.onChanges.bind(this) 878 | }, 879 | $onChange: { 880 | enumerable: false, 881 | value: this.onChange.bind(this) 882 | }, 883 | 884 | $offChanges: { 885 | enumerable: false, 886 | value: this.offChanges.bind(this) 887 | }, 888 | $offChange: { 889 | enumerable: false, 890 | value: this.offChange.bind(this) 891 | } 892 | }); 893 | } 894 | 895 | Section.prototype = Object.create(field.prototype); 896 | 897 | var tabs = function Tabs(name, inputFields, config, parentField) { 898 | var section$1 = new section(name, inputFields, config, parentField); 899 | 900 | section$1.type = 'tabs'; 901 | 902 | return section$1; 903 | }; 904 | 905 | var select = Select; 906 | 907 | function Select(name, initialValue, config, parentField) { 908 | if (!(this instanceof Select)) return new Select(name, initialValue, config, parentField); 909 | 910 | initialValue = initialValue === undefined ? null : initialValue; 911 | 912 | field.call(this, name, initialValue, parentField, config); 913 | 914 | this.options = config.options; 915 | 916 | this.type = 'select'; 917 | } 918 | 919 | Select.prototype = Object.create(field.prototype); 920 | 921 | function Controls(fields, options) { 922 | return new section('', fields, options).value; 923 | } 924 | 925 | Controls.Slider = function (value, opts) { 926 | return new slider(null, value, opts); 927 | }; 928 | 929 | Controls.Textinput = function (value, opts) { 930 | return new textinput(null, value, opts); 931 | }; 932 | 933 | Controls.Select = function (value, opts) { 934 | return new select(null, value, opts); 935 | }; 936 | 937 | Controls.Checkbox = function (value, opts) { 938 | return new checkbox(null, value, opts); 939 | }; 940 | 941 | Controls.Color = function (value, opts) { 942 | return new color(null, value, opts); 943 | }; 944 | 945 | Controls.Section = function (value, opts) { 946 | return new section(null, value, opts); 947 | }; 948 | 949 | Controls.Tabs = function (value, opts) { 950 | return new tabs(null, value, opts); 951 | }; 952 | 953 | Controls.Raw = function (value, opts) { 954 | return new raw(null, value, opts); 955 | }; 956 | 957 | var src = Controls; 958 | 959 | export default src; 960 | -------------------------------------------------------------------------------- /examples/lib/math-utils.js: -------------------------------------------------------------------------------- 1 | // get a random number between min and max 2 | export function random(min, max) { 3 | return Math.random() * (max - min) + min 4 | } 5 | -------------------------------------------------------------------------------- /examples/lib/three-utils.js: -------------------------------------------------------------------------------- 1 | import { mergeBufferGeometries } from 'https://unpkg.com/three@0.139.2/examples/jsm/utils/BufferGeometryUtils.js' 2 | import { GLTFLoader } from 'https://unpkg.com/three@0.139.2/examples/jsm/loaders/GLTFLoader.js' 3 | 4 | // from https://discourse.threejs.org/t/functions-to-calculate-the-visible-width-height-at-a-given-z-depth-from-a-perspective-camera/269 5 | export function visibleHeightAtZDepth(depth, camera) { 6 | if (camera.isOrthographicCamera) { 7 | return Math.abs(camera.top - camera.bottom) 8 | } 9 | 10 | // compensate for cameras not positioned at z=0 11 | const cameraOffset = camera.position.z 12 | if (depth < cameraOffset) { 13 | depth -= cameraOffset 14 | } else { 15 | depth += cameraOffset 16 | } 17 | 18 | // vertical fov in radians 19 | const vFOV = (camera.fov * Math.PI) / 180 20 | 21 | // Math.abs to ensure the result is always positive 22 | return 2 * Math.tan(vFOV / 2) * Math.abs(depth) 23 | } 24 | 25 | export function visibleWidthAtZDepth(depth, camera) { 26 | if (camera.isOrthographicCamera) { 27 | return Math.abs(camera.right - camera.left) 28 | } 29 | 30 | const height = visibleHeightAtZDepth(depth, camera) 31 | return height * camera.aspect 32 | } 33 | 34 | // extract all geometry from a gltf scene 35 | export function extractGeometry(gltf) { 36 | const geometries = [] 37 | gltf.traverse((child) => { 38 | if (child.isMesh) { 39 | geometries.push(child.geometry) 40 | } 41 | }) 42 | 43 | return mergeBufferGeometries(geometries) 44 | } 45 | 46 | // promise wrapper of the GLTFLoader 47 | export function loadGltf(url) { 48 | return new Promise((resolve, reject) => { 49 | new GLTFLoader().load(url, resolve, null, reject) 50 | }) 51 | } 52 | -------------------------------------------------------------------------------- /examples/models/cinema_screen.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/models/cinema_screen.glb -------------------------------------------------------------------------------- /examples/multiple-projections-instancing.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Multiple projections instancing example - three-projected-material 7 | 8 | 9 | 10 | 11 | 17 | 18 | 19 |

20 | Inspired by @dirkkoy 21 |

22 | 23 | 24 | 25 | 26 | 27 | 34 | 35 | 211 | 212 | 213 | -------------------------------------------------------------------------------- /examples/multiple-projections.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Multiple projections example - three-projected-material 7 | 8 | 9 | 10 | 11 | 17 | 18 | 19 |

20 | Inspired by @yiwen_lin 21 |
22 |
23 | Click to paint, drag to explore. 24 |

25 | 26 | 27 | 28 | 29 | 30 | 37 | 38 | 149 | 150 | 151 | -------------------------------------------------------------------------------- /examples/orthographic-camera.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Orthographic camera example - three-projected-material 7 | 8 | 9 | 10 | 11 | 17 | 18 | 19 |

20 | Inspired by @juanuys 21 |

22 | 23 | 24 | 25 | 26 | 27 | 34 | 35 | 110 | 111 | 112 | -------------------------------------------------------------------------------- /examples/same-camera.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Same camera example - three-projected-material 7 | 8 | 9 | 10 | 11 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 31 | 32 | 114 | 115 | 116 | -------------------------------------------------------------------------------- /examples/screenshots/3d-model.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/screenshots/3d-model.png -------------------------------------------------------------------------------- /examples/screenshots/basic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/screenshots/basic.png -------------------------------------------------------------------------------- /examples/screenshots/envmap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/screenshots/envmap.png -------------------------------------------------------------------------------- /examples/screenshots/instancing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/screenshots/instancing.png -------------------------------------------------------------------------------- /examples/screenshots/multiple-projections-instancing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/screenshots/multiple-projections-instancing.png -------------------------------------------------------------------------------- /examples/screenshots/multiple-projections.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/screenshots/multiple-projections.png -------------------------------------------------------------------------------- /examples/screenshots/orthographic-camera.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/screenshots/orthographic-camera.png -------------------------------------------------------------------------------- /examples/screenshots/same-camera.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/screenshots/same-camera.png -------------------------------------------------------------------------------- /examples/screenshots/transparency.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/screenshots/transparency.png -------------------------------------------------------------------------------- /examples/screenshots/video.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/examples/screenshots/video.png -------------------------------------------------------------------------------- /examples/transparency.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Transparency example - three-projected-material 7 | 8 | 9 | 10 | 11 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 31 | 32 | 97 | 98 | 99 | -------------------------------------------------------------------------------- /examples/video.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Video example - three-projected-material 7 | 8 | 9 | 10 | 11 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 31 | 32 | 132 | 133 | 134 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "three-projected-material", 3 | "description": "Material which projects a texture onto an object", 4 | "version": "2.1.2", 5 | "type": "module", 6 | "main": "./build/ProjectedMaterial.js", 7 | "module": "./build/ProjectedMaterial.module.js", 8 | "types": "./ProjectedMaterial.d.ts", 9 | "exports": { 10 | ".": { 11 | "import": "./build/ProjectedMaterial.module.js", 12 | "require": "./build/ProjectedMaterial.js" 13 | } 14 | }, 15 | "repository": "git@github.com:wb-ts/three-projected-material.git", 16 | "author": "Dennis Lee ", 17 | "license": "MIT", 18 | "files": [ 19 | "build/" 20 | ], 21 | "scripts": { 22 | "start": "serve examples/", 23 | "build": "rollup -c", 24 | "postbuild": "cp build/ProjectedMaterial.module.js examples/lib/" 25 | }, 26 | "peerDependencies": { 27 | "three": "*" 28 | }, 29 | "devDependencies": { 30 | "@babel/core": "^7.17.9", 31 | "@babel/plugin-proposal-class-properties": "^7.16.7", 32 | "@babel/preset-env": "^7.16.11", 33 | "@rollup/plugin-babel": "^5.3.1", 34 | "@types/three": "^0.139.0", 35 | "eslint-config-accurapp": "^5.1.0", 36 | "rollup": "^2.70.2", 37 | "serve": "^13.0.2", 38 | "stats.js": "^0.17.0", 39 | "three": "0.139.2" 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /rollup.config.js: -------------------------------------------------------------------------------- 1 | import babel from '@rollup/plugin-babel' 2 | 3 | const babelOptions = { 4 | babelrc: false, 5 | exclude: '**/node_modules/**', 6 | babelHelpers: 'bundled', 7 | presets: [ 8 | [ 9 | '@babel/preset-env', 10 | { 11 | loose: true, 12 | modules: false, 13 | targets: '>1.5%, not dead, not ie 11, not op_mini all', 14 | }, 15 | ], 16 | ], 17 | plugins: [ 18 | [ 19 | '@babel/plugin-proposal-class-properties', 20 | { 21 | loose: true, 22 | }, 23 | ], 24 | ], 25 | } 26 | 27 | export default [ 28 | { 29 | input: 'src/ProjectedMaterial.js', 30 | external: ['three'], 31 | plugins: [babel(babelOptions)], 32 | output: [ 33 | { 34 | format: 'umd', 35 | globals: { 36 | three: 'THREE', 37 | }, 38 | name: 'projectedMaterial', 39 | exports: 'named', 40 | file: 'build/ProjectedMaterial.js', 41 | }, 42 | { 43 | format: 'esm', 44 | file: 'build/ProjectedMaterial.module.js', 45 | }, 46 | ], 47 | }, 48 | ] 49 | -------------------------------------------------------------------------------- /screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wb-ts/three-js-examples/b59e795c353df9c24b635e2718add0106d415f3a/screenshot.png -------------------------------------------------------------------------------- /src/ProjectedMaterial.js: -------------------------------------------------------------------------------- 1 | import * as THREE from 'three' 2 | import { monkeyPatch, addLoadListener } from './three-utils' 3 | 4 | export default class ProjectedMaterial extends THREE.MeshPhysicalMaterial { 5 | // internal values... they are exposed via getters 6 | #camera 7 | #cover 8 | #textureScale 9 | 10 | get camera() { 11 | return this.#camera 12 | } 13 | set camera(camera) { 14 | if (!camera || !camera.isCamera) { 15 | throw new Error('Invalid camera set to the ProjectedMaterial') 16 | } 17 | 18 | this.#camera = camera 19 | 20 | this.#saveDimensions() 21 | } 22 | 23 | get texture() { 24 | return this.uniforms.projectedTexture.value 25 | } 26 | set texture(texture) { 27 | if (!texture?.isTexture) { 28 | throw new Error('Invalid texture set to the ProjectedMaterial') 29 | } 30 | 31 | this.uniforms.projectedTexture.value = texture 32 | this.uniforms.isTextureLoaded.value = Boolean(texture.image) 33 | 34 | if (!this.uniforms.isTextureLoaded.value) { 35 | addLoadListener(texture, () => { 36 | this.uniforms.isTextureLoaded.value = true 37 | 38 | this.#saveDimensions() 39 | }) 40 | } else { 41 | this.#saveDimensions() 42 | } 43 | } 44 | 45 | get textureScale() { 46 | return this.#textureScale 47 | } 48 | set textureScale(textureScale) { 49 | this.#textureScale = textureScale 50 | this.#saveDimensions() 51 | } 52 | 53 | get textureOffset() { 54 | return this.uniforms.textureOffset.value 55 | } 56 | set textureOffset(textureOffset) { 57 | this.uniforms.textureOffset.value = textureOffset 58 | } 59 | 60 | get cover() { 61 | return this.#cover 62 | } 63 | set cover(cover) { 64 | this.#cover = cover 65 | this.#saveDimensions() 66 | } 67 | 68 | constructor({ 69 | camera = new THREE.PerspectiveCamera(), 70 | texture = new THREE.Texture(), 71 | textureScale = 1, 72 | textureOffset = new THREE.Vector2(), 73 | cover = false, 74 | ...options 75 | } = {}) { 76 | if (!texture.isTexture) { 77 | throw new Error('Invalid texture passed to the ProjectedMaterial') 78 | } 79 | 80 | if (!camera.isCamera) { 81 | throw new Error('Invalid camera passed to the ProjectedMaterial') 82 | } 83 | 84 | super(options) 85 | 86 | Object.defineProperty(this, 'isProjectedMaterial', { value: true }) 87 | 88 | // save the private variables 89 | this.#camera = camera 90 | this.#cover = cover 91 | this.#textureScale = textureScale 92 | 93 | // scale to keep the image proportions and apply textureScale 94 | const [widthScaled, heightScaled] = computeScaledDimensions( 95 | texture, 96 | camera, 97 | textureScale, 98 | cover 99 | ) 100 | 101 | this.uniforms = { 102 | projectedTexture: { value: texture }, 103 | // this avoids rendering black if the texture 104 | // hasn't loaded yet 105 | isTextureLoaded: { value: Boolean(texture.image) }, 106 | // don't show the texture if we haven't called project() 107 | isTextureProjected: { value: false }, 108 | // if we have multiple materials we want to show the 109 | // background only of the first material 110 | backgroundOpacity: { value: 1 }, 111 | // these will be set on project() 112 | viewMatrixCamera: { value: new THREE.Matrix4() }, 113 | projectionMatrixCamera: { value: new THREE.Matrix4() }, 114 | projPosition: { value: new THREE.Vector3() }, 115 | projDirection: { value: new THREE.Vector3(0, 0, -1) }, 116 | // we will set this later when we will have positioned the object 117 | savedModelMatrix: { value: new THREE.Matrix4() }, 118 | widthScaled: { value: widthScaled }, 119 | heightScaled: { value: heightScaled }, 120 | textureOffset: { value: textureOffset }, 121 | } 122 | 123 | this.onBeforeCompile = (shader) => { 124 | // expose also the material's uniforms 125 | Object.assign(this.uniforms, shader.uniforms) 126 | shader.uniforms = this.uniforms 127 | 128 | if (this.camera.isOrthographicCamera) { 129 | shader.defines.ORTHOGRAPHIC = '' 130 | } 131 | 132 | shader.vertexShader = monkeyPatch(shader.vertexShader, { 133 | header: /* glsl */ ` 134 | uniform mat4 viewMatrixCamera; 135 | uniform mat4 projectionMatrixCamera; 136 | 137 | #ifdef USE_INSTANCING 138 | attribute vec4 savedModelMatrix0; 139 | attribute vec4 savedModelMatrix1; 140 | attribute vec4 savedModelMatrix2; 141 | attribute vec4 savedModelMatrix3; 142 | #else 143 | uniform mat4 savedModelMatrix; 144 | #endif 145 | 146 | varying vec3 vSavedNormal; 147 | varying vec4 vTexCoords; 148 | #ifndef ORTHOGRAPHIC 149 | varying vec4 vWorldPosition; 150 | #endif 151 | `, 152 | main: /* glsl */ ` 153 | #ifdef USE_INSTANCING 154 | mat4 savedModelMatrix = mat4( 155 | savedModelMatrix0, 156 | savedModelMatrix1, 157 | savedModelMatrix2, 158 | savedModelMatrix3 159 | ); 160 | #endif 161 | 162 | vSavedNormal = mat3(savedModelMatrix) * normal; 163 | vTexCoords = projectionMatrixCamera * viewMatrixCamera * savedModelMatrix * vec4(position, 1.0); 164 | #ifndef ORTHOGRAPHIC 165 | vWorldPosition = savedModelMatrix * vec4(position, 1.0); 166 | #endif 167 | `, 168 | }) 169 | 170 | shader.fragmentShader = monkeyPatch(shader.fragmentShader, { 171 | header: /* glsl */ ` 172 | uniform sampler2D projectedTexture; 173 | uniform bool isTextureLoaded; 174 | uniform bool isTextureProjected; 175 | uniform float backgroundOpacity; 176 | uniform vec3 projPosition; 177 | uniform vec3 projDirection; 178 | uniform float widthScaled; 179 | uniform float heightScaled; 180 | uniform vec2 textureOffset; 181 | 182 | varying vec3 vSavedNormal; 183 | varying vec4 vTexCoords; 184 | #ifndef ORTHOGRAPHIC 185 | varying vec4 vWorldPosition; 186 | #endif 187 | 188 | float mapRange(float value, float min1, float max1, float min2, float max2) { 189 | return min2 + (value - min1) * (max2 - min2) / (max1 - min1); 190 | } 191 | `, 192 | 'vec4 diffuseColor = vec4( diffuse, opacity );': /* glsl */ ` 193 | // clamp the w to make sure we don't project behind 194 | float w = max(vTexCoords.w, 0.0); 195 | 196 | vec2 uv = (vTexCoords.xy / w) * 0.5 + 0.5; 197 | 198 | uv += textureOffset; 199 | 200 | // apply the corrected width and height 201 | uv.x = mapRange(uv.x, 0.0, 1.0, 0.5 - widthScaled / 2.0, 0.5 + widthScaled / 2.0); 202 | uv.y = mapRange(uv.y, 0.0, 1.0, 0.5 - heightScaled / 2.0, 0.5 + heightScaled / 2.0); 203 | 204 | // this makes sure we don't sample out of the texture 205 | bool isInTexture = (max(uv.x, uv.y) <= 1.0 && min(uv.x, uv.y) >= 0.0); 206 | 207 | // this makes sure we don't render also the back of the object 208 | #ifdef ORTHOGRAPHIC 209 | vec3 projectorDirection = projDirection; 210 | #else 211 | vec3 projectorDirection = normalize(projPosition - vWorldPosition.xyz); 212 | #endif 213 | float dotProduct = dot(vSavedNormal, projectorDirection); 214 | bool isFacingProjector = dotProduct > 0.0000001; 215 | 216 | 217 | vec4 diffuseColor = vec4(diffuse, opacity * backgroundOpacity); 218 | 219 | if (isFacingProjector && isInTexture && isTextureLoaded && isTextureProjected) { 220 | vec4 textureColor = texture2D(projectedTexture, uv); 221 | 222 | // apply the material opacity 223 | textureColor.a *= opacity; 224 | 225 | // https://learnopengl.com/Advanced-OpenGL/Blending 226 | diffuseColor = textureColor * textureColor.a + diffuseColor * (1.0 - textureColor.a); 227 | } 228 | `, 229 | }) 230 | } 231 | 232 | // Listen on resize if the camera used for the projection 233 | // is the same used to render. 234 | // We do this on window resize because there is no way to 235 | // listen for the resize of the renderer 236 | window.addEventListener('resize', this.#saveCameraProjectionMatrix) 237 | 238 | // If the image texture passed hasn't loaded yet, 239 | // wait for it to load and compute the correct proportions. 240 | // This avoids rendering black while the texture is loading 241 | addLoadListener(texture, () => { 242 | this.uniforms.isTextureLoaded.value = true 243 | 244 | this.#saveDimensions() 245 | }) 246 | } 247 | 248 | #saveCameraProjectionMatrix = () => { 249 | this.uniforms.projectionMatrixCamera.value.copy(this.camera.projectionMatrix) 250 | 251 | this.#saveDimensions() 252 | } 253 | 254 | #saveDimensions() { 255 | const [widthScaled, heightScaled] = computeScaledDimensions( 256 | this.texture, 257 | this.camera, 258 | this.textureScale, 259 | this.cover 260 | ) 261 | 262 | this.uniforms.widthScaled.value = widthScaled 263 | this.uniforms.heightScaled.value = heightScaled 264 | } 265 | 266 | #saveCameraMatrices() { 267 | // make sure the camera matrices are updated 268 | this.camera.updateProjectionMatrix() 269 | this.camera.updateMatrixWorld() 270 | this.camera.updateWorldMatrix() 271 | 272 | // update the uniforms from the camera so they're 273 | // fixed in the camera's position at the projection time 274 | const viewMatrixCamera = this.camera.matrixWorldInverse 275 | const projectionMatrixCamera = this.camera.projectionMatrix 276 | const modelMatrixCamera = this.camera.matrixWorld 277 | 278 | this.uniforms.viewMatrixCamera.value.copy(viewMatrixCamera) 279 | this.uniforms.projectionMatrixCamera.value.copy(projectionMatrixCamera) 280 | this.uniforms.projPosition.value.copy(this.camera.position) 281 | this.uniforms.projDirection.value.set(0, 0, 1).applyMatrix4(modelMatrixCamera) 282 | 283 | // tell the shader we've projected 284 | this.uniforms.isTextureProjected.value = true 285 | } 286 | 287 | project(mesh) { 288 | if ( 289 | !(Array.isArray(mesh.material) 290 | ? mesh.material.every((m) => m.isProjectedMaterial) 291 | : mesh.material.isProjectedMaterial) 292 | ) { 293 | throw new Error(`The mesh material must be a ProjectedMaterial`) 294 | } 295 | 296 | if ( 297 | !(Array.isArray(mesh.material) 298 | ? mesh.material.some((m) => m === this) 299 | : mesh.material === this) 300 | ) { 301 | throw new Error( 302 | `The provided mesh doesn't have the same material as where project() has been called from` 303 | ) 304 | } 305 | 306 | // make sure the matrix is updated 307 | mesh.updateWorldMatrix(true, false) 308 | 309 | // we save the object model matrix so it's projected relative 310 | // to that position, like a snapshot 311 | this.uniforms.savedModelMatrix.value.copy(mesh.matrixWorld) 312 | 313 | // if the material is not the first, output just the texture 314 | if (Array.isArray(mesh.material)) { 315 | const materialIndex = mesh.material.indexOf(this) 316 | if (!mesh.material[materialIndex].transparent) { 317 | throw new Error( 318 | `You have to pass "transparent: true" to the ProjectedMaterial if you're working with multiple materials.` 319 | ) 320 | } 321 | if (materialIndex > 0) { 322 | this.uniforms.backgroundOpacity.value = 0 323 | } 324 | } 325 | 326 | // persist also the current camera position and matrices 327 | this.#saveCameraMatrices() 328 | } 329 | 330 | projectInstanceAt(index, instancedMesh, matrixWorld, { forceCameraSave = false } = {}) { 331 | if (!instancedMesh.isInstancedMesh) { 332 | throw new Error(`The provided mesh is not an InstancedMesh`) 333 | } 334 | 335 | if ( 336 | !(Array.isArray(instancedMesh.material) 337 | ? instancedMesh.material.every((m) => m.isProjectedMaterial) 338 | : instancedMesh.material.isProjectedMaterial) 339 | ) { 340 | throw new Error(`The InstancedMesh material must be a ProjectedMaterial`) 341 | } 342 | 343 | if ( 344 | !(Array.isArray(instancedMesh.material) 345 | ? instancedMesh.material.some((m) => m === this) 346 | : instancedMesh.material === this) 347 | ) { 348 | throw new Error( 349 | `The provided InstancedMeshhave't i samenclude thas e material where project() has been called from` 350 | ) 351 | } 352 | 353 | if ( 354 | !instancedMesh.geometry.attributes[`savedModelMatrix0`] || 355 | !instancedMesh.geometry.attributes[`savedModelMatrix1`] || 356 | !instancedMesh.geometry.attributes[`savedModelMatrix2`] || 357 | !instancedMesh.geometry.attributes[`savedModelMatrix3`] 358 | ) { 359 | throw new Error( 360 | `No allocated data found on the geometry, please call 'allocateProjectionData(geometry, instancesCount)'` 361 | ) 362 | } 363 | 364 | instancedMesh.geometry.attributes[`savedModelMatrix0`].setXYZW( 365 | index, 366 | matrixWorld.elements[0], 367 | matrixWorld.elements[1], 368 | matrixWorld.elements[2], 369 | matrixWorld.elements[3] 370 | ) 371 | instancedMesh.geometry.attributes[`savedModelMatrix1`].setXYZW( 372 | index, 373 | matrixWorld.elements[4], 374 | matrixWorld.elements[5], 375 | matrixWorld.elements[6], 376 | matrixWorld.elements[7] 377 | ) 378 | instancedMesh.geometry.attributes[`savedModelMatrix2`].setXYZW( 379 | index, 380 | matrixWorld.elements[8], 381 | matrixWorld.elements[9], 382 | matrixWorld.elements[10], 383 | matrixWorld.elements[11] 384 | ) 385 | instancedMesh.geometry.attributes[`savedModelMatrix3`].setXYZW( 386 | index, 387 | matrixWorld.elements[12], 388 | matrixWorld.elements[13], 389 | matrixWorld.elements[14], 390 | matrixWorld.elements[15] 391 | ) 392 | 393 | // if the material is not the first, output just the texture 394 | if (Array.isArray(instancedMesh.material)) { 395 | const materialIndex = instancedMesh.material.indexOf(this) 396 | if (!instancedMesh.material[materialIndex].transparent) { 397 | throw new Error( 398 | `You have to pass "transparent: true" to the ProjectedMaterial if you're working with multiple materials.` 399 | ) 400 | } 401 | if (materialIndex > 0) { 402 | this.uniforms.backgroundOpacity.value = 0 403 | } 404 | } 405 | 406 | // persist the current camera position and matrices 407 | // only if it's the first instance since most surely 408 | // in all other instances the camera won't change 409 | if (index === 0 || forceCameraSave) { 410 | this.#saveCameraMatrices() 411 | } 412 | } 413 | 414 | copy(source) { 415 | super.copy(source) 416 | 417 | this.camera = source.camera 418 | this.texture = source.texture 419 | this.textureScale = source.textureScale 420 | this.textureOffset = source.textureOffset 421 | this.cover = source.cover 422 | 423 | return this 424 | } 425 | 426 | dispose() { 427 | super.dispose() 428 | window.removeEventListener('resize', this.#saveCameraProjectionMatrix) 429 | } 430 | } 431 | 432 | // get camera ratio from different types of cameras 433 | function getCameraRatio(camera) { 434 | switch (camera.type) { 435 | case 'PerspectiveCamera': { 436 | return camera.aspect 437 | } 438 | case 'OrthographicCamera': { 439 | const width = Math.abs(camera.right - camera.left) 440 | const height = Math.abs(camera.top - camera.bottom) 441 | return width / height 442 | } 443 | default: { 444 | throw new Error(`${camera.type} is currently not supported in ProjectedMaterial`) 445 | } 446 | } 447 | } 448 | 449 | // scale to keep the image proportions and apply textureScale 450 | function computeScaledDimensions(texture, camera, textureScale, cover) { 451 | // return some default values if the image hasn't loaded yet 452 | if (!texture.image) { 453 | return [1, 1] 454 | } 455 | 456 | // return if it's a video and if the video hasn't loaded yet 457 | if (texture.image.videoWidth === 0 && texture.image.videoHeight === 0) { 458 | return [1, 1] 459 | } 460 | 461 | const sourceWidth = 462 | texture.image.naturalWidth || texture.image.videoWidth || texture.image.clientWidth 463 | const sourceHeight = 464 | texture.image.naturalHeight || texture.image.videoHeight || texture.image.clientHeight 465 | 466 | const ratio = sourceWidth / sourceHeight 467 | const ratioCamera = getCameraRatio(camera) 468 | const widthCamera = 1 469 | const heightCamera = widthCamera * (1 / ratioCamera) 470 | let widthScaled 471 | let heightScaled 472 | if (cover ? ratio > ratioCamera : ratio < ratioCamera) { 473 | const width = heightCamera * ratio 474 | widthScaled = 1 / ((width / widthCamera) * textureScale) 475 | heightScaled = 1 / textureScale 476 | } else { 477 | const height = widthCamera * (1 / ratio) 478 | heightScaled = 1 / ((height / heightCamera) * textureScale) 479 | widthScaled = 1 / textureScale 480 | } 481 | 482 | return [widthScaled, heightScaled] 483 | } 484 | 485 | export function allocateProjectionData(geometry, instancesCount) { 486 | geometry.setAttribute( 487 | `savedModelMatrix0`, 488 | new THREE.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4) 489 | ) 490 | geometry.setAttribute( 491 | `savedModelMatrix1`, 492 | new THREE.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4) 493 | ) 494 | geometry.setAttribute( 495 | `savedModelMatrix2`, 496 | new THREE.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4) 497 | ) 498 | geometry.setAttribute( 499 | `savedModelMatrix3`, 500 | new THREE.InstancedBufferAttribute(new Float32Array(instancesCount * 4), 4) 501 | ) 502 | } 503 | -------------------------------------------------------------------------------- /src/three-utils.js: -------------------------------------------------------------------------------- 1 | export function monkeyPatch(shader, { defines = '', header = '', main = '', ...replaces }) { 2 | let patchedShader = shader 3 | 4 | const replaceAll = (str, find, rep) => str.split(find).join(rep) 5 | Object.keys(replaces).forEach((key) => { 6 | patchedShader = replaceAll(patchedShader, key, replaces[key]) 7 | }) 8 | 9 | patchedShader = patchedShader.replace( 10 | 'void main() {', 11 | ` 12 | ${header} 13 | void main() { 14 | ${main} 15 | ` 16 | ) 17 | 18 | const stringDefines = Object.keys(defines) 19 | .map((d) => `#define ${d} ${defines[d]}`) 20 | .join('\n') 21 | 22 | return ` 23 | ${stringDefines} 24 | ${patchedShader} 25 | ` 26 | } 27 | 28 | // run the callback when the image will be loaded 29 | export function addLoadListener(texture, callback) { 30 | // return if it's already loaded 31 | if (texture.image && texture.image.videoWidth !== 0 && texture.image.videoHeight !== 0) { 32 | return 33 | } 34 | 35 | const interval = setInterval(() => { 36 | if (texture.image && texture.image.videoWidth !== 0 && texture.image.videoHeight !== 0) { 37 | clearInterval(interval) 38 | return callback(texture) 39 | } 40 | }, 16) 41 | } 42 | --------------------------------------------------------------------------------