├── .gitignore ├── CODE_OF_CONDUCT.md ├── CODING.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── dist-footer.js ├── dist-header.js ├── examples ├── ar_anchors │ ├── index.html │ └── screenshot.jpeg ├── ar_simplest │ ├── index.html │ └── screenshot.jpeg ├── boombox │ ├── index.html │ └── screenshot.jpeg ├── common.css ├── common.js ├── face_tracking │ ├── DuckyMesh.glb │ ├── glasses │ │ ├── glasses.bin │ │ ├── glasses.gltf │ │ └── textures │ │ │ ├── Glasses1_baseColor.jpg │ │ │ ├── Glasses1_metallicRoughness.png │ │ │ ├── Glasses1_normal.png │ │ │ ├── Glasses2_baseColor.jpg │ │ │ ├── Glasses2_metallicRoughness.png │ │ │ ├── Glasses2_normal.png │ │ │ ├── Lenses_baseColor.jpg │ │ │ ├── Lenses_metallicRoughness.png │ │ │ └── Lenses_normal.png │ ├── index.html │ └── screenshot.jpg ├── hit_test │ ├── index.html │ └── screenshot.jpeg ├── image_detection │ ├── DuckyMesh.glb │ ├── hubs.png │ ├── index.html │ └── screenshot.jpg ├── libs │ ├── dat.gui.min.js │ ├── loaders │ │ └── BinaryLoader.js │ ├── postprocessing │ │ ├── BloomPass.js │ │ ├── EffectComposer.js │ │ ├── FilmPass.js │ │ ├── MaskPass.js │ │ ├── RenderPass.js │ │ └── ShaderPass.js │ ├── shaders │ │ ├── ConvolutionShader.js │ │ ├── CopyShader.js │ │ ├── FilmShader.js │ │ └── FocusShader.js │ ├── stats.js │ ├── three-gltf-loader.js │ ├── three-mtl-loader.js │ ├── three-obj-loader.js │ ├── three.js │ └── three.min.js ├── light │ ├── index.html │ └── screenshot.jpeg ├── models │ ├── Axis.mtl │ ├── Axis.obj │ ├── BoomBox │ │ ├── README.md │ │ ├── glTF-Binary │ │ │ └── BoomBox.glb │ │ ├── glTF-pbrSpecularGlossiness │ │ │ ├── BoomBox.bin │ │ │ ├── BoomBox.gltf │ │ │ ├── BoomBox_baseColor ORIG.png │ │ │ ├── BoomBox_baseColor.png │ │ │ ├── BoomBox_diffuse ORIG.png │ │ │ ├── BoomBox_diffuse.png │ │ │ ├── BoomBox_emissive ORIG.png │ │ │ ├── BoomBox_emissive.png │ │ │ ├── BoomBox_normal ORIG.png │ │ │ ├── BoomBox_normal.png │ │ │ ├── BoomBox_occlusion ORIG.png │ │ │ ├── BoomBox_occlusion.png │ │ │ ├── BoomBox_roughnessMetallic ORIG.png │ │ │ ├── BoomBox_roughnessMetallic.png │ │ │ ├── BoomBox_specularGlossiness ORIG.png │ │ │ └── BoomBox_specularGlossiness.png │ │ ├── glTF │ │ │ ├── BoomBox.bin │ │ │ ├── BoomBox.gltf │ │ │ ├── BoomBox_baseColor.png │ │ │ ├── BoomBox_emissive.png │ │ │ ├── BoomBox_normal.png │ │ │ └── BoomBox_occlusionRoughnessMetallic.png │ │ └── screenshot │ │ │ └── screenshot.jpg │ ├── TeapotBufferGeometry.js │ ├── female02 │ │ ├── 01_-_Default1noCulling.JPG │ │ ├── 02_-_Default1noCulling.JPG │ │ ├── 03_-_Default1noCulling.JPG │ │ ├── Female02_bin.bin │ │ ├── Female02_bin.js │ │ ├── Female02_slim.js │ │ ├── female02.mtl │ │ ├── female02.obj │ │ ├── female02_vertex_colors.obj │ │ └── readme.txt │ └── male02 │ │ ├── 01_-_Default1noCulling.JPG │ │ ├── 01_-_Default1noCulling.dds │ │ ├── Male02_bin.bin │ │ ├── Male02_bin.js │ │ ├── Male02_dds.js │ │ ├── Male02_slim.js │ │ ├── male-02-1noCulling.JPG │ │ ├── male-02-1noCulling.dds │ │ ├── male02.mtl │ │ ├── male02.obj │ │ ├── male02_dds.mtl │ │ ├── orig_02_-_Defaul1noCulling.JPG │ │ ├── orig_02_-_Defaul1noCulling.dds │ │ └── readme.txt ├── opencv-aruco │ ├── createMarker.html │ ├── index.html │ ├── opencv.js │ ├── opencv_js.wasm │ ├── screenshot.png │ └── worker.js ├── opencv-face │ ├── haarcascade_eye.xml │ ├── haarcascade_frontalface_default.xml │ ├── haarcascade_profileface.xml │ ├── index.html │ ├── old-rotate-resize.js │ ├── opencv.js │ ├── opencv_js.wasm │ ├── screenshot.png │ └── worker.js ├── peoples │ ├── index.html │ └── screenshot.jpeg ├── persistence │ ├── index.html │ └── screenshot.jpeg ├── reticle │ ├── index.html │ └── screenshot.jpeg ├── sensing │ ├── index.html │ └── screenshot.jpg ├── simplecv │ ├── index.html │ ├── screenshot.png │ ├── target-28139_64.png │ ├── webxr-worker.js │ └── worker.js ├── textures │ └── Park2 │ │ ├── negx.jpg │ │ ├── negy.jpg │ │ ├── negz.jpg │ │ ├── posx.jpg │ │ ├── posy.jpg │ │ ├── posz.jpg │ │ └── readme.txt └── vr_simplest │ └── index.html ├── index.html ├── package-lock.json ├── package.json ├── polyfill ├── Reality.js ├── XRAnchor.js ├── XRAnchorOffset.js ├── XRCoordinateSystem.js ├── XRDisplay.js ├── XRFaceAnchor.js ├── XRFieldOfView.js ├── XRImageAnchor.js ├── XRLayer.js ├── XRLightEstimate.js ├── XRPlaneAnchor.js ├── XRPointCloud.js ├── XRPolyfill.js ├── XRPresentationFrame.js ├── XRSession.js ├── XRSessionCreateParameters.js ├── XRStageBounds.js ├── XRStageBoundsPoint.js ├── XRVideoFrame.js ├── XRView.js ├── XRViewPose.js ├── XRViewport.js ├── XRWebGLLayer.js ├── XRWorkerPolyfill.js ├── display │ ├── FlatDisplay.js │ └── HeadMountedDisplay.js ├── fill │ ├── DeviceOrientationTracker.js │ ├── Euler.js │ ├── EventHandlerBase.js │ ├── MatrixMath.js │ ├── Quaternion.js │ ├── Vector3.js │ ├── base64-binary.js │ ├── gl-matrix-min.js │ ├── gl-matrix.js │ └── gl-matrix │ │ ├── common.js │ │ ├── mat2.js │ │ ├── mat2d.js │ │ ├── mat3.js │ │ ├── mat4.js │ │ ├── quat.js │ │ ├── vec2.js │ │ ├── vec3.js │ │ └── vec4.js ├── platform │ ├── ARCoreCameraRenderer.js │ └── ARKitWrapper.js └── reality │ ├── CameraReality.js │ └── VirtualReality.js ├── screenshots ├── apainter.png └── xrstore.jpg ├── tests ├── CoordinatesTest.js ├── Test.js └── index.html ├── viewer.html └── webpack.config.js /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | dist/ 3 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Community Participation Guidelines 2 | 3 | This repository is governed by Mozilla's code of conduct and etiquette guidelines. 4 | For more details, please read the 5 | [Mozilla Community Participation Guidelines](https://www.mozilla.org/about/governance/policies/participation/). 6 | 7 | ## How to Report 8 | For more information on how to report violations of the Community Participation Guidelines, please read our '[How to Report](https://www.mozilla.org/about/governance/policies/participation/reporting/)' page. 9 | 10 | 16 | -------------------------------------------------------------------------------- /CODING.md: -------------------------------------------------------------------------------- 1 | # How to code against the WebXR APIs 2 | 3 | Working examples of this type of code can be found in the [examples directory](https://github.com/mozilla/webxr-polyfill/tree/master/examples). 4 | 5 | ## Session setup 6 | 7 | The basic pattern is to iterate through the `XRDisplay` instances to find the one that you want to use, based on whether it's external and whether it is a pass-through display. Once you have a display, you ask it for an `XRSession` that is either for rendering a `Reality` or rendering an augmentation on top of a `Reality`. 8 | 9 | let displays = null // A list of XRDisplay 10 | let display = null // The display we'll use 11 | let session = null // The XRSession we'll use 12 | let canvas = document.createElement('canvas') // The canvas into which we'll render 13 | let anchoredNodes = [] // An array of { anchorOffset: XRAnchorOffset, node: } 14 | 15 | // Get displays and then request a session 16 | navigator.XR.getDisplays().then(disps => { 17 | if(disps.length == 0) { 18 | // No displays are available 19 | return 20 | } 21 | displays = disps 22 | }).catch(err => { 23 | console.error('Error getting XR displays', err) 24 | }) 25 | 26 | Once you have the displays, you look for one that will support the type of session that you want to start: 27 | 28 | // Set up the options for the type of session we want 29 | let sessionInitOptions = { 30 | exclusive: false, // True if you want only this session to have access to the display 31 | type: 'augmentation' // do you want the session to create a 'reality' or offer an 'augmentation' on an existing `Reality` 32 | outputContext: new XRPresentationContext(canvas) // Then canvas into which we'll render 33 | } 34 | // Now test each display 35 | for(let disp of displays){ 36 | if(display.supportsSession(sessionInitOptions)){ 37 | display = disp 38 | break 39 | } 40 | } 41 | 42 | Once you have a display and the user has chosen to start using it, you ask the display for an `XRSession` and request the first frame: 43 | 44 | display.requestSession(sessionInitOptions).then(sess => { 45 | session = sess 46 | session.depthNear = 0.1 47 | session.depthFar = 1000.0 48 | 49 | session.requestFrame(handleFrame) 50 | )} 51 | 52 | ## Per-frame rendering 53 | 54 | The scene, camera, and renderer objects below are representative APIs that have equivalents in most WebGL libs like Three.js: 55 | 56 | function handleFrame(frame){ 57 | // Set up for the next frame 58 | session.requestFrame(frame => { handleFrame(frame) }) 59 | 60 | // Get the pose for the head 61 | let headCoordinateSystem = frame.getCoordinateSystem(XRCoordinateSystem.HEAD_MODEL) 62 | let headPose = frame.getDsiplayPose(frame.getCoordinateSystem(headCoordinateSystem) 63 | 64 | // XXX Below we will add code here to add and manage anchors 65 | 66 | // Displays can have one or more views. A magic window display has one, a headset has two (one for each eye), so iterate through each. 67 | for(const view of frame.views){ 68 | 69 | // Each XRView has its own projection matrix, so set the camera to use that 70 | camera.projectionMatrix = view.projectionMatrix 71 | 72 | // Rotate the scene around the camera using the head pose 73 | scene.matrix = headPose.getViewMatrix(view) 74 | 75 | // Set up the renderer to the XRView's viewport and then render 76 | const viewport = view.getViewport(session.baseLayer) 77 | renderer.setViewport(viewport.x, viewport.y, viewport.width, viewport.height) 78 | renderer.render(scene, camera) 79 | } 80 | } 81 | 82 | ## Finding and updating anchors 83 | 84 | Anchors are places in space that the AR system is tracking for you. They could be a surface like a floor or table, a feature like a door knob, or just a point in space relative to the world coordinate system. When you place virtual objects in XR, you find an `XRAnchor` to attach it to, possibly with an `XRAnchorOffset` to indicate a position relative to the anchor. 85 | 86 | The reason that you use anchors instead of just placing objects in a global coordinate system is that AR systems may change their relative position over time as they sense the world. A table may shift. The system may refine its estimate of the location of the floor or a wall. 87 | 88 | First, let's add an anchor just floated in space a meter in front of the current head position. 89 | 90 | This code uses the `XRPresentationFrame`, so it would live in the `handleFrame` method above, where the '// XXX' comment is: 91 | 92 | const sceneNode = createSceneNode() // if using Three.js, could be an Object3D or a Group 93 | let anchorUID = frame.addAnchor(headCoordinateSystem, [0, 0, -1]) 94 | scene.add(sceneNode) // Now the node is in the scene 95 | // Save this info for update during each frame 96 | anchoredNodes.push({ 97 | anchorOffset: new XRAnchorOffset(anchor.uid), 98 | node: sceneNode 99 | }) 100 | 101 | Now search for an anchor on a surface like a floor or table: 102 | 103 | frame.findAnchor(x, y).then(anchorOffset => { 104 | if(anchorOffset === null){ 105 | // no surface was found to place the anchor 106 | return 107 | } 108 | const node = createSceneNode() 109 | // Add the node to the scene 110 | scene.add(node) 111 | // Save this info for update during each frame 112 | anchoredNodes.push({ 113 | anchorOffset: anchorOffset, 114 | node: node 115 | }) 116 | }) 117 | 118 | You now have a couple of anchored nodes save in `anchoredNodes`, so during each frame use the most recent anchor info to update the node position: 119 | 120 | for(let anchoredNode of anchoredNodes){ 121 | // Get the updated anchor info 122 | const anchor = frame.getAnchor(anchoredNode.anchorOffset.anchorUID) 123 | // Get the offset coordinates relative to the anchor's coordinate system 124 | let offsetTransform = anchoredNode.anchorOffset.getOffsetTransform(anchor.coordinateSystem) 125 | // Now use the offset transform to position the anchored node in the scene 126 | anchoredNode.node.matrix = offsetTransform 127 | } 128 | 129 | 130 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | Contributing to webxr-polyfill 2 | ====================== 3 | 4 | This is an open source project and we love to receive contributions from the community. There are many ways to contribute, from writing documentation, submitting bug reports and feature requests or writing code. 5 | We would also love to hear how you are using this code and to receive contributions that make it easier to deploy and integrate. 6 | 7 | Bug reports 8 | ----------- 9 | 10 | If you think you have found a bug, first make sure that you are testing against the [master branch](https://github.com/mozilla/webxr-polyfill) - your issue may already have been fixed. If not, search our [issues list](https://github.com/mozilla/webxr-polyfill/issues) on GitHub in the event a similar issue has already been opened. 11 | 12 | It is very helpful if you can provide enough information to replicate the bug. In other words, provide a small test case which we can run to confirm your bug. It makes it easier to find the problem and resolve it. 13 | 14 | Provide as much information as you can. The easier it is for us to recreate your problem, the faster we can fix it. 15 | 16 | Feature requests 17 | ---------------- 18 | 19 | If you are looking for a feature that doesn't exist currently, you are probably not alone. 20 | Open an issue on our [issues list](https://github.com/mozilla/webxr-polyfill/issues) on GitHub which describes the feature you would like to see, the value it provides, and how it should work. 21 | If you attach diagrams or mockups, it would be super nice ;-). 22 | 23 | Contributing code and documentation changes 24 | ------------------------------------------- 25 | 26 | If you have a bugfix or new feature that you would like to contribute, please search through our issues and see if one exists, or open an issue about it first. Explain what you would like to do. It is possible someone has already begun to work on it, or that there are existing issues that you should know about before implementing the change. 27 | 28 | We enjoy working with contributors to get their code accepted. There are many approaches to fixing a problem and it is important to find the best approach before writing too much code. 29 | 30 | The process is described below. 31 | 32 | ### Fork and clone the repository 33 | 34 | You will need to fork the main [repository](https://github.com/mozilla/webxr-polyfill) and clone it to your local machine. See 35 | [github help page](https://help.github.com/articles/fork-a-repo) for help. 36 | 37 | Push your local changes to your forked copy of the repository and [submit a pull request](https://help.github.com/articles/using-pull-requests). In the pull request, describe what your changes do and mention the number of the issue where discussion has taken place, eg "Closes #123". 38 | 39 | Then sit back and wait. There will probably be discussion about the pull request, and if any changes are needed, we would love to work with you to get your pull request merged. 40 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # (deprecated, experimental) WebXR polyfill with examples 2 | 3 | The API for "WebXR" implemented in this repository is based on a [proposed draft proposal for WebXR](https://github.com/mozilla/webxr-api) we created as a starting point for discussing WebXR in the fall of 2017, to explore what it might mean to expand WebVR to include AR/MR capabilities. 4 | 5 | We initially created this polyfill when the community group was calling the specification "WebVR", so using "WebXR" was not confusing. Now that the community group is working towards changing the name of the spec, this repo may be confusing to newcomers. 6 | 7 | We're working to bring this repo's master branch in line with the community group's draft spec. But that work is not yet complete. 8 | 9 | The WebVR community has shifted WebVR in this direction. The group is now called the [Immersive Web Community Group](https://github.com/immersive-web/) and the WebVR specification has now become the [WebXR Device API](https://github.com/immersive-web/webxr). You should consider that spec as ground-truth for WebXR, and it is what you will likely see appearing in browsers through the rest of 2018 and into 2019. 10 | 11 | We will continue to experiment with extensions to, and new ideas for, WebXR in this library. Soon, we expect it to be integrated directly in our [WebXR Viewer iOS app](https://github.com/mozilla-mobile/webxr-ios) and no longer be included directly in any web pages. 12 | 13 | ## WebXR library with examples 14 | 15 | This repository holds an implementation of a non-compliant version of WebXR, along with sample code demonstrating how to use the API. 16 | 17 | ## WARNING 18 | 19 | THIS SOFTWARE IS NON-STANDARD AND PRERELEASE, IS *NOT* READY FOR PRODUCTION USE, AND *WILL* SOON HAVE BREAKING CHANGES. 20 | 21 | NOTHING IN THIS REPO COMES WITH ANY WARRENTY WHATSOEVER. DO NOT USE IT FOR ANYTHING EXCEPT EXPERIMENTS. 22 | 23 | There may be pieces of the library that are stubbed out and throw 'Not implemented' when called. 24 | 25 | ## Running the examples 26 | 27 | The master branch of this repo is automatically built and hosted at https://examples.webxrexperiments.com 28 | 29 | The develop branch is hosted at https://develop.examples.webxrexperiments.com 30 | 31 | ## Building and Running the examples 32 | 33 | Clone this repo and then change directories into webxr-polyfill/ 34 | 35 | Install npm and then run the following: 36 | 37 | npm install # downloads webpack and an http server 38 | npm start # builds the polyfill in dist/webxr-polyfill.js and start the http server in the current directory 39 | 40 | Using one of the supported browsers listed below, go to http://YOUR_HOST_NAME:8080/ 41 | 42 | ## Portable builds 43 | 44 | To build the WebXR polyfill into a single file that you can use in a different codebase: 45 | 46 | npm run build 47 | 48 | The resulting file will be in dist/webxr-polyfill.js 49 | 50 | ## Writing your own XR apps 51 | 52 | The WebXR polyfill is not dependent on any external libraries, but examples/common.js has a handy base class, XRExampleBase, that wraps all of the boilerplate of starting a WebXR session and rendering into a WebGL layer using Three.js. 53 | 54 | Look in [examples/ar_simplest/index.html](https://github.com/mozilla/webxr-polyfill/blob/master/examples/ar_simplest/index.html) for an example of how to extend [XRExampleBase](https://github.com/mozilla/webxr-polyfill/blob/master/examples/common.js) and how to start up an app. 55 | 56 | If you run these apps on Mozilla's [ARKit based iOS app](https://github.com/mozilla-mobile/webxr-ios) then they will use the class in [polyfill/platform/ARKitWrapper.js](https://github.com/mozilla/webxr-polyfill/blob/master/polyfill/platform/ARKitWrapper.js) to get pose and anchor data out of ARKit. 57 | 58 | If you run these apps on Google's old ARCore backed experimental browser then they will use the class in [polyfill/platform/ARCoreCameraRenderer.js](https://github.com/mozilla/webxr-polyfill/blob/master/polyfill/platform/ARCoreCameraRenderer.js) to use data out of ARCore. 59 | 60 | If you run these apps on desktop Firefox or Chrome with a WebVR 1.1 supported VR headset, the headset will be exposed as a WebXR XRDisplay. 61 | 62 | If you run these apps on a device with no VR or AR tracking, the apps will use the 3dof orientation provided by Javascript orientation events. 63 | 64 | ## Supported Displays 65 | 66 | - Flat Display (AR only, needs VR) 67 | - WebVR 1.1 HMD (VR only, needs AR) 68 | - Cardboard (NOT YET) 69 | - Hololens (NOT YET) 70 | 71 | ## Supported Realities 72 | 73 | - Camera Reality (ARKit on Mozilla iOS Test App, WebARonARCore on Android, WebARonARKit on iOS, WebRTC video stream (PARTIAL)) 74 | - Virtual Reality (Desktop Firefox with Vive and Rift, Daydream (NOT TESTED), GearVR (Not Tested), Edge with MS MR headsets (NOT TESTED)) 75 | - Passthrough Reality (NOT YET) 76 | 77 | ## Supported Browsers 78 | 79 | - Mozilla [WebXR Playground](https://github.com/mozilla/webxr-ios) iOS App using ARKit 80 | - Google [ARCore Test Chrome on Android](https://github.com/google-ar/WebARonARCore) 81 | - Google [ARKit Test Chrome on iOS](https://github.com/google-ar/WebARonARKit) 82 | - Desktop Firefox with WebVR 1.1 HMDs 83 | - Mobile Safari, Chrome, and Firefox (PARTIAL, Daydream NOT TESTED) 84 | - GearVR Internet (NOT TESTED) 85 | -------------------------------------------------------------------------------- /dist-footer.js: -------------------------------------------------------------------------------- 1 | 2 | XRDisplay = window.XRDisplay 3 | XRSession = window.XRSession 4 | XRSessionCreateParameters = window.XRSessionCreateParameters 5 | Reality = window.Reality 6 | XRPointCloud = window.XRPointCloud 7 | XRLightEstimate = window.XRLightEstimate 8 | XRAnchor = window.XRAnchor; 9 | XRPlaneAnchor = window.XRPlaneAnchor; 10 | XRFaceAnchor = window.XRFaceAnchor; 11 | XRImageAnchor = window.XRImageAnchor; 12 | XRAnchorOffset = window.XRAnchorOffset; 13 | XRStageBounds = window.XRStageBounds; 14 | XRStageBoundsPoint = window.XRStageBoundsPoint; 15 | XRPresentationFrame = window.XRPresentationFrame; 16 | XRView = window.XRView; 17 | XRViewport = window.XRViewport; 18 | XRCoordinateSystem = window.XRCoordinateSystem; 19 | XRViewPose = window.XRViewPose; 20 | XRLayer = window.XRLayer; 21 | XRWebGLLayer = window.XRWebGLLayer; 22 | XRVideoFrame = window.XRVideoFrame; 23 | -------------------------------------------------------------------------------- /dist-header.js: -------------------------------------------------------------------------------- 1 | /* if there is a navigator.xr, clear it out */ 2 | if(typeof navigator.xr != 'undefined') { 3 | if(typeof XRDisplay != 'undefined') { XRDisplay = null } 4 | if(typeof XRSession != 'undefined') { XRSession = null } 5 | if(typeof XRSessionCreateParameters != 'undefined') { XRSessionCreateParameters = null } 6 | if(typeof Reality != 'undefined') { Reality = null } 7 | if(typeof XRPointCloud != 'undefined') { XRPointCloud = null } 8 | if(typeof XRLightEstimate != 'undefined') { XRLightEstimate = null } 9 | if(typeof XRAnchor != 'undefined') { XRAnchor = null } 10 | if(typeof XRPlaneAnchor != 'undefined') { XRPlaneAnchor = null } 11 | if(typeof XRFaceAnchor != 'undefined') { XRFaceAnchor = null } 12 | if(typeof XRImageAnchor != 'undefined') { XRImageAnchor = null } 13 | if(typeof XRAnchorOffset != 'undefined') { XRAnchorOffset = null } 14 | if(typeof XRStageBounds != 'undefined') { XRStageBounds = null } 15 | if(typeof XRStageBoundsPoint != 'undefined') { XRStageBoundsPoint = null } 16 | if(typeof XRPresentationFrame != 'undefined') { XRPresentationFrame = null } 17 | if(typeof XRView != 'undefined') { XRView = null } 18 | if(typeof XRViewport != 'undefined') { XRViewport = null } 19 | if(typeof XRCoordinateSystem != 'undefined') { XRCoordinateSystem = null } 20 | if(typeof XRViewPose != 'undefined') { XRViewPose = null } 21 | if(typeof XRLayer != 'undefined') { XRLayer = null } 22 | if(typeof XRWebGLLayer != 'undefined') { XRWebGLLayer = null } 23 | if(typeof XRVideoFrame != 'undefined') { XRVideoFrame = null } 24 | //navigator.xr = null; 25 | } -------------------------------------------------------------------------------- /examples/ar_anchors/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | AR anchor example 4 | 5 | 6 | 36 | 37 | 38 | 42 | 43 | 44 | 45 | 46 |
47 |
48 |

Anchors

49 |
(click to dismiss)
50 |

Position boxes by tapping. The box positions are updated using ARKit anchors.

51 |
52 | 115 | 116 | 117 | -------------------------------------------------------------------------------- /examples/ar_anchors/screenshot.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/ar_anchors/screenshot.jpeg -------------------------------------------------------------------------------- /examples/ar_simplest/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | AR simplest example 4 | 5 | 6 | 28 | 29 | 30 | 31 | 35 | 36 | 37 | 38 | 39 | 40 |
41 | 42 |
43 |

Simplest AR

44 |
(click to dismiss)
45 |

This example displays the Utah Teapot positioned in front of the viewer at head height.

46 |
47 | 123 | 124 | 125 | -------------------------------------------------------------------------------- /examples/ar_simplest/screenshot.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/ar_simplest/screenshot.jpeg -------------------------------------------------------------------------------- /examples/boombox/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Boombox 4 | 5 | 6 | 28 | 29 | 30 | 31 | 35 | 36 | 37 | 38 | 39 |
40 |
41 |

Boombox

42 |
(click to dismiss)
43 |

Shows a boombox in a VR environment.

44 |
45 | 114 | 115 | 116 | -------------------------------------------------------------------------------- /examples/boombox/screenshot.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/boombox/screenshot.jpeg -------------------------------------------------------------------------------- /examples/common.css: -------------------------------------------------------------------------------- 1 | #description { 2 | pointer-events: auto; 3 | font-family: sans-serif; 4 | padding: 1em; 5 | background-color:rgba(255,255,255,0.7); 6 | -webkit-backdrop-filter: blur(5px); 7 | backdrop-filter: blur(5px); 8 | position:absolute; 9 | bottom: 0px; 10 | left:0px; 11 | right: 0px; 12 | } 13 | -------------------------------------------------------------------------------- /examples/face_tracking/DuckyMesh.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/face_tracking/DuckyMesh.glb -------------------------------------------------------------------------------- /examples/face_tracking/glasses/glasses.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/face_tracking/glasses/glasses.bin -------------------------------------------------------------------------------- /examples/face_tracking/glasses/textures/Glasses1_baseColor.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/face_tracking/glasses/textures/Glasses1_baseColor.jpg -------------------------------------------------------------------------------- /examples/face_tracking/glasses/textures/Glasses1_metallicRoughness.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/face_tracking/glasses/textures/Glasses1_metallicRoughness.png -------------------------------------------------------------------------------- /examples/face_tracking/glasses/textures/Glasses1_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/face_tracking/glasses/textures/Glasses1_normal.png -------------------------------------------------------------------------------- /examples/face_tracking/glasses/textures/Glasses2_baseColor.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/face_tracking/glasses/textures/Glasses2_baseColor.jpg -------------------------------------------------------------------------------- /examples/face_tracking/glasses/textures/Glasses2_metallicRoughness.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/face_tracking/glasses/textures/Glasses2_metallicRoughness.png -------------------------------------------------------------------------------- /examples/face_tracking/glasses/textures/Glasses2_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/face_tracking/glasses/textures/Glasses2_normal.png -------------------------------------------------------------------------------- /examples/face_tracking/glasses/textures/Lenses_baseColor.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/face_tracking/glasses/textures/Lenses_baseColor.jpg -------------------------------------------------------------------------------- /examples/face_tracking/glasses/textures/Lenses_metallicRoughness.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/face_tracking/glasses/textures/Lenses_metallicRoughness.png -------------------------------------------------------------------------------- /examples/face_tracking/glasses/textures/Lenses_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/face_tracking/glasses/textures/Lenses_normal.png -------------------------------------------------------------------------------- /examples/face_tracking/screenshot.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/face_tracking/screenshot.jpg -------------------------------------------------------------------------------- /examples/hit_test/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Hit test example 5 | 6 | 7 | 31 | 32 | 33 | 37 | 38 | 39 | 40 | 41 |
42 |
43 |

Hit Test

44 |
(click to dismiss)
45 |

Find anchors by searching on tap events.

46 |
47 | 163 | 164 | 165 | -------------------------------------------------------------------------------- /examples/hit_test/screenshot.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/hit_test/screenshot.jpeg -------------------------------------------------------------------------------- /examples/image_detection/DuckyMesh.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/image_detection/DuckyMesh.glb -------------------------------------------------------------------------------- /examples/image_detection/hubs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/image_detection/hubs.png -------------------------------------------------------------------------------- /examples/image_detection/screenshot.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/image_detection/screenshot.jpg -------------------------------------------------------------------------------- /examples/libs/postprocessing/BloomPass.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author alteredq / http://alteredqualia.com/ 3 | */ 4 | 5 | THREE.BloomPass = function ( strength, kernelSize, sigma, resolution ) { 6 | 7 | THREE.Pass.call( this ); 8 | 9 | strength = ( strength !== undefined ) ? strength : 1; 10 | kernelSize = ( kernelSize !== undefined ) ? kernelSize : 25; 11 | sigma = ( sigma !== undefined ) ? sigma : 4.0; 12 | resolution = ( resolution !== undefined ) ? resolution : 256; 13 | 14 | // render targets 15 | 16 | var pars = { minFilter: THREE.LinearFilter, magFilter: THREE.LinearFilter, format: THREE.RGBAFormat }; 17 | 18 | this.renderTargetX = new THREE.WebGLRenderTarget( resolution, resolution, pars ); 19 | this.renderTargetX.texture.name = "BloomPass.x"; 20 | this.renderTargetY = new THREE.WebGLRenderTarget( resolution, resolution, pars ); 21 | this.renderTargetY.texture.name = "BloomPass.y"; 22 | 23 | // copy material 24 | 25 | if ( THREE.CopyShader === undefined ) 26 | console.error( "THREE.BloomPass relies on THREE.CopyShader" ); 27 | 28 | var copyShader = THREE.CopyShader; 29 | 30 | this.copyUniforms = THREE.UniformsUtils.clone( copyShader.uniforms ); 31 | 32 | this.copyUniforms[ "opacity" ].value = strength; 33 | 34 | this.materialCopy = new THREE.ShaderMaterial( { 35 | 36 | uniforms: this.copyUniforms, 37 | vertexShader: copyShader.vertexShader, 38 | fragmentShader: copyShader.fragmentShader, 39 | blending: THREE.AdditiveBlending, 40 | transparent: true 41 | 42 | } ); 43 | 44 | // convolution material 45 | 46 | if ( THREE.ConvolutionShader === undefined ) 47 | console.error( "THREE.BloomPass relies on THREE.ConvolutionShader" ); 48 | 49 | var convolutionShader = THREE.ConvolutionShader; 50 | 51 | this.convolutionUniforms = THREE.UniformsUtils.clone( convolutionShader.uniforms ); 52 | 53 | this.convolutionUniforms[ "uImageIncrement" ].value = THREE.BloomPass.blurX; 54 | this.convolutionUniforms[ "cKernel" ].value = THREE.ConvolutionShader.buildKernel( sigma ); 55 | 56 | this.materialConvolution = new THREE.ShaderMaterial( { 57 | 58 | uniforms: this.convolutionUniforms, 59 | vertexShader: convolutionShader.vertexShader, 60 | fragmentShader: convolutionShader.fragmentShader, 61 | defines: { 62 | "KERNEL_SIZE_FLOAT": kernelSize.toFixed( 1 ), 63 | "KERNEL_SIZE_INT": kernelSize.toFixed( 0 ) 64 | } 65 | 66 | } ); 67 | 68 | this.needsSwap = false; 69 | 70 | this.camera = new THREE.OrthographicCamera( - 1, 1, 1, - 1, 0, 1 ); 71 | this.scene = new THREE.Scene(); 72 | 73 | this.quad = new THREE.Mesh( new THREE.PlaneBufferGeometry( 2, 2 ), null ); 74 | this.quad.frustumCulled = false; // Avoid getting clipped 75 | this.scene.add( this.quad ); 76 | 77 | }; 78 | 79 | THREE.BloomPass.prototype = Object.assign( Object.create( THREE.Pass.prototype ), { 80 | 81 | constructor: THREE.BloomPass, 82 | 83 | render: function ( renderer, writeBuffer, readBuffer, delta, maskActive ) { 84 | 85 | if ( maskActive ) renderer.context.disable( renderer.context.STENCIL_TEST ); 86 | 87 | // Render quad with blured scene into texture (convolution pass 1) 88 | 89 | this.quad.material = this.materialConvolution; 90 | 91 | this.convolutionUniforms[ "tDiffuse" ].value = readBuffer.texture; 92 | this.convolutionUniforms[ "uImageIncrement" ].value = THREE.BloomPass.blurX; 93 | 94 | renderer.render( this.scene, this.camera, this.renderTargetX, true ); 95 | 96 | 97 | // Render quad with blured scene into texture (convolution pass 2) 98 | 99 | this.convolutionUniforms[ "tDiffuse" ].value = this.renderTargetX.texture; 100 | this.convolutionUniforms[ "uImageIncrement" ].value = THREE.BloomPass.blurY; 101 | 102 | renderer.render( this.scene, this.camera, this.renderTargetY, true ); 103 | 104 | // Render original scene with superimposed blur to texture 105 | 106 | this.quad.material = this.materialCopy; 107 | 108 | this.copyUniforms[ "tDiffuse" ].value = this.renderTargetY.texture; 109 | 110 | if ( maskActive ) renderer.context.enable( renderer.context.STENCIL_TEST ); 111 | 112 | renderer.render( this.scene, this.camera, readBuffer, this.clear ); 113 | 114 | } 115 | 116 | } ); 117 | 118 | THREE.BloomPass.blurX = new THREE.Vector2( 0.001953125, 0.0 ); 119 | THREE.BloomPass.blurY = new THREE.Vector2( 0.0, 0.001953125 ); 120 | -------------------------------------------------------------------------------- /examples/libs/postprocessing/EffectComposer.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author alteredq / http://alteredqualia.com/ 3 | */ 4 | 5 | THREE.EffectComposer = function ( renderer, renderTarget ) { 6 | 7 | this.renderer = renderer; 8 | 9 | if ( renderTarget === undefined ) { 10 | 11 | var parameters = { 12 | minFilter: THREE.LinearFilter, 13 | magFilter: THREE.LinearFilter, 14 | format: THREE.RGBAFormat, 15 | stencilBuffer: false 16 | }; 17 | 18 | var size = renderer.getSize(); 19 | renderTarget = new THREE.WebGLRenderTarget( size.width, size.height, parameters ); 20 | renderTarget.texture.name = 'EffectComposer.rt1'; 21 | 22 | } 23 | 24 | this.renderTarget1 = renderTarget; 25 | this.renderTarget2 = renderTarget.clone(); 26 | this.renderTarget2.texture.name = 'EffectComposer.rt2'; 27 | 28 | this.writeBuffer = this.renderTarget1; 29 | this.readBuffer = this.renderTarget2; 30 | 31 | this.passes = []; 32 | 33 | // dependencies 34 | 35 | if ( THREE.CopyShader === undefined ) { 36 | 37 | console.error( 'THREE.EffectComposer relies on THREE.CopyShader' ); 38 | 39 | } 40 | 41 | if ( THREE.ShaderPass === undefined ) { 42 | 43 | console.error( 'THREE.EffectComposer relies on THREE.ShaderPass' ); 44 | 45 | } 46 | 47 | this.copyPass = new THREE.ShaderPass( THREE.CopyShader ); 48 | 49 | }; 50 | 51 | Object.assign( THREE.EffectComposer.prototype, { 52 | 53 | swapBuffers: function() { 54 | 55 | var tmp = this.readBuffer; 56 | this.readBuffer = this.writeBuffer; 57 | this.writeBuffer = tmp; 58 | 59 | }, 60 | 61 | addPass: function ( pass ) { 62 | 63 | this.passes.push( pass ); 64 | 65 | var size = this.renderer.getSize(); 66 | pass.setSize( size.width, size.height ); 67 | 68 | }, 69 | 70 | insertPass: function ( pass, index ) { 71 | 72 | this.passes.splice( index, 0, pass ); 73 | 74 | }, 75 | 76 | render: function ( delta ) { 77 | 78 | var maskActive = false; 79 | 80 | var pass, i, il = this.passes.length; 81 | 82 | for ( i = 0; i < il; i ++ ) { 83 | 84 | pass = this.passes[ i ]; 85 | 86 | if ( pass.enabled === false ) continue; 87 | 88 | pass.render( this.renderer, this.writeBuffer, this.readBuffer, delta, maskActive ); 89 | 90 | if ( pass.needsSwap ) { 91 | 92 | if ( maskActive ) { 93 | 94 | var context = this.renderer.context; 95 | 96 | context.stencilFunc( context.NOTEQUAL, 1, 0xffffffff ); 97 | 98 | this.copyPass.render( this.renderer, this.writeBuffer, this.readBuffer, delta ); 99 | 100 | context.stencilFunc( context.EQUAL, 1, 0xffffffff ); 101 | 102 | } 103 | 104 | this.swapBuffers(); 105 | 106 | } 107 | 108 | if ( THREE.MaskPass !== undefined ) { 109 | 110 | if ( pass instanceof THREE.MaskPass ) { 111 | 112 | maskActive = true; 113 | 114 | } else if ( pass instanceof THREE.ClearMaskPass ) { 115 | 116 | maskActive = false; 117 | 118 | } 119 | 120 | } 121 | 122 | } 123 | 124 | }, 125 | 126 | reset: function ( renderTarget ) { 127 | 128 | if ( renderTarget === undefined ) { 129 | 130 | var size = this.renderer.getSize(); 131 | 132 | renderTarget = this.renderTarget1.clone(); 133 | renderTarget.setSize( size.width, size.height ); 134 | 135 | } 136 | 137 | this.renderTarget1.dispose(); 138 | this.renderTarget2.dispose(); 139 | this.renderTarget1 = renderTarget; 140 | this.renderTarget2 = renderTarget.clone(); 141 | 142 | this.writeBuffer = this.renderTarget1; 143 | this.readBuffer = this.renderTarget2; 144 | 145 | }, 146 | 147 | setSize: function ( width, height ) { 148 | 149 | this.renderTarget1.setSize( width, height ); 150 | this.renderTarget2.setSize( width, height ); 151 | 152 | for ( var i = 0; i < this.passes.length; i ++ ) { 153 | 154 | this.passes[i].setSize( width, height ); 155 | 156 | } 157 | 158 | } 159 | 160 | } ); 161 | 162 | 163 | THREE.Pass = function () { 164 | 165 | // if set to true, the pass is processed by the composer 166 | this.enabled = true; 167 | 168 | // if set to true, the pass indicates to swap read and write buffer after rendering 169 | this.needsSwap = true; 170 | 171 | // if set to true, the pass clears its buffer before rendering 172 | this.clear = false; 173 | 174 | // if set to true, the result of the pass is rendered to screen 175 | this.renderToScreen = false; 176 | 177 | }; 178 | 179 | Object.assign( THREE.Pass.prototype, { 180 | 181 | setSize: function( width, height ) {}, 182 | 183 | render: function ( renderer, writeBuffer, readBuffer, delta, maskActive ) { 184 | 185 | console.error( 'THREE.Pass: .render() must be implemented in derived pass.' ); 186 | 187 | } 188 | 189 | } ); 190 | -------------------------------------------------------------------------------- /examples/libs/postprocessing/FilmPass.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author alteredq / http://alteredqualia.com/ 3 | */ 4 | 5 | THREE.FilmPass = function ( noiseIntensity, scanlinesIntensity, scanlinesCount, grayscale ) { 6 | 7 | THREE.Pass.call( this ); 8 | 9 | if ( THREE.FilmShader === undefined ) 10 | console.error( "THREE.FilmPass relies on THREE.FilmShader" ); 11 | 12 | var shader = THREE.FilmShader; 13 | 14 | this.uniforms = THREE.UniformsUtils.clone( shader.uniforms ); 15 | 16 | this.material = new THREE.ShaderMaterial( { 17 | 18 | uniforms: this.uniforms, 19 | vertexShader: shader.vertexShader, 20 | fragmentShader: shader.fragmentShader 21 | 22 | } ); 23 | 24 | if ( grayscale !== undefined ) this.uniforms.grayscale.value = grayscale; 25 | if ( noiseIntensity !== undefined ) this.uniforms.nIntensity.value = noiseIntensity; 26 | if ( scanlinesIntensity !== undefined ) this.uniforms.sIntensity.value = scanlinesIntensity; 27 | if ( scanlinesCount !== undefined ) this.uniforms.sCount.value = scanlinesCount; 28 | 29 | this.camera = new THREE.OrthographicCamera( - 1, 1, 1, - 1, 0, 1 ); 30 | this.scene = new THREE.Scene(); 31 | 32 | this.quad = new THREE.Mesh( new THREE.PlaneBufferGeometry( 2, 2 ), null ); 33 | this.quad.frustumCulled = false; // Avoid getting clipped 34 | this.scene.add( this.quad ); 35 | 36 | }; 37 | 38 | THREE.FilmPass.prototype = Object.assign( Object.create( THREE.Pass.prototype ), { 39 | 40 | constructor: THREE.FilmPass, 41 | 42 | render: function ( renderer, writeBuffer, readBuffer, delta, maskActive ) { 43 | 44 | this.uniforms[ "tDiffuse" ].value = readBuffer.texture; 45 | this.uniforms[ "time" ].value += delta; 46 | 47 | this.quad.material = this.material; 48 | 49 | if ( this.renderToScreen ) { 50 | 51 | renderer.render( this.scene, this.camera ); 52 | 53 | } else { 54 | 55 | renderer.render( this.scene, this.camera, writeBuffer, this.clear ); 56 | 57 | } 58 | 59 | } 60 | 61 | } ); 62 | -------------------------------------------------------------------------------- /examples/libs/postprocessing/MaskPass.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author alteredq / http://alteredqualia.com/ 3 | */ 4 | 5 | THREE.MaskPass = function ( scene, camera ) { 6 | 7 | THREE.Pass.call( this ); 8 | 9 | this.scene = scene; 10 | this.camera = camera; 11 | 12 | this.clear = true; 13 | this.needsSwap = false; 14 | 15 | this.inverse = false; 16 | 17 | }; 18 | 19 | THREE.MaskPass.prototype = Object.assign( Object.create( THREE.Pass.prototype ), { 20 | 21 | constructor: THREE.MaskPass, 22 | 23 | render: function ( renderer, writeBuffer, readBuffer, delta, maskActive ) { 24 | 25 | var context = renderer.context; 26 | var state = renderer.state; 27 | 28 | // don't update color or depth 29 | 30 | state.buffers.color.setMask( false ); 31 | state.buffers.depth.setMask( false ); 32 | 33 | // lock buffers 34 | 35 | state.buffers.color.setLocked( true ); 36 | state.buffers.depth.setLocked( true ); 37 | 38 | // set up stencil 39 | 40 | var writeValue, clearValue; 41 | 42 | if ( this.inverse ) { 43 | 44 | writeValue = 0; 45 | clearValue = 1; 46 | 47 | } else { 48 | 49 | writeValue = 1; 50 | clearValue = 0; 51 | 52 | } 53 | 54 | state.buffers.stencil.setTest( true ); 55 | state.buffers.stencil.setOp( context.REPLACE, context.REPLACE, context.REPLACE ); 56 | state.buffers.stencil.setFunc( context.ALWAYS, writeValue, 0xffffffff ); 57 | state.buffers.stencil.setClear( clearValue ); 58 | 59 | // draw into the stencil buffer 60 | 61 | renderer.render( this.scene, this.camera, readBuffer, this.clear ); 62 | renderer.render( this.scene, this.camera, writeBuffer, this.clear ); 63 | 64 | // unlock color and depth buffer for subsequent rendering 65 | 66 | state.buffers.color.setLocked( false ); 67 | state.buffers.depth.setLocked( false ); 68 | 69 | // only render where stencil is set to 1 70 | 71 | state.buffers.stencil.setFunc( context.EQUAL, 1, 0xffffffff ); // draw if == 1 72 | state.buffers.stencil.setOp( context.KEEP, context.KEEP, context.KEEP ); 73 | 74 | } 75 | 76 | } ); 77 | 78 | 79 | THREE.ClearMaskPass = function () { 80 | 81 | THREE.Pass.call( this ); 82 | 83 | this.needsSwap = false; 84 | 85 | }; 86 | 87 | THREE.ClearMaskPass.prototype = Object.create( THREE.Pass.prototype ); 88 | 89 | Object.assign( THREE.ClearMaskPass.prototype, { 90 | 91 | render: function ( renderer, writeBuffer, readBuffer, delta, maskActive ) { 92 | 93 | renderer.state.buffers.stencil.setTest( false ); 94 | 95 | } 96 | 97 | } ); 98 | -------------------------------------------------------------------------------- /examples/libs/postprocessing/RenderPass.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author alteredq / http://alteredqualia.com/ 3 | */ 4 | 5 | THREE.RenderPass = function ( scene, camera, overrideMaterial, clearColor, clearAlpha ) { 6 | 7 | THREE.Pass.call( this ); 8 | 9 | this.scene = scene; 10 | this.camera = camera; 11 | 12 | this.overrideMaterial = overrideMaterial; 13 | 14 | this.clearColor = clearColor; 15 | this.clearAlpha = ( clearAlpha !== undefined ) ? clearAlpha : 0; 16 | 17 | this.clear = true; 18 | this.clearDepth = false; 19 | this.needsSwap = false; 20 | 21 | }; 22 | 23 | THREE.RenderPass.prototype = Object.assign( Object.create( THREE.Pass.prototype ), { 24 | 25 | constructor: THREE.RenderPass, 26 | 27 | render: function ( renderer, writeBuffer, readBuffer, delta, maskActive ) { 28 | 29 | var oldAutoClear = renderer.autoClear; 30 | renderer.autoClear = false; 31 | 32 | this.scene.overrideMaterial = this.overrideMaterial; 33 | 34 | var oldClearColor, oldClearAlpha; 35 | 36 | if ( this.clearColor ) { 37 | 38 | oldClearColor = renderer.getClearColor().getHex(); 39 | oldClearAlpha = renderer.getClearAlpha(); 40 | 41 | renderer.setClearColor( this.clearColor, this.clearAlpha ); 42 | 43 | } 44 | 45 | if ( this.clearDepth ) { 46 | 47 | renderer.clearDepth(); 48 | 49 | } 50 | 51 | renderer.render( this.scene, this.camera, this.renderToScreen ? null : readBuffer, this.clear ); 52 | 53 | if ( this.clearColor ) { 54 | 55 | renderer.setClearColor( oldClearColor, oldClearAlpha ); 56 | 57 | } 58 | 59 | this.scene.overrideMaterial = null; 60 | renderer.autoClear = oldAutoClear; 61 | } 62 | 63 | } ); 64 | -------------------------------------------------------------------------------- /examples/libs/postprocessing/ShaderPass.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author alteredq / http://alteredqualia.com/ 3 | */ 4 | 5 | THREE.ShaderPass = function ( shader, textureID ) { 6 | 7 | THREE.Pass.call( this ); 8 | 9 | this.textureID = ( textureID !== undefined ) ? textureID : "tDiffuse"; 10 | 11 | if ( shader instanceof THREE.ShaderMaterial ) { 12 | 13 | this.uniforms = shader.uniforms; 14 | 15 | this.material = shader; 16 | 17 | } else if ( shader ) { 18 | 19 | this.uniforms = THREE.UniformsUtils.clone( shader.uniforms ); 20 | 21 | this.material = new THREE.ShaderMaterial( { 22 | 23 | defines: shader.defines || {}, 24 | uniforms: this.uniforms, 25 | vertexShader: shader.vertexShader, 26 | fragmentShader: shader.fragmentShader 27 | 28 | } ); 29 | 30 | } 31 | 32 | this.camera = new THREE.OrthographicCamera( - 1, 1, 1, - 1, 0, 1 ); 33 | this.scene = new THREE.Scene(); 34 | 35 | this.quad = new THREE.Mesh( new THREE.PlaneBufferGeometry( 2, 2 ), null ); 36 | this.quad.frustumCulled = false; // Avoid getting clipped 37 | this.scene.add( this.quad ); 38 | 39 | }; 40 | 41 | THREE.ShaderPass.prototype = Object.assign( Object.create( THREE.Pass.prototype ), { 42 | 43 | constructor: THREE.ShaderPass, 44 | 45 | render: function( renderer, writeBuffer, readBuffer, delta, maskActive ) { 46 | 47 | if ( this.uniforms[ this.textureID ] ) { 48 | 49 | this.uniforms[ this.textureID ].value = readBuffer.texture; 50 | 51 | } 52 | 53 | this.quad.material = this.material; 54 | 55 | if ( this.renderToScreen ) { 56 | 57 | renderer.render( this.scene, this.camera ); 58 | 59 | } else { 60 | 61 | renderer.render( this.scene, this.camera, writeBuffer, this.clear ); 62 | 63 | } 64 | 65 | } 66 | 67 | } ); 68 | -------------------------------------------------------------------------------- /examples/libs/shaders/ConvolutionShader.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author alteredq / http://alteredqualia.com/ 3 | * 4 | * Convolution shader 5 | * ported from o3d sample to WebGL / GLSL 6 | * http://o3d.googlecode.com/svn/trunk/samples/convolution.html 7 | */ 8 | 9 | THREE.ConvolutionShader = { 10 | 11 | defines: { 12 | 13 | "KERNEL_SIZE_FLOAT": "25.0", 14 | "KERNEL_SIZE_INT": "25" 15 | 16 | }, 17 | 18 | uniforms: { 19 | 20 | "tDiffuse": { value: null }, 21 | "uImageIncrement": { value: new THREE.Vector2( 0.001953125, 0.0 ) }, 22 | "cKernel": { value: [] } 23 | 24 | }, 25 | 26 | vertexShader: [ 27 | 28 | "uniform vec2 uImageIncrement;", 29 | 30 | "varying vec2 vUv;", 31 | 32 | "void main() {", 33 | 34 | "vUv = uv - ( ( KERNEL_SIZE_FLOAT - 1.0 ) / 2.0 ) * uImageIncrement;", 35 | "gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", 36 | 37 | "}" 38 | 39 | ].join( "\n" ), 40 | 41 | fragmentShader: [ 42 | 43 | "uniform float cKernel[ KERNEL_SIZE_INT ];", 44 | 45 | "uniform sampler2D tDiffuse;", 46 | "uniform vec2 uImageIncrement;", 47 | 48 | "varying vec2 vUv;", 49 | 50 | "void main() {", 51 | 52 | "vec2 imageCoord = vUv;", 53 | "vec4 sum = vec4( 0.0, 0.0, 0.0, 0.0 );", 54 | 55 | "for( int i = 0; i < KERNEL_SIZE_INT; i ++ ) {", 56 | 57 | "sum += texture2D( tDiffuse, imageCoord ) * cKernel[ i ];", 58 | "imageCoord += uImageIncrement;", 59 | 60 | "}", 61 | 62 | "gl_FragColor = sum;", 63 | 64 | "}" 65 | 66 | 67 | ].join( "\n" ), 68 | 69 | buildKernel: function ( sigma ) { 70 | 71 | // We lop off the sqrt(2 * pi) * sigma term, since we're going to normalize anyway. 72 | 73 | function gauss( x, sigma ) { 74 | 75 | return Math.exp( - ( x * x ) / ( 2.0 * sigma * sigma ) ); 76 | 77 | } 78 | 79 | var i, values, sum, halfWidth, kMaxKernelSize = 25, kernelSize = 2 * Math.ceil( sigma * 3.0 ) + 1; 80 | 81 | if ( kernelSize > kMaxKernelSize ) kernelSize = kMaxKernelSize; 82 | halfWidth = ( kernelSize - 1 ) * 0.5; 83 | 84 | values = new Array( kernelSize ); 85 | sum = 0.0; 86 | for ( i = 0; i < kernelSize; ++ i ) { 87 | 88 | values[ i ] = gauss( i - halfWidth, sigma ); 89 | sum += values[ i ]; 90 | 91 | } 92 | 93 | // normalize the kernel 94 | 95 | for ( i = 0; i < kernelSize; ++ i ) values[ i ] /= sum; 96 | 97 | return values; 98 | 99 | } 100 | 101 | }; 102 | -------------------------------------------------------------------------------- /examples/libs/shaders/CopyShader.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author alteredq / http://alteredqualia.com/ 3 | * 4 | * Full-screen textured quad shader 5 | */ 6 | 7 | THREE.CopyShader = { 8 | 9 | uniforms: { 10 | 11 | "tDiffuse": { value: null }, 12 | "opacity": { value: 1.0 } 13 | 14 | }, 15 | 16 | vertexShader: [ 17 | 18 | "varying vec2 vUv;", 19 | 20 | "void main() {", 21 | 22 | "vUv = uv;", 23 | "gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", 24 | 25 | "}" 26 | 27 | ].join( "\n" ), 28 | 29 | fragmentShader: [ 30 | 31 | "uniform float opacity;", 32 | 33 | "uniform sampler2D tDiffuse;", 34 | 35 | "varying vec2 vUv;", 36 | 37 | "void main() {", 38 | 39 | "vec4 texel = texture2D( tDiffuse, vUv );", 40 | "gl_FragColor = opacity * texel;", 41 | 42 | "}" 43 | 44 | ].join( "\n" ) 45 | 46 | }; 47 | -------------------------------------------------------------------------------- /examples/libs/shaders/FilmShader.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author alteredq / http://alteredqualia.com/ 3 | * 4 | * Film grain & scanlines shader 5 | * 6 | * - ported from HLSL to WebGL / GLSL 7 | * http://www.truevision3d.com/forums/showcase/staticnoise_colorblackwhite_scanline_shaders-t18698.0.html 8 | * 9 | * Screen Space Static Postprocessor 10 | * 11 | * Produces an analogue noise overlay similar to a film grain / TV static 12 | * 13 | * Original implementation and noise algorithm 14 | * Pat 'Hawthorne' Shearon 15 | * 16 | * Optimized scanlines + noise version with intensity scaling 17 | * Georg 'Leviathan' Steinrohder 18 | * 19 | * This version is provided under a Creative Commons Attribution 3.0 License 20 | * http://creativecommons.org/licenses/by/3.0/ 21 | */ 22 | 23 | THREE.FilmShader = { 24 | 25 | uniforms: { 26 | 27 | "tDiffuse": { value: null }, 28 | "time": { value: 0.0 }, 29 | "nIntensity": { value: 0.5 }, 30 | "sIntensity": { value: 0.05 }, 31 | "sCount": { value: 4096 }, 32 | "grayscale": { value: 1 } 33 | 34 | }, 35 | 36 | vertexShader: [ 37 | 38 | "varying vec2 vUv;", 39 | 40 | "void main() {", 41 | 42 | "vUv = uv;", 43 | "gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", 44 | 45 | "}" 46 | 47 | ].join( "\n" ), 48 | 49 | fragmentShader: [ 50 | 51 | "#include ", 52 | 53 | // control parameter 54 | "uniform float time;", 55 | 56 | "uniform bool grayscale;", 57 | 58 | // noise effect intensity value (0 = no effect, 1 = full effect) 59 | "uniform float nIntensity;", 60 | 61 | // scanlines effect intensity value (0 = no effect, 1 = full effect) 62 | "uniform float sIntensity;", 63 | 64 | // scanlines effect count value (0 = no effect, 4096 = full effect) 65 | "uniform float sCount;", 66 | 67 | "uniform sampler2D tDiffuse;", 68 | 69 | "varying vec2 vUv;", 70 | 71 | "void main() {", 72 | 73 | // sample the source 74 | "vec4 cTextureScreen = texture2D( tDiffuse, vUv );", 75 | 76 | // make some noise 77 | "float dx = rand( vUv + time );", 78 | 79 | // add noise 80 | "vec3 cResult = cTextureScreen.rgb + cTextureScreen.rgb * clamp( 0.1 + dx, 0.0, 1.0 );", 81 | 82 | // get us a sine and cosine 83 | "vec2 sc = vec2( sin( vUv.y * sCount ), cos( vUv.y * sCount ) );", 84 | 85 | // add scanlines 86 | "cResult += cTextureScreen.rgb * vec3( sc.x, sc.y, sc.x ) * sIntensity;", 87 | 88 | // interpolate between source and result by intensity 89 | "cResult = cTextureScreen.rgb + clamp( nIntensity, 0.0,1.0 ) * ( cResult - cTextureScreen.rgb );", 90 | 91 | // convert to grayscale if desired 92 | "if( grayscale ) {", 93 | 94 | "cResult = vec3( cResult.r * 0.3 + cResult.g * 0.59 + cResult.b * 0.11 );", 95 | 96 | "}", 97 | 98 | "gl_FragColor = vec4( cResult, cTextureScreen.a );", 99 | 100 | "}" 101 | 102 | ].join( "\n" ) 103 | 104 | }; 105 | -------------------------------------------------------------------------------- /examples/libs/shaders/FocusShader.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @author alteredq / http://alteredqualia.com/ 3 | * 4 | * Focus shader 5 | * based on PaintEffect postprocess from ro.me 6 | * http://code.google.com/p/3-dreams-of-black/source/browse/deploy/js/effects/PaintEffect.js 7 | */ 8 | 9 | THREE.FocusShader = { 10 | 11 | uniforms : { 12 | 13 | "tDiffuse": { value: null }, 14 | "screenWidth": { value: 1024 }, 15 | "screenHeight": { value: 1024 }, 16 | "sampleDistance": { value: 0.94 }, 17 | "waveFactor": { value: 0.00125 } 18 | 19 | }, 20 | 21 | vertexShader: [ 22 | 23 | "varying vec2 vUv;", 24 | 25 | "void main() {", 26 | 27 | "vUv = uv;", 28 | "gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );", 29 | 30 | "}" 31 | 32 | ].join( "\n" ), 33 | 34 | fragmentShader: [ 35 | 36 | "uniform float screenWidth;", 37 | "uniform float screenHeight;", 38 | "uniform float sampleDistance;", 39 | "uniform float waveFactor;", 40 | 41 | "uniform sampler2D tDiffuse;", 42 | 43 | "varying vec2 vUv;", 44 | 45 | "void main() {", 46 | 47 | "vec4 color, org, tmp, add;", 48 | "float sample_dist, f;", 49 | "vec2 vin;", 50 | "vec2 uv = vUv;", 51 | 52 | "add = color = org = texture2D( tDiffuse, uv );", 53 | 54 | "vin = ( uv - vec2( 0.5 ) ) * vec2( 1.4 );", 55 | "sample_dist = dot( vin, vin ) * 2.0;", 56 | 57 | "f = ( waveFactor * 100.0 + sample_dist ) * sampleDistance * 4.0;", 58 | 59 | "vec2 sampleSize = vec2( 1.0 / screenWidth, 1.0 / screenHeight ) * vec2( f );", 60 | 61 | "add += tmp = texture2D( tDiffuse, uv + vec2( 0.111964, 0.993712 ) * sampleSize );", 62 | "if( tmp.b < color.b ) color = tmp;", 63 | 64 | "add += tmp = texture2D( tDiffuse, uv + vec2( 0.846724, 0.532032 ) * sampleSize );", 65 | "if( tmp.b < color.b ) color = tmp;", 66 | 67 | "add += tmp = texture2D( tDiffuse, uv + vec2( 0.943883, -0.330279 ) * sampleSize );", 68 | "if( tmp.b < color.b ) color = tmp;", 69 | 70 | "add += tmp = texture2D( tDiffuse, uv + vec2( 0.330279, -0.943883 ) * sampleSize );", 71 | "if( tmp.b < color.b ) color = tmp;", 72 | 73 | "add += tmp = texture2D( tDiffuse, uv + vec2( -0.532032, -0.846724 ) * sampleSize );", 74 | "if( tmp.b < color.b ) color = tmp;", 75 | 76 | "add += tmp = texture2D( tDiffuse, uv + vec2( -0.993712, -0.111964 ) * sampleSize );", 77 | "if( tmp.b < color.b ) color = tmp;", 78 | 79 | "add += tmp = texture2D( tDiffuse, uv + vec2( -0.707107, 0.707107 ) * sampleSize );", 80 | "if( tmp.b < color.b ) color = tmp;", 81 | 82 | "color = color * vec4( 2.0 ) - ( add / vec4( 8.0 ) );", 83 | "color = color + ( add / vec4( 8.0 ) - color ) * ( vec4( 1.0 ) - vec4( sample_dist * 0.5 ) );", 84 | 85 | "gl_FragColor = vec4( color.rgb * color.rgb * vec3( 0.95 ) + color.rgb, 1.0 );", 86 | 87 | "}" 88 | 89 | 90 | ].join( "\n" ) 91 | }; 92 | -------------------------------------------------------------------------------- /examples/libs/stats.js: -------------------------------------------------------------------------------- 1 | (function (global, factory) { 2 | typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() : 3 | typeof define === 'function' && define.amd ? define(factory) : 4 | (global.Stats = factory()); 5 | }(this, (function () { 'use strict'; 6 | 7 | /** 8 | * @author mrdoob / http://mrdoob.com/ 9 | */ 10 | 11 | var Stats = function () { 12 | 13 | var mode = 0; 14 | 15 | var container = document.createElement( 'div' ); 16 | container.style.cssText = 'position:fixed;top:0;left:0;cursor:pointer;opacity:0.9;z-index:10000'; 17 | container.addEventListener( 'click', function ( event ) { 18 | 19 | event.preventDefault(); 20 | showPanel( ++ mode % container.children.length ); 21 | 22 | }, false ); 23 | 24 | // 25 | 26 | function addPanel( panel ) { 27 | 28 | container.appendChild( panel.dom ); 29 | return panel; 30 | 31 | } 32 | 33 | function showPanel( id ) { 34 | 35 | for ( var i = 0; i < container.children.length; i ++ ) { 36 | 37 | container.children[ i ].style.display = i === id ? 'block' : 'none'; 38 | 39 | } 40 | 41 | mode = id; 42 | 43 | } 44 | 45 | // 46 | 47 | var beginTime = ( performance || Date ).now(), prevTime = beginTime, frames = 0; 48 | 49 | var fpsPanel = addPanel( new Stats.Panel( 'FPS', '#0ff', '#002' ) ); 50 | var msPanel = addPanel( new Stats.Panel( 'MS', '#0f0', '#020' ) ); 51 | 52 | if ( self.performance && self.performance.memory ) { 53 | 54 | var memPanel = addPanel( new Stats.Panel( 'MB', '#f08', '#201' ) ); 55 | 56 | } 57 | 58 | showPanel( 0 ); 59 | 60 | return { 61 | 62 | REVISION: 16, 63 | 64 | dom: container, 65 | 66 | addPanel: addPanel, 67 | showPanel: showPanel, 68 | 69 | begin: function () { 70 | 71 | beginTime = ( performance || Date ).now(); 72 | 73 | }, 74 | 75 | end: function () { 76 | 77 | frames ++; 78 | 79 | var time = ( performance || Date ).now(); 80 | 81 | msPanel.update( time - beginTime, 200 ); 82 | 83 | if ( time > prevTime + 1000 ) { 84 | 85 | fpsPanel.update( ( frames * 1000 ) / ( time - prevTime ), 100 ); 86 | 87 | prevTime = time; 88 | frames = 0; 89 | 90 | if ( memPanel ) { 91 | 92 | var memory = performance.memory; 93 | memPanel.update( memory.usedJSHeapSize / 1048576, memory.jsHeapSizeLimit / 1048576 ); 94 | 95 | } 96 | 97 | } 98 | 99 | return time; 100 | 101 | }, 102 | 103 | update: function () { 104 | 105 | beginTime = this.end(); 106 | 107 | }, 108 | 109 | // Backwards Compatibility 110 | 111 | domElement: container, 112 | setMode: showPanel 113 | 114 | }; 115 | 116 | }; 117 | 118 | Stats.Panel = function ( name, fg, bg ) { 119 | 120 | var min = Infinity, max = 0, round = Math.round; 121 | var PR = round( window.devicePixelRatio || 1 ); 122 | 123 | var WIDTH = 80 * PR, HEIGHT = 48 * PR, 124 | TEXT_X = 3 * PR, TEXT_Y = 2 * PR, 125 | GRAPH_X = 3 * PR, GRAPH_Y = 15 * PR, 126 | GRAPH_WIDTH = 74 * PR, GRAPH_HEIGHT = 30 * PR; 127 | 128 | var canvas = document.createElement( 'canvas' ); 129 | canvas.width = WIDTH; 130 | canvas.height = HEIGHT; 131 | canvas.style.cssText = 'width:80px;height:48px'; 132 | 133 | var context = canvas.getContext( '2d' ); 134 | context.font = 'bold ' + ( 9 * PR ) + 'px Helvetica,Arial,sans-serif'; 135 | context.textBaseline = 'top'; 136 | 137 | context.fillStyle = bg; 138 | context.fillRect( 0, 0, WIDTH, HEIGHT ); 139 | 140 | context.fillStyle = fg; 141 | context.fillText( name, TEXT_X, TEXT_Y ); 142 | context.fillRect( GRAPH_X, GRAPH_Y, GRAPH_WIDTH, GRAPH_HEIGHT ); 143 | 144 | context.fillStyle = bg; 145 | context.globalAlpha = 0.9; 146 | context.fillRect( GRAPH_X, GRAPH_Y, GRAPH_WIDTH, GRAPH_HEIGHT ); 147 | 148 | return { 149 | 150 | dom: canvas, 151 | 152 | update: function ( value, maxValue ) { 153 | 154 | min = Math.min( min, value ); 155 | max = Math.max( max, value ); 156 | 157 | context.fillStyle = bg; 158 | context.globalAlpha = 1; 159 | context.fillRect( 0, 0, WIDTH, GRAPH_Y ); 160 | context.fillStyle = fg; 161 | context.fillText( round( value ) + ' ' + name + ' (' + round( min ) + '-' + round( max ) + ')', TEXT_X, TEXT_Y ); 162 | 163 | context.drawImage( canvas, GRAPH_X + PR, GRAPH_Y, GRAPH_WIDTH - PR, GRAPH_HEIGHT, GRAPH_X, GRAPH_Y, GRAPH_WIDTH - PR, GRAPH_HEIGHT ); 164 | 165 | context.fillRect( GRAPH_X + GRAPH_WIDTH - PR, GRAPH_Y, PR, GRAPH_HEIGHT ); 166 | 167 | context.fillStyle = bg; 168 | context.globalAlpha = 0.9; 169 | context.fillRect( GRAPH_X + GRAPH_WIDTH - PR, GRAPH_Y, PR, round( ( 1 - ( value / maxValue ) ) * GRAPH_HEIGHT ) ); 170 | 171 | } 172 | 173 | }; 174 | 175 | }; 176 | 177 | return Stats; 178 | 179 | }))); 180 | -------------------------------------------------------------------------------- /examples/light/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Light example 5 | 6 | 7 | 31 | 32 | 33 | 37 | 38 | 39 | 40 | 41 |
42 |
43 |

Light

44 |
(click to dismiss)
45 |

Place a reticle on surfaces with light (only works with iOS viewer).

46 |
47 | 163 | 164 | 165 | -------------------------------------------------------------------------------- /examples/light/screenshot.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/light/screenshot.jpeg -------------------------------------------------------------------------------- /examples/models/Axis.mtl: -------------------------------------------------------------------------------- 1 | # Blender MTL File: 'Axis.blend' 2 | # Material Count: 4 3 | 4 | newmtl Cube 5 | Ns 96.078431 6 | Ka 1.000000 1.000000 1.000000 7 | Kd 0.420028 0.420028 0.420028 8 | Ks 0.000000 0.000000 0.000000 9 | Ke 0.000000 0.000000 0.000000 10 | Ni 1.000000 11 | d 1.000000 12 | illum 1 13 | 14 | newmtl X 15 | Ns 96.078431 16 | Ka 1.000000 1.000000 1.000000 17 | Kd 1.000000 0.000000 0.000000 18 | Ks 0.000000 0.000000 0.000000 19 | Ke 0.000000 0.000000 0.000000 20 | Ni 1.000000 21 | d 1.000000 22 | illum 1 23 | 24 | newmtl Y 25 | Ns 96.078431 26 | Ka 1.000000 1.000000 1.000000 27 | Kd 0.000000 1.000000 0.000000 28 | Ks 0.000000 0.000000 0.000000 29 | Ke 0.000000 0.000000 0.000000 30 | Ni 1.000000 31 | d 1.000000 32 | illum 1 33 | 34 | newmtl Z 35 | Ns 96.078431 36 | Ka 1.000000 1.000000 1.000000 37 | Kd 0.000000 0.000000 1.000000 38 | Ks 0.000000 0.000000 0.000000 39 | Ke 0.000000 0.000000 0.000000 40 | Ni 1.000000 41 | d 1.000000 42 | illum 1 43 | -------------------------------------------------------------------------------- /examples/models/BoomBox/README.md: -------------------------------------------------------------------------------- 1 | # Boom Box 2 | ## Screenshot 3 | 4 | ![screenshot](screenshot/screenshot.jpg) 5 | 6 | ## License Information 7 | 8 | Donated by Microsoft for glTF testing 9 | 10 | Created by [Ryan Martin](https://www.linkedin.com/in/ryan-c-martin-techartist) -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-Binary/BoomBox.glb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-Binary/BoomBox.glb -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox.bin -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox.gltf: -------------------------------------------------------------------------------- 1 | { 2 | "accessors": [ 3 | { 4 | "bufferView": 0, 5 | "componentType": 5126, 6 | "count": 3575, 7 | "type": "VEC2", 8 | "max": [ 9 | 0.9999003, 10 | -0.0221377648 11 | ], 12 | "min": [ 13 | 0.0006585993, 14 | -0.996773958 15 | ] 16 | }, 17 | { 18 | "bufferView": 1, 19 | "componentType": 5126, 20 | "count": 3575, 21 | "type": "VEC3", 22 | "max": [ 23 | 1.0, 24 | 1.0, 25 | 0.9999782 26 | ], 27 | "min": [ 28 | -1.0, 29 | -1.0, 30 | -0.9980823 31 | ] 32 | }, 33 | { 34 | "bufferView": 2, 35 | "componentType": 5126, 36 | "count": 3575, 37 | "type": "VEC4", 38 | "max": [ 39 | 1.0, 40 | 0.9999976, 41 | 1.0, 42 | 1.0 43 | ], 44 | "min": [ 45 | -0.9991289, 46 | -0.999907851, 47 | -1.0, 48 | 1.0 49 | ] 50 | }, 51 | { 52 | "bufferView": 3, 53 | "componentType": 5126, 54 | "count": 3575, 55 | "type": "VEC3", 56 | "max": [ 57 | 0.009921154, 58 | 0.00977163, 59 | 0.0100762453 60 | ], 61 | "min": [ 62 | -0.009921154, 63 | -0.00977163, 64 | -0.0100762453 65 | ] 66 | }, 67 | { 68 | "bufferView": 4, 69 | "componentType": 5123, 70 | "count": 18108, 71 | "type": "SCALAR", 72 | "max": [ 73 | 3574 74 | ], 75 | "min": [ 76 | 0 77 | ] 78 | } 79 | ], 80 | "asset": { 81 | "generator": "glTF Tools for Unity", 82 | "version": "2.0" 83 | }, 84 | "bufferViews": [ 85 | { 86 | "buffer": 0, 87 | "byteLength": 28600 88 | }, 89 | { 90 | "buffer": 0, 91 | "byteOffset": 28600, 92 | "byteLength": 42900 93 | }, 94 | { 95 | "buffer": 0, 96 | "byteOffset": 71500, 97 | "byteLength": 57200 98 | }, 99 | { 100 | "buffer": 0, 101 | "byteOffset": 128700, 102 | "byteLength": 42900 103 | }, 104 | { 105 | "buffer": 0, 106 | "byteOffset": 171600, 107 | "byteLength": 36216 108 | } 109 | ], 110 | "buffers": [ 111 | { 112 | "uri": "BoomBox.bin", 113 | "byteLength": 207816 114 | } 115 | ], 116 | "extensionsUsed": [ 117 | "KHR_materials_pbrSpecularGlossiness" 118 | ], 119 | "images": [ 120 | { 121 | "uri": "BoomBox_baseColor.png" 122 | }, 123 | { 124 | "uri": "BoomBox_roughnessMetallic.png" 125 | }, 126 | { 127 | "uri": "BoomBox_normal.png" 128 | }, 129 | { 130 | "uri": "BoomBox_emissive.png" 131 | }, 132 | { 133 | "uri": "BoomBox_occlusion.png" 134 | }, 135 | { 136 | "uri": "BoomBox_diffuse.png" 137 | }, 138 | { 139 | "uri": "BoomBox_specularGlossiness.png" 140 | } 141 | ], 142 | "meshes": [ 143 | { 144 | "primitives": [ 145 | { 146 | "attributes": { 147 | "TEXCOORD_0": 0, 148 | "NORMAL": 1, 149 | "TANGENT": 2, 150 | "POSITION": 3 151 | }, 152 | "indices": 4, 153 | "material": 0 154 | } 155 | ], 156 | "name": "BoomBox" 157 | } 158 | ], 159 | "materials": [ 160 | { 161 | "pbrMetallicRoughness": { 162 | "baseColorTexture": { 163 | "index": 0 164 | }, 165 | "metallicRoughnessTexture": { 166 | "index": 1 167 | } 168 | }, 169 | "normalTexture": { 170 | "index": 2 171 | }, 172 | "occlusionTexture": { 173 | "index": 4 174 | }, 175 | "emissiveFactor": [ 176 | 1.0, 177 | 1.0, 178 | 1.0 179 | ], 180 | "emissiveTexture": { 181 | "index": 3 182 | }, 183 | "name": "BoomBox_Mat", 184 | "extensions": { 185 | "KHR_materials_pbrSpecularGlossiness": { 186 | "diffuseTexture": { 187 | "index": 5 188 | }, 189 | "specularGlossinessTexture": { 190 | "index": 6 191 | } 192 | } 193 | } 194 | } 195 | ], 196 | "nodes": [ 197 | { 198 | "mesh": 0, 199 | "name": "BoomBox" 200 | } 201 | ], 202 | "scene": 0, 203 | "scenes": [ 204 | { 205 | "nodes": [ 206 | 0 207 | ] 208 | } 209 | ], 210 | "textures": [ 211 | { 212 | "source": 0 213 | }, 214 | { 215 | "source": 1 216 | }, 217 | { 218 | "source": 2 219 | }, 220 | { 221 | "source": 3 222 | }, 223 | { 224 | "source": 4 225 | }, 226 | { 227 | "source": 5 228 | }, 229 | { 230 | "source": 6 231 | } 232 | ] 233 | } -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_baseColor ORIG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_baseColor ORIG.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_baseColor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_baseColor.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_diffuse ORIG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_diffuse ORIG.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_diffuse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_diffuse.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_emissive ORIG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_emissive ORIG.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_emissive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_emissive.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_normal ORIG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_normal ORIG.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_normal.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_occlusion ORIG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_occlusion ORIG.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_occlusion.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_occlusion.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_roughnessMetallic ORIG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_roughnessMetallic ORIG.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_roughnessMetallic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_roughnessMetallic.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_specularGlossiness ORIG.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_specularGlossiness ORIG.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_specularGlossiness.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF-pbrSpecularGlossiness/BoomBox_specularGlossiness.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF/BoomBox.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF/BoomBox.bin -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF/BoomBox.gltf: -------------------------------------------------------------------------------- 1 | { 2 | "accessors": [ 3 | { 4 | "bufferView": 0, 5 | "componentType": 5126, 6 | "count": 3575, 7 | "type": "VEC2", 8 | "max": [ 9 | 0.9999003, 10 | -0.0221377648 11 | ], 12 | "min": [ 13 | 0.0006585993, 14 | -0.996773958 15 | ] 16 | }, 17 | { 18 | "bufferView": 1, 19 | "componentType": 5126, 20 | "count": 3575, 21 | "type": "VEC3", 22 | "max": [ 23 | 1.0, 24 | 1.0, 25 | 0.9999782 26 | ], 27 | "min": [ 28 | -1.0, 29 | -1.0, 30 | -0.9980823 31 | ] 32 | }, 33 | { 34 | "bufferView": 2, 35 | "componentType": 5126, 36 | "count": 3575, 37 | "type": "VEC4", 38 | "max": [ 39 | 1.0, 40 | 0.9999976, 41 | 1.0, 42 | 1.0 43 | ], 44 | "min": [ 45 | -0.9991289, 46 | -0.999907851, 47 | -1.0, 48 | 1.0 49 | ] 50 | }, 51 | { 52 | "bufferView": 3, 53 | "componentType": 5126, 54 | "count": 3575, 55 | "type": "VEC3", 56 | "max": [ 57 | 0.009921154, 58 | 0.00977163, 59 | 0.0100762453 60 | ], 61 | "min": [ 62 | -0.009921154, 63 | -0.00977163, 64 | -0.0100762453 65 | ] 66 | }, 67 | { 68 | "bufferView": 4, 69 | "componentType": 5123, 70 | "count": 18108, 71 | "type": "SCALAR", 72 | "max": [ 73 | 3574 74 | ], 75 | "min": [ 76 | 0 77 | ] 78 | } 79 | ], 80 | "asset": { 81 | "generator": "glTF Tools for Unity", 82 | "version": "2.0" 83 | }, 84 | "bufferViews": [ 85 | { 86 | "buffer": 0, 87 | "byteLength": 28600 88 | }, 89 | { 90 | "buffer": 0, 91 | "byteOffset": 28600, 92 | "byteLength": 42900 93 | }, 94 | { 95 | "buffer": 0, 96 | "byteOffset": 71500, 97 | "byteLength": 57200 98 | }, 99 | { 100 | "buffer": 0, 101 | "byteOffset": 128700, 102 | "byteLength": 42900 103 | }, 104 | { 105 | "buffer": 0, 106 | "byteOffset": 171600, 107 | "byteLength": 36216 108 | } 109 | ], 110 | "buffers": [ 111 | { 112 | "uri": "BoomBox.bin", 113 | "byteLength": 207816 114 | } 115 | ], 116 | "images": [ 117 | { 118 | "uri": "BoomBox_baseColor.png" 119 | }, 120 | { 121 | "uri": "BoomBox_occlusionRoughnessMetallic.png" 122 | }, 123 | { 124 | "uri": "BoomBox_normal.png" 125 | }, 126 | { 127 | "uri": "BoomBox_emissive.png" 128 | } 129 | ], 130 | "meshes": [ 131 | { 132 | "primitives": [ 133 | { 134 | "attributes": { 135 | "TEXCOORD_0": 0, 136 | "NORMAL": 1, 137 | "TANGENT": 2, 138 | "POSITION": 3 139 | }, 140 | "indices": 4, 141 | "material": 0 142 | } 143 | ], 144 | "name": "BoomBox" 145 | } 146 | ], 147 | "materials": [ 148 | { 149 | "pbrMetallicRoughness": { 150 | "baseColorTexture": { 151 | "index": 0 152 | }, 153 | "metallicRoughnessTexture": { 154 | "index": 1 155 | } 156 | }, 157 | "normalTexture": { 158 | "index": 2 159 | }, 160 | "occlusionTexture": { 161 | "index": 1 162 | }, 163 | "emissiveFactor": [ 164 | 1.0, 165 | 1.0, 166 | 1.0 167 | ], 168 | "emissiveTexture": { 169 | "index": 3 170 | }, 171 | "name": "BoomBox_Mat" 172 | } 173 | ], 174 | "nodes": [ 175 | { 176 | "mesh": 0, 177 | "name": "BoomBox" 178 | } 179 | ], 180 | "scene": 0, 181 | "scenes": [ 182 | { 183 | "nodes": [ 184 | 0 185 | ] 186 | } 187 | ], 188 | "textures": [ 189 | { 190 | "source": 0 191 | }, 192 | { 193 | "source": 1 194 | }, 195 | { 196 | "source": 2 197 | }, 198 | { 199 | "source": 3 200 | } 201 | ] 202 | } -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF/BoomBox_baseColor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF/BoomBox_baseColor.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF/BoomBox_emissive.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF/BoomBox_emissive.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF/BoomBox_normal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF/BoomBox_normal.png -------------------------------------------------------------------------------- /examples/models/BoomBox/glTF/BoomBox_occlusionRoughnessMetallic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/glTF/BoomBox_occlusionRoughnessMetallic.png -------------------------------------------------------------------------------- /examples/models/BoomBox/screenshot/screenshot.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/BoomBox/screenshot/screenshot.jpg -------------------------------------------------------------------------------- /examples/models/female02/01_-_Default1noCulling.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/female02/01_-_Default1noCulling.JPG -------------------------------------------------------------------------------- /examples/models/female02/02_-_Default1noCulling.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/female02/02_-_Default1noCulling.JPG -------------------------------------------------------------------------------- /examples/models/female02/03_-_Default1noCulling.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/female02/03_-_Default1noCulling.JPG -------------------------------------------------------------------------------- /examples/models/female02/Female02_bin.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/female02/Female02_bin.bin -------------------------------------------------------------------------------- /examples/models/female02/Female02_bin.js: -------------------------------------------------------------------------------- 1 | { 2 | 3 | "metadata" : 4 | { 5 | "formatVersion" : 3.1, 6 | "sourceFile" : "female02.obj", 7 | "generatedBy" : "OBJConverter", 8 | "vertices" : 3274, 9 | "faces" : 6233, 10 | "normals" : 3292, 11 | "uvs" : 4935, 12 | "materials" : 6 13 | }, 14 | 15 | "materials": [ { 16 | "DbgColor" : 15658734, 17 | "DbgIndex" : 0, 18 | "DbgName" : "_03_-_Default1noCulli__03_-_Default1noCulli", 19 | "colorDiffuse" : [0.64, 0.64, 0.64], 20 | "colorSpecular" : [0.165, 0.165, 0.165], 21 | "illumination" : 2, 22 | "mapDiffuse" : "03_-_Default1noCulling.JPG", 23 | "opticalDensity" : 1.0, 24 | "specularCoef" : 154.901961, 25 | "opacity" : 1.0 26 | }, 27 | 28 | { 29 | "DbgColor" : 15597568, 30 | "DbgIndex" : 1, 31 | "DbgName" : "_02_-_Default1noCulli__02_-_Default1noCulli", 32 | "colorDiffuse" : [0.64, 0.64, 0.64], 33 | "colorSpecular" : [0.165, 0.165, 0.165], 34 | "illumination" : 2, 35 | "mapDiffuse" : "02_-_Default1noCulling.JPG", 36 | "opticalDensity" : 1.0, 37 | "specularCoef" : 154.901961, 38 | "opacity" : 1.0 39 | }, 40 | 41 | { 42 | "DbgColor" : 60928, 43 | "DbgIndex" : 2, 44 | "DbgName" : "FrontColorNoCullingID__02_-_Default1noCulli", 45 | "colorDiffuse" : [0.8, 0.8, 0.8], 46 | "colorSpecular" : [0.165, 0.165, 0.165], 47 | "illumination" : 2, 48 | "mapDiffuse" : "02_-_Default1noCulling.JPG", 49 | "opticalDensity" : 1.0, 50 | "specularCoef" : 154.901961, 51 | "opacity" : 1.0 52 | }, 53 | 54 | { 55 | "DbgColor" : 238, 56 | "DbgIndex" : 3, 57 | "DbgName" : "FrontColorNoCullingID__03_-_Default1noCulli", 58 | "colorDiffuse" : [0.8, 0.8, 0.8], 59 | "colorSpecular" : [0.165, 0.165, 0.165], 60 | "illumination" : 2, 61 | "mapDiffuse" : "03_-_Default1noCulling.JPG", 62 | "opticalDensity" : 1.0, 63 | "specularCoef" : 154.901961, 64 | "opacity" : 1.0 65 | }, 66 | 67 | { 68 | "DbgColor" : 15658496, 69 | "DbgIndex" : 4, 70 | "DbgName" : "_01_-_Default1noCulli__01_-_Default1noCulli", 71 | "colorDiffuse" : [0.64, 0.64, 0.64], 72 | "colorSpecular" : [0.165, 0.165, 0.165], 73 | "illumination" : 2, 74 | "mapDiffuse" : "01_-_Default1noCulling.JPG", 75 | "opticalDensity" : 1.0, 76 | "specularCoef" : 154.901961, 77 | "opacity" : 1.0 78 | }, 79 | 80 | { 81 | "DbgColor" : 61166, 82 | "DbgIndex" : 5, 83 | "DbgName" : "FrontColorNoCullingID__01_-_Default1noCulli", 84 | "colorDiffuse" : [0.8, 0.8, 0.8], 85 | "colorSpecular" : [0.165, 0.165, 0.165], 86 | "illumination" : 2, 87 | "mapDiffuse" : "01_-_Default1noCulling.JPG", 88 | "opticalDensity" : 1.0, 89 | "specularCoef" : 154.901961, 90 | "opacity" : 1.0 91 | }], 92 | 93 | "buffers": "Female02_bin.bin" 94 | 95 | } 96 | -------------------------------------------------------------------------------- /examples/models/female02/female02.mtl: -------------------------------------------------------------------------------- 1 | # Material Count: 6 2 | newmtl FrontColorNoCullingID__01_-_Default1noCulli 3 | Ns 154.901961 4 | Ka 0.000000 0.000000 0.000000 5 | Kd 0.800000 0.800000 0.800000 6 | Ks 0.165000 0.165000 0.165000 7 | Ni 1.000000 8 | d 1.000000 9 | illum 2 10 | map_Kd 01_-_Default1noCulling.JPG 11 | 12 | 13 | newmtl _02_-_Default1noCulli__02_-_Default1noCulli 14 | Ns 154.901961 15 | Ka 0.000000 0.000000 0.000000 16 | Kd 0.640000 0.640000 0.640000 17 | Ks 0.165000 0.165000 0.165000 18 | Ni 1.000000 19 | d 1.000000 20 | illum 2 21 | map_Kd 02_-_Default1noCulling.JPG 22 | 23 | 24 | newmtl _01_-_Default1noCulli__01_-_Default1noCulli 25 | Ns 154.901961 26 | Ka 0.000000 0.000000 0.000000 27 | Kd 0.640000 0.640000 0.640000 28 | Ks 0.165000 0.165000 0.165000 29 | Ni 1.000000 30 | d 1.000000 31 | illum 2 32 | map_Kd 01_-_Default1noCulling.JPG 33 | 34 | 35 | newmtl FrontColorNoCullingID__03_-_Default1noCulli 36 | Ns 154.901961 37 | Ka 0.000000 0.000000 0.000000 38 | Kd 0.800000 0.800000 0.800000 39 | Ks 0.165000 0.165000 0.165000 40 | Ni 1.000000 41 | d 1.000000 42 | illum 2 43 | map_Kd 03_-_Default1noCulling.JPG 44 | 45 | 46 | newmtl _03_-_Default1noCulli__03_-_Default1noCulli 47 | Ns 154.901961 48 | Ka 0.000000 0.000000 0.000000 49 | Kd 0.640000 0.640000 0.640000 50 | Ks 0.165000 0.165000 0.165000 51 | Ni 1.000000 52 | d 1.000000 53 | illum 2 54 | map_Kd 03_-_Default1noCulling.JPG 55 | 56 | 57 | newmtl FrontColorNoCullingID__02_-_Default1noCulli 58 | Ns 154.901961 59 | Ka 0.000000 0.000000 0.000000 60 | Kd 0.800000 0.800000 0.800000 61 | Ks 0.165000 0.165000 0.165000 62 | Ni 1.000000 63 | d 1.000000 64 | illum 2 65 | map_Kd 02_-_Default1noCulling.JPG 66 | 67 | 68 | -------------------------------------------------------------------------------- /examples/models/female02/readme.txt: -------------------------------------------------------------------------------- 1 | Model by Reallusion iClone from Google 3d Warehouse: 2 | 3 | http://sketchup.google.com/3dwarehouse/details?mid=2c6fd128fca34052adc5f5b98d513da1 -------------------------------------------------------------------------------- /examples/models/male02/01_-_Default1noCulling.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/male02/01_-_Default1noCulling.JPG -------------------------------------------------------------------------------- /examples/models/male02/01_-_Default1noCulling.dds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/male02/01_-_Default1noCulling.dds -------------------------------------------------------------------------------- /examples/models/male02/Male02_bin.bin: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/male02/Male02_bin.bin -------------------------------------------------------------------------------- /examples/models/male02/Male02_bin.js: -------------------------------------------------------------------------------- 1 | { 2 | 3 | "metadata" : 4 | { 5 | "formatVersion" : 3.1, 6 | "sourceFile" : "male02.obj", 7 | "generatedBy" : "OBJConverter", 8 | "vertices" : 2746, 9 | "faces" : 5004, 10 | "normals" : 2769, 11 | "uvs" : 3275, 12 | "materials" : 5 13 | }, 14 | 15 | "materials": [ { 16 | "DbgColor" : 15658734, 17 | "DbgIndex" : 0, 18 | "DbgName" : "male-02-1noCullingID_male-02-1noCulling.JP", 19 | "colorDiffuse" : [0.64, 0.64, 0.64], 20 | "colorSpecular" : [0.165, 0.165, 0.165], 21 | "illumination" : 2, 22 | "mapDiffuse" : "male-02-1noCulling.JPG", 23 | "opticalDensity" : 1.0, 24 | "specularCoef" : 154.901961, 25 | "opacity" : 1.0 26 | }, 27 | 28 | { 29 | "DbgColor" : 15597568, 30 | "DbgIndex" : 1, 31 | "DbgName" : "orig_02_-_Defaul1noCu_orig_02_-_Defaul1noCu", 32 | "colorDiffuse" : [0.64, 0.64, 0.64], 33 | "colorSpecular" : [0.165, 0.165, 0.165], 34 | "illumination" : 2, 35 | "mapDiffuse" : "orig_02_-_Defaul1noCulling.JPG", 36 | "opticalDensity" : 1.0, 37 | "specularCoef" : 154.901961, 38 | "opacity" : 1.0 39 | }, 40 | 41 | { 42 | "DbgColor" : 60928, 43 | "DbgIndex" : 2, 44 | "DbgName" : "FrontColorNoCullingID_orig_02_-_Defaul1noCu", 45 | "colorDiffuse" : [0.8, 0.8, 0.8], 46 | "colorSpecular" : [0.165, 0.165, 0.165], 47 | "illumination" : 2, 48 | "mapDiffuse" : "orig_02_-_Defaul1noCulling.JPG", 49 | "opticalDensity" : 1.0, 50 | "specularCoef" : 154.901961, 51 | "opacity" : 1.0 52 | }, 53 | 54 | { 55 | "DbgColor" : 238, 56 | "DbgIndex" : 3, 57 | "DbgName" : "_01_-_Default1noCulli__01_-_Default1noCulli", 58 | "colorDiffuse" : [0.64, 0.64, 0.64], 59 | "colorSpecular" : [0.165, 0.165, 0.165], 60 | "illumination" : 2, 61 | "mapDiffuse" : "01_-_Default1noCulling.JPG", 62 | "opticalDensity" : 1.0, 63 | "specularCoef" : 154.901961, 64 | "opacity" : 1.0 65 | }, 66 | 67 | { 68 | "DbgColor" : 15658496, 69 | "DbgIndex" : 4, 70 | "DbgName" : "FrontColorNoCullingID_male-02-1noCulling.JP", 71 | "colorDiffuse" : [0.8, 0.8, 0.8], 72 | "colorSpecular" : [0.165, 0.165, 0.165], 73 | "illumination" : 2, 74 | "mapDiffuse" : "male-02-1noCulling.JPG", 75 | "opticalDensity" : 1.0, 76 | "specularCoef" : 154.901961, 77 | "opacity" : 1.0 78 | }], 79 | 80 | "buffers": "Male02_bin.bin" 81 | 82 | } 83 | -------------------------------------------------------------------------------- /examples/models/male02/male-02-1noCulling.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/male02/male-02-1noCulling.JPG -------------------------------------------------------------------------------- /examples/models/male02/male-02-1noCulling.dds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/male02/male-02-1noCulling.dds -------------------------------------------------------------------------------- /examples/models/male02/male02.mtl: -------------------------------------------------------------------------------- 1 | # Material Count: 5 2 | newmtl _01_-_Default1noCulli__01_-_Default1noCulli 3 | Ns 154.901961 4 | Ka 0.000000 0.000000 0.000000 5 | Kd 0.640000 0.640000 0.640000 6 | Ks 0.165000 0.165000 0.165000 7 | Ni 1.000000 8 | d 1.000000 9 | illum 2 10 | map_Kd 01_-_Default1noCulling.JPG 11 | 12 | 13 | newmtl FrontColorNoCullingID_male-02-1noCulling.JP 14 | Ns 154.901961 15 | Ka 0.000000 0.000000 0.000000 16 | Kd 0.800000 0.800000 0.800000 17 | Ks 0.165000 0.165000 0.165000 18 | Ni 1.000000 19 | d 1.000000 20 | illum 2 21 | map_Kd male-02-1noCulling.JPG 22 | 23 | 24 | newmtl male-02-1noCullingID_male-02-1noCulling.JP 25 | Ns 154.901961 26 | Ka 0.000000 0.000000 0.000000 27 | Kd 0.640000 0.640000 0.640000 28 | Ks 0.165000 0.165000 0.165000 29 | Ni 1.000000 30 | d 1.000000 31 | illum 2 32 | map_Kd male-02-1noCulling.JPG 33 | 34 | 35 | newmtl orig_02_-_Defaul1noCu_orig_02_-_Defaul1noCu 36 | Ns 154.901961 37 | Ka 0.000000 0.000000 0.000000 38 | Kd 0.640000 0.640000 0.640000 39 | Ks 0.165000 0.165000 0.165000 40 | Ni 1.000000 41 | d 1.000000 42 | illum 2 43 | map_Kd orig_02_-_Defaul1noCulling.JPG 44 | 45 | 46 | newmtl FrontColorNoCullingID_orig_02_-_Defaul1noCu 47 | Ns 154.901961 48 | Ka 0.000000 0.000000 0.000000 49 | Kd 0.800000 0.800000 0.800000 50 | Ks 0.165000 0.165000 0.165000 51 | Ni 1.000000 52 | d 1.000000 53 | illum 2 54 | map_Kd orig_02_-_Defaul1noCulling.JPG 55 | 56 | 57 | -------------------------------------------------------------------------------- /examples/models/male02/male02_dds.mtl: -------------------------------------------------------------------------------- 1 | # Material Count: 5 2 | newmtl _01_-_Default1noCulli__01_-_Default1noCulli 3 | Ns 30.0000 4 | Ka 0.640000 0.640000 0.640000 5 | Kd 0.640000 0.640000 0.640000 6 | Ks 0.050000 0.050000 0.050000 7 | Ni 1.000000 8 | d 1.000000 9 | illum 2 10 | map_Kd 01_-_Default1noCulling.dds 11 | 12 | 13 | newmtl FrontColorNoCullingID_male-02-1noCulling.JP 14 | Ns 30.0000 15 | Ka 0.800000 0.800000 0.800000 16 | Kd 0.800000 0.800000 0.800000 17 | Ks 0.050000 0.050000 0.050000 18 | Ni 1.000000 19 | d 1.000000 20 | illum 2 21 | map_Kd male-02-1noCulling.dds 22 | 23 | 24 | newmtl male-02-1noCullingID_male-02-1noCulling.JP 25 | Ns 30.0000 26 | Ka 0.640000 0.640000 0.640000 27 | Kd 0.640000 0.640000 0.640000 28 | Ks 0.050000 0.050000 0.050000 29 | Ni 1.000000 30 | d 1.000000 31 | illum 2 32 | map_Kd male-02-1noCulling.dds 33 | 34 | 35 | newmtl orig_02_-_Defaul1noCu_orig_02_-_Defaul1noCu 36 | Ns 30.0000 37 | Ka 0.640000 0.640000 0.640000 38 | Kd 0.640000 0.640000 0.640000 39 | Ks 0.050000 0.050000 0.050000 40 | Ni 1.000000 41 | d 1.000000 42 | illum 2 43 | map_Kd orig_02_-_Defaul1noCulling.dds 44 | 45 | 46 | newmtl FrontColorNoCullingID_orig_02_-_Defaul1noCu 47 | Ns 30.0000 48 | Ka 0.800000 0.800000 0.800000 49 | Kd 0.800000 0.800000 0.800000 50 | Ks 0.050000 0.050000 0.050000 51 | Ni 1.000000 52 | d 1.000000 53 | illum 2 54 | map_Kd orig_02_-_Defaul1noCulling.dds 55 | 56 | 57 | -------------------------------------------------------------------------------- /examples/models/male02/orig_02_-_Defaul1noCulling.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/male02/orig_02_-_Defaul1noCulling.JPG -------------------------------------------------------------------------------- /examples/models/male02/orig_02_-_Defaul1noCulling.dds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/models/male02/orig_02_-_Defaul1noCulling.dds -------------------------------------------------------------------------------- /examples/models/male02/readme.txt: -------------------------------------------------------------------------------- 1 | Model by Reallusion iClone from Google 3d Warehouse: 2 | 3 | https://3dwarehouse.sketchup.com/user.html?id=0122725873552223594220183 4 | -------------------------------------------------------------------------------- /examples/opencv-aruco/createMarker.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 11 | 12 | 13 |
14 |

Set the maker number in this code and run it:

15 |

(Marker generator courtesy of Ningxin Hu @ Intel)

16 |
17 | 25 |

26 |
27 |
28 |
29 | 30 |
31 |
32 | 33 | 34 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /examples/opencv-aruco/opencv_js.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/opencv-aruco/opencv_js.wasm -------------------------------------------------------------------------------- /examples/opencv-aruco/screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/opencv-aruco/screenshot.png -------------------------------------------------------------------------------- /examples/opencv-face/old-rotate-resize.js: -------------------------------------------------------------------------------- 1 | // createCVMat 2 | // 3 | // this routine does two things (if needed) as part of copying the input buffer to a cv.Mat: 4 | // - rotates the image so it is upright 5 | // - converts to greyscale 6 | 7 | var rotatedImage = null; 8 | var lastRotation = -1; 9 | 10 | function createCVMat(rotation, buffer, pixelFormat) { 11 | var width = buffer.size.width 12 | var height = buffer.size.height 13 | 14 | if (!rotatedImage || (lastRotation != rotation)) { 15 | lastRotation = rotation; 16 | if (rotatedImage) rotatedImage.delete() 17 | 18 | if(rotation == 90 || rotation == -90) { 19 | rotatedImage = new cv.Mat(width, height, cv.CV_8U) 20 | } else { 21 | rotatedImage = new cv.Mat(height, width, cv.CV_8U) 22 | } 23 | } 24 | var src, dest; 25 | src = dest = 0; 26 | 27 | var i, j; 28 | var b = new Uint8Array(buffer.buffer); 29 | var r = rotatedImage.data; 30 | 31 | var rowExtra = buffer.size.bytesPerRow - buffer.size.bytesPerPixel * width; 32 | switch(rotation) { 33 | case -90: 34 | // clockwise 35 | dest = height - 1; 36 | for (j = 0; j < height; j++) { 37 | switch(pixelFormat) { 38 | case XRVideoFrame.IMAGEFORMAT_YUV420P: 39 | for (var i = 0; i < width; i++) { 40 | r[dest] = b[src++] 41 | dest += height; // up the row 42 | } 43 | break; 44 | case XRVideoFrame.IMAGEFORMAT_RGBA32: 45 | for (var i = 0; i < width; i++) { 46 | r[dest] = (b[src++] + b[src++] + b[src++]) / 3 47 | src++ 48 | dest += height; // up the row 49 | } 50 | break; 51 | } 52 | dest -= width * height; 53 | dest --; 54 | src += rowExtra; 55 | } 56 | break; 57 | 58 | case 90: 59 | // anticlockwise 60 | dest = width * (height - 1); 61 | for (j = 0; j < height; j++) { 62 | switch(pixelFormat) { 63 | case XRVideoFrame.IMAGEFORMAT_YUV420P: 64 | for (var i = 0; i < width; i++) { 65 | r[dest] = b[src++] 66 | dest -= height; // down the row 67 | } 68 | break; 69 | case XRVideoFrame.IMAGEFORMAT_RGBA32: 70 | for (var i = 0; i < width; i++) { 71 | r[dest] = (b[src++] + b[src++] + b[src++]) / 3 72 | src++ 73 | dest -= height; // down the row 74 | } 75 | break; 76 | } 77 | dest += width * height; 78 | dest ++; 79 | src += rowExtra; 80 | } 81 | break; 82 | 83 | case 180: 84 | // 180 85 | dest = width * height - 1; 86 | for (j = 0; j < height; j++) { 87 | switch(pixelFormat) { 88 | case XRVideoFrame.IMAGEFORMAT_YUV420P: 89 | for (var i = 0; i < width; i++) { 90 | r[dest--] = b[src++] 91 | } 92 | break; 93 | case XRVideoFrame.IMAGEFORMAT_RGBA32: 94 | for (var i = 0; i < width; i++) { 95 | r[dest--] = (b[src++] + b[src++] + b[src++]) / 3 96 | src++ 97 | } 98 | break; 99 | } 100 | src += rowExtra; 101 | } 102 | break; 103 | 104 | case 0: 105 | default: 106 | // copy 107 | for (j = 0; j < height; j++) { 108 | switch(pixelFormat) { 109 | case XRVideoFrame.IMAGEFORMAT_YUV420P: 110 | for (var i = 0; i < width; i++) { 111 | r[dest++] = b[src++] 112 | } 113 | break; 114 | case XRVideoFrame.IMAGEFORMAT_RGBA32: 115 | for (var i = 0; i < width; i++) { 116 | r[dest++] = (b[src++] + b[src++] + b[src++]) / 3 117 | src++ 118 | } 119 | break; 120 | } 121 | src += rowExtra; 122 | } 123 | } 124 | return rotatedImage; 125 | } 126 | -------------------------------------------------------------------------------- /examples/opencv-face/opencv_js.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/opencv-face/opencv_js.wasm -------------------------------------------------------------------------------- /examples/opencv-face/screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/opencv-face/screenshot.png -------------------------------------------------------------------------------- /examples/peoples/screenshot.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/peoples/screenshot.jpeg -------------------------------------------------------------------------------- /examples/persistence/screenshot.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/persistence/screenshot.jpeg -------------------------------------------------------------------------------- /examples/reticle/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Reticle example 5 | 6 | 7 | 31 | 32 | 33 | 37 | 38 | 39 | 40 | 41 |
42 |
43 |

Reticle

44 |
(click to dismiss)
45 |

Place a reticle on surfaces.

46 |
47 | 167 | 168 | 169 | -------------------------------------------------------------------------------- /examples/reticle/screenshot.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/reticle/screenshot.jpeg -------------------------------------------------------------------------------- /examples/sensing/screenshot.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/sensing/screenshot.jpg -------------------------------------------------------------------------------- /examples/simplecv/screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/simplecv/screenshot.png -------------------------------------------------------------------------------- /examples/simplecv/target-28139_64.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/simplecv/target-28139_64.png -------------------------------------------------------------------------------- /examples/simplecv/worker.js: -------------------------------------------------------------------------------- 1 | //importScripts('webxr-worker.js') 2 | importScripts('../../dist/webxr-worker.js') 3 | /** 4 | * In the video callback, ev.detail contains: 5 | { 6 | "frame": { 7 | "buffers": [ // Array of base64 encoded string buffers 8 | { 9 | "size": { 10 | "width": 320, 11 | "height": 180, 12 | "bytesPerRow": 320, 13 | "bytesPerPixel": 1 14 | }, 15 | "buffer": "e3x...d7d" /// convert to Uint8 ArrayBuffer in code below 16 | }, 17 | { 18 | "size": { 19 | "width": 160, 20 | "height": 90, 21 | "bytesPerRow": 320, 22 | "bytesPerPixel": 2 23 | }, 24 | "buffer": "ZZF.../fIJ7" /// convert to Uint8 ArrayBuffer in code below 25 | } 26 | ], 27 | "pixelFormatType": "kCVPixelFormatType_420YpCbCr8BiPlanarFullRange", 28 | "pixelFormat": "YUV420P", /// Added in the code below, clients should ignore pixelFormatType 29 | "timestamp": 337791 30 | }, 31 | "camera": { 32 | "cameraIntrinsics": [3x3 matrix], 33 | fx 0 px 34 | 0 fy py 35 | 0 0 1 36 | fx and fy are the focal length in pixels. 37 | px and py are the coordinates of the principal point in pixels. 38 | The origin is at the center of the upper-left pixel. 39 | 40 | "cameraImageResolution": { 41 | "width": 1280, 42 | "height": 720 43 | }, 44 | "viewMatrix": [4x4 camera view matrix], 45 | "interfaceOrientation": 3, 46 | // 0 UIDeviceOrientationUnknown 47 | // 1 UIDeviceOrientationPortrait 48 | // 2 UIDeviceOrientationPortraitUpsideDown 49 | // 3 UIDeviceOrientationLandscapeRight 50 | // 4 UIDeviceOrientationLandscapeLeft 51 | "projectionMatrix": [4x4 camera projection matrix] 52 | } 53 | } 54 | */ 55 | 56 | 57 | // some globals to hold the -- silly -- values we compute 58 | var intensity = 0.0; 59 | var cr = -1; 60 | var cg = -1; 61 | var cb = -1; 62 | 63 | // a silly simply function to compute something based on 'all the pixels' in an RGBA image 64 | averageIntensityRGBA = function (buffer) { 65 | var w = buffer.size.width; 66 | var h = buffer.size.height; 67 | var pad = buffer.size.bytesPerRow - w * buffer.size.bytesPerPixel; 68 | var pixels = buffer.buffer; 69 | 70 | intensity = 0.0; 71 | var p = 0; 72 | for (var r = 0; r < h; r++) { 73 | var v = 0; 74 | for (var i = 0; i < w; i++) { 75 | v += (pixels[p++] + pixels[p++] + pixels[p++]) / 3 76 | p++ 77 | } 78 | intensity += v / w; 79 | p += pad; 80 | } 81 | intensity = (intensity / h) / 255.0; 82 | } 83 | 84 | // a silly simply function to compute something based on 'all the pixels' in a grayscale image 85 | averageIntensityLum = function (buffer) { 86 | var w = buffer.size.width; 87 | var h = buffer.size.height; 88 | var pad = buffer.size.bytesPerRow - w * buffer.size.bytesPerPixel; 89 | var pixels = buffer.buffer; 90 | 91 | intensity = 0.0; 92 | var p = 0; 93 | for (var r = 0; r < h; r++) { 94 | var v = 0; 95 | for (var i = 0; i < w; i++) { 96 | v += pixels[p++] 97 | } 98 | intensity += v / w; 99 | p += pad; 100 | } 101 | intensity = (intensity / h) / 255.0; 102 | } 103 | 104 | // sample a single color, just for variety 105 | colorAtCenterRGB = function(buffer0) { 106 | var w = buffer0.size.width; 107 | var h = buffer0.size.height; 108 | var pixels = buffer0.buffer; 109 | 110 | var cx = Math.floor(w / 2) * buffer0.size.bytesPerPixel 111 | var cy = Math.floor(h / 2) 112 | var p = cy * buffer0.size.bytesPerRow + cx; 113 | cr = pixels[p++]; 114 | cg = pixels[p++]; 115 | cb = pixels[p]; 116 | } 117 | 118 | // Make an attempt to convert a UV color to RBG 119 | // 120 | // LUV == LuCbCr 121 | // 122 | // Y = 0.299R + 0.587G + 0.114B 123 | // U'= (B-Y)*0.565 124 | // V'= (R-Y)*0.713 125 | 126 | clamp = function (x, min, max) { 127 | if (x < min) { 128 | return min; 129 | } 130 | if (x > max) { 131 | return max; 132 | } 133 | return x; 134 | } 135 | 136 | colorAtCenterLUV = function(buffer0, buffer1) { 137 | var w = buffer0.size.width; 138 | var h = buffer0.size.height; 139 | var pixels = buffer0.buffer; 140 | 141 | var cx = Math.floor(w / 2) * buffer0.size.bytesPerPixel 142 | var cy = Math.floor(h / 2) 143 | var p = cy * buffer0.size.bytesPerRow + cx; 144 | var lum = pixels[p]; 145 | 146 | w = buffer1.size.width; 147 | h = buffer1.size.height; 148 | pixels = buffer1.buffer; 149 | 150 | cx = Math.floor(w / 2) * buffer1.size.bytesPerPixel 151 | cy = Math.floor(h / 2) 152 | p = cy * buffer1.size.bytesPerRow + cx; 153 | cb = pixels[p++]; 154 | cr = pixels[p]; 155 | 156 | // luv -> rgb. see https://www.fourcc.org/fccyvrgb.php 157 | var y=1.1643*(lum-16) 158 | var u=cb-128; 159 | var v=cr-128; 160 | cr=clamp(y+1.5958*v, 0, 255); 161 | cg=clamp(y-0.39173*u-0.81290*v, 0, 255); 162 | cb=clamp(y+2.017*u, 0, 255); 163 | 164 | // Alternatives: 165 | // 166 | // var y=lum 167 | // var u=cb-128; 168 | // var v=cr-128; 169 | // cr=y+1.402*v; 170 | // cg=y-0.34414*u-0.71414*v; 171 | // cb=y+1.772*u; 172 | } 173 | 174 | // The listener. 175 | // 176 | // We can ignore the message type field, since we are only receiving one message from 177 | // the main thread, a new video frame 178 | self.addEventListener('message', function(event){ 179 | try { 180 | // a utility function to receive the message. Takes care of managing the 181 | // internal ArrayBuffers that are being passed around 182 | var videoFrame = XRVideoFrame.createFromMessage(event); 183 | 184 | // The video frames will come in different formats on different platforms. 185 | // The call to videoFrame.buffer(i) retrieves the i-th plane for the frame; 186 | // (in the case of the WebXR Viewer, it also converts the base64 encoded message 187 | // into an ArrayBuffer, which we don't do until the plane is used) 188 | switch (videoFrame.pixelFormat) { 189 | // the WebXR Viewer uses iOS native YCbCr, which is two buffers, one for Y and one for CbCr 190 | case XRVideoFrame.IMAGEFORMAT_YUV420P: 191 | this.averageIntensityLum(videoFrame.buffer(0)) 192 | this.colorAtCenterLUV(videoFrame.buffer(0),videoFrame.buffer(1)) 193 | break; 194 | // WebRTC uses web-standard RGBA 195 | case XRVideoFrame.IMAGEFORMAT_RGBA32: 196 | this.averageIntensityRGBA(videoFrame.buffer(0)) 197 | this.colorAtCenterRGB(videoFrame.buffer(0)) 198 | break; 199 | } 200 | 201 | // utility function to send the video frame and additional parameters back. 202 | // Want to use this so we pass ArrayBuffers back and forth to avoid having to 203 | // reallocate them every frame. 204 | videoFrame.postReplyMessage({intensity: intensity, cr: cr, cg: cg, cb: cb}) 205 | videoFrame.release(); 206 | } catch(e) { 207 | console.error('page error', e) 208 | } 209 | }); 210 | -------------------------------------------------------------------------------- /examples/textures/Park2/negx.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/textures/Park2/negx.jpg -------------------------------------------------------------------------------- /examples/textures/Park2/negy.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/textures/Park2/negy.jpg -------------------------------------------------------------------------------- /examples/textures/Park2/negz.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/textures/Park2/negz.jpg -------------------------------------------------------------------------------- /examples/textures/Park2/posx.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/textures/Park2/posx.jpg -------------------------------------------------------------------------------- /examples/textures/Park2/posy.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/textures/Park2/posy.jpg -------------------------------------------------------------------------------- /examples/textures/Park2/posz.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/examples/textures/Park2/posz.jpg -------------------------------------------------------------------------------- /examples/textures/Park2/readme.txt: -------------------------------------------------------------------------------- 1 | Author 2 | ====== 3 | 4 | This is the work of Emil Persson, aka Humus. 5 | http://www.humus.name 6 | humus@comhem.se 7 | 8 | 9 | 10 | Legal stuff 11 | =========== 12 | 13 | This work is free and may be used by anyone for any purpose 14 | and may be distributed freely to anyone using any distribution 15 | media or distribution method as long as this file is included. 16 | Distribution without this file is allowed if it's distributed 17 | with free non-commercial software; however, fair credit of the 18 | original author is expected. 19 | Any commercial distribution of this software requires the written 20 | approval of Emil Persson. 21 | -------------------------------------------------------------------------------- /examples/vr_simplest/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | VR simplest example 4 | 5 | 6 | 36 | 37 | 38 | 39 | 43 | 44 | 45 | 46 | 47 |
48 | 132 | 133 | 134 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "WebXR_examples", 3 | "version": "0.0.1", 4 | "description": "", 5 | "main": "index.js", 6 | "directories": { 7 | "lib": "lib" 8 | }, 9 | "scripts": { 10 | "start": "npm run build && http-server .", 11 | "build": "webpack" 12 | }, 13 | "repository": { 14 | "type": "git", 15 | "url": "git+https://github.com/Mozilla/webxr-api.git" 16 | }, 17 | "keywords": [], 18 | "author": "", 19 | "license": "ISC", 20 | "bugs": { 21 | "url": "https://github.com/Mozilla/webxr-api/issues" 22 | }, 23 | "homepage": "https://github.com/Mozilla/webxr-api#readme", 24 | "devDependencies": { 25 | "babel-core": "^6.26.0", 26 | "babel-loader": "^7.1.1", 27 | "babel-preset-env": "^1.6.0", 28 | "http-server": "^0.10.0", 29 | "webpack": "^3.5.5", 30 | "wrapper-webpack-plugin": "^1.0.0" 31 | }, 32 | "dependencies": { 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /polyfill/Reality.js: -------------------------------------------------------------------------------- 1 | import EventHandlerBase from './fill/EventHandlerBase.js' 2 | 3 | /* 4 | A Reality represents a view of the world, be it the real world via sensors or a virtual world that is rendered with WebGL or WebGPU. 5 | */ 6 | export default class Reality extends EventHandlerBase { 7 | constructor(xr, name, isShared, isPassthrough){ 8 | super() 9 | this._xr = xr 10 | this._name = name 11 | this._isShared = isShared 12 | this._isPassthrough = isPassthrough 13 | this._anchors = new Map() 14 | } 15 | 16 | get name(){ return this._name } 17 | 18 | get isShared(){ return this._isShared } 19 | 20 | get isPassthrough(){ return this._isPassthrough } 21 | 22 | getCoordinateSystem(...types){ 23 | //XRCoordinateSystem? getCoordinateSystem(XRFrameOfReferenceType type, ...); // Tries the types in order, returning the first match or null if none is found 24 | throw new Error('Not implemented') 25 | } 26 | 27 | /* 28 | Called when at least one active XRSession is using this Reality 29 | */ 30 | _start(parameters){ 31 | throw new Error('Extending classes should implement _start') 32 | } 33 | 34 | /* 35 | Called when no more active XRSessions are using this Reality 36 | */ 37 | _stop(){ 38 | throw new Error('Extending classes should implement _stop') 39 | } 40 | 41 | /* 42 | Request another video frame be generated, typically from video-mixed Realities. 43 | */ 44 | _requestVideoFrame() { 45 | } 46 | 47 | /* 48 | Start or stop video frames 49 | */ 50 | _stopVideoFrames() { 51 | } 52 | 53 | _startVideoFrames() { 54 | } 55 | 56 | /* 57 | Called by a session before it hands a new XRPresentationFrame to the app 58 | */ 59 | _handleNewFrame(){} 60 | 61 | /* 62 | Create an anchor hung in space 63 | */ 64 | _addAnchor(anchor, display){ 65 | // returns DOMString anchor UID 66 | throw new Error('Extending classes should implement _addAnchor') 67 | } 68 | 69 | /* 70 | Create an anchor attached to a surface, as found by a ray 71 | returns a Promise that resolves either to an AnchorOffset or null if the hit test failed 72 | normalized screen x and y are in range 0..1, with 0,0 at top left and 1,1 at bottom right 73 | */ 74 | _findAnchor(normalizedScreenX, normalizedScreenY, display){ 75 | throw new Error('Extending classes should implement _findAnchor') 76 | } 77 | 78 | _createImageAnchor(uid, buffer, width, height, physicalWidthInMeters) { 79 | throw new Error('Extending classes should implement _createImageAnchor') 80 | } 81 | 82 | activateDetectionImage(uid, display) { 83 | throw new Error('Extending classes should implement _activateDetectionImage') 84 | } 85 | 86 | /* 87 | Find an XRAnchorOffset that is at floor level below the current head pose 88 | returns a Promise that resolves either to an AnchorOffset or null if the floor level is unknown 89 | */ 90 | _findFloorAnchor(display, uid=null){ 91 | throw new Error('Extending classes should implement _findFloorAnchor') 92 | } 93 | 94 | _getAnchor(uid){ 95 | return this._anchors.get(uid) || null 96 | } 97 | 98 | _removeAnchor(uid){ 99 | // returns void 100 | throw new Error('Extending classes should implement _removeAnchor') 101 | } 102 | 103 | _hitTestNoAnchor(normalizedScreenX, normalizedScreenY, display){ 104 | throw new Error('Extending classes should implement _hitTestNoAnchor') 105 | } 106 | 107 | _getLightAmbientIntensity(){ 108 | throw new Error('Extending classes should implement _getLightAmbientIntensity') 109 | } 110 | 111 | _getWorldMap() { 112 | throw new Error('Extending classes should implement _getWorldMap') 113 | } 114 | 115 | _setWorldMap(worldMap) { 116 | throw new Error('Extending classes should implement _setWorldMap') 117 | } 118 | 119 | _getWorldMappingStatus() { 120 | throw new Error('Extending classes should implement _getWorldMappingStatus') 121 | } 122 | // attribute EventHandler onchange; 123 | } 124 | 125 | Reality.COMPUTER_VISION_DATA = 'cv_data' 126 | Reality.WINDOW_RESIZE_EVENT = 'window-resize' 127 | Reality.NEW_WORLD_ANCHOR = 'world-anchor' 128 | Reality.UPDATE_WORLD_ANCHOR = 'update-world-anchor' 129 | Reality.REMOVE_WORLD_ANCHOR = 'remove-world-anchor' -------------------------------------------------------------------------------- /polyfill/XRAnchor.js: -------------------------------------------------------------------------------- 1 | /* 2 | XRAnchors provide per-frame coordinates which the Reality attempts to pin "in place". 3 | In a virtual Reality these coordinates do not change. 4 | In a Reality based on environment mapping sensors, the anchors may change pose on a per-frame bases as the system refines its map. 5 | */ 6 | export default class XRAnchor { 7 | constructor(xrCoordinateSystem, uid=null){ 8 | this._uid = uid || XRAnchor._generateUID() 9 | this._coordinateSystem = xrCoordinateSystem 10 | } 11 | 12 | get uid(){ return this._uid } 13 | 14 | get coordinateSystem(){ return this._coordinateSystem } 15 | 16 | static _generateUID(){ 17 | return 'anchor-' + new Date().getTime() + '-' + Math.floor((Math.random() * Number.MAX_SAFE_INTEGER)) 18 | } 19 | } -------------------------------------------------------------------------------- /polyfill/XRAnchorOffset.js: -------------------------------------------------------------------------------- 1 | import MatrixMath from './fill/MatrixMath.js' 2 | import Quaternion from './fill/Quaternion.js' 3 | 4 | import XRAnchor from './XRAnchor.js' 5 | 6 | /* 7 | XRAnchorOffset represents a pose in relation to an XRAnchor 8 | */ 9 | export default class XRAnchorOffset { 10 | constructor(anchorUID, poseMatrix=null){ 11 | this._anchorUID = anchorUID 12 | this._tempArray = new Float32Array(16); 13 | this._poseMatrix = poseMatrix || MatrixMath.mat4_generateIdentity() 14 | } 15 | 16 | setIdentityOffset() { 17 | var p = this._poseMatrix 18 | p[0] = p[5] = p[10] = p[15] = 1 19 | p[1] = p[2] = p[3] = 0 20 | p[4] = p[6] = p[7] = 0 21 | p[8] = p[9] = p[11] = 0 22 | p[12] = p[13] = p[14] = 0 23 | } 24 | get anchorUID(){ return this._anchorUID } 25 | 26 | /* 27 | A Float32Array(16) representing a column major affine transform matrix 28 | */ 29 | get poseMatrix(){ return this._poseMatrix } 30 | 31 | set poseMatrix(array16){ 32 | for(let i=0; i < 16; i++){ 33 | this._poseMatrix[i] = array16[i] 34 | } 35 | } 36 | 37 | /* 38 | returns a Float32Array(4) representing an x, y, z position from this.poseMatrix 39 | */ 40 | get position(){ 41 | return new Float32Array([this._poseMatrix[12], this._poseMatrix[13], this._poseMatrix[14]]) 42 | } 43 | 44 | /* 45 | returns a Float32Array(4) representing x, y, z, w of a quaternion from this.poseMatrix 46 | */ 47 | get orientation(){ 48 | let quat = new Quaternion() 49 | quat.setFromRotationMatrix(this._poseMatrix) 50 | return quat.toArray() 51 | } 52 | 53 | /* 54 | Return a transform matrix that is offset by this XRAnchorOffset.poseMatrix relative to coordinateSystem 55 | */ 56 | getOffsetTransform(coordinateSystem){ 57 | return MatrixMath.mat4_multiply(this._tempArray, this._poseMatrix, coordinateSystem._poseModelMatrix) 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /polyfill/XRCoordinateSystem.js: -------------------------------------------------------------------------------- 1 | import MatrixMath from './fill/MatrixMath.js' 2 | 3 | /* 4 | XRCoordinateSystem represents the origin of a 3D coordinate system positioned at a known frame of reference. 5 | The XRCoordinateSystem is a string from XRCoordinateSystem.TYPES: 6 | 7 | These types are used by the app code when requesting a coordinate system from the session: 8 | - XRCoordinateSystem.HEAD_MODEL: origin is aligned with the pose of the head, as sensed by HMD or handset trackers 9 | - XRCoordinateSystem.EYE_LEVEL: origin is at a fixed distance above the ground 10 | 11 | This is an internal type, specific to just this polyfill and not visible to the app code 12 | - XRCoordinateSystem.TRACKER: The origin of this coordinate system is at floor level at or below the origin of the HMD or handset provided tracking system 13 | 14 | */ 15 | export default class XRCoordinateSystem { 16 | constructor(display, type){ 17 | this._display = display 18 | this._type = type 19 | 20 | this.__relativeMatrix = MatrixMath.mat4_generateIdentity() 21 | this._workingMatrix = MatrixMath.mat4_generateIdentity() 22 | } 23 | 24 | getTransformTo(otherCoordinateSystem){ 25 | // apply inverse of the poseModelMatrix to the identity matrix 26 | let inverse = MatrixMath.mat4_invert(new Float32Array(16), otherCoordinateSystem._poseModelMatrix) 27 | let out = MatrixMath.mat4_generateIdentity() 28 | MatrixMath.mat4_multiply(out, inverse, out) 29 | 30 | // apply the other system's poseModelMatrix 31 | MatrixMath.mat4_multiply(out, this._poseModelMatrix, out) 32 | return out 33 | } 34 | 35 | get _relativeMatrix(){ return this.__relativeMatrix } 36 | 37 | set _relativeMatrix(value){ 38 | for(let i=0; i < 16; i++){ 39 | this.__relativeMatrix[i] = value[i] 40 | } 41 | } 42 | 43 | get _poseModelMatrix(){ 44 | switch(this._type){ 45 | case XRCoordinateSystem.HEAD_MODEL: 46 | return this._display._headPose.poseModelMatrix 47 | case XRCoordinateSystem.EYE_LEVEL: 48 | return this._display._eyeLevelPose.poseModelMatrix 49 | case XRCoordinateSystem.TRACKER: 50 | MatrixMath.mat4_multiply(this._workingMatrix, this.__relativeMatrix, this._display._trackerPoseModelMatrix) 51 | return this._workingMatrix 52 | default: 53 | throw new Error('Unknown coordinate system type: ' + this._type) 54 | } 55 | } 56 | } 57 | 58 | XRCoordinateSystem.HEAD_MODEL = 'headModel' 59 | XRCoordinateSystem.EYE_LEVEL = 'eyeLevel' 60 | XRCoordinateSystem.TRACKER = 'tracker' 61 | 62 | XRCoordinateSystem.TYPES = [ 63 | XRCoordinateSystem.HEAD_MODEL, 64 | XRCoordinateSystem.EYE_LEVEL, 65 | XRCoordinateSystem.TRACKER, 66 | ] -------------------------------------------------------------------------------- /polyfill/XRDisplay.js: -------------------------------------------------------------------------------- 1 | import MatrixMath from './fill/MatrixMath.js' 2 | import EventHandlerBase from './fill/EventHandlerBase.js' 3 | 4 | import VirtualReality from './reality/VirtualReality.js' 5 | 6 | import XRFieldOfView from './XRFieldOfView.js' 7 | import Reality from './Reality.js' 8 | 9 | 10 | /* 11 | Each XRDisplay represents a method of using a specific type of hardware to render AR or VR realities and layers. 12 | 13 | This doesn't yet support a geospatial coordinate system 14 | */ 15 | export default class XRDisplay extends EventHandlerBase { 16 | constructor(xr, displayName, isExternal, reality){ 17 | super() 18 | this._xr = xr 19 | this._displayName = displayName 20 | this._isExternal = isExternal 21 | this._reality = reality // The Reality instance that is currently displayed 22 | 23 | this._headModelCoordinateSystem = new XRCoordinateSystem(this, XRCoordinateSystem.HEAD_MODEL) 24 | this._eyeLevelCoordinateSystem = new XRCoordinateSystem(this, XRCoordinateSystem.EYE_LEVEL) 25 | this._trackerCoordinateSystem = new XRCoordinateSystem(this, XRCoordinateSystem.TRACKER) 26 | 27 | this._headPose = new XRViewPose([0, XRViewPose.SITTING_EYE_HEIGHT, 0]) 28 | this._eyeLevelPose = new XRViewPose([0, XRViewPose.SITTING_EYE_HEIGHT, 0]) 29 | this._trackerPoseModelMatrix = MatrixMath.mat4_generateIdentity() 30 | 31 | this._fovy = 70; 32 | var fov = this._fovy/2; 33 | this._fov = new XRFieldOfView(fov, fov, fov, fov) 34 | this._depthNear = 0.1 35 | this._depthFar = 1000 36 | 37 | this._views = [] 38 | } 39 | 40 | get displayName(){ return this._displayName } 41 | 42 | get isExternal(){ return this._isExternal } 43 | 44 | supportsSession(parameters){ 45 | // parameters: XRSessionCreateParametersInit 46 | // returns boolean 47 | return this._supportedCreationParameters(parameters) 48 | } 49 | 50 | requestSession(parameters){ 51 | return new Promise((resolve, reject) => { 52 | if(this._supportedCreationParameters(parameters) === false){ 53 | reject() 54 | return 55 | } 56 | if(parameters.type === XRSession.REALITY){ 57 | this._reality = new VirtualReality() 58 | this._xr._privateRealities.push(this._reality) 59 | } 60 | resolve(this._createSession(parameters)) 61 | }) 62 | } 63 | 64 | // no-op unless display supports it 65 | _requestVideoFrame() {} 66 | 67 | _requestAnimationFrame(callback){ 68 | return window.requestAnimationFrame(callback) 69 | } 70 | 71 | _cancelAnimationFrame(handle){ 72 | return window.cancelAnimationFrame(handle) 73 | } 74 | 75 | _createSession(parameters){ 76 | return new XRSession(this._xr, this, parameters) 77 | } 78 | 79 | _supportedCreationParameters(parameters){ 80 | // returns true if the parameters are supported by this display 81 | throw 'Should be implemented by extending class' 82 | } 83 | 84 | /* 85 | Called by a session before it hands a new XRPresentationFrame to the app 86 | */ 87 | _handleNewFrame(frame){} 88 | 89 | /* 90 | Called by a session after it has handed the XRPresentationFrame to the app 91 | Use this for any display submission calls that need to happen after the render has occurred. 92 | */ 93 | _handleAfterFrame(frame){} 94 | 95 | 96 | /* 97 | Called by XRSession after the session.baseLayer is assigned a value 98 | */ 99 | _handleNewBaseLayer(baseLayer){} 100 | 101 | //attribute EventHandler ondeactivate; 102 | } 103 | 104 | XRDisplay.VIDEO_FRAME = 'videoFrame' 105 | XRDisplay.TRACKING_CHANGED = 'tracking-changed' 106 | XRDisplay.NEW_WORLD_ANCHOR = 'world-anchor' 107 | XRDisplay.UPDATE_WORLD_ANCHOR = 'update-world-anchor' 108 | XRDisplay.REMOVE_WORLD_ANCHOR = 'remove-world-anchor' 109 | -------------------------------------------------------------------------------- /polyfill/XRFaceAnchor.js: -------------------------------------------------------------------------------- 1 | import XRAnchor from './XRAnchor.js' 2 | 3 | /* 4 | XRFaceAnchor represents a face anchor 5 | */ 6 | export default class XRFaceAnchor extends XRAnchor { 7 | constructor(coordinateSystem, uid=null, geometry, blendShapeArray) { 8 | super(coordinateSystem, uid) 9 | this.geometry = geometry 10 | this.blendShapes = {} 11 | this.updateBlendShapes(blendShapeArray) 12 | } 13 | 14 | updateBlendShapes(blendShapeArray) { 15 | for (let i = 0; i < blendShapeNames.length; i++) { 16 | this.blendShapes[blendShapeNames[i]] = blendShapeArray[i] 17 | } 18 | } 19 | } 20 | 21 | const blendShapeNames = [ 22 | "browDownLeft", 23 | "browDownRight", 24 | "browInnerUp", 25 | "browOuterUpLeft", 26 | "browOuterUpRight", 27 | "cheekPuff", 28 | "cheekSquintLeft", 29 | "cheekSquintRight", 30 | "eyeBlinkLeft", 31 | "eyeBlinkRight", 32 | "eyeLookDownLeft", 33 | "eyeLookDownRight", 34 | "eyeLookInLeft", 35 | "eyeLookInRight", 36 | "eyeLookOutLeft", 37 | "eyeLookOutRight", 38 | "eyeLookUpLeft", 39 | "eyeLookUpRight", 40 | "eyeSquintLeft", 41 | "eyeSquintRight", 42 | "eyeWideLeft", 43 | "eyeWideRight", 44 | "jawForward", 45 | "jawLeft", 46 | "jawOpen", 47 | "jawRight", 48 | "mouthClose", 49 | "mouthDimpleLeft", 50 | "mouthDimpleRight", 51 | "mouthFrownLeft", 52 | "mouthFrownRight", 53 | "mouthFunnel", 54 | "mouthLeft", 55 | "mouthLowerDownLeft", 56 | "mouthLowerDownRight", 57 | "mouthPressLeft", 58 | "mouthPressRight", 59 | "mouthPucker", 60 | "mouthRight", 61 | "mouthRollLower", 62 | "mouthRollUpper", 63 | "mouthShrugLower", 64 | "mouthShrugUpper", 65 | "mouthSmileLeft", 66 | "mouthSmileRight", 67 | "mouthStretchLeft", 68 | "mouthStretchRight", 69 | "mouthUpperUpLeft", 70 | "mouthUpperUpRight", 71 | "noseSneerLeft", 72 | "noseSneerRight" 73 | ] -------------------------------------------------------------------------------- /polyfill/XRFieldOfView.js: -------------------------------------------------------------------------------- 1 | /* 2 | XRFieldOFView represents the four boundaries of a camera's field of view: up, down, left, and right. 3 | */ 4 | export default class XRFieldOfView { 5 | constructor(upDegrees, downDegrees, leftDegrees, rightDegrees){ 6 | this._upDegrees = upDegrees 7 | this._downDegrees = downDegrees 8 | this._leftDegrees = leftDegrees 9 | this._rightDegrees = rightDegrees 10 | } 11 | 12 | get upDegrees(){ return this._upDegrees } 13 | get downDegrees(){ return this._downDegrees } 14 | get leftDegrees(){ return this._leftDegrees } 15 | get rightDegrees(){ return this._rightDegrees } 16 | } -------------------------------------------------------------------------------- /polyfill/XRImageAnchor.js: -------------------------------------------------------------------------------- 1 | import XRAnchor from './XRAnchor.js' 2 | 3 | /* 4 | XRFaceAnchor represents an anchor 5 | */ 6 | export default class XRImageAnchor extends XRAnchor { 7 | constructor(coordinateSystem, uid=null) { 8 | super(coordinateSystem, uid) 9 | } 10 | } -------------------------------------------------------------------------------- /polyfill/XRLayer.js: -------------------------------------------------------------------------------- 1 | import EventHandlerBase from './fill/EventHandlerBase.js' 2 | 3 | /* 4 | XRLayer defines a source of bitmap images and a description of how the image is to be rendered in the XRDisplay 5 | */ 6 | export default class XRLayer extends EventHandlerBase { 7 | // Everything is implemented on XRWebGLLayer 8 | } -------------------------------------------------------------------------------- /polyfill/XRLightEstimate.js: -------------------------------------------------------------------------------- 1 | /* 2 | XRLightEstimate represents the attributes of environmental light as supplied by the device's sensors. 3 | */ 4 | export default class XRLightEstimate { 5 | constructor(){ 6 | this._ambientLightIntensity = 1 7 | } 8 | 9 | set ambientIntensity(value){ 10 | // A value of 1000 represents "neutral" lighting. (https://developer.apple.com/documentation/arkit/arlightestimate/2878308-ambientintensity) 11 | this._ambientLightIntensity = value / 1000 12 | } 13 | 14 | get ambientIntensity(){ 15 | //readonly attribute double ambientIntensity; 16 | return this._ambientLightIntensity 17 | } 18 | 19 | getAmbientColorTemperature(){ 20 | //readonly attribute double ambientColorTemperature; 21 | throw new Error('Not implemented') 22 | } 23 | } -------------------------------------------------------------------------------- /polyfill/XRPlaneAnchor.js: -------------------------------------------------------------------------------- 1 | import XRAnchor from './XRAnchor.js' 2 | 3 | /* 4 | XRPlaneAnchor represents a flat surfaces like floors, table tops, or walls. 5 | */ 6 | export default class XRPlaneAnchor extends XRAnchor { 7 | constructor(coordinateSystem, uid=null, center, extent, alignment, geometry) { 8 | super(coordinateSystem, uid) 9 | this.center = center 10 | this.extent = extent 11 | this.alignment = alignment 12 | this.geometry = geometry 13 | } 14 | 15 | get width(){ 16 | //readonly attribute double width; 17 | throw 'Not implemented' 18 | } 19 | 20 | get length(){ 21 | //readonly attribute double length; 22 | throw 'Not implemented' 23 | } 24 | } -------------------------------------------------------------------------------- /polyfill/XRPointCloud.js: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | XRPointCloud holds an array of float values where each four values represents [x, y, z, confidence in range 0-1] that describe a point in space detected by the device's sensors. 4 | */ 5 | export default class XRPointCloud { 6 | get points(){ 7 | //readonly attribute Float32Array points 8 | throw new Error('Not implemented') 9 | } 10 | } -------------------------------------------------------------------------------- /polyfill/XRPolyfill.js: -------------------------------------------------------------------------------- 1 | import _XRDisplay from './XRDisplay.js' 2 | import _XRSession from './XRSession.js' 3 | import _XRSessionCreateParameters from './XRSessionCreateParameters.js' 4 | import _Reality from './Reality.js' 5 | import _XRPointCloud from './XRPointCloud.js' 6 | import _XRLightEstimate from './XRLightEstimate.js' 7 | import _XRAnchor from './XRAnchor.js' 8 | import _XRPlaneAnchor from './XRPlaneAnchor.js' 9 | import _XRFaceAnchor from './XRFaceAnchor.js' 10 | import _XRImageAnchor from './XRImageAnchor.js' 11 | import _XRAnchorOffset from './XRAnchorOffset.js' 12 | import _XRStageBounds from './XRStageBounds.js' 13 | import _XRStageBoundsPoint from './XRStageBoundsPoint.js' 14 | import _XRPresentationFrame from './XRPresentationFrame.js' 15 | import _XRView from './XRView.js' 16 | import _XRViewport from './XRViewport.js' 17 | import _XRCoordinateSystem from './XRCoordinateSystem.js' 18 | import _XRViewPose from './XRViewPose.js' 19 | import _XRLayer from './XRLayer.js' 20 | import _XRWebGLLayer from './XRWebGLLayer.js' 21 | import _XRVideoFrame from './XRVideoFrame.js' 22 | 23 | import EventHandlerBase from './fill/EventHandlerBase.js' 24 | import FlatDisplay from './display/FlatDisplay.js' 25 | import HeadMountedDisplay from './display/HeadMountedDisplay.js' 26 | 27 | import CameraReality from './reality/CameraReality.js' 28 | 29 | /* 30 | XRPolyfill implements the window.XR functionality as a polyfill 31 | 32 | Code below will check for window.XR and if it doesn't exist will install this polyfill, 33 | so you can safely include this script in any page. 34 | */ 35 | class XRPolyfill extends EventHandlerBase { 36 | constructor(){ 37 | super() 38 | window.XRDisplay = _XRDisplay 39 | window.XRSession = _XRSession 40 | window.XRSessionCreateParameters = _XRSessionCreateParameters 41 | window.Reality = _Reality 42 | window.XRPointCloud = _XRPointCloud 43 | window.XRLightEstimate = _XRLightEstimate 44 | window.XRAnchor = _XRAnchor 45 | window.XRPlaneAnchor = _XRPlaneAnchor 46 | window.XRFaceAnchor = _XRFaceAnchor 47 | window.XRImageAnchor = _XRImageAnchor 48 | window.XRAnchorOffset = _XRAnchorOffset 49 | window.XRStageBounds = _XRStageBounds 50 | window.XRStageBoundsPoint = _XRStageBoundsPoint 51 | window.XRPresentationFrame = _XRPresentationFrame 52 | window.XRView = _XRView 53 | window.XRViewport = _XRViewport 54 | window.XRCoordinateSystem = _XRCoordinateSystem 55 | window.XRViewPose = _XRViewPose 56 | window.XRLayer = _XRLayer 57 | window.XRWebGLLayer = _XRWebGLLayer 58 | window.XRVideoFrame = _XRVideoFrame 59 | 60 | XRDisplay = window.XRDisplay 61 | XRSession = window.XRSession 62 | XRSessionCreateParameters = window.XRSessionCreateParameters 63 | Reality = window.Reality 64 | XRPointCloud = window.XRPointCloud 65 | XRLightEstimate = window.XRLightEstimate 66 | XRAnchor = window.XRAnchor; 67 | XRPlaneAnchor = window.XRPlaneAnchor; 68 | XRFaceAnchor = window.XRFaceAnchor; 69 | XRImageAnchor = window.XRImageAnchor; 70 | XRAnchorOffset = window.XRAnchorOffset; 71 | XRStageBounds = window.XRStageBounds; 72 | XRStageBoundsPoint = window.XRStageBoundsPoint; 73 | XRPresentationFrame = window.XRPresentationFrame; 74 | XRView = window.XRView; 75 | XRViewport = window.XRViewport; 76 | XRCoordinateSystem = window.XRCoordinateSystem; 77 | XRViewPose = window.XRViewPose; 78 | XRLayer = window.XRLayer; 79 | XRWebGLLayer = window.XRWebGLLayer; 80 | XRVideoFrame = window.XRVideoFrame; 81 | 82 | this._getVRDisplaysFinished = false; 83 | 84 | // Reality instances that may be shared by multiple XRSessions 85 | this._sharedRealities = [new CameraReality(this)] 86 | this._privateRealities = [] 87 | 88 | this._displays = [new FlatDisplay(this, this._sharedRealities[0])] 89 | 90 | if(typeof navigator.getVRDisplays === 'function'){ 91 | navigator.getVRDisplays().then(displays => { 92 | for(let display of displays){ 93 | if(display === null) continue 94 | if(display.capabilities.canPresent){ 95 | this._displays.push(new HeadMountedDisplay(this, this._sharedRealities[0], display)) 96 | } 97 | } 98 | this._getVRDisplaysFinished = true; 99 | }) 100 | } else { 101 | // if no WebVR, we don't need to wait 102 | this._getVRDisplaysFinished = true; 103 | } 104 | 105 | // These elements are at the beginning of the body and absolutely positioned to fill the entire window 106 | // Sessions and realities add their elements to these divs so that they are in the right render order 107 | this._sessionEls = document.createElement('div') 108 | this._sessionEls.setAttribute('class', 'webxr-sessions') 109 | this._realityEls = document.createElement('div') 110 | this._realityEls.setAttribute('class', 'webxr-realities') 111 | for(let el of [this._sessionEls, this._realityEls]){ 112 | el.style.position = 'absolute' 113 | el.style.width = '100%' 114 | el.style.height = '100%' 115 | } 116 | 117 | let prependElements = () => { 118 | document.body.style.width = '100%'; 119 | document.body.style.height = '100%'; 120 | document.body.prepend(this._sessionEls); 121 | document.body.prepend(this._realityEls); // realities must render behind the sessions 122 | } 123 | 124 | if(document.readyState !== 'loading') { 125 | prependElements(); 126 | } else { 127 | document.addEventListener('DOMContentLoaded', prependElements); 128 | } 129 | } 130 | 131 | getDisplays(){ 132 | var self=this 133 | var waitTillDisplaysChecked = function(resolve) { 134 | if (!self._getVRDisplaysFinished) { 135 | setTimeout(waitTillDisplaysChecked.bind(self, resolve), 30); 136 | } else { 137 | resolve(self._displays); 138 | } 139 | } 140 | return new Promise((resolve, reject) => { 141 | waitTillDisplaysChecked(resolve); 142 | }) 143 | } 144 | 145 | //attribute EventHandler ondisplayconnect; 146 | //attribute EventHandler ondisplaydisconnect; 147 | } 148 | 149 | /* Install XRPolyfill if window.XR does not exist */ 150 | if(typeof navigator.XR === 'undefined') { 151 | navigator.XR = new XRPolyfill() 152 | } 153 | -------------------------------------------------------------------------------- /polyfill/XRPresentationFrame.js: -------------------------------------------------------------------------------- 1 | import XRAnchor from './XRAnchor.js' 2 | import ARKitWrapper from './platform/ARKitWrapper.js' 3 | import MatrixMath from './fill/MatrixMath.js' 4 | 5 | /* 6 | XRPresentationFrame provides all of the values needed to render a single frame of an XR scene to the XRDisplay. 7 | */ 8 | export default class XRPresentationFrame { 9 | constructor(session, timestamp){ 10 | this._session = session 11 | this._timestamp = this._session.reality._getTimeStamp(timestamp); 12 | } 13 | 14 | get session(){ return this._session } 15 | 16 | get views(){ 17 | //readonly attribute FrozenArray views; 18 | return this._session._display._views 19 | } 20 | 21 | get hasPointCloud(){ 22 | //readonly attribute boolean hasPointCloud; 23 | return false 24 | } 25 | 26 | get pointCloud(){ 27 | //readonly attribute XRPointCloud? pointCloud; 28 | return null 29 | } 30 | 31 | get hasLightEstimate(){ 32 | //readonly attribute boolean hasLightEstimate; 33 | return this._session.reality._getHasLightEstimate(); 34 | } 35 | 36 | get lightEstimate(){ 37 | //readonly attribute XRLightEstimate? lightEstimate; 38 | return this._session.reality._getLightAmbientIntensity(); 39 | } 40 | 41 | get timestamp () { 42 | return this._timestamp; 43 | } 44 | /* 45 | Returns an array of known XRAnchor instances. May be empty. 46 | */ 47 | get anchors(){ 48 | //readonly attribute sequence anchors; 49 | let results = [] 50 | for(let value of this._session.reality._anchors.values()){ 51 | results.push(value) 52 | } 53 | return results 54 | } 55 | 56 | /* 57 | Create an anchor at a specific position defined by XRAnchor.coordinates 58 | */ 59 | addAnchor(coordinateSystem, position=[0,0,0], orientation=[0,0,0,1],uid=null){ 60 | //DOMString? addAnchor(XRCoordinateSystem, position, orientation); 61 | let poseMatrix = MatrixMath.mat4_fromRotationTranslation(new Float32Array(16), orientation, position) 62 | MatrixMath.mat4_multiply(poseMatrix, coordinateSystem.getTransformTo(this._session._display._trackerCoordinateSystem), poseMatrix) 63 | let anchorCoordinateSystem = new XRCoordinateSystem(this._session._display, XRCoordinateSystem.TRACKER) 64 | anchorCoordinateSystem._relativeMatrix = poseMatrix 65 | return this._session.reality._addAnchor(new XRAnchor(anchorCoordinateSystem, uid), this._session.display) 66 | } 67 | 68 | // normalized screen x and y are in range 0..1, with 0,0 at top left and 1,1 at bottom right 69 | findAnchor(normalizedScreenX, normalizedScreenY, options=null){ 70 | // Promise findAnchor(float32, float32); // cast a ray to find or create an anchor at the first intersection in the Reality 71 | return this._session.reality._findAnchor(normalizedScreenX, normalizedScreenY, this._session.display, options) 72 | } 73 | 74 | hitTestNoAnchor(normalizedScreenX, normalizedScreenY){ 75 | // Array hitTestNoAnchor(float32, float32); // cast a ray to find all plane intersections in the Reality 76 | return this._session.reality._hitTestNoAnchor(normalizedScreenX, normalizedScreenY, this._session.display) 77 | } 78 | 79 | /* 80 | Find an XRAnchorOffset that is at floor level below the current head pose 81 | uid will be the resulting anchor uid (if any), or if null one will be assigned 82 | */ 83 | findFloorAnchor(uid=null){ 84 | // Promise findFloorAnchor(); 85 | return this._session.reality._findFloorAnchor(this._session.display, uid) 86 | } 87 | 88 | removeAnchor(uid){ 89 | // void removeAnchor(DOMString uid); 90 | return this._session.reality._removeAnchor(uid) 91 | } 92 | 93 | /* 94 | Returns an existing XRAnchor or null if uid is unknown 95 | */ 96 | getAnchor(uid){ 97 | // XRAnchor? getAnchor(DOMString uid); 98 | return this._session.reality._getAnchor(uid) 99 | } 100 | 101 | getCoordinateSystem(...types){ 102 | // XRCoordinateSystem? getCoordinateSystem(...XRFrameOfReferenceType types); // Tries the types in order, returning the first match or null if none is found 103 | return this._session._getCoordinateSystem(...types) 104 | } 105 | 106 | getDisplayPose(coordinateSystem){ 107 | // XRViewPose? getDisplayPose(XRCoordinateSystem coordinateSystem); 108 | switch(coordinateSystem._type){ 109 | case XRCoordinateSystem.HEAD_MODEL: 110 | return this._session._display._headPose 111 | case XRCoordinateSystem.EYE_LEVEL: 112 | return this._session._display._eyeLevelPose 113 | default: 114 | return null 115 | } 116 | } 117 | } 118 | 119 | // hit test types 120 | XRPresentationFrame.HIT_TEST_TYPE_FEATURE_POINT = ARKitWrapper.HIT_TEST_TYPE_FEATURE_POINT 121 | XRPresentationFrame.HIT_TEST_TYPE_ESTIMATED_HORIZONTAL_PLANE = ARKitWrapper.HIT_TEST_TYPE_ESTIMATED_HORIZONTAL_PLANE 122 | XRPresentationFrame.HIT_TEST_TYPE_ESTIMATED_VERTICAL_PLANE = ARKitWrapper.HIT_TEST_TYPE_ESTIMATED_VERTICAL_PLANE 123 | XRPresentationFrame.HIT_TEST_TYPE_EXISTING_PLANE = ARKitWrapper.HIT_TEST_TYPE_EXISTING_PLANE 124 | XRPresentationFrame.HIT_TEST_TYPE_EXISTING_PLANE_USING_EXTENT = ARKitWrapper.HIT_TEST_TYPE_EXISTING_PLANE_USING_EXTENT 125 | XRPresentationFrame.HIT_TEST_TYPE_EXISTING_PLANE_USING_GEOMETRY = ARKitWrapper.HIT_TEST_TYPE_EXISTING_PLANE_USING_GEOMETRY 126 | 127 | XRPresentationFrame.HIT_TEST_TYPE_ALL = ARKitWrapper.HIT_TEST_TYPE_FEATURE_POINT | 128 | ARKitWrapper.HIT_TEST_TYPE_EXISTING_PLANE | 129 | ARKitWrapper.HIT_TEST_TYPE_ESTIMATED_HORIZONTAL_PLANE | 130 | ARKitWrapper.HIT_TEST_TYPE_EXISTING_PLANE_USING_EXTENT 131 | 132 | XRPresentationFrame.HIT_TEST_TYPE_EXISTING_PLANES = ARKitWrapper.HIT_TEST_TYPE_EXISTING_PLANE | 133 | ARKitWrapper.HIT_TEST_TYPE_EXISTING_PLANE_USING_EXTENT -------------------------------------------------------------------------------- /polyfill/XRSessionCreateParameters.js: -------------------------------------------------------------------------------- 1 | /* 2 | The XRSessionCreateParametersInit dictionary provides a session description, indicating the desired capabilities of a session to be returned from requestSession() 3 | */ 4 | export default class XRSessionCreateParameters { 5 | get exclusive(){ 6 | //readonly attribute boolean exclusive; 7 | throw 'Not implemented' 8 | } 9 | 10 | get type(){ 11 | //readonly attribute XRSessionRealityType type; 12 | throw 'Not implemented' 13 | } 14 | } -------------------------------------------------------------------------------- /polyfill/XRStageBounds.js: -------------------------------------------------------------------------------- 1 | /* 2 | The XRStageBounds interface describes a space known as a "Stage". 3 | The stage is a bounded, floor-relative play space that the user can be expected to safely be able to move within. 4 | Other XR platforms sometimes refer to this concept as "room scale" or "standing XR". 5 | */ 6 | export default class XRStageBounds { 7 | get center(){ 8 | //readonly attribute XRCoordinateSystem center; 9 | throw new Error('Not implemented') 10 | } 11 | 12 | get geometry(){ 13 | //readonly attribute FrozenArray? geometry; 14 | throw new Error('Not implemented') 15 | } 16 | } -------------------------------------------------------------------------------- /polyfill/XRStageBoundsPoint.js: -------------------------------------------------------------------------------- 1 | /* 2 | XRStageBoundPoints represent the offset in meters from the stage origin along the X and Z axes. 3 | */ 4 | export default class XRStageBoundsPoint { 5 | get x(){ 6 | //readonly attribute double x; 7 | throw new Error('Not implemented') 8 | } 9 | 10 | get y(){ 11 | //readonly attribute double z; 12 | throw new Error('Not implemented') 13 | } 14 | } -------------------------------------------------------------------------------- /polyfill/XRView.js: -------------------------------------------------------------------------------- 1 | import XRViewport from './XRViewport.js' 2 | import MatrixMath from './fill/MatrixMath.js' 3 | 4 | /* 5 | An XRView describes a single view into an XR scene. 6 | It provides several values directly, and acts as a key to query view-specific values from other interfaces. 7 | */ 8 | export default class XRView { 9 | constructor(fov, depthNear, depthFar, eye=null){ 10 | this._fov = fov 11 | this._depthNear = depthNear 12 | this._depthFar = depthFar 13 | this._eye = eye 14 | this._viewport = new XRViewport(0, 0, 1, 1) 15 | this._projectionMatrix = new Float32Array(16) 16 | this._viewMatrix = new Float32Array([1,0,0,0, 0,1,0,0, 0,0,1,0, 0,0,0,1]) 17 | MatrixMath.mat4_perspectiveFromFieldOfView(this._projectionMatrix, this._fov, this._depthNear, this._depthFar) 18 | } 19 | 20 | set fov ( value ) { 21 | this._fov = value 22 | MatrixMath.mat4_perspectiveFromFieldOfView(this._projectionMatrix, this._fov, this._depthNear, this._depthFar) 23 | } 24 | 25 | get eye(){ return this._eye } 26 | 27 | get projectionMatrix(){ return this._projectionMatrix } 28 | 29 | setProjectionMatrix(array16){ 30 | for(let i=0; i < 16; i++){ 31 | this._projectionMatrix[i] = array16[i] 32 | } 33 | } 34 | 35 | get viewMatrix(){ return this._viewMatrix } 36 | 37 | setViewMatrix(array16){ 38 | for(let i=0; i < 16; i++){ 39 | this._viewMatrix[i] = array16[i] 40 | } 41 | } 42 | 43 | getViewport(layer){ 44 | if(this._eye === XRView.LEFT){ 45 | this._viewport.x = 0 46 | this._viewport.y = 0 47 | this._viewport.width = layer.framebufferWidth / 2 48 | this._viewport.height = layer.framebufferHeight 49 | } else if(this._eye === XRView.RIGHT){ 50 | this._viewport.x = layer.framebufferWidth / 2 51 | this._viewport.y = 0 52 | this._viewport.width = layer.framebufferWidth / 2 53 | this._viewport.height = layer.framebufferHeight 54 | } else { 55 | this._viewport.x = 0 56 | this._viewport.y = 0 57 | this._viewport.width = layer.framebufferWidth 58 | this._viewport.height = layer.framebufferHeight 59 | } 60 | return this._viewport 61 | } 62 | } 63 | 64 | XRView.LEFT = 'left' 65 | XRView.RIGHT = 'right' 66 | XRView.EYES = [XRView.LEFT, XRView.RIGHT] -------------------------------------------------------------------------------- /polyfill/XRViewPose.js: -------------------------------------------------------------------------------- 1 | import MatrixMath from './fill/MatrixMath.js' 2 | import Quaternion from './fill/Quaternion.js' 3 | 4 | /* 5 | XRDevicePose describes the position and orientation of an XRDisplay relative to the query XRCoordinateSystem. 6 | It also describes the view and projection matrices that should be used by the application to render a frame of the XR scene. 7 | */ 8 | export default class XRViewPose { 9 | constructor(position=[0, 0, 0], orientation=[0, 0, 0, 1]){ 10 | this._poseModelMatrix = new Float32Array(16) 11 | MatrixMath.mat4_fromRotationTranslation(this._poseModelMatrix, orientation, position) 12 | } 13 | 14 | get poseModelMatrix(){ return this._poseModelMatrix } 15 | 16 | _setPoseModelMatrix(array16){ 17 | for(let i=0; i < 16; i++){ 18 | this._poseModelMatrix[i] = array16[i] 19 | } 20 | } 21 | 22 | get _position(){ 23 | return [this._poseModelMatrix[12], this._poseModelMatrix[13], this._poseModelMatrix[14]] 24 | } 25 | 26 | set _position(array3){ 27 | this._poseModelMatrix[12] = array3[0] 28 | this._poseModelMatrix[13] = array3[1] 29 | this._poseModelMatrix[14] = array3[2] 30 | } 31 | 32 | get _orientation(){ 33 | let quat = new Quaternion() 34 | quat.setFromRotationMatrix(this._poseModelMatrix) 35 | return quat.toArray() 36 | } 37 | 38 | set _orientation(array4){ 39 | MatrixMath.mat4_fromRotationTranslation(this._poseModelMatrix, array4, this._position) 40 | } 41 | 42 | _translate(array3){ 43 | this._poseModelMatrix[12] += array3[0] 44 | this._poseModelMatrix[13] += array3[1] 45 | this._poseModelMatrix[14] += array3[2] 46 | } 47 | 48 | getViewMatrix(view, out=null){ 49 | if(out === null){ 50 | out = new Float32Array(16) 51 | } 52 | MatrixMath.mat4_eyeView(out, this._poseModelMatrix) // TODO offsets 53 | return out 54 | } 55 | } 56 | 57 | // We are not going to use this any more. The way it was handled was broken, we'll just 58 | // use the raw values for the coordinate systems. 59 | XRViewPose.SITTING_EYE_HEIGHT = 0 // meters 60 | -------------------------------------------------------------------------------- /polyfill/XRViewport.js: -------------------------------------------------------------------------------- 1 | /* 2 | XRViewport represents the dimensions in pixels of an XRView. 3 | */ 4 | export default class XRViewport { 5 | constructor(x, y, width, height){ 6 | this._x = x 7 | this._y = y 8 | this._width = width 9 | this._height = height 10 | } 11 | 12 | get x(){ return this._x } 13 | set x(value) { this._x = value } 14 | 15 | get y(){ return this._y } 16 | set y(value) { this._y = value } 17 | 18 | get width(){ return this._width } 19 | set width(value) { this._width = value } 20 | 21 | get height(){ return this._height } 22 | set height(value) { this._height = value } 23 | } -------------------------------------------------------------------------------- /polyfill/XRWebGLLayer.js: -------------------------------------------------------------------------------- 1 | import XRLayer from './XRLayer.js' 2 | 3 | /* 4 | XRWebGLLayer defines the WebGL or WebGL 2 context that is rendering the visuals for this layer. 5 | */ 6 | export default class XRWebGLLayer extends XRLayer { 7 | constructor(session, context){ 8 | super() 9 | this._session = session 10 | this._context = context 11 | this._width = this._context.canvas.width; 12 | this._height = this._context.canvas.height; 13 | this._framebuffer = null // TODO 14 | } 15 | 16 | get context(){ return this._context } 17 | 18 | get antialias(){ 19 | // readonly attribute boolean antialias; 20 | throw 'Not implemented' 21 | } 22 | 23 | get depth(){ 24 | // readonly attribute boolean depth; 25 | throw 'Not implemented' 26 | } 27 | 28 | get stencil(){ 29 | // readonly attribute boolean stencil; 30 | throw 'Not implemented' 31 | } 32 | 33 | get alpha(){ 34 | // readonly attribute boolean alpha; 35 | throw 'Not implemented' 36 | } 37 | 38 | get multiview(){ 39 | // readonly attribute boolean multiview; 40 | throw 'Not implemented' 41 | } 42 | 43 | get framebuffer(){ 44 | return this._framebuffer 45 | } 46 | 47 | set framebufferWidth(w){ 48 | this._width = w; 49 | this._context.canvas.width = w; 50 | } 51 | 52 | get framebufferWidth(){ 53 | // not using this for now, on iOS it's not good. 54 | // var pr = window.devicePixelRatio || 1; 55 | //return this._context.canvas.clientWidth; 56 | return this._width; 57 | } 58 | 59 | set framebufferHeight(h){ 60 | this._height = h; 61 | this._context.canvas.height = h; 62 | } 63 | 64 | get framebufferHeight(){ 65 | // not using this for now, on iOS it's not good. 66 | // var pr = window.devicePixelRatio || 1; 67 | //return this._context.canvas.clientHeight; 68 | return this._height; 69 | } 70 | 71 | requestViewportScaling(viewportScaleFactor){ 72 | // void requestViewportScaling(double viewportScaleFactor); 73 | throw 'Not implemented' 74 | } 75 | } -------------------------------------------------------------------------------- /polyfill/XRWorkerPolyfill.js: -------------------------------------------------------------------------------- 1 | import XRAnchor from './XRAnchor.js' 2 | import XRAnchorOffset from './XRAnchorOffset.js' 3 | import XRCoordinateSystem from './XRCoordinateSystem.js' 4 | import XRViewPose from './XRViewPose.js' 5 | import XRVideoFrame from './XRVideoFrame.js' 6 | import EventHandlerBase from './fill/EventHandlerBase.js' 7 | 8 | /* 9 | XRPolyfill implements the window.XR functionality as a polyfill 10 | 11 | Code below will check for window.XR and if it doesn't exist will install this polyfill, 12 | so you can safely include this script in any page. 13 | */ 14 | export default class XRWorkerPolyfill extends EventHandlerBase { 15 | constructor(){ 16 | super() 17 | self.XRAnchor = XRAnchor 18 | self.XRAnchorOffset = XRAnchorOffset 19 | self.XRCoordinateSystem = XRCoordinateSystem 20 | self.XRViewPose = XRViewPose 21 | self.XRVideoFrame = XRVideoFrame 22 | } 23 | } 24 | 25 | /* Install XRWorkerPolyfill if self.XR does not exist */ 26 | self.XR = new XRWorkerPolyfill() -------------------------------------------------------------------------------- /polyfill/display/HeadMountedDisplay.js: -------------------------------------------------------------------------------- 1 | import XRDisplay from '../XRDisplay.js' 2 | import XRView from '../XRView.js' 3 | import XRSession from '../XRSession.js' 4 | import XRViewPose from '../XRViewPose.js' 5 | 6 | import MatrixMath from '../fill/MatrixMath.js' 7 | import Quaternion from '../fill/Quaternion.js' 8 | import Vector3 from '../fill/Vector3.js' 9 | 10 | import DeviceOrientationTracker from '../fill/DeviceOrientationTracker.js' 11 | import ARKitWrapper from '../platform/ARKitWrapper.js' 12 | 13 | /* 14 | HeadMountedDisplay wraps a WebVR 1.1 display, like a Vive, Rift, or Daydream. 15 | */ 16 | export default class HeadMountedDisplay extends XRDisplay { 17 | constructor(xr, reality, vrDisplay){ 18 | super(xr, vrDisplay.displayName, vrDisplay.capabilities.hasExternalDisplay, reality) 19 | this._vrDisplay = vrDisplay 20 | this._vrFrameData = new VRFrameData() 21 | 22 | // The view projection matrices will be reset using VRFrameData during this._handleNewFrame 23 | this._leftView = new XRView(this._fov, this._depthNear, this._depthFar, XRView.LEFT) 24 | this._rightView = new XRView(this._fov, this._depthNear, this._depthFar, XRView.RIGHT) 25 | this._views = [this._leftView, this._rightView] 26 | 27 | // These will be used to set the head and eye level poses during this._handleNewFrame 28 | this._deviceOrientation = new Quaternion() 29 | this._devicePosition = new Vector3() 30 | this._deviceWorldMatrix = new Float32Array(16) 31 | } 32 | 33 | /* 34 | Called via the XRSession.requestAnimationFrame 35 | */ 36 | _requestAnimationFrame(callback){ 37 | if(this._vrDisplay.isPresenting){ 38 | this._vrDisplay.requestAnimationFrame(callback) 39 | } else { 40 | window.requestAnimationFrame(callback) 41 | } 42 | } 43 | 44 | /* 45 | Called by a session to indicate that its baseLayer attribute has been set. 46 | This is where the VRDisplay is used to create a session 47 | */ 48 | _handleNewBaseLayer(baseLayer){ 49 | this._vrDisplay.requestPresent([{ 50 | source: baseLayer._context.canvas 51 | }]).then(() => { 52 | const leftEye = this._vrDisplay.getEyeParameters('left') 53 | const rightEye = this._vrDisplay.getEyeParameters('right') 54 | baseLayer.framebufferWidth = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2 55 | baseLayer.framebufferHeight = Math.max(leftEye.renderHeight, rightEye.renderHeight) 56 | baseLayer._context.canvas.style.position = 'absolute' 57 | baseLayer._context.canvas.style.bottom = '1px' 58 | baseLayer._context.canvas.style.right = '1px' 59 | baseLayer._context.canvas.style.width = "100%"; 60 | baseLayer._context.canvas.style.height = "100%"; 61 | document.body.appendChild(baseLayer._context.canvas) 62 | }).catch(e => { 63 | console.error('Unable to init WebVR 1.1 display', e) 64 | }) 65 | } 66 | 67 | _stop(){ 68 | // TODO figure out how to stop ARKit and ARCore so that CameraReality can still work 69 | if(this.running === false) return 70 | this.running = false 71 | this._reality._stop() 72 | } 73 | 74 | /* 75 | Called by a session before it hands a new XRPresentationFrame to the app 76 | */ 77 | _handleNewFrame(frame){ 78 | if(this._vrDisplay.isPresenting){ 79 | this._updateFromVRFrameData() 80 | } 81 | } 82 | 83 | _handleAfterFrame(frame){ 84 | if(this._vrDisplay.isPresenting){ 85 | this._vrDisplay.submitFrame() 86 | } 87 | } 88 | 89 | _supportedCreationParameters(parameters){ 90 | return parameters.type === XRSession.REALITY && parameters.exclusive === true 91 | } 92 | 93 | _updateFromVRFrameData(){ 94 | this._vrDisplay.getFrameData(this._vrFrameData) 95 | this._leftView.setViewMatrix(this._vrFrameData.leftViewMatrix) 96 | this._rightView.setViewMatrix(this._vrFrameData.rightViewMatrix) 97 | this._leftView.setProjectionMatrix(this._vrFrameData.leftProjectionMatrix) 98 | this._rightView.setProjectionMatrix(this._vrFrameData.rightProjectionMatrix) 99 | if(this._vrFrameData.pose){ 100 | if(this._vrFrameData.pose.orientation){ 101 | this._deviceOrientation.set(...this._vrFrameData.pose.orientation) 102 | } 103 | if(this._vrFrameData.pose.position){ 104 | this._devicePosition.set(...this._vrFrameData.pose.position) 105 | } 106 | MatrixMath.mat4_fromRotationTranslation(this._deviceWorldMatrix, this._deviceOrientation.toArray(), this._devicePosition.toArray()) 107 | if(this._vrDisplay.stageParameters && this._vrDisplay.stageParameters.sittingToStandingTransform){ 108 | MatrixMath.mat4_multiply(this._deviceWorldMatrix, this._vrDisplay.stageParameters.sittingToStandingTransform, this._deviceWorldMatrix) 109 | } 110 | this._headPose._setPoseModelMatrix(this._deviceWorldMatrix) 111 | this._eyeLevelPose.position = this._devicePosition.toArray() 112 | } 113 | } 114 | } -------------------------------------------------------------------------------- /polyfill/fill/DeviceOrientationTracker.js: -------------------------------------------------------------------------------- 1 | import EventHandlerBase from './EventHandlerBase.js' 2 | 3 | import Vector3 from './Vector3.js' 4 | import Quaternion from './Quaternion.js' 5 | import Euler from './Euler.js' 6 | 7 | /* 8 | DeviceOrientationTracker keeps track of device orientation, which can be queried usnig `getOrientation` 9 | */ 10 | export default class DeviceOrientationTracker extends EventHandlerBase { 11 | constructor(){ 12 | super() 13 | this._deviceOrientation = null 14 | this._windowOrientation = 0 15 | 16 | window.addEventListener('orientationchange', () => { 17 | this._windowOrientation = window.orientation || 0 18 | }, false) 19 | window.addEventListener('deviceorientation', ev => { 20 | this._deviceOrientation = ev 21 | try { 22 | this.dispatchEvent(new CustomEvent(DeviceOrientationTracker.ORIENTATION_UPDATE_EVENT, { 23 | deviceOrientation: this._deviceOrientation, 24 | windowOrientation: this._windowOrientation 25 | })) 26 | } catch(e) { 27 | console.error('deviceorientation event handler error', e) 28 | } 29 | }, false) 30 | } 31 | 32 | /* 33 | getOrientation sets the value of outQuaternion to the most recently tracked device orientation 34 | returns true if a device orientation has been received, otherwise false 35 | */ 36 | getOrientation(outQuaternion){ 37 | if(this._deviceOrientation === null){ 38 | outQuaternion.set(0, 0, 0, 1) 39 | return false 40 | } 41 | DeviceOrientationTracker.WORKING_EULER.set( 42 | this._deviceOrientation.beta * DeviceOrientationTracker.DEG_TO_RAD, 43 | this._deviceOrientation.alpha * DeviceOrientationTracker.DEG_TO_RAD, 44 | -1 * this._deviceOrientation.gamma * DeviceOrientationTracker.DEG_TO_RAD, 45 | 'YXZ' 46 | ) 47 | outQuaternion.setFromEuler( 48 | DeviceOrientationTracker.WORKING_EULER.x, 49 | DeviceOrientationTracker.WORKING_EULER.y, 50 | DeviceOrientationTracker.WORKING_EULER.z, 51 | DeviceOrientationTracker.WORKING_EULER.order 52 | ) 53 | outQuaternion.multiply(DeviceOrientationTracker.HALF_PI_AROUND_X) 54 | outQuaternion.multiply(DeviceOrientationTracker.WORKING_QUATERNION.setFromAxisAngle(DeviceOrientationTracker.Z_AXIS, -this._windowOrientation * DeviceOrientationTracker.DEG_TO_RAD)) 55 | return true 56 | } 57 | } 58 | 59 | DeviceOrientationTracker.ORIENTATION_UPDATE_EVENT = 'orientation-update' 60 | 61 | DeviceOrientationTracker.Z_AXIS = new Vector3(0, 0, 1) 62 | DeviceOrientationTracker.WORKING_EULER = new Euler() 63 | DeviceOrientationTracker.WORKING_QUATERNION = new Quaternion() 64 | DeviceOrientationTracker.HALF_PI_AROUND_X = new Quaternion(-Math.sqrt(0.5), 0, 0, Math.sqrt(0.5)) 65 | DeviceOrientationTracker.DEG_TO_RAD = Math.PI / 180 66 | -------------------------------------------------------------------------------- /polyfill/fill/Euler.js: -------------------------------------------------------------------------------- 1 | 2 | export default class Euler { 3 | constructor(x, y, z, order=Euler.DefaultOrder){ 4 | this.x = x 5 | this.y = y 6 | this.z = z 7 | this.order = order 8 | } 9 | 10 | set(x, y, z, order=Euler.DefaultOrder){ 11 | this.x = x 12 | this.y = y 13 | this.z = z 14 | this.order = order 15 | } 16 | 17 | toArray(){ 18 | return [this.x, this.y, this.z] 19 | } 20 | } 21 | 22 | Euler.RotationOrders = ['XYZ', 'YZX', 'ZXY', 'XZY', 'YXZ', 'ZYX'] 23 | Euler.DefaultOrder = 'XYZ' 24 | -------------------------------------------------------------------------------- /polyfill/fill/EventHandlerBase.js: -------------------------------------------------------------------------------- 1 | /* 2 | EventHandlerBase is the base class that implements the EventHandler interface methods for dispatching and receiving events. 3 | */ 4 | export default class EventHandlerBase { 5 | constructor(){ 6 | this._listeners = new Map() // string type -> [listener, ...] 7 | } 8 | 9 | addEventListener(type, listener){ 10 | let listeners = this._listeners.get(type) 11 | if(Array.isArray(listeners) === false){ 12 | listeners = [] 13 | this._listeners.set(type, listeners) 14 | } 15 | listeners.push(listener) 16 | } 17 | 18 | removeEventListener(type, listener){ 19 | let listeners = this._listeners.get(type) 20 | if(Array.isArray(listeners) === false){ 21 | return 22 | } 23 | for(let i = 0; i < listeners.length; i++){ 24 | if(listeners[i] === listener){ 25 | listeners.splice(i, 1) 26 | return 27 | } 28 | } 29 | } 30 | 31 | dispatchEvent(event){ 32 | let listeners = this._listeners.get(event.type) 33 | if(Array.isArray(listeners) === false) return 34 | 35 | // need a copy, since removeEventListener is often called inside listeners to create one-shots and it modifies the array, causing 36 | // listeners not to be called! 37 | var array = listeners.slice( 0 ); 38 | for(let listener of array){ 39 | listener(event) 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /polyfill/fill/MatrixMath.js: -------------------------------------------------------------------------------- 1 | import Quaternion from './Quaternion.js' 2 | 3 | /* 4 | MatrixMath provides helper functions for populating the various matrices involved with 3D graphics. 5 | 6 | Many of the math methods were taken from the Google webvr polyfill: 7 | https://github.com/googlevr/webvr-polyfill/blob/master/src/util.js#L270 8 | */ 9 | export default class MatrixMath { 10 | 11 | // Returns a new Float32Array that is set to the transform identity 12 | static mat4_generateIdentity(){ 13 | return new Float32Array([ 14 | 1, 0, 0, 0, 15 | 0, 1, 0, 0, 16 | 0, 0, 1, 0, 17 | 0, 0, 0, 1, 18 | ]) 19 | } 20 | 21 | static mat4_get_position(out, m){ 22 | out[0] = m[12] 23 | out[1] = m[13] 24 | out[2] = m[14] 25 | return out 26 | } 27 | 28 | static mat4_get_rotation(out, m){ 29 | let quat = new Quaternion() 30 | quat.setFromRotationMatrix(m) 31 | out[0] = quat.x 32 | out[1] = quat.y 33 | out[2] = quat.z 34 | out[3] = quat.w 35 | return out 36 | } 37 | 38 | static mat4_eyeView(out, poseModelMatrix, offset=new Float32Array([0, 0, 0])) { 39 | MatrixMath.mat4_translate(out, poseModelMatrix, offset) 40 | MatrixMath.mat4_invert(out, out) 41 | } 42 | 43 | static mat4_perspectiveFromFieldOfView(out, fov, near, far) { 44 | var upTan = Math.tan(fov.upDegrees * MatrixMath.PI_OVER_180) 45 | var downTan = Math.tan(fov.downDegrees * MatrixMath.PI_OVER_180) 46 | var leftTan = Math.tan(fov.leftDegrees * MatrixMath.PI_OVER_180) 47 | var rightTan = Math.tan(fov.rightDegrees * MatrixMath.PI_OVER_180) 48 | 49 | var xScale = 2.0 / (leftTan + rightTan) 50 | var yScale = 2.0 / (upTan + downTan) 51 | 52 | out[0] = xScale 53 | out[1] = 0.0 54 | out[2] = 0.0 55 | out[3] = 0.0 56 | out[4] = 0.0 57 | out[5] = yScale 58 | out[6] = 0.0 59 | out[7] = 0.0 60 | out[8] = -((leftTan - rightTan) * xScale * 0.5) 61 | out[9] = ((upTan - downTan) * yScale * 0.5) 62 | out[10] = far / (near - far) 63 | out[11] = -1.0 64 | out[12] = 0.0 65 | out[13] = 0.0 66 | out[14] = (far * near) / (near - far) 67 | out[15] = 0.0 68 | return out 69 | } 70 | 71 | static mat4_fromRotationTranslation(out, q=[0,0,0,1], v=[0,0,0]) { 72 | // Quaternion math 73 | var x = q[0] 74 | var y = q[1] 75 | var z = q[2] 76 | var w = q[3] 77 | var x2 = x + x 78 | var y2 = y + y 79 | var z2 = z + z 80 | 81 | var xx = x * x2 82 | var xy = x * y2 83 | var xz = x * z2 84 | var yy = y * y2 85 | var yz = y * z2 86 | var zz = z * z2 87 | var wx = w * x2 88 | var wy = w * y2 89 | var wz = w * z2 90 | 91 | out[0] = 1 - (yy + zz) 92 | out[1] = xy + wz 93 | out[2] = xz - wy 94 | out[3] = 0 95 | out[4] = xy - wz 96 | out[5] = 1 - (xx + zz) 97 | out[6] = yz + wx 98 | out[7] = 0 99 | out[8] = xz + wy 100 | out[9] = yz - wx 101 | out[10] = 1 - (xx + yy) 102 | out[11] = 0 103 | out[12] = v[0] 104 | out[13] = v[1] 105 | out[14] = v[2] 106 | out[15] = 1 107 | 108 | return out 109 | } 110 | 111 | static mat4_translate(out, a, v) { 112 | var x = v[0] 113 | var y = v[1] 114 | var z = v[2] 115 | let a00 116 | let a01 117 | let a02 118 | let a03 119 | let a10, a11, a12, a13, 120 | a20, a21, a22, a23 121 | 122 | if (a === out) { 123 | out[12] = a[0] * x + a[4] * y + a[8] * z + a[12] 124 | out[13] = a[1] * x + a[5] * y + a[9] * z + a[13] 125 | out[14] = a[2] * x + a[6] * y + a[10] * z + a[14] 126 | out[15] = a[3] * x + a[7] * y + a[11] * z + a[15] 127 | } else { 128 | a00 = a[0]; a01 = a[1]; a02 = a[2]; a03 = a[3] 129 | a10 = a[4]; a11 = a[5]; a12 = a[6]; a13 = a[7] 130 | a20 = a[8]; a21 = a[9]; a22 = a[10]; a23 = a[11] 131 | 132 | out[0] = a00; out[1] = a01; out[2] = a02; out[3] = a03 133 | out[4] = a10; out[5] = a11; out[6] = a12; out[7] = a13 134 | out[8] = a20; out[9] = a21; out[10] = a22; out[11] = a23 135 | 136 | out[12] = a00 * x + a10 * y + a20 * z + a[12] 137 | out[13] = a01 * x + a11 * y + a21 * z + a[13] 138 | out[14] = a02 * x + a12 * y + a22 * z + a[14] 139 | out[15] = a03 * x + a13 * y + a23 * z + a[15] 140 | } 141 | 142 | return out 143 | } 144 | 145 | static mat4_invert(out, a) { 146 | var a00 = a[0], a01 = a[1], a02 = a[2], a03 = a[3], 147 | a10 = a[4], a11 = a[5], a12 = a[6], a13 = a[7], 148 | a20 = a[8], a21 = a[9], a22 = a[10], a23 = a[11], 149 | a30 = a[12], a31 = a[13], a32 = a[14], a33 = a[15] 150 | 151 | var b00 = a00 * a11 - a01 * a10 152 | var b01 = a00 * a12 - a02 * a10 153 | var b02 = a00 * a13 - a03 * a10 154 | var b03 = a01 * a12 - a02 * a11 155 | var b04 = a01 * a13 - a03 * a11 156 | var b05 = a02 * a13 - a03 * a12 157 | var b06 = a20 * a31 - a21 * a30 158 | var b07 = a20 * a32 - a22 * a30 159 | var b08 = a20 * a33 - a23 * a30 160 | var b09 = a21 * a32 - a22 * a31 161 | var b10 = a21 * a33 - a23 * a31 162 | var b11 = a22 * a33 - a23 * a32 163 | 164 | // Calculate the determinant 165 | let det = b00 * b11 - b01 * b10 + b02 * b09 + b03 * b08 - b04 * b07 + b05 * b06 166 | 167 | if (!det) { 168 | return null 169 | } 170 | det = 1.0 / det 171 | 172 | out[0] = (a11 * b11 - a12 * b10 + a13 * b09) * det 173 | out[1] = (a02 * b10 - a01 * b11 - a03 * b09) * det 174 | out[2] = (a31 * b05 - a32 * b04 + a33 * b03) * det 175 | out[3] = (a22 * b04 - a21 * b05 - a23 * b03) * det 176 | out[4] = (a12 * b08 - a10 * b11 - a13 * b07) * det 177 | out[5] = (a00 * b11 - a02 * b08 + a03 * b07) * det 178 | out[6] = (a32 * b02 - a30 * b05 - a33 * b01) * det 179 | out[7] = (a20 * b05 - a22 * b02 + a23 * b01) * det 180 | out[8] = (a10 * b10 - a11 * b08 + a13 * b06) * det 181 | out[9] = (a01 * b08 - a00 * b10 - a03 * b06) * det 182 | out[10] = (a30 * b04 - a31 * b02 + a33 * b00) * det 183 | out[11] = (a21 * b02 - a20 * b04 - a23 * b00) * det 184 | out[12] = (a11 * b07 - a10 * b09 - a12 * b06) * det 185 | out[13] = (a00 * b09 - a01 * b07 + a02 * b06) * det 186 | out[14] = (a31 * b01 - a30 * b03 - a32 * b00) * det 187 | out[15] = (a20 * b03 - a21 * b01 + a22 * b00) * det 188 | 189 | return out 190 | } 191 | 192 | static mat4_multiply(out, ae, be){ 193 | var a11 = ae[0], a12 = ae[4], a13 = ae[8], a14 = ae[12] 194 | var a21 = ae[1], a22 = ae[5], a23 = ae[9], a24 = ae[13] 195 | var a31 = ae[2], a32 = ae[6], a33 = ae[10], a34 = ae[14] 196 | var a41 = ae[3], a42 = ae[7], a43 = ae[11], a44 = ae[15] 197 | 198 | var b11 = be[0], b12 = be[4], b13 = be[8], b14 = be[12] 199 | var b21 = be[1], b22 = be[5], b23 = be[9], b24 = be[13] 200 | var b31 = be[2], b32 = be[6], b33 = be[10], b34 = be[14] 201 | var b41 = be[3], b42 = be[7], b43 = be[11], b44 = be[15] 202 | 203 | out[0] = a11 * b11 + a12 * b21 + a13 * b31 + a14 * b41 204 | out[4] = a11 * b12 + a12 * b22 + a13 * b32 + a14 * b42 205 | out[8] = a11 * b13 + a12 * b23 + a13 * b33 + a14 * b43 206 | out[12] = a11 * b14 + a12 * b24 + a13 * b34 + a14 * b44 207 | 208 | out[1] = a21 * b11 + a22 * b21 + a23 * b31 + a24 * b41 209 | out[5] = a21 * b12 + a22 * b22 + a23 * b32 + a24 * b42 210 | out[9] = a21 * b13 + a22 * b23 + a23 * b33 + a24 * b43 211 | out[13] = a21 * b14 + a22 * b24 + a23 * b34 + a24 * b44 212 | 213 | out[2] = a31 * b11 + a32 * b21 + a33 * b31 + a34 * b41 214 | out[6] = a31 * b12 + a32 * b22 + a33 * b32 + a34 * b42 215 | out[10] = a31 * b13 + a32 * b23 + a33 * b33 + a34 * b43 216 | out[14] = a31 * b14 + a32 * b24 + a33 * b34 + a34 * b44 217 | 218 | out[3] = a41 * b11 + a42 * b21 + a43 * b31 + a44 * b41 219 | out[7] = a41 * b12 + a42 * b22 + a43 * b32 + a44 * b42 220 | out[11] = a41 * b13 + a42 * b23 + a43 * b33 + a44 * b43 221 | out[15] = a41 * b14 + a42 * b24 + a43 * b34 + a44 * b44 222 | 223 | return out 224 | } 225 | } 226 | 227 | MatrixMath.PI_OVER_180 = Math.PI / 180.0 228 | -------------------------------------------------------------------------------- /polyfill/fill/Quaternion.js: -------------------------------------------------------------------------------- 1 | /* 2 | Quaternion wraps a vector of length 4 used as an orientation value. 3 | 4 | Taken from https://github.com/googlevr/webvr-polyfill/blob/master/src/math-util.js which took it from Three.js 5 | */ 6 | export default class Quaternion{ 7 | constructor(x=0, y=0, z=0, w=1){ 8 | this.x = x 9 | this.y = y 10 | this.z = z 11 | this.w = w 12 | } 13 | 14 | set(x, y, z, w){ 15 | this.x = x 16 | this.y = y 17 | this.z = z 18 | this.w = w 19 | return this 20 | } 21 | 22 | toArray(){ 23 | return [this.x, this.y, this.z, this.w] 24 | } 25 | 26 | copy(quaternion){ 27 | this.x = quaternion.x 28 | this.y = quaternion.y 29 | this.z = quaternion.z 30 | this.w = quaternion.w 31 | return this 32 | } 33 | 34 | setFromRotationMatrix(array16){ 35 | // Taken from https://github.com/mrdoob/three.js/blob/dev/src/math/Quaternion.js 36 | // which took it from http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/index.htm 37 | // assumes the upper 3x3 of array16 (column major) is a pure rotation matrix (i.e, unscaled) 38 | 39 | let m11 = array16[0], m12 = array16[4], m13 = array16[8], 40 | m21 = array16[1], m22 = array16[5], m23 = array16[9], 41 | m31 = array16[2], m32 = array16[6], m33 = array16[10] 42 | 43 | var trace = m11 + m22 + m33 44 | 45 | if(trace > 0){ 46 | var s = 0.5 / Math.sqrt(trace + 1.0) 47 | this.w = 0.25 / s 48 | this.x = (m32 - m23) * s 49 | this.y = (m13 - m31) * s 50 | this.z = (m21 - m12) * s 51 | } else if (m11 > m22 && m11 > m33){ 52 | var s = 2.0 * Math.sqrt(1.0 + m11 - m22 - m33) 53 | this.w = (m32 - m23) / s 54 | this.x = 0.25 * s 55 | this.y = (m12 + m21) / s 56 | this.z = (m13 + m31) / s 57 | } else if (m22 > m33){ 58 | var s = 2.0 * Math.sqrt(1.0 + m22 - m11 - m33) 59 | this.w = (m13 - m31) / s 60 | this.x = (m12 + m21) / s 61 | this.y = 0.25 * s 62 | this.z = (m23 + m32) / s 63 | } else{ 64 | var s = 2.0 * Math.sqrt(1.0 + m33 - m11 - m22) 65 | this.w = (m21 - m12) / s 66 | this.x = (m13 + m31) / s 67 | this.y = (m23 + m32) / s 68 | this.z = 0.25 * s 69 | } 70 | return this 71 | } 72 | 73 | setFromEuler(x, y, z, order='XYZ'){ 74 | // http://www.mathworks.com/matlabcentral/fileexchange/ 75 | // 20696-function-to-convert-between-dcm-euler-angles-quaternions-and-euler-vectors/ 76 | // content/SpinCalc.m 77 | 78 | var cos = Math.cos 79 | var sin = Math.sin 80 | var c1 = cos(x / 2) 81 | var c2 = cos(y / 2) 82 | var c3 = cos(z / 2) 83 | var s1 = sin(x / 2) 84 | var s2 = sin(y / 2) 85 | var s3 = sin(z / 2) 86 | 87 | if (order === 'XYZ'){ 88 | this.x = s1 * c2 * c3 + c1 * s2 * s3 89 | this.y = c1 * s2 * c3 - s1 * c2 * s3 90 | this.z = c1 * c2 * s3 + s1 * s2 * c3 91 | this.w = c1 * c2 * c3 - s1 * s2 * s3 92 | } else if (order === 'YXZ'){ 93 | this.x = s1 * c2 * c3 + c1 * s2 * s3 94 | this.y = c1 * s2 * c3 - s1 * c2 * s3 95 | this.z = c1 * c2 * s3 - s1 * s2 * c3 96 | this.w = c1 * c2 * c3 + s1 * s2 * s3 97 | } else if (order === 'ZXY'){ 98 | this.x = s1 * c2 * c3 - c1 * s2 * s3 99 | this.y = c1 * s2 * c3 + s1 * c2 * s3 100 | this.z = c1 * c2 * s3 + s1 * s2 * c3 101 | this.w = c1 * c2 * c3 - s1 * s2 * s3 102 | } else if (order === 'ZYX'){ 103 | this.x = s1 * c2 * c3 - c1 * s2 * s3 104 | this.y = c1 * s2 * c3 + s1 * c2 * s3 105 | this.z = c1 * c2 * s3 - s1 * s2 * c3 106 | this.w = c1 * c2 * c3 + s1 * s2 * s3 107 | } else if (order === 'YZX'){ 108 | this.x = s1 * c2 * c3 + c1 * s2 * s3 109 | this.y = c1 * s2 * c3 + s1 * c2 * s3 110 | this.z = c1 * c2 * s3 - s1 * s2 * c3 111 | this.w = c1 * c2 * c3 - s1 * s2 * s3 112 | } else if (order === 'XZY'){ 113 | this.x = s1 * c2 * c3 - c1 * s2 * s3 114 | this.y = c1 * s2 * c3 - s1 * c2 * s3 115 | this.z = c1 * c2 * s3 + s1 * s2 * c3 116 | this.w = c1 * c2 * c3 + s1 * s2 * s3 117 | } 118 | } 119 | 120 | setFromAxisAngle(axis, angle){ 121 | // http://www.euclideanspace.com/maths/geometry/rotations/conversions/angleToQuaternion/index.htm 122 | // assumes axis is normalized 123 | var halfAngle = angle / 2 124 | var s = Math.sin(halfAngle) 125 | this.x = axis.x * s 126 | this.y = axis.y * s 127 | this.z = axis.z * s 128 | this.w = Math.cos(halfAngle) 129 | return this 130 | } 131 | 132 | multiply(q){ 133 | return this.multiplyQuaternions(this, q) 134 | } 135 | 136 | multiplyQuaternions(a, b){ 137 | // from http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/code/index.htm 138 | var qax = a.x, qay = a.y, qaz = a.z, qaw = a.w 139 | var qbx = b.x, qby = b.y, qbz = b.z, qbw = b.w 140 | this.x = qax * qbw + qaw * qbx + qay * qbz - qaz * qby 141 | this.y = qay * qbw + qaw * qby + qaz * qbx - qax * qbz 142 | this.z = qaz * qbw + qaw * qbz + qax * qby - qay * qbx 143 | this.w = qaw * qbw - qax * qbx - qay * qby - qaz * qbz 144 | return this 145 | } 146 | 147 | inverse(){ 148 | this.x *= -1 149 | this.y *= -1 150 | this.z *= -1 151 | this.normalize() 152 | return this 153 | } 154 | 155 | normalize(){ 156 | let l = Math.sqrt(this.x * this.x + this.y * this.y + this.z * this.z + this.w * this.w) 157 | if (l === 0){ 158 | this.x = 0 159 | this.y = 0 160 | this.z = 0 161 | this.w = 1 162 | } else{ 163 | l = 1 / l 164 | this.x = this.x * l 165 | this.y = this.y * l 166 | this.z = this.z * l 167 | this.w = this.w * l 168 | } 169 | return this 170 | } 171 | 172 | slerp(qb, t){ 173 | // http://www.euclideanspace.com/maths/algebra/realNormedAlgebra/quaternions/slerp/ 174 | if(t === 0) return this 175 | if(t === 1) return this.copy(qb) 176 | 177 | var x = this.x, y = this.y, z = this.z, w = this.w 178 | let cosHalfTheta = w * qb.w + x * qb.x + y * qb.y + z * qb.z 179 | if (cosHalfTheta < 0){ 180 | this.w = - qb.w 181 | this.x = - qb.x 182 | this.y = - qb.y 183 | this.z = - qb.z 184 | cosHalfTheta = - cosHalfTheta 185 | } else{ 186 | this.copy(qb) 187 | } 188 | if (cosHalfTheta >= 1.0){ 189 | this.w = w 190 | this.x = x 191 | this.y = y 192 | this.z = z 193 | return this 194 | } 195 | 196 | var halfTheta = Math.acos(cosHalfTheta) 197 | var sinHalfTheta = Math.sqrt(1.0 - cosHalfTheta * cosHalfTheta) 198 | if (Math.abs(sinHalfTheta) < 0.001){ 199 | this.w = 0.5 * (w + this.w) 200 | this.x = 0.5 * (x + this.x) 201 | this.y = 0.5 * (y + this.y) 202 | this.z = 0.5 * (z + this.z) 203 | 204 | return this 205 | } 206 | 207 | var ratioA = Math.sin((1 - t) * halfTheta) / sinHalfTheta 208 | var ratioB = Math.sin(t * halfTheta) / sinHalfTheta 209 | this.w = (w * ratioA + this.w * ratioB) 210 | this.x = (x * ratioA + this.x * ratioB) 211 | this.y = (y * ratioA + this.y * ratioB) 212 | this.z = (z * ratioA + this.z * ratioB) 213 | return this 214 | } 215 | } -------------------------------------------------------------------------------- /polyfill/fill/Vector3.js: -------------------------------------------------------------------------------- 1 | /* 2 | Vector3 wraps a vector of length 3, often used as a position in 3D space. 3 | 4 | Taken from https://github.com/googlevr/webvr-polyfill/blob/master/src/math-util.js which took it from Three.js 5 | */ 6 | export default class Vector3 { 7 | constructor(x=0, y=0, z=0){ 8 | this.x = x 9 | this.y = y 10 | this.z = z 11 | } 12 | 13 | set(x, y, z){ 14 | this.x = x 15 | this.y = y 16 | this.z = z 17 | return this 18 | } 19 | 20 | copy(v){ 21 | this.x = v.x 22 | this.y = v.y 23 | this.z = v.z 24 | return this 25 | } 26 | 27 | toArray(){ 28 | return [this.x, this.y, this.z] 29 | } 30 | 31 | length(){ 32 | return Math.sqrt(this.x * this.x + this.y * this.y + this.z * this.z) 33 | } 34 | 35 | add(x, y, z){ 36 | this.x += x 37 | this.y += y 38 | this.z += z 39 | } 40 | 41 | normalize(){ 42 | var scalar = this.length() 43 | if (scalar !== 0){ 44 | this.multiplyScalar(1 / scalar) 45 | } else { 46 | this.x = 0 47 | this.y = 0 48 | this.z = 0 49 | } 50 | return this 51 | } 52 | 53 | multiplyScalar(scalar){ 54 | this.x *= scalar 55 | this.y *= scalar 56 | this.z *= scalar 57 | } 58 | 59 | applyQuaternion(q){ 60 | var x = this.x 61 | var y = this.y 62 | var z = this.z 63 | 64 | var qx = q.x 65 | var qy = q.y 66 | var qz = q.z 67 | var qw = q.w 68 | 69 | // calculate quat * vector 70 | var ix = qw * x + qy * z - qz * y 71 | var iy = qw * y + qz * x - qx * z 72 | var iz = qw * z + qx * y - qy * x 73 | var iw = - qx * x - qy * y - qz * z 74 | 75 | // calculate result * inverse quat 76 | this.x = ix * qw + iw * - qx + iy * - qz - iz * - qy 77 | this.y = iy * qw + iw * - qy + iz * - qx - ix * - qz 78 | this.z = iz * qw + iw * - qz + ix * - qy - iy * - qx 79 | 80 | return this 81 | } 82 | 83 | applyMatrix4(matrix){ 84 | var x = this.x 85 | var y = this.y 86 | var z = this.z 87 | var w = 1 / (matrix[3] * x + matrix[7] * y + matrix[11] * z + matrix[15]) 88 | this.x = (matrix[0] * x + matrix[4] * y + matrix[8] * z + matrix[12]) * w 89 | this.y = (matrix[1] * x + matrix[5] * y + matrix[9] * z + matrix[13]) * w 90 | this.z = (matrix[2] * x + matrix[6] * y + matrix[10] * z + matrix[14]) * w 91 | return this 92 | } 93 | 94 | dot(v){ 95 | return this.x * v.x + this.y * v.y + this.z * v.z 96 | } 97 | 98 | crossVectors(a, b){ 99 | var ax = a.x, ay = a.y, az = a.z 100 | var bx = b.x, by = b.y, bz = b.z 101 | this.x = ay * bz - az * by 102 | this.y = az * bx - ax * bz 103 | this.z = ax * by - ay * bx 104 | return this 105 | } 106 | } 107 | -------------------------------------------------------------------------------- /polyfill/fill/base64-binary.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (c) 2011, Daniel Guerrero 3 | All rights reserved. 4 | 5 | Redistribution and use in source and binary forms, with or without 6 | modification, are permitted provided that the following conditions are met: 7 | * Redistributions of source code must retain the above copyright 8 | notice, this list of conditions and the following disclaimer. 9 | * Redistributions in binary form must reproduce the above copyright 10 | notice, this list of conditions and the following disclaimer in the 11 | documentation and/or other materials provided with the distribution. 12 | 13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 14 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 15 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 16 | DISCLAIMED. IN NO EVENT SHALL DANIEL GUERRERO BE LIABLE FOR ANY 17 | DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 18 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 19 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 20 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 21 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 22 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 23 | */ 24 | 25 | /** 26 | * Uses the new array typed in javascript to binary base64 encode/decode 27 | * at the moment just decodes a binary base64 encoded 28 | * into either an ArrayBuffer (decodeArrayBuffer) 29 | * or into an Uint8Array (decode) 30 | * 31 | * References: 32 | * https://developer.mozilla.org/en/JavaScript_typed_arrays/ArrayBuffer 33 | * https://developer.mozilla.org/en/JavaScript_typed_arrays/Uint8Array 34 | */ 35 | 36 | export default class base64 { 37 | static decodeLength(input) { 38 | return (input.length/4) * 3; 39 | } 40 | 41 | /* will return a Uint8Array type */ 42 | static decodeArrayBuffer(input, buffer) { 43 | var bytes = (input.length/4) * 3; 44 | if (!buffer || buffer.byteLength != bytes) { 45 | // replace the buffer with a new, appropriately sized one 46 | buffer = new ArrayBuffer(bytes); 47 | } 48 | this.decode(input, buffer); 49 | 50 | return buffer; 51 | } 52 | 53 | static removePaddingChars(input){ 54 | var lkey = this._keyStr.indexOf(input.charAt(input.length - 1)); 55 | if(lkey == 64){ 56 | return input.substring(0,input.length - 1); 57 | } 58 | return input; 59 | } 60 | 61 | static decode(input, arrayBuffer) { 62 | //get last chars to see if are valid 63 | input = this.removePaddingChars(input); 64 | input = this.removePaddingChars(input); 65 | 66 | var bytes = parseInt((input.length / 4) * 3, 10); 67 | 68 | var uarray; 69 | var chr1, chr2, chr3; 70 | var enc1, enc2, enc3, enc4; 71 | var i = 0; 72 | var j = 0; 73 | 74 | if (arrayBuffer) 75 | uarray = new Uint8Array(arrayBuffer); 76 | else 77 | uarray = new Uint8Array(bytes); 78 | 79 | input = input.replace(/[^A-Za-z0-9\+\/\=]/g, ""); 80 | 81 | for (i=0; i> 4); 89 | chr2 = ((enc2 & 15) << 4) | (enc3 >> 2); 90 | chr3 = ((enc3 & 3) << 6) | enc4; 91 | 92 | uarray[i] = chr1; 93 | if (enc3 != 64) uarray[i+1] = chr2; 94 | if (enc4 != 64) uarray[i+2] = chr3; 95 | } 96 | 97 | return uarray; 98 | } 99 | 100 | // pass in a typedArray, ArrayBuffer, or ImageData object 101 | static encode(buffer) { 102 | var base64 = '' 103 | var encodings = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' 104 | 105 | var bytes = buffer; // assume it's a typedArrayBuffer 106 | 107 | if (buffer instanceof ArrayBuffer) { 108 | bytes = new Uint8Array(arrayBuffer) 109 | } else if (buffer instanceof ImageData) { 110 | bytes = buffer.data 111 | } 112 | 113 | var byteLength = buffer.length 114 | var byteRemainder = byteLength % 3 115 | var mainLength = byteLength - byteRemainder 116 | 117 | var a, b, c, d 118 | var chunk 119 | 120 | // Main loop deals with bytes in chunks of 3 121 | for (var i = 0; i < mainLength; i = i + 3) { 122 | // Combine the three bytes into a single integer 123 | chunk = (bytes[i] << 16) | (bytes[i + 1] << 8) | bytes[i + 2] 124 | 125 | // Use bitmasks to extract 6-bit segments from the triplet 126 | a = (chunk & 16515072) >> 18 // 16515072 = (2^6 - 1) << 18 127 | b = (chunk & 258048) >> 12 // 258048 = (2^6 - 1) << 12 128 | c = (chunk & 4032) >> 6 // 4032 = (2^6 - 1) << 6 129 | d = chunk & 63 // 63 = 2^6 - 1 130 | 131 | // Convert the raw binary segments to the appropriate ASCII encoding 132 | base64 += encodings[a] + encodings[b] + encodings[c] + encodings[d] 133 | } 134 | 135 | // Deal with the remaining bytes and padding 136 | if (byteRemainder == 1) { 137 | chunk = bytes[mainLength] 138 | 139 | a = (chunk & 252) >> 2 // 252 = (2^6 - 1) << 2 140 | 141 | // Set the 4 least significant bits to zero 142 | b = (chunk & 3) << 4 // 3 = 2^2 - 1 143 | 144 | base64 += encodings[a] + encodings[b] + '==' 145 | } else if (byteRemainder == 2) { 146 | chunk = (bytes[mainLength] << 8) | bytes[mainLength + 1] 147 | 148 | a = (chunk & 64512) >> 10 // 64512 = (2^6 - 1) << 10 149 | b = (chunk & 1008) >> 4 // 1008 = (2^6 - 1) << 4 150 | 151 | // Set the 2 least significant bits to zero 152 | c = (chunk & 15) << 2 // 15 = 2^4 - 1 153 | 154 | base64 += encodings[a] + encodings[b] + encodings[c] + '=' 155 | } 156 | 157 | return base64 158 | } 159 | } 160 | base64._keyStr = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=" 161 | -------------------------------------------------------------------------------- /polyfill/fill/gl-matrix/common.js: -------------------------------------------------------------------------------- 1 | /* Copyright (c) 2015, Brandon Jones, Colin MacKenzie IV. 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. */ 20 | 21 | /** 22 | * Common utilities 23 | * @module glMatrix 24 | */ 25 | 26 | // Configuration Constants 27 | export const EPSILON = 0.000001; 28 | export let ARRAY_TYPE = (typeof Float32Array !== 'undefined') ? Float32Array : Array; 29 | export const RANDOM = Math.random; 30 | 31 | /** 32 | * Sets the type of array used when creating new vectors and matrices 33 | * 34 | * @param {Type} type Array type, such as Float32Array or Array 35 | */ 36 | export function setMatrixArrayType(type) { 37 | ARRAY_TYPE = type; 38 | } 39 | 40 | const degree = Math.PI / 180; 41 | 42 | /** 43 | * Convert Degree To Radian 44 | * 45 | * @param {Number} a Angle in Degrees 46 | */ 47 | export function toRadian(a) { 48 | return a * degree; 49 | } 50 | 51 | /** 52 | * Tests whether or not the arguments have approximately the same value, within an absolute 53 | * or relative tolerance of glMatrix.EPSILON (an absolute tolerance is used for values less 54 | * than or equal to 1.0, and a relative tolerance is used for larger values) 55 | * 56 | * @param {Number} a The first number to test. 57 | * @param {Number} b The second number to test. 58 | * @returns {Boolean} True if the numbers are approximately equal, false otherwise. 59 | */ 60 | export function equals(a, b) { 61 | return Math.abs(a - b) <= EPSILON*Math.max(1.0, Math.abs(a), Math.abs(b)); 62 | } 63 | -------------------------------------------------------------------------------- /polyfill/reality/VirtualReality.js: -------------------------------------------------------------------------------- 1 | import Reality from '../Reality.js' 2 | 3 | /* 4 | VirtualReality is a Reality that is empty and waiting for fanstastic CG scenes. 5 | */ 6 | export default class VirtualReality extends Reality { 7 | constructor(xr){ 8 | super(xr, 'Virtual', false, false) 9 | } 10 | 11 | /* 12 | Called when at least one active XRSession is using this Reality 13 | */ 14 | _start(parameters){ 15 | } 16 | 17 | /* 18 | Called when no more active XRSessions are using this Reality 19 | */ 20 | _stop(){ 21 | } 22 | 23 | /* 24 | Called by a session before it hands a new XRPresentationFrame to the app 25 | */ 26 | _handleNewFrame(){} 27 | 28 | /* 29 | Create an anchor hung in space 30 | */ 31 | _addAnchor(anchor, display){ 32 | this._anchors.set(anchor.uid, anchor) 33 | return anchor.uid 34 | } 35 | 36 | /* 37 | Create an anchor attached to a surface, as found by a ray 38 | normalized screen x and y are in range 0..1, with 0,0 at top left and 1,1 at bottom right 39 | */ 40 | _findAnchor(normalizedScreenX, normalizedScreenY, display, options=null){ 41 | return new Promise((resolve, reject) => { 42 | resolve(null) 43 | }) 44 | } 45 | 46 | _removeAnchor(uid){ 47 | this._anchors.delete(uid) 48 | } 49 | 50 | _hitTestNoAnchor(normalizedScreenX, normalizedScreenY, display){ 51 | return null 52 | } 53 | 54 | _getHasLightEstimate(){ 55 | return false; 56 | } 57 | 58 | /* 59 | Find an XRAnchorOffset that is at floor level below the current head pose 60 | returns a Promise that resolves either to an AnchorOffset or null if the floor level is unknown 61 | */ 62 | _findFloorAnchor(display, uid=null){ 63 | // Copy the head model matrix for the current pose so we have it in the promise below 64 | const headModelMatrix = new Float32Array(display._headPose.poseModelMatrix) 65 | return new Promise((resolve, reject) => { 66 | // For now, just create an anchor at origin level. Probably want to use stage more intelligently 67 | headModelMatrix[13] = 0 68 | const coordinateSystem = new XRCoordinateSystem(display, XRCoordinateSystem.TRACKER) 69 | coordinateSystem._relativeMatrix = headModelMatrix 70 | const anchor = new XRAnchor(coordinateSystem, uid) 71 | this._addAnchor(anchor, display) 72 | resolve(new XRAnchorOffset(anchor.uid)) 73 | }) 74 | } 75 | 76 | _getTimeStamp(timestamp) { 77 | return timestamp 78 | } 79 | 80 | 81 | } -------------------------------------------------------------------------------- /screenshots/apainter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/screenshots/apainter.png -------------------------------------------------------------------------------- /screenshots/xrstore.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mozilla/webxr-polyfill/4ba29e108b9b7a3317cf312dc7a5fe1550e3711b/screenshots/xrstore.jpg -------------------------------------------------------------------------------- /tests/CoordinatesTest.js: -------------------------------------------------------------------------------- 1 | import Test from './Test.js' 2 | 3 | import MatrixMath from '../polyfill/fill/MatrixMath.js' 4 | import XRDisplay from '../polyfill/XRDisplay.js' 5 | import XRCoordinateSystem from '../polyfill/XRCoordinateSystem.js' 6 | 7 | import Quaternion from '../polyfill/fill/Quaternion.js' 8 | 9 | export default class CoordinatesTest extends Test { 10 | testTransform(){ 11 | let display1 = new MockXRDisplay() 12 | 13 | // Test that relative coordinate systems correctly provide transforms 14 | let relativeCoordinateSystem = new XRCoordinateSystem(display1, XRCoordinateSystem.TRACKER) 15 | let pose = MatrixMath.mat4_generateIdentity() 16 | pose[12] = 1 17 | pose[13] = 2 18 | pose[14] = 3 19 | relativeCoordinateSystem._relativeMatrix = pose 20 | let r2hTransform = relativeCoordinateSystem.getTransformTo(display1._headModelCoordinateSystem) 21 | this.assertFloatArraysEqual( 22 | [1, 2 - XRViewPose.SITTING_EYE_HEIGHT, 3], 23 | [r2hTransform[12], r2hTransform[13], r2hTransform[14]] 24 | ) 25 | 26 | // Test the transform is where we expect it 27 | let h2tTransform = display1._headModelCoordinateSystem.getTransformTo(display1._trackerCoordinateSystem) 28 | this.assertFloatArraysEqual( 29 | [0, XRViewPose.SITTING_EYE_HEIGHT, 0], 30 | [h2tTransform[12], h2tTransform[13], h2tTransform[14]] 31 | ) 32 | 33 | // Offset the head and test the transform 34 | display1._headPose._position = [0, XRViewPose.SITTING_EYE_HEIGHT, 0.5] 35 | h2tTransform = display1._headModelCoordinateSystem.getTransformTo(display1._trackerCoordinateSystem) 36 | this.assertFloatArraysEqual( 37 | [0, XRViewPose.SITTING_EYE_HEIGHT, display1._headPose._position[2]], 38 | [h2tTransform[12], h2tTransform[13], h2tTransform[14]] 39 | ) 40 | 41 | // Rotate the head and test the transform 42 | let quat1 = new Quaternion() 43 | quat1.setFromEuler(0, -Math.PI, 0) 44 | display1._headPose._orientation = quat1.toArray() 45 | h2tTransform = display1._headModelCoordinateSystem.getTransformTo(display1._trackerCoordinateSystem) 46 | let trackerPosition = MatrixMath.mat4_get_position(new Float32Array(3), h2tTransform) 47 | this.assertEqual(trackerPosition[2], display1._headPose._position[2]) 48 | quat1.inverse() 49 | let trackerOrientation = MatrixMath.mat4_get_rotation(new Float32Array(4), h2tTransform) 50 | this.assertFloatArraysEqual(quat1.toArray(), trackerOrientation) 51 | } 52 | } 53 | 54 | class MockXR { 55 | 56 | } 57 | 58 | class MockReality { 59 | 60 | } 61 | 62 | class MockXRDisplay extends XRDisplay { 63 | constructor(xr=null, displayName='Mock', isExternal=false, reality=null){ 64 | super(xr ? xr : new MockXR(), displayName, isExternal, reality ? reality : new MockReality()) 65 | } 66 | } -------------------------------------------------------------------------------- /tests/Test.js: -------------------------------------------------------------------------------- 1 | export default class Test { 2 | run(){ 3 | for(let name of Object.getOwnPropertyNames(Object.getPrototypeOf(this))){ 4 | if(name.startsWith('test') && typeof this[name] === 'function'){ 5 | this[name]() 6 | } 7 | } 8 | } 9 | 10 | assertEqual(item1, item2){ 11 | if(item1 === item2) return true 12 | if(item1 == item2) return true 13 | throw new Error('Unequal? ' + item1 + " / " + item2) 14 | } 15 | 16 | assertFloatArraysEqual(array1, array2, decimalPrecision=5){ 17 | if(Array.isArray(array1) === false && array1 instanceof Float32Array === false) throw new Error('Not equal: ' + array1 + ' / ' + array2) 18 | if(Array.isArray(array2) === false && array2 instanceof Float32Array === false) throw new Error('Not equal: ' + array1 + ' / ' + array2) 19 | if(array1.length != array2.length) throw new Error('Not equal: ' + array1 + ' / ' + array2) 20 | const precisionMultiplier = 10^decimalPrecision 21 | for(let i=0; i < array1.length; i++){ 22 | const a1 = Math.trunc((array1[i] * precisionMultiplier)) / precisionMultiplier 23 | const a2 = Math.trunc((array2[i] * precisionMultiplier)) / precisionMultiplier 24 | if(a1 !== a2) throw new Error('Not equal: ' + array1 + ' / ' + array2) 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /tests/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | Tests 4 | 5 | 7 | 8 | 9 | 10 | 11 | 12 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | const path = require('path'); 2 | const fs = require('fs'); 3 | 4 | WrapperPlugin = require('wrapper-webpack-plugin'); 5 | 6 | const headerDoc = fs.readFileSync('./dist-header.js', 'utf8'); 7 | const footerDoc = fs.readFileSync('./dist-footer.js', 'utf8'); 8 | 9 | var xrPolyfill = { 10 | entry: './polyfill/XRPolyfill.js', 11 | output: { 12 | filename: 'webxr-polyfill.js', 13 | path: path.resolve(__dirname, 'dist') 14 | }, 15 | plugins: [ 16 | new WrapperPlugin({ 17 | header: headerDoc, 18 | footer: footerDoc 19 | }) 20 | ], 21 | module: { 22 | rules: [ 23 | { 24 | test: /\.js$/, 25 | include: [ 26 | path.resolve(__dirname, "polyfill"), 27 | ], 28 | use: { 29 | loader: 'babel-loader', 30 | options: { 31 | presets: ['env'] 32 | } 33 | } 34 | } 35 | ] 36 | }, 37 | resolve: { 38 | extensions: ['.js'] 39 | } 40 | }; 41 | 42 | var xrVideoWorker = { 43 | entry: './polyfill/XRWorkerPolyfill.js', 44 | output: { 45 | filename: 'webxr-worker.js', 46 | path: path.resolve(__dirname, 'dist') 47 | }, 48 | module: { 49 | rules: [ 50 | { 51 | test: /\.js$/, 52 | include: [ 53 | path.resolve(__dirname, "polyfill"), 54 | ], 55 | use: { 56 | loader: 'babel-loader', 57 | options: { 58 | presets: ['env'] 59 | } 60 | } 61 | } 62 | ] 63 | }, 64 | resolve: { 65 | extensions: ['.js'] 66 | } 67 | }; 68 | 69 | module.exports = [xrPolyfill, xrVideoWorker] 70 | --------------------------------------------------------------------------------