├── article ├── subthing.png ├── antialiasing.png ├── iterations.png ├── linear_fog.png ├── perspective.png ├── repetition.png ├── softshadows.png ├── twisted_box.png ├── window-to-uv.png ├── modulo_preview.png ├── ambientocclusion.png ├── raymarching-alg.png ├── subtract_modulo.png ├── subtract_modulo2.png ├── eq-window-to-world-ortho.png ├── eq-window-to-world-perspective.png └── raymarch-distance-fields.txt ├── src ├── fileio.h ├── fileio.cpp ├── opengl.h ├── opengl.cpp └── main.cpp ├── .gitignore ├── data ├── raymarch.vs ├── default.vs ├── default.fs └── raymarch.fs ├── README.md ├── raymarch-dev.md └── webgl ├── webgl-utils.js ├── raymarching.js ├── raymarching.html └── gl-matrix-min.js /article/subthing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/subthing.png -------------------------------------------------------------------------------- /src/fileio.h: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | bool readFile(const char *filename, std::string &dest); -------------------------------------------------------------------------------- /article/antialiasing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/antialiasing.png -------------------------------------------------------------------------------- /article/iterations.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/iterations.png -------------------------------------------------------------------------------- /article/linear_fog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/linear_fog.png -------------------------------------------------------------------------------- /article/perspective.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/perspective.png -------------------------------------------------------------------------------- /article/repetition.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/repetition.png -------------------------------------------------------------------------------- /article/softshadows.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/softshadows.png -------------------------------------------------------------------------------- /article/twisted_box.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/twisted_box.png -------------------------------------------------------------------------------- /article/window-to-uv.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/window-to-uv.png -------------------------------------------------------------------------------- /article/modulo_preview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/modulo_preview.png -------------------------------------------------------------------------------- /article/ambientocclusion.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/ambientocclusion.png -------------------------------------------------------------------------------- /article/raymarching-alg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/raymarching-alg.png -------------------------------------------------------------------------------- /article/subtract_modulo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/subtract_modulo.png -------------------------------------------------------------------------------- /article/subtract_modulo2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/subtract_modulo2.png -------------------------------------------------------------------------------- /article/eq-window-to-world-ortho.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/eq-window-to-world-ortho.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.sdf 2 | *.sln 3 | *.vcxproj 4 | *.vcxproj.user 5 | *.vcxproj.filters 6 | *.opensdf 7 | *.v11.suo 8 | bin 9 | ipch 10 | obj -------------------------------------------------------------------------------- /article/eq-window-to-world-perspective.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightbits/ray-march/HEAD/article/eq-window-to-world-perspective.png -------------------------------------------------------------------------------- /data/raymarch.vs: -------------------------------------------------------------------------------- 1 | #version 140 2 | 3 | in vec2 position; 4 | smooth out vec2 uv; 5 | 6 | void main() 7 | { 8 | uv = position; 9 | gl_Position = vec4(position.xy, 0.0f, 1.0f); 10 | } -------------------------------------------------------------------------------- /data/default.vs: -------------------------------------------------------------------------------- 1 | #version 140 2 | 3 | in vec3 position; 4 | in vec2 texel; 5 | in vec4 color; 6 | 7 | out vec2 vertTexel; 8 | out vec4 vertColor; 9 | 10 | void main() 11 | { 12 | vertTexel = texel; 13 | vertColor = color; 14 | gl_Position = vec4(position.xyz, 1.0f); 15 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Raymarching Distance Fields 2 | ========= 3 | Rendering procedural 3D geometry by raymarching through a distance field. 4 | See `raymarch-dev.md` for details. 5 | 6 | An interactive WebGL demo can be found [here](https://dl.dropboxusercontent.com/u/27844576/raymarch/raymarching.html) 7 | -------------------------------------------------------------------------------- /data/default.fs: -------------------------------------------------------------------------------- 1 | #version 140 2 | 3 | in vec2 vertTexel; 4 | in vec4 vertColor; 5 | 6 | out vec4 fragColor; 7 | 8 | uniform sampler2D tex; 9 | // uniform float texBlend; 10 | 11 | void main() 12 | { 13 | fragColor = texture(tex, vertTexel); 14 | // fragColor = mix(vertColor, texture(tex, vertTexel), texBlend); 15 | } -------------------------------------------------------------------------------- /src/fileio.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | bool readFile(const char *filename, std::string &dest) 4 | { 5 | std::ifstream in(filename, std::ios::in | std::ios::binary); 6 | if(!in.is_open()) 7 | return false; 8 | 9 | if(in.good()) 10 | { 11 | in.seekg(0, std::ios::end); // Set get position to end 12 | dest.resize(in.tellg()); // Resize string to support enough bytes 13 | in.seekg(0, std::ios::beg); // Set get position to beginning 14 | in.read(&dest[0], dest.size()); // Read file to string 15 | in.close(); 16 | } 17 | 18 | return true; 19 | } -------------------------------------------------------------------------------- /src/opengl.h: -------------------------------------------------------------------------------- 1 | /* 2 | Provides functions for context creation and destruction, as well as OpenGL helper functions such 3 | as compiling shaders and GLSL programs. 4 | */ 5 | 6 | #include // OpenGL version 3.1, compatability profile 7 | #include // The C-style loading interface 8 | #include // Context 9 | #include // OpenGL mathematics 10 | #include // for value_ptr(matrix) 11 | #include 12 | 13 | const int OPENGL_VERSION_MAJOR = 3; 14 | const int OPENGL_VERSION_MINOR = 1; 15 | 16 | bool createContext(const char *title, int w, int h, int depthbits, int stencilbits, int fsaa, bool fullscreen); 17 | void destroyContext(); 18 | GLuint compileShader(GLenum shaderType, GLsizei count, const std::string &shaderSrc); 19 | GLuint createProgram(GLuint vertexShader, GLuint fragmentShader, GLuint geometryShader = 0); -------------------------------------------------------------------------------- /src/opengl.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | 5 | bool createContext(const char *title, int width, int height, 6 | int depthbits, int stencilbits, int fsaa, bool fullscreen) 7 | { 8 | if(glfwInit() != GL_TRUE) 9 | return false; 10 | 11 | glfwOpenWindowHint(GLFW_OPENGL_PROFILE, 0); // 0 lets the system choose the profile 12 | glfwOpenWindowHint(GLFW_OPENGL_VERSION_MAJOR, OPENGL_VERSION_MAJOR); 13 | glfwOpenWindowHint(GLFW_OPENGL_VERSION_MINOR, OPENGL_VERSION_MINOR); 14 | glfwOpenWindowHint(GLFW_FSAA_SAMPLES, fsaa); 15 | glfwOpenWindowHint(GLFW_WINDOW_NO_RESIZE, GL_TRUE); 16 | 17 | if(glfwOpenWindow( 18 | width, height, 19 | 0, 0, 0, 0, // Default color bits 20 | depthbits, stencilbits, 21 | (fullscreen ? GLFW_FULLSCREEN : GLFW_WINDOW)) != GL_TRUE) 22 | return false; 23 | 24 | glfwSetWindowTitle(title); 25 | 26 | // Note that this function fails if no GL context has been made current 27 | if(glload::LoadFunctions() == glload::LS_LOAD_FAILED) 28 | return false; 29 | 30 | std::cout <<"Hello World!"<' + 71 | '' + 72 | '
' + 73 | '
' + msg + '
' + 74 | '
' + 75 | ''; 76 | }; 77 | 78 | /** 79 | * Mesasge for getting a webgl browser 80 | * @type {string} 81 | */ 82 | var GET_A_WEBGL_BROWSER = '' + 83 | 'This page requires a browser that supports WebGL.
' + 84 | 'Click here to upgrade your browser.'; 85 | 86 | /** 87 | * Mesasge for need better hardware 88 | * @type {string} 89 | */ 90 | var OTHER_PROBLEM = '' + 91 | "It doesn't appear your computer can support WebGL.
" + 92 | 'Click here for more information.'; 93 | 94 | /** 95 | * Creates a webgl context. If creation fails it will 96 | * change the contents of the container of the 97 | * tag to an error message with the correct links for WebGL. 98 | * @param {Element} canvas. The canvas element to create a 99 | * context from. 100 | * @param {WebGLContextCreationAttirbutes} opt_attribs Any 101 | * creation attributes you want to pass in. 102 | * @param {function:(msg)} opt_onError An function to call 103 | * if there is an error during creation. 104 | * @return {WebGLRenderingContext} The created context. 105 | */ 106 | var setupWebGL = function(canvas, opt_attribs, opt_onError) { 107 | function handleCreationError(msg) { 108 | var container = canvas.parentNode; 109 | if (container) { 110 | var str = window.WebGLRenderingContext ? 111 | OTHER_PROBLEM : 112 | GET_A_WEBGL_BROWSER; 113 | if (msg) { 114 | str += "

Status: " + msg; 115 | } 116 | container.innerHTML = makeFailHTML(str); 117 | } 118 | }; 119 | 120 | opt_onError = opt_onError || handleCreationError; 121 | 122 | if (canvas.addEventListener) { 123 | canvas.addEventListener("webglcontextcreationerror", function(event) { 124 | opt_onError(event.statusMessage); 125 | }, false); 126 | } 127 | var context = create3DContext(canvas, opt_attribs); 128 | if (!context) { 129 | if (!window.WebGLRenderingContext) { 130 | opt_onError(""); 131 | } 132 | } 133 | return context; 134 | }; 135 | 136 | /** 137 | * Creates a webgl context. 138 | * @param {!Canvas} canvas The canvas tag to get context 139 | * from. If one is not passed in one will be created. 140 | * @return {!WebGLContext} The created context. 141 | */ 142 | var create3DContext = function(canvas, opt_attribs) { 143 | var names = ["webgl", "experimental-webgl", "webkit-3d", "moz-webgl"]; 144 | var context = null; 145 | for (var ii = 0; ii < names.length; ++ii) { 146 | try { 147 | context = canvas.getContext(names[ii], opt_attribs); 148 | } catch(e) {} 149 | if (context) { 150 | break; 151 | } 152 | } 153 | return context; 154 | } 155 | 156 | return { 157 | create3DContext: create3DContext, 158 | setupWebGL: setupWebGL 159 | }; 160 | }(); 161 | 162 | /** 163 | * Provides requestAnimationFrame in a cross browser way. 164 | */ 165 | window.requestAnimFrame = (function() { 166 | return window.requestAnimationFrame || 167 | window.webkitRequestAnimationFrame || 168 | window.mozRequestAnimationFrame || 169 | window.oRequestAnimationFrame || 170 | window.msRequestAnimationFrame || 171 | function(/* function FrameRequestCallback */ callback, /* DOMElement Element */ element) { 172 | window.setTimeout(callback, 1000/60); 173 | }; 174 | })(); 175 | 176 | -------------------------------------------------------------------------------- /src/main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | using namespace glm; 5 | 6 | // Camera 7 | float g_theta = 0.0f; // Horizontal angle 8 | float g_phi = 0.0f; // Vertical angle 9 | vec3 g_camUp = normalize(vec3(0.0f, 1.0f, 0.0f)); // The upward-vector of the image plane 10 | vec3 g_camRight = normalize(vec3(1.0f, 0.0f, 0.0f)); // The right-vector of the image plane 11 | vec3 g_camForward = cross(g_camRight, g_camUp); // The forward-vector of the image plane 12 | vec3 g_eye = vec3(0.0f, 0.0f, -2.0f); // The eye position in the world 13 | float g_focalLength = 1.67f; // Distance between eye and image-plane 14 | float g_zNear = 0.0f; // Near plane distance from camera 15 | float g_zFar = 15.0f; // Far plane distance from camera 16 | float g_moveSpeed = 0.1f; 17 | 18 | // Raymarch parameters 19 | int g_rmSteps = 64; 20 | float g_rmEpsilon = 0.001f; 21 | 22 | // Scene 23 | vec4 g_skyColor = vec4(0.31f, 0.47f, 0.67f, 1.0f); 24 | vec4 g_ambient = vec4(0.15, 0.2f, 0.32f, 1.0f); 25 | vec3 g_light0Position = vec3(0.25f, 2.0f, 0.0f); 26 | vec4 g_light0Color = vec4(0.67f, 0.87f, 0.93f, 1.0f); 27 | 28 | const int g_windowWidth = 640; 29 | const int g_windowHeight = 480; 30 | float g_aspectRatio = g_windowWidth / (float)g_windowHeight; 31 | 32 | void updateCamera(float dt) 33 | { 34 | if(glfwGetKey('A')) 35 | g_eye -= g_camRight * g_moveSpeed; 36 | else if(glfwGetKey('D')) 37 | g_eye += g_camRight * g_moveSpeed; 38 | 39 | if(glfwGetKey('W')) 40 | g_eye += g_camForward * g_moveSpeed; 41 | else if(glfwGetKey('S')) 42 | g_eye -= g_camForward * g_moveSpeed; 43 | 44 | if(glfwGetKey(GLFW_KEY_SPACE)) 45 | g_eye += g_camUp * g_moveSpeed; 46 | else if(glfwGetKey(GLFW_KEY_LCTRL)) 47 | g_eye -= g_camUp * g_moveSpeed; 48 | 49 | if(glfwGetKey(GLFW_KEY_LEFT)) 50 | g_light0Position -= g_camRight * g_moveSpeed; 51 | else if(glfwGetKey(GLFW_KEY_RIGHT)) 52 | g_light0Position += g_camRight * g_moveSpeed; 53 | 54 | if(glfwGetKey(GLFW_KEY_UP)) 55 | g_light0Position += g_camUp * g_moveSpeed; 56 | else if(glfwGetKey(GLFW_KEY_DOWN)) 57 | g_light0Position -= g_camUp * g_moveSpeed; 58 | 59 | int mposX, mposY; 60 | glfwGetMousePos(&mposX, &mposY); 61 | glfwSetMousePos(g_windowWidth / 2, g_windowHeight / 2); 62 | int dx = mposX - g_windowWidth / 2; 63 | int dy = mposY - g_windowHeight / 2; 64 | g_theta += dx * 0.01f; 65 | static const float TWO_PI = 6.28318530718f; 66 | if(g_theta > TWO_PI) g_theta -= TWO_PI; 67 | else if(g_theta < 0.0f) g_theta += TWO_PI; 68 | 69 | g_phi += dy * 0.01f; 70 | if(g_phi > TWO_PI) g_phi -= TWO_PI; 71 | else if(g_phi < 0.0f) g_phi += TWO_PI; 72 | 73 | float sintheta = sinf(g_theta); 74 | float costheta = cosf(g_theta); 75 | float sinphi = sinf(g_phi); 76 | float cosphi = cosf(g_phi); 77 | g_camForward = vec3(cosphi * sintheta, -sinphi, cosphi * costheta); 78 | g_camRight = vec3(costheta, 0.0f, -sintheta); 79 | g_camUp = normalize(cross(g_camForward, g_camRight)); 80 | } 81 | 82 | std::ostream &operator<<(std::ostream &out, const vec3 &v) 83 | { 84 | out<<"("<WebGL. Your browser may not support it."; 7 | } 8 | 9 | try { gl = canvas.getContext("webgl"); } 10 | catch(e) { } 11 | 12 | if(gl == null) { 13 | try { gl = canvas.getContext("experimental-webgl"); } 14 | catch (e) { gl = null }; 15 | } 16 | } 17 | 18 | function getShader(gl, id) { 19 | var script = document.getElementById(id); 20 | if(!script) { 21 | return null; 22 | } 23 | 24 | var src = ""; 25 | var k = script.firstChild; 26 | while(k) { 27 | if(k.nodeType == 3) { 28 | src += k.textContent; 29 | } 30 | k = k.nextSibling; 31 | } 32 | 33 | var shader; 34 | if(script.type == "x-shader/x-fragment") { 35 | shader = gl.createShader(gl.FRAGMENT_SHADER); 36 | } else if(script.type == "x-shader/x-vertex") { 37 | shader = gl.createShader(gl.VERTEX_SHADER); 38 | } else { 39 | return null; 40 | } 41 | 42 | gl.shaderSource(shader, src); 43 | gl.compileShader(shader); 44 | 45 | if(!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) { 46 | alert(gl.getShaderInfoLog(shader)); 47 | return null; 48 | } 49 | 50 | return shader; 51 | } 52 | 53 | var program; 54 | var vbo; 55 | 56 | function initProgram() { 57 | var fragmentShader = getShader(gl, "fshader"); 58 | var vertexShader = getShader(gl, "vshader"); 59 | 60 | program = gl.createProgram(); 61 | gl.attachShader(program, vertexShader); 62 | gl.attachShader(program, fragmentShader); 63 | gl.linkProgram(program); 64 | 65 | if(!gl.getProgramParameter(program, gl.LINK_STATUS)) { 66 | alert("Unable to initialize the shader program"); 67 | } 68 | 69 | gl.useProgram(program); 70 | 71 | var quad = [ 72 | -1.0, -1.0, 0.0, 73 | -1.0, 1.0, 0.0, 74 | 1.0, -1.0, 0.0, 75 | 1.0, 1.0, 0.0 76 | ]; 77 | 78 | vbo = gl.createBuffer(); 79 | gl.bindBuffer(gl.ARRAY_BUFFER, vbo); 80 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(quad), gl.STATIC_DRAW); 81 | 82 | program.positionAttrib = gl.getAttribLocation(program, "position"); 83 | gl.enableVertexAttribArray(program.positionAttrib); 84 | gl.vertexAttribPointer(program.positionAttrib, 3, gl.FLOAT, false, 0, 0); 85 | 86 | program.resolutionUniform = gl.getUniformLocation(program, "g_resolution"); 87 | program.camUpUniform = gl.getUniformLocation(program, "g_camUp"); 88 | program.camRightUniform = gl.getUniformLocation(program, "g_camRight"); 89 | program.camForwardUniform = gl.getUniformLocation(program, "g_camForward"); 90 | program.eyeUniform = gl.getUniformLocation(program, "g_eye"); 91 | program.light0PositionUniform = gl.getUniformLocation(program, "g_light0Position"); 92 | program.light0ColorUniform = gl.getUniformLocation(program, "g_light0Color"); 93 | } 94 | 95 | var g_eye; 96 | var g_camUp; 97 | var g_camRight; 98 | var g_camForward; 99 | var g_light0Position = [0, 4, 0]; 100 | var g_light0Color = [0.67, 0.87, 0.93, 1.0]; 101 | var horizontalAngle = 0.0; 102 | var verticalAngle = 0.0; 103 | 104 | function initCamera() { 105 | g_eye = [0, 1, -2]; 106 | g_camUp = [0, 1, 0]; 107 | g_camRight = [1, 0, 0]; 108 | g_camForward = vec3.create(); 109 | vec3.cross(g_camForward, g_camRight, g_camUp); 110 | vec3.normalize(g_camForward, g_camForward); 111 | } 112 | 113 | var mouseSpeedX = null; 114 | var mouseSpeedY = null; 115 | 116 | function handleMouseMove(event) { 117 | var mouseX = event.clientX; 118 | var mouseY = event.clientY; 119 | 120 | var rect = document.getElementById("glcanvas").getBoundingClientRect(); 121 | var dx = mouseX - (rect.left + rect.width / 2); 122 | var dy = mouseY - (rect.top + rect.height / 2); 123 | 124 | mouseSpeedX = dx * 0.00005; 125 | mouseSpeedY = dy * 0.00005; 126 | } 127 | 128 | var currentKeys = {}; 129 | 130 | function handleKeyDown(event) { 131 | currentKeys[event.keyCode] = true; 132 | } 133 | 134 | function handleKeyUp(event) { 135 | currentKeys[event.keyCode] = false; 136 | } 137 | 138 | function handleInput() { 139 | var moveSpeed = 0.05; 140 | if(currentKeys[87]) { // Forward 141 | g_eye[0] += g_camForward[0] * moveSpeed; 142 | g_eye[1] += g_camForward[1] * moveSpeed; 143 | g_eye[2] += g_camForward[2] * moveSpeed; 144 | } else if(currentKeys[83]) { // Backward 145 | g_eye[0] -= g_camForward[0] * moveSpeed; 146 | g_eye[1] -= g_camForward[1] * moveSpeed; 147 | g_eye[2] -= g_camForward[2] * moveSpeed; 148 | } 149 | 150 | if(currentKeys[68]) { // Right 151 | g_eye[0] += g_camRight[0] * moveSpeed; 152 | g_eye[1] += g_camRight[1] * moveSpeed; 153 | g_eye[2] += g_camRight[2] * moveSpeed; 154 | } else if(currentKeys[65]) { // Left 155 | g_eye[0] -= g_camRight[0] * moveSpeed; 156 | g_eye[1] -= g_camRight[1] * moveSpeed; 157 | g_eye[2] -= g_camRight[2] * moveSpeed; 158 | } 159 | 160 | if(currentKeys[37]) { // Arrow left 161 | g_light0Position[0] -= g_camRight[0] * moveSpeed; 162 | g_light0Position[1] -= g_camRight[1] * moveSpeed; 163 | g_light0Position[2] -= g_camRight[2] * moveSpeed; 164 | } else if(currentKeys[39]) { // Arrow right 165 | g_light0Position[0] += g_camRight[0] * moveSpeed; 166 | g_light0Position[1] += g_camRight[1] * moveSpeed; 167 | g_light0Position[2] += g_camRight[2] * moveSpeed; 168 | } 169 | 170 | if(currentKeys[38]) { // Arrow up 171 | g_light0Position[0] += g_camUp[0] * moveSpeed; 172 | g_light0Position[1] += g_camUp[1] * moveSpeed; 173 | g_light0Position[2] += g_camUp[2] * moveSpeed; 174 | } else if(currentKeys[40]) { // Arrow down 175 | g_light0Position[0] -= g_camUp[0] * moveSpeed; 176 | g_light0Position[1] -= g_camUp[1] * moveSpeed; 177 | g_light0Position[2] -= g_camUp[2] * moveSpeed; 178 | } 179 | } 180 | 181 | function updateCamera() { 182 | horizontalAngle += mouseSpeedX; 183 | verticalAngle += mouseSpeedY; 184 | 185 | if(horizontalAngle > 2.0 * Math.PI) 186 | horizontalAngle -= 2.0 * Math.PI; 187 | else if(horizontalAngle < 0.0) 188 | horizontalAngle += 2.0 * Math.PI; 189 | 190 | if(verticalAngle > 2.0 * Math.PI) 191 | verticalAngle -= 2.0 * Math.PI; 192 | else if(verticalAngle < 0.0) 193 | verticalAngle += 2.0 * Math.PI; 194 | 195 | // Update camera vectors 196 | var sintheta = Math.sin(horizontalAngle); 197 | var costheta = Math.cos(horizontalAngle); 198 | var sinphi = Math.sin(verticalAngle); 199 | var cosphi = Math.cos(verticalAngle); 200 | g_camForward = [cosphi * sintheta, -sinphi, cosphi * costheta]; 201 | g_camRight = [costheta, 0.0, -sintheta]; 202 | vec3.cross(g_camUp, g_camForward, g_camRight); 203 | vec3.normalize(g_camUp, g_camUp); 204 | } 205 | 206 | function updateUniforms() { 207 | gl.uniform2f(program.resolutionUniform, gl.viewportWidth, gl.viewportHeight); 208 | gl.uniform3f(program.camUpUniform, g_camUp[0], g_camUp[1], g_camUp[2]); 209 | gl.uniform3f(program.camRightUniform, g_camRight[0], g_camRight[1], g_camRight[2]); 210 | gl.uniform3f(program.camForwardUniform, g_camForward[0], g_camForward[1], g_camForward[2]); 211 | gl.uniform3f(program.eyeUniform, g_eye[0], g_eye[1], g_eye[2]); 212 | gl.uniform3f(program.light0PositionUniform, g_light0Position[0], g_light0Position[1], g_light0Position[2]); 213 | gl.uniform4f(program.light0ColorUniform, g_light0Color[0], g_light0Color[1], g_light0Color[2], g_light0Color[3]); 214 | } 215 | 216 | function render(canvas) { 217 | gl.clear(gl.COLOR_BUFFER_BIT); 218 | 219 | handleInput(); 220 | updateCamera(); 221 | updateUniforms(); 222 | 223 | gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4); 224 | } 225 | 226 | function tick() { 227 | requestAnimFrame(tick); 228 | render(); 229 | } 230 | 231 | function start() { 232 | var canvas = document.getElementById("glcanvas"); 233 | 234 | initGL(canvas); 235 | 236 | if(gl) 237 | { 238 | gl.viewportWidth = canvas.width; 239 | gl.viewportHeight = canvas.height; 240 | 241 | initProgram(); 242 | initCamera(); 243 | 244 | gl.viewport(0, 0, gl.viewportWidth, gl.viewportHeight); 245 | gl.clearColor(0.3, 0.3, 0.3, 1); 246 | 247 | document.onmousemove = handleMouseMove; 248 | document.onkeydown = handleKeyDown; 249 | document.onkeyup = handleKeyUp; 250 | 251 | tick(); 252 | } 253 | } -------------------------------------------------------------------------------- /data/raymarch.fs: -------------------------------------------------------------------------------- 1 | #version 140 2 | 3 | // A quad is uploaded extending from -1 to 1 on both axes 4 | // The uv variable interpolates between the quad vertices 5 | smooth in vec2 uv; 6 | 7 | out vec4 outColor; 8 | 9 | // Camera 10 | uniform vec2 g_resolution; 11 | uniform vec3 g_camUp; 12 | uniform vec3 g_camRight; 13 | uniform vec3 g_camForward; 14 | uniform vec3 g_eye; 15 | uniform float g_focalLength; 16 | uniform float g_zNear; 17 | uniform float g_zFar; 18 | uniform float g_aspectRatio; 19 | 20 | // Raymarch parameters 21 | uniform int g_rmSteps; // Max steps 22 | uniform float g_rmEpsilon; // Distance threshold 23 | 24 | // Scene 25 | uniform vec4 g_skyColor; 26 | uniform vec4 g_ambient; 27 | uniform vec3 g_light0Position; 28 | uniform vec4 g_light0Color; 29 | 30 | // Rotates a point t radians around the y-axis 31 | vec3 rotateY(vec3 v, float t) 32 | { 33 | float cost = cos(t); float sint = sin(t); 34 | return vec3(v.x * cost + v.z * sint, v.y, -v.x * sint + v.z * cost); 35 | } 36 | 37 | // Rotates a point t radians around the x-axis 38 | vec3 rotateX(vec3 v, float t) 39 | { 40 | float cost = cos(t); float sint = sin(t); 41 | return vec3(v.x, v.y * cost - v.z * sint, v.y * sint + v.z * cost); 42 | } 43 | 44 | // Maps x from the range [minX, maxX] to the range [minY, maxY] 45 | // The function does not clamp the result, as it may be useful 46 | float mapTo(float x, float minX, float maxX, float minY, float maxY) 47 | { 48 | float a = (maxY - minY) / (maxX - minX); 49 | float b = minY - a * minX; 50 | return a * x + b; 51 | } 52 | 53 | // Returns the signed distance to a sphere at the origin 54 | float sdSphere(vec3 p, float radius) 55 | { 56 | return length(p) - radius; 57 | } 58 | 59 | // Returns the unsigned distance estimate to a box at the origin of the given size 60 | float udBox(vec3 p, vec3 size) 61 | { 62 | return length(max(abs(p) - size, vec3(0.0f))); 63 | } 64 | 65 | // Returns the signed distance estimate to a box at the origin of the given size 66 | float sdBox(vec3 p, vec3 size) 67 | { 68 | vec3 d = abs(p) - size; 69 | return min(max(d.x, max(d.y, d.z)), 0.0f) + udBox(p, size); 70 | } 71 | 72 | // Subtracts d1 from d0, assuming d1 is a signed distance 73 | float opSubtract(float d0, float d1) 74 | { 75 | return max(d0, -d1); 76 | } 77 | 78 | // Defines the distance field for the scene 79 | float distScene(vec3 p) 80 | { 81 | p.xz = mod(p.xz, 1.0f) - vec2(0.5f); 82 | return sdBox(p - vec3(0.0f, -0.25f, 0.0f), vec3(0.25f)); 83 | 84 | // p = rotateY(p, 0.5f * p.y); 85 | // float d1 = sdBox(p - vec3(0, 0.5, 0), vec3(0.5, 1.0, 0.5)); 86 | // float d2 = sdBox(p, vec3(2.0, 0.3, 0.25)); 87 | // return opSubtract(d1, d2); 88 | } 89 | 90 | // Finds the closest intersecting object along the ray at origin ro, and direction rd. 91 | // i: step count 92 | // t: distance traveled by the ray 93 | void raymarch(vec3 ro, vec3 rd, out int i, out float t) 94 | { 95 | t = 0.0f; 96 | for(i = 0; i < g_rmSteps; ++i) 97 | { 98 | float dist = distScene(ro + rd * t); 99 | 100 | // We make epsilon proportional to t so that we drop accuracy the further into the scene we get 101 | // We also drop the ray as soon as it leaves the clipping volume as defined by g_zFar 102 | if(dist < g_rmEpsilon * t * 2.0f || t > g_zFar) 103 | break; 104 | t += dist; 105 | } 106 | } 107 | 108 | // Returns a value between [0, 1] depending on how visible p0 is from p1 109 | // k: denotes the soft-shadow strength 110 | // See http://www.iquilezles.org/www/articles/rmshadows/rmshadows.htm 111 | float getVisibility(vec3 p0, vec3 p1, float k) 112 | { 113 | vec3 rd = normalize(p1 - p0); 114 | float t = 10.0f * g_rmEpsilon; 115 | float maxt = length(p1 - p0); 116 | float f = 1.0f; 117 | while(t < maxt) 118 | { 119 | float d = distScene(p0 + rd * t); 120 | 121 | // A surface was hit before we reached p1 122 | if(d < g_rmEpsilon) 123 | return 0.0f; 124 | 125 | // Penumbra factor 126 | f = min(f, k * d / t); 127 | 128 | t += d; 129 | } 130 | 131 | return f; 132 | } 133 | 134 | // Approximates the (normalized) gradient of the distance function at the given point. 135 | // If p is near a surface, the function will approximate the surface normal. 136 | vec3 getNormal(vec3 p) 137 | { 138 | float h = 0.0001f; 139 | 140 | return normalize(vec3( 141 | distScene(p + vec3(h, 0, 0)) - distScene(p - vec3(h, 0, 0)), 142 | distScene(p + vec3(0, h, 0)) - distScene(p - vec3(0, h, 0)), 143 | distScene(p + vec3(0, 0, h)) - distScene(p - vec3(0, 0, h)))); 144 | } 145 | 146 | // Calculate the light intensity with soft shadows 147 | // p: point on surface 148 | // lightPos: position of the light source 149 | // lightColor: the radiance of the light source 150 | // returns: the color of the point 151 | vec4 getShading(vec3 p, vec3 normal, vec3 lightPos, vec4 lightColor) 152 | { 153 | float intensity = 0.0f; 154 | float vis = getVisibility(p, lightPos, 16); 155 | if(vis > 0.0f) 156 | { 157 | vec3 lightDirection = normalize(lightPos - p); 158 | intensity = clamp(dot(normal, lightDirection), 0, 1) * vis; 159 | } 160 | 161 | return lightColor * intensity + g_ambient * (1.0f - intensity); 162 | } 163 | 164 | // Compute an ambient occlusion factor 165 | // p: point on surface 166 | // n: normal of the surface at p 167 | // returns: a value clamped to [0, 1], where 0 means there were no other surfaces around the point, 168 | // and 1 means that the point is occluded by other surfaces. 169 | float ambientOcclusion(vec3 p, vec3 n) 170 | { 171 | float stepSize = 0.01f; 172 | float t = stepSize; 173 | float oc = 0.0f; 174 | for(int i = 0; i < 10; ++i) 175 | { 176 | float d = distScene(p + n * t); 177 | oc += t - d; // Actual distance to surface - distance field value 178 | t += stepSize; 179 | } 180 | 181 | return clamp(oc, 0, 1); 182 | } 183 | 184 | // Create a checkboard texture 185 | vec4 getFloorTexture(vec3 p) 186 | { 187 | vec2 m = mod(p.xz, 2.0f) - vec2(1.0f); 188 | return m.x * m.y > 0.0f ? vec4(0.1f) : vec4(1.0f); 189 | } 190 | 191 | // To improve performance we raytrace the floor 192 | // n: floor normal 193 | // o: floor position 194 | float raytraceFloor(vec3 ro, vec3 rd, vec3 n, vec3 o) 195 | { 196 | return dot(o - ro, n) / dot(rd, n); 197 | } 198 | 199 | vec4 computeColor(vec3 ro, vec3 rd) 200 | { 201 | float t0; 202 | int i; 203 | raymarch(ro, rd, i, t0); 204 | 205 | vec3 floorNormal = vec3(0, 1, 0); 206 | float t1 = raytraceFloor(ro, rd, floorNormal, vec3(0, -0.5, 0)); 207 | 208 | vec3 p; // Surface point 209 | vec3 normal; // Surface normal 210 | float t; // Distance traveled by ray from eye 211 | vec4 texture = vec4(1.0); // Surface texture 212 | 213 | if(t1 < t0 && t1 >= g_zNear && t1 <= g_zFar) // The floor was closest 214 | { 215 | t = t1; 216 | p = ro + rd * t1; 217 | normal = floorNormal; 218 | texture = getFloorTexture(p); 219 | } 220 | else if(i < g_rmSteps && t0 >= g_zNear && t0 <= g_zFar) // Raymarching hit a surface 221 | { 222 | t = t0; 223 | p = ro + rd * t0; 224 | normal = getNormal(p); 225 | } 226 | else 227 | { 228 | return g_skyColor; 229 | } 230 | 231 | vec4 color; 232 | float z = mapTo(t, g_zNear, g_zFar, 1, 0); // Map depth to [0, 1] 233 | 234 | // Color based on depth 235 | //color = vec4(1.0f) * z; 236 | 237 | // Diffuse lighting 238 | color = texture * ( 239 | getShading(p, normal, g_light0Position, g_light0Color) + 240 | getShading(p, normal, vec3(2.0f, 1.0f, 0.0f), vec4(1.0f, 0.5f, 0.5f, 1.0f)) 241 | ) / 2.0f; 242 | 243 | // Color based on surface normal 244 | //color = vec4(abs(normal), 1.0); 245 | 246 | // Blend in ambient occlusion factor 247 | float ao = ambientOcclusion(p, normal); 248 | color = color * (1.0f - ao); 249 | 250 | // Blend the background color based on the distance from the camera 251 | float zSqrd = z * z; 252 | color = mix(g_skyColor, color, zSqrd * (3.0f - 2.0f * z)); // Fog 253 | 254 | return color; 255 | } 256 | 257 | void main() 258 | { 259 | vec2 hps = vec2(1.0) / (g_resolution * 2.0); 260 | vec3 ro = g_eye; 261 | vec3 rd = normalize(g_camForward * g_focalLength + g_camRight * uv.x * g_aspectRatio + g_camUp * uv.y); 262 | 263 | vec4 color = computeColor(ro, rd); 264 | 265 | // 4xAA 266 | //vec3 rd0 = normalize(g_camForward * g_focalLength + g_camRight * (uv.x - hps.x) * g_aspectRatio + g_camUp * uv.y); 267 | //vec3 rd1 = normalize(g_camForward * g_focalLength + g_camRight * (uv.x + hps.x) * g_aspectRatio + g_camUp * uv.y); 268 | //vec3 rd2 = normalize(g_camForward * g_focalLength + g_camRight * uv.x * g_aspectRatio + g_camUp * (uv.y - hps.y)); 269 | //vec3 rd3 = normalize(g_camForward * g_focalLength + g_camRight * uv.x * g_aspectRatio + g_camUp * (uv.y + hps.y)); 270 | 271 | //vec4 color = (computeColor(ro, rd0) + computeColor(ro, rd1) + computeColor(ro, rd2) + computeColor(ro, rd3)) / 4.0; 272 | 273 | outColor = vec4(color.xyz, 1.0f); 274 | } -------------------------------------------------------------------------------- /webgl/raymarching.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Raymarching Distance Fields 5 | 6 | 7 | 8 | 9 | 10 | 11 | 24 | 25 | 318 | 333 | 334 | 335 |
336 | raymarching.js - Real-time raymarching with distance fields 337 |

Move around with WASD, move the blue light with the arrow keys

338 | 339 |
340 | Your browser doesn't appear to support the HTML5 canvas :( 341 |
342 | 343 | 344 | 345 | -------------------------------------------------------------------------------- /article/raymarch-distance-fields.txt: -------------------------------------------------------------------------------- 1 | Raymarching Distance Fields 2 | =========================== 3 | 4 | Hi. In this article I will tell you about a rendering technique known as **raymarching with distance fields**, capable of producing highly detailed images in real-time with very simple code. 5 | 6 | ![Inigo Quilez' slisesix](http://4.bp.blogspot.com/-Zjev0r-Eoms/UeQd_wcDInI/AAAAAAAAAGE/Py6rDNjTlCY/s1600/gfx00.jpg) 7 | 8 | **slisesix by Inigo Quilez** 9 | 10 | Before reading on, perhaps you would like to try the [**interactive WebGL demo?**](http://lightbits.github.io/toys/raymarching/raymarching.html) 11 | 12 | Content 13 | ---------- 14 | * Introduction 15 | * The raymarching algorithm 16 | * Rendering techniques 17 | * Iteration based coloring 18 | * Bounding volume 19 | * Fog 20 | * Anti-aliasing 21 | * Lighting 22 | * Shadows 23 | * Ambient occlusion 24 | * Reflections 25 | * Distortion and repetition 26 | * Other 27 | * Conclusion 28 | * Code 29 | * References 30 | 31 | Introduction 32 | ------------ 33 | Raymarching is a 3d-rendering technique, praised by programming-enthusiasts for both its simplicity and speed. It has been used extensively in the demoscene, producing low-size executables and amazing visuals. The most frontstanding figure behind its popularity, is [Inigo Quilez](http://iquilezles.org/), promoting it with his presentation at nvscene: [*Rendering Worlds With Two Triangles*](http://www.iquilezles.org/www/material/nvscene2008/nvscene2008.htm). 34 | 35 | The idea is this: Say you have some surface in space. You don't have an explicit formula for it, nor a set of triangles describing it. But you can find out how far away it is, from any point. How would you render this surface? 36 | 37 | First of all, we need to find out which points that lie on the surface, and what pixels they correspond to. To do this we use a technique known as [ray-casting](http://en.wikipedia.org/wiki/Ray_casting). 38 | 39 | Imagine you and your monitor being placed in this virtual world. Your *eye* will be looking at a rectangle (your monitor), which we shall call the image plane. Ray-casting works by shooting rays from the eye through each pixel on the image plane, and finding the closest object blocking the path of the ray. Once we hit an object, we can compute the color and shading of the corresponding pixel. If the ray does not hit anything, the pixel is colored with some *sky color*. 40 | 41 | There are several ways in which we can calculate the intersection, for example we analytically solve for it. A raymarcher, however, looks for an approximate solution, by marching along the ray in steps until it finds an intersection. By controlling the step size using a *distance field*, we can reach blazing speeds, even on a regular laptop GPU. 42 | 43 | The raymarching algorithm 44 | ------------------------- 45 | In traditional raytracing, a scene is often described by a set of triangles or spheres, making up a mesh. Using some spatial acceleration structure, we can quickly solve for the exact intersections between the rays and the objects. 46 | 47 | With raymarching however, we allow for some leeway in the intersection, and accept it when a ray is *close enough* to a surface. This is done by marching along the ray at step sizes, and checking whether or not the surface is within a given threshold. We can set a limit on the number of steps to prevent marching into oblivion. In code the algorithm looks like this: 48 | 49 | bool raymarch(vec3 rayOrigin, vec3 rayDirection) { 50 | float t = 0.0f; 51 | for(int i = 0; i < maxSteps; ++i) { 52 | float d = sceneDistance(rayOrigin + rayDirection * t); 53 | if(d < epsilon) { 54 | // Do something with p 55 | return true; 56 | } 57 | t += d; 58 | } 59 | return false; 60 | } 61 | 62 | But this can be slow if the step size is small, and inaccurate if the step size is large. So we speed things up by implementing a variable step size, and that is where **distance fields** comes in. 63 | 64 | The basic idea is to make sure every surface in our scene is given by a distance estimator (DE), which returns the distance to it from a point p. This way, we can find the distance to the closest surface in the scene, and know that we can step *this far* without overshooting. 65 | 66 | ![Raymarching variable step](http://3.bp.blogspot.com/-x8E4QGbdsTs/UeL7DODAoyI/AAAAAAAAAEo/GfHOsYjs4Bk/s1600/raymarching-alg.png) 67 | 68 | In the figure above the distance field is evaluated at various points along the ray. At the first point (the eye) there is quite a large distance to the closest surface, so we step that far to the next point. This continues until we get close enough to say we hit the surface. 69 | 70 | #### Distance estimators 71 | Consider a sphere centered at the origin with radius *r*. The distance from a point *p* to the sphere is given by: 72 | 73 | float distSphere(vec3 p, float radius) { 74 | return length(p) - radius; 75 | } 76 | 77 | This function gives us **signed distance**, because the distance is negative or positive depending on whether we are inside or outside the sphere. In the later sections we will see that this is important in order to compute the surface normal. 78 | 79 | Coming up with your own DE's can be difficult, so feel free to take a look at [this page](http://www.iquilezles.org/www/articles/distfunctions/distfunctions.htm), by Inigo Quilez, for a list of distance functions. A useful trick though is to take advantage of symmetry. For example, a box at the origin can be split into the 8 octants, but we do not need to handle each region on its own. Instead we can take the absolute value of the point, and then compute the distance. 80 | 81 | #### The distance field 82 | Once we have the distance to each surface, the scene can be described by a function returning the minimum of them, i.e.: 83 | 84 | float distanceField(vec3 p) { 85 | float d1 = sphere_of_radius_one(p) 86 | float d2 = box_one_unit_to_left(p) 87 | return min(d1, d2); 88 | } 89 | 90 | We expect that this function provides us a *lower-bound* on the closest object. It is therefore important that the distance estimators do not under-estimate the distance, as we might risk overshooting. 91 | 92 | Various operations can be performed on the distance fields. For example, the union of two distance fields is the minimum, the intersection is maximum, and the complement is the negated distance (assuming it is signed). This is described in more detail [at the above mentioned page](http://www.iquilezles.org/www/articles/distfunctions/distfunctions.htm). 93 | 94 | A simple scene 95 | -------------- 96 | It's time to put the theory to the test. But how do we go from pixels in the image, to points in our virtual world? 97 | 98 | ![Window to image plane](http://3.bp.blogspot.com/-pooioOCAaf8/UeL7Ebf1ImI/AAAAAAAAAE8/YjgJAg5Rdc4/s1600/window-to-uv.png) 99 | 100 | Let's say that our window has a position and orientation in the world, the *image plane*. For the sake of simplicity, we scale it down to a rectangle of width and height 2 units. We define a pair of coordinates, u and v, both going from -1 to 1, representing the corners of the rectangle. This way, the top-left corner of the window (0, 0), becomes (-1, 1) in uv-coordinates. While the bottom-right corner becomes (1, -1). 101 | 102 | We decide the orientation of the rectangle by defining an **up-vector** and a **right-vector**. A point on the image plane in world-coordinate is then: 103 | 104 | ![Eq: Window to world](http://4.bp.blogspot.com/-Sa3_Pj4NQFA/UeL8tdyQpRI/AAAAAAAAAFc/dziCfBpgNPU/s1600/eq-window-to-world-ortho.png) 105 | 106 | For this scene, let's say that we fire rays perpendicular out of the image plane. If our up-vector is along the y-axis, the right-vector is along the x-axis, the **forward-vector** is either down the positive or negative z-axis. The former can be found by crossing the right vector with the up vector. 107 | 108 | Our scene is now everything from -1 to 1 on the x- and y-axis, looking down the positive z-axis. We position the eye at [0, 0, -1], and have a single distance function: a sphere of radius 0.5. If the raymarch function hits the sphere, we color the pixel white, else we color it black. The code for this fits nicely in a shader: 109 | 110 | void main() 111 | { 112 | vec3 eye = vec3(0, 0, -1); 113 | vec3 up = vec3(0, 1, 0); 114 | vec3 right = vec3(1, 0, 0); 115 | 116 | float u = gl_FragCoord.x * 2.0 / g_resolution.x - 1.0; 117 | float v = gl_FragCoord.y * 2.0 / g_resolution.y - 1.0; 118 | vec3 ro = right * u + up * v; 119 | vec3 rd = normalize(cross(right, up)); 120 | 121 | vec4 color = vec4(0.0); // Sky color 122 | 123 | float t = 0.0; 124 | const int maxSteps = 32; 125 | for(int i = 0; i < maxSteps; ++i) 126 | { 127 | vec3 p = ro + rd * t; 128 | float d = length(p) - 0.5; // Distance to sphere of radius 0.5 129 | if(d < g_rmEpsilon) 130 | { 131 | color = vec4(1.0); // Sphere color 132 | break; 133 | } 134 | 135 | t += d; 136 | } 137 | 138 | return color; 139 | } 140 | 141 | I believe it's much more exciting if you do this yourself, so I'll let you render this on your own ;) 142 | 143 | Perspective 144 | ---------- 145 | Notice that in the first example, the ray direction was equal to the forward vector, causing every ray to be cast perpendicular to the image plane. This is actually how orthographic projection works, and is not very three-dee-y. What we want, is an illusion of depth. To do this, we can simulate a pinhole camera by positioning the eye *behind* the image plane, like so: 146 | 147 | ![Perspective model](http://2.bp.blogspot.com/-AuRSuWpC1WU/UeL7CcQLmYI/AAAAAAAAAEM/6SFX41PNaTQ/s1600/perspective.png) 148 | 149 | The distance between the eye and the image plane is analogue to the focal length of a camera lens and focal point. The best part is, this does not change the behaviour of the rays, but merely what direction we cast them! 150 | 151 | A point on the image plane is now given by: 152 | 153 | ![Eq: Window to image plane perspective](http://4.bp.blogspot.com/-bq-1NmvM1Uo/UeL8tSMWtaI/AAAAAAAAAFg/eiyyZFOApKs/s1600/eq-window-to-world-perspective.png) 154 | 155 | where *f* is the distance between the eye and the image plane, along the forward vector. 156 | 157 | The ray direction can now be found by taking the difference between the image plane position and the eye. 158 | 159 | Rendering techniques 160 | --------------------------- 161 | A simple black or white image is not too exciting. Alot of these can be found both in the [presentation](http://www.iquilezles.org/www/material/nvscene2008/nvscene2008.htm) mentioned in the introduction, and various pages in [this thread](http://pouet.net/topic.php?which=7931&page=1&x=3&y=14). 162 | 163 | #### Iteration count 164 | A quick way to spice things up, is to let the color depend on the ray step count. That is, how many steps the raymarch function performed before bailing out or hitting a surface. This will allow you to see where the most intensive portions of your image are. 165 | 166 | ![Iteration based color](http://4.bp.blogspot.com/-uoHgx5pgwv8/UeL7CD1z31I/AAAAAAAAAD8/cgHwGfx_GPg/s1600/iterations.png) 167 | 168 | **Iteration-based coloring. The maximum step count was set to 64.** 169 | 170 | #### Bounding volume 171 | We can speed up the raymarching if we limit the rays to a certain bounding volume. 172 | 173 | Remember that in the raymarching function we repeatedly calculate the distance to the closest surface, and travel along the ray direction by this amount. The sum of the distances will be how far the ray has traveled so far. Using this, we can drop a ray as soon as it is further away than some limit. 174 | 175 | #### Fog 176 | Fog is important to get a sense of scale and distance, and can be used to avoid artifacts in the distance. 177 | 178 | To get fog, we can simply blend the distance from the eye with the sky color. In the simplest case, we can define the near- and far-clipping planes to have blending factors of 0 and 1, respectively, and linearly interpolate between these based on the distance. 179 | 180 | More realistic blending methods can be found [here](http://www.iquilezles.org/www/articles/fog/fog.htm). 181 | 182 | #### Anti-aliasing 183 | Instead of casting a single ray per pixel, we can distribute multiple rays inside a single pixel, sum the results and calculate the average. This will effectively remove sharp edges, and wreck your framerate at the same time, so be careful. 184 | 185 | ![Anti aliasing diagram](http://2.bp.blogspot.com/-0xKCK5FFZX4/UeL7Bwa8xzI/AAAAAAAAAD4/yg-pUTAfr7I/s1600/antialiasing.png) 186 | 187 | An interesting optimization could be to only perform AA if the iteration count is above average, as it likely indicates the edge of an object. 188 | 189 | #### Lighting 190 | A simple lighting model is the *Lambertian reflectance* model. The idea is that the light intensity at a point depends on the angle between the surface normal and the direction to the light. By clamping the dot product of these vectors between 0 and 1, we get a measure for how strongly the point should be lit. For more on lighting models, Arcsynthesis' tutorial explains the topic quite well: [Arcsynthesis: Lights on](http://www.arcsynthesis.org/gltut/Illumination/Tutorial%2009.html). 191 | 192 | We need to compute the surface normal. Our distance function is special type of function known as a scalar field, because it assigns each point (x, y, z) a scalar quantity (the distance to the closest surface). Knowing this, we can approximate the surface normal using what is known as the **gradient**. 193 | 194 | The gradient of a scalar field is a vector, pointing in the direction where the field increases or decreases the most. Its magnitude is how big this change is. Naturally, the distance to a surface will increase more if we move *normally* away from it, rather than parallell to it. Thus, the gradient points in the same direction as the normal. 195 | 196 | The gradient can be approximated by numerical differentation, and normalized to give us the surface normal. The lighting of the point can now be computed. For example: 197 | 198 | vec4 shade(vec3 p) 199 | { 200 | vec3 normal = getNormal(p); 201 | vec3 lightDir = normalize(lightPosition - p); 202 | float LightIntensity = lightColor * dot(normal, lightDir); 203 | return getReflectance(p) * lightIntensity; 204 | } 205 | 206 | #### Shadows 207 | Light rarely looks good without shadows. Luckily for us, this is fairly easy to implement. Simply check if the path from the point to shade to each light source is obstructed or not, by raymarching. This will produce a hard-shadow, but it is possible to get good looking soft-shadows, [almost for free](http://www.iquilezles.org/www/articles/rmshadows/rmshadows.htm). 208 | 209 | ![Soft shadows](http://1.bp.blogspot.com/-VDgVKI8T_Gk/UeL7D_bPPKI/AAAAAAAAAEs/4LMg81mDiHI/s1600/softshadows.png) 210 | 211 | #### Ambient occlusion 212 | Once we have obtained the surface normal (see the section about Lighting), we can easily fake an ambient occlusion effect. A proposed method is to sample along the surface normal a couple of times, comparing the value of the distance field with the actual distance from the surface. For example, if the difference is 0, the surface is not occluded. If there are other surfaces closer to the ray at the sample point, the difference will be non-zero. 213 | 214 | ![Ambient occlusion](http://1.bp.blogspot.com/-rTU9gDQcc8c/UeL7B3c5FvI/AAAAAAAAAEU/Eh7Noewp3UQ/s1600/ambientocclusion.png) 215 | 216 | #### Reflections 217 | A simple reflection can be calculated by raymarching off the surface normal, and blending the result together with the surface color. 218 | 219 | #### Domain distortion and repetition 220 | Objects can be distorted by manipulating the point checked against in the distance functions. For example: 221 | 222 | float twistedCube(vec3 p) { 223 | vec3 q = rotateY(p, 0.5 * p.y); 224 | return cube(q); 225 | } 226 | 227 | This will produce a cube rotated more or less along the y-axis: 228 | 229 | ![Twisted cube](http://3.bp.blogspot.com/-LHB2J0yx2X0/UeL7EOYuDsI/AAAAAAAAAFA/OGfxdJf4Y_4/s1600/twisted_box.png) 230 | 231 | A scene can be repeated by taking the modulo of the point by some size: 232 | 233 | float repeatedCubes(vec3 p) { 234 | p.x = mod(p.x, 1) - 0.5; 235 | p.z = mod(p.z, 1) - 0.5; 236 | return cube(p); 237 | } 238 | 239 | Which results in many cubes repeated in the xz-plane: 240 | 241 | ![Repeated cubes](http://4.bp.blogspot.com/-OPfiwoAnJ5k/UeL7Dd2_rOI/AAAAAAAAAEk/KbyFYOHc5cQ/s1600/repetition.png) 242 | 243 | By combining domain repetition with subtracting distance fields from eachother (as described in the beginning), we can make complex objects like this: 244 | 245 | ![Domain fractal](http://2.bp.blogspot.com/-UhTlNBbAsks/UeR49uPRSmI/AAAAAAAAAGU/NUq2l8NlINw/s1600/subtract_modulo2.png) 246 | **Domain repeated cubes are subtracted from a non-repeated, larger sphere** 247 | 248 | Be careful though, as distorting the distance field like this can cause some nasty artifacts. A sort-of solution is to step shorter in the raymarching function than the distance field returns. 249 | 250 | Other 251 | ----- 252 | While distance fields has been a major focus of this article, raymarching itself is a general method. The main advantage is that the algorithm makes it very easy to render a variety of scenes with high level of detail. For example terrain: 253 | 254 | Consider a heightmap describing your terrain. It could be a texture sampler uploaded to your shader, or it could be created procedurally. In order to render the terrain by raymarching, you would simply cast rays through each pixel of the image, step along the ray by some size, and stop whenever the point on the ray is lower than the terrain. The color can then be determined by perhaps making taller points white (i.e. snow) and lower points green (i.e. grass). We can shade the point by raymarching towards a lightsource. 255 | 256 | See [Terrain marching](http://www.iquilezles.org/www/articles/terrainmarching/terrainmarching.htm) for a nice introduction. 257 | 258 | ![Ixaleno/rgba by Inigo Quilez](http://1.bp.blogspot.com/-NqCg7Hqre9I/UeL7DO6KZSI/AAAAAAAAAFI/tJQgvY_hDPE/s1600/ray_05_g.jpg) 259 | 260 | **Ixaleno/rgba by Inigo Quilez** 261 | 262 | Conclusion 263 | ---------- 264 | In summary, raymarching is a general technique that can be used to render a variety of complex scenes. It fits perfectly in the GPU, as each pixel can be calculated independently. By using distance fields, we can accelerate the rendering to real-time speeds, and still maintain a high level of detail. A number of techniques have been developed for manipulating the distance fields, such as blending, distorting and repeating objects. Furthermore, it is easy to implement graphical improvements, such as ambient occlusion, lighting, soft shadows and bump mapping. 265 | 266 | If you think this is really cool, feel free to indulge in the links below. For a more thorough walkthrough of the raymarching algorithm, see [Hvidtfeldt's article series](http://blog.hvidtfeldts.net/index.php/2011/06/distance-estimated-3d-fractals-part-i/), where he also investigates 3D fractals! 267 | 268 | I recommend looking through the pouet threads, which are packed full of colorful inspirations and helpful code. 269 | 270 | Happy coding! 271 | 272 | References 273 | ---------- 274 | * [Inigo Quilez: *Raymarching Distance Fields*](http://www.iquilezles.org/www/articles/raymarchingdf/raymarchingdf.htm) 275 | * [hvidtfeldts: *Distance Estimated 3D Fractals*](http://blog.hvidtfeldts.net/index.php/2011/06/distance-estimated-3d-fractals-part-i/) 276 | * [Inigo Quilez: *Modeling with Distance Functions*](http://www.iquilezles.org/www/articles/distfunctions/distfunctions.htm) 277 | * [Inigo Quilez: *Free Penumbra Shadows for Raymarching Distance Fields*](http://www.iquilezles.org/www/articles/rmshadows/rmshadows.htm) 278 | * [John C. Hart: *Sphere Tracing: A Geometric Method for the Antialiased Ray Tracing of Implicit Surfaces* (1994)](http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.48.3825) 279 | * [fractalforums: *Kaleidoscopic (escape time) IFS*](http://www.fractalforums.com/ifs-iterated-function-systems/kaleidoscopic-(escape-time-ifs)/) 280 | * [pouet: *Raymarching beginner's thread*](http://pouet.net/topic.php?which=7920&page=1&x=11&y=6) 281 | * [pouet: *Raymarching toolbox*](http://pouet.net/topic.php?which=7931&page=1&x=3&y=14) 282 | 283 | Code 284 | ---- 285 | An interactive WebGL demo can be found [here](http://lightbits.github.io/toys/raymarching/raymarching.html). 286 | 287 | A Github repo for the project can be found [here](https://github.com/lightbits/ray-march). The repo contains C++ code, which runs with GLFW and OpenGL ver. 3.2. It also contains the source for the WebGL implementation inside the "webgl" subfolder. 288 | 289 | Thanks 290 | ------ 291 | Special thanks to Inigo Quilez (iq) for his efforts in the demoscene on this topic, as well as various forum members of pouet.net, for being super helpful. Hvidtfeldt's introductory articles were well-written and very useful for gaining an understanding of the topic. 292 | -------------------------------------------------------------------------------- /webgl/gl-matrix-min.js: -------------------------------------------------------------------------------- 1 | /** 2 | * @fileoverview gl-matrix - High performance matrix and vector operations 3 | * @author Brandon Jones 4 | * @author Colin MacKenzie IV 5 | * @version 2.2.0 6 | */ 7 | /* Copyright (c) 2013, Brandon Jones, Colin MacKenzie IV. All rights reserved. 8 | 9 | Redistribution and use in source and binary forms, with or without modification, 10 | are permitted provided that the following conditions are met: 11 | 12 | * Redistributions of source code must retain the above copyright notice, this 13 | list of conditions and the following disclaimer. 14 | * Redistributions in binary form must reproduce the above copyright notice, 15 | this list of conditions and the following disclaimer in the documentation 16 | and/or other materials provided with the distribution. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR 22 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 23 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 24 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON 25 | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 26 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 27 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ 28 | (function(e){"use strict";var t={};typeof exports=="undefined"?typeof define=="function"&&typeof define.amd=="object"&&define.amd?(t.exports={},define(function(){return t.exports})):t.exports=typeof window!="undefined"?window:e:t.exports=exports,function(e){if(!t)var t=1e-6;if(!n)var n=typeof Float32Array!="undefined"?Float32Array:Array;if(!r)var r=Math.random;var i={};i.setMatrixArrayType=function(e){n=e},typeof e!="undefined"&&(e.glMatrix=i);var s={};s.create=function(){var e=new n(2);return e[0]=0,e[1]=0,e},s.clone=function(e){var t=new n(2);return t[0]=e[0],t[1]=e[1],t},s.fromValues=function(e,t){var r=new n(2);return r[0]=e,r[1]=t,r},s.copy=function(e,t){return e[0]=t[0],e[1]=t[1],e},s.set=function(e,t,n){return e[0]=t,e[1]=n,e},s.add=function(e,t,n){return e[0]=t[0]+n[0],e[1]=t[1]+n[1],e},s.subtract=function(e,t,n){return e[0]=t[0]-n[0],e[1]=t[1]-n[1],e},s.sub=s.subtract,s.multiply=function(e,t,n){return e[0]=t[0]*n[0],e[1]=t[1]*n[1],e},s.mul=s.multiply,s.divide=function(e,t,n){return e[0]=t[0]/n[0],e[1]=t[1]/n[1],e},s.div=s.divide,s.min=function(e,t,n){return e[0]=Math.min(t[0],n[0]),e[1]=Math.min(t[1],n[1]),e},s.max=function(e,t,n){return e[0]=Math.max(t[0],n[0]),e[1]=Math.max(t[1],n[1]),e},s.scale=function(e,t,n){return e[0]=t[0]*n,e[1]=t[1]*n,e},s.scaleAndAdd=function(e,t,n,r){return e[0]=t[0]+n[0]*r,e[1]=t[1]+n[1]*r,e},s.distance=function(e,t){var n=t[0]-e[0],r=t[1]-e[1];return Math.sqrt(n*n+r*r)},s.dist=s.distance,s.squaredDistance=function(e,t){var n=t[0]-e[0],r=t[1]-e[1];return n*n+r*r},s.sqrDist=s.squaredDistance,s.length=function(e){var t=e[0],n=e[1];return Math.sqrt(t*t+n*n)},s.len=s.length,s.squaredLength=function(e){var t=e[0],n=e[1];return t*t+n*n},s.sqrLen=s.squaredLength,s.negate=function(e,t){return e[0]=-t[0],e[1]=-t[1],e},s.normalize=function(e,t){var n=t[0],r=t[1],i=n*n+r*r;return i>0&&(i=1/Math.sqrt(i),e[0]=t[0]*i,e[1]=t[1]*i),e},s.dot=function(e,t){return e[0]*t[0]+e[1]*t[1]},s.cross=function(e,t,n){var r=t[0]*n[1]-t[1]*n[0];return e[0]=e[1]=0,e[2]=r,e},s.lerp=function(e,t,n,r){var i=t[0],s=t[1];return e[0]=i+r*(n[0]-i),e[1]=s+r*(n[1]-s),e},s.random=function(e,t){t=t||1;var n=r()*2*Math.PI;return e[0]=Math.cos(n)*t,e[1]=Math.sin(n)*t,e},s.transformMat2=function(e,t,n){var r=t[0],i=t[1];return e[0]=n[0]*r+n[2]*i,e[1]=n[1]*r+n[3]*i,e},s.transformMat2d=function(e,t,n){var r=t[0],i=t[1];return e[0]=n[0]*r+n[2]*i+n[4],e[1]=n[1]*r+n[3]*i+n[5],e},s.transformMat3=function(e,t,n){var r=t[0],i=t[1];return e[0]=n[0]*r+n[3]*i+n[6],e[1]=n[1]*r+n[4]*i+n[7],e},s.transformMat4=function(e,t,n){var r=t[0],i=t[1];return e[0]=n[0]*r+n[4]*i+n[12],e[1]=n[1]*r+n[5]*i+n[13],e},s.forEach=function(){var e=s.create();return function(t,n,r,i,s,o){var u,a;n||(n=2),r||(r=0),i?a=Math.min(i*n+r,t.length):a=t.length;for(u=r;u0&&(s=1/Math.sqrt(s),e[0]=t[0]*s,e[1]=t[1]*s,e[2]=t[2]*s),e},o.dot=function(e,t){return e[0]*t[0]+e[1]*t[1]+e[2]*t[2]},o.cross=function(e,t,n){var r=t[0],i=t[1],s=t[2],o=n[0],u=n[1],a=n[2];return e[0]=i*a-s*u,e[1]=s*o-r*a,e[2]=r*u-i*o,e},o.lerp=function(e,t,n,r){var i=t[0],s=t[1],o=t[2];return e[0]=i+r*(n[0]-i),e[1]=s+r*(n[1]-s),e[2]=o+r*(n[2]-o),e},o.random=function(e,t){t=t||1;var n=r()*2*Math.PI,i=r()*2-1,s=Math.sqrt(1-i*i)*t;return e[0]=Math.cos(n)*s,e[1]=Math.sin(n)*s,e[2]=i*t,e},o.transformMat4=function(e,t,n){var r=t[0],i=t[1],s=t[2];return e[0]=n[0]*r+n[4]*i+n[8]*s+n[12],e[1]=n[1]*r+n[5]*i+n[9]*s+n[13],e[2]=n[2]*r+n[6]*i+n[10]*s+n[14],e},o.transformMat3=function(e,t,n){var r=t[0],i=t[1],s=t[2];return e[0]=r*n[0]+i*n[3]+s*n[6],e[1]=r*n[1]+i*n[4]+s*n[7],e[2]=r*n[2]+i*n[5]+s*n[8],e},o.transformQuat=function(e,t,n){var r=t[0],i=t[1],s=t[2],o=n[0],u=n[1],a=n[2],f=n[3],l=f*r+u*s-a*i,c=f*i+a*r-o*s,h=f*s+o*i-u*r,p=-o*r-u*i-a*s;return e[0]=l*f+p*-o+c*-a-h*-u,e[1]=c*f+p*-u+h*-o-l*-a,e[2]=h*f+p*-a+l*-u-c*-o,e},o.forEach=function(){var e=o.create();return function(t,n,r,i,s,o){var u,a;n||(n=3),r||(r=0),i?a=Math.min(i*n+r,t.length):a=t.length;for(u=r;u0&&(o=1/Math.sqrt(o),e[0]=t[0]*o,e[1]=t[1]*o,e[2]=t[2]*o,e[3]=t[3]*o),e},u.dot=function(e,t){return e[0]*t[0]+e[1]*t[1]+e[2]*t[2]+e[3]*t[3]},u.lerp=function(e,t,n,r){var i=t[0],s=t[1],o=t[2],u=t[3];return e[0]=i+r*(n[0]-i),e[1]=s+r*(n[1]-s),e[2]=o+r*(n[2]-o),e[3]=u+r*(n[3]-u),e},u.random=function(e,t){return t=t||1,e[0]=r(),e[1]=r(),e[2]=r(),e[3]=r(),u.normalize(e,e),u.scale(e,e,t),e},u.transformMat4=function(e,t,n){var r=t[0],i=t[1],s=t[2],o=t[3];return e[0]=n[0]*r+n[4]*i+n[8]*s+n[12]*o,e[1]=n[1]*r+n[5]*i+n[9]*s+n[13]*o,e[2]=n[2]*r+n[6]*i+n[10]*s+n[14]*o,e[3]=n[3]*r+n[7]*i+n[11]*s+n[15]*o,e},u.transformQuat=function(e,t,n){var r=t[0],i=t[1],s=t[2],o=n[0],u=n[1],a=n[2],f=n[3],l=f*r+u*s-a*i,c=f*i+a*r-o*s,h=f*s+o*i-u*r,p=-o*r-u*i-a*s;return e[0]=l*f+p*-o+c*-a-h*-u,e[1]=c*f+p*-u+h*-o-l*-a,e[2]=h*f+p*-a+l*-u-c*-o,e},u.forEach=function(){var e=u.create();return function(t,n,r,i,s,o){var u,a;n||(n=4),r||(r=0),i?a=Math.min(i*n+r,t.length):a=t.length;for(u=r;u.999999?(r[0]=0,r[1]=0,r[2]=0,r[3]=1,r):(o.cross(e,i,s),r[0]=e[0],r[1]=e[1],r[2]=e[2],r[3]=1+u,h.normalize(r,r))}}(),h.setAxes=function(){var e=l.create();return function(t,n,r,i){return e[0]=r[0],e[3]=r[1],e[6]=r[2],e[1]=i[0],e[4]=i[1],e[7]=i[2],e[2]=n[0],e[5]=n[1],e[8]=n[2],h.normalize(t,h.fromMat3(t,e))}}(),h.clone=u.clone,h.fromValues=u.fromValues,h.copy=u.copy,h.set=u.set,h.identity=function(e){return e[0]=0,e[1]=0,e[2]=0,e[3]=1,e},h.setAxisAngle=function(e,t,n){n*=.5;var r=Math.sin(n);return e[0]=r*t[0],e[1]=r*t[1],e[2]=r*t[2],e[3]=Math.cos(n),e},h.add=u.add,h.multiply=function(e,t,n){var r=t[0],i=t[1],s=t[2],o=t[3],u=n[0],a=n[1],f=n[2],l=n[3];return e[0]=r*l+o*u+i*f-s*a,e[1]=i*l+o*a+s*u-r*f,e[2]=s*l+o*f+r*a-i*u,e[3]=o*l-r*u-i*a-s*f,e},h.mul=h.multiply,h.scale=u.scale,h.rotateX=function(e,t,n){n*=.5;var r=t[0],i=t[1],s=t[2],o=t[3],u=Math.sin(n),a=Math.cos(n);return e[0]=r*a+o*u,e[1]=i*a+s*u,e[2]=s*a-i*u,e[3]=o*a-r*u,e},h.rotateY=function(e,t,n){n*=.5;var r=t[0],i=t[1],s=t[2],o=t[3],u=Math.sin(n),a=Math.cos(n);return e[0]=r*a-s*u,e[1]=i*a+o*u,e[2]=s*a+r*u,e[3]=o*a-i*u,e},h.rotateZ=function(e,t,n){n*=.5;var r=t[0],i=t[1],s=t[2],o=t[3],u=Math.sin(n),a=Math.cos(n);return e[0]=r*a+i*u,e[1]=i*a-r*u,e[2]=s*a+o*u,e[3]=o*a-s*u,e},h.calculateW=function(e,t){var n=t[0],r=t[1],i=t[2];return e[0]=n,e[1]=r,e[2]=i,e[3]=-Math.sqrt(Math.abs(1-n*n-r*r-i*i)),e},h.dot=u.dot,h.lerp=u.lerp,h.slerp=function(e,t,n,r){var i=t[0],s=t[1],o=t[2],u=t[3],a=n[0],f=n[1],l=n[2],c=n[3],h,p,d,v,m;return p=i*a+s*f+o*l+u*c,p<0&&(p=-p,a=-a,f=-f,l=-l,c=-c),1-p>1e-6?(h=Math.acos(p),d=Math.sin(h),v=Math.sin((1-r)*h)/d,m=Math.sin(r*h)/d):(v=1-r,m=r),e[0]=v*i+m*a,e[1]=v*s+m*f,e[2]=v*o+m*l,e[3]=v*u+m*c,e},h.invert=function(e,t){var n=t[0],r=t[1],i=t[2],s=t[3],o=n*n+r*r+i*i+s*s,u=o?1/o:0;return e[0]=-n*u,e[1]=-r*u,e[2]=-i*u,e[3]=s*u,e},h.conjugate=function(e,t){return e[0]=-t[0],e[1]=-t[1],e[2]=-t[2],e[3]=t[3],e},h.length=u.length,h.len=h.length,h.squaredLength=u.squaredLength,h.sqrLen=h.squaredLength,h.normalize=u.normalize,h.fromMat3=function(){var e=typeof Int8Array!="undefined"?new Int8Array([1,2,0]):[1,2,0];return function(t,n){var r=n[0]+n[4]+n[8],i;if(r>0)i=Math.sqrt(r+1),t[3]=.5*i,i=.5/i,t[0]=(n[7]-n[5])*i,t[1]=(n[2]-n[6])*i,t[2]=(n[3]-n[1])*i;else{var s=0;n[4]>n[0]&&(s=1),n[8]>n[s*3+s]&&(s=2);var o=e[s],u=e[o];i=Math.sqrt(n[s*3+s]-n[o*3+o]-n[u*3+u]+1),t[s]=.5*i,i=.5/i,t[3]=(n[u*3+o]-n[o*3+u])*i,t[o]=(n[o*3+s]+n[s*3+o])*i,t[u]=(n[u*3+s]+n[s*3+u])*i}return t}}(),h.str=function(e){return"quat("+e[0]+", "+e[1]+", "+e[2]+", "+e[3]+")"},typeof e!="undefined"&&(e.quat=h)}(t.exports)})(this); 29 | --------------------------------------------------------------------------------