├── .gitignore ├── Data ├── BlueNoise.bmp ├── geo.h └── pathtrace.hlsl ├── FalcorPathTracer.sln ├── FalcorPathTracer.vcxproj ├── LICENSE ├── MakeGeoHelper ├── MakeGeoHelper.vcxproj └── Sandbox.cpp ├── main.cpp ├── out ├── __clear.bat ├── __makevideo.bat └── ffmpeg.exe └── readme.md /.gitignore: -------------------------------------------------------------------------------- 1 | bin/ 2 | .vs/ 3 | *.vcxproj.user 4 | *.vcxproj.filters 5 | x64/ 6 | out/*.png 7 | out/*.webm 8 | out/*.mp4 -------------------------------------------------------------------------------- /Data/BlueNoise.bmp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Atrix256/FalcorPathTracer/c63896e8c30aad7b962e1999b4d0aefa4020328d/Data/BlueNoise.bmp -------------------------------------------------------------------------------- /Data/geo.h: -------------------------------------------------------------------------------- 1 | static const float c_rayHitMinimumT = 0.001f; 2 | 3 | struct Ray 4 | { 5 | float3 origin; 6 | float3 direction; 7 | }; 8 | 9 | struct CollisionInfo 10 | { 11 | float collisionTime; // init to -1 12 | float3 normal; 13 | float3 albedo; 14 | float3 emissive; 15 | uint geoID; 16 | }; 17 | 18 | struct Quad 19 | { 20 | float3 a,b,c,d; 21 | float3 normal; 22 | float3 albedo; 23 | float3 emissive; 24 | uint geoID; 25 | }; 26 | 27 | struct Sphere 28 | { 29 | float3 position; 30 | float radius; 31 | float3 albedo; 32 | float3 emissive; 33 | uint geoID; 34 | }; 35 | 36 | struct PLight 37 | { 38 | float3 position; 39 | float3 color; 40 | }; 41 | 42 | float ScalarTriple(in float3 a, in float3 b, in float3 c) 43 | { 44 | return dot(cross(a, b), c); 45 | } 46 | 47 | bool RayIntersects (in Ray ray, in Sphere sphere, inout CollisionInfo collisionInfo) 48 | { 49 | //get the vector from the center of this circle to where the ray begins. 50 | float3 m = ray.origin - sphere.position; 51 | 52 | //get the dot product of the above vector and the ray's vector 53 | float b = dot(m, ray.direction); 54 | 55 | float c = dot(m, m) - sphere.radius * sphere.radius; 56 | 57 | //exit if r's origin outside s (c > 0) and r pointing away from s (b > 0) 58 | if (c > 0.0 && b > 0.0) 59 | return false; 60 | 61 | //calculate discriminant 62 | float discr = b * b - c; 63 | 64 | //a negative discriminant corresponds to ray missing sphere 65 | if (discr <= 0.0) 66 | return false; 67 | 68 | //ray now found to intersect sphere, compute smallest t value of intersection 69 | float collisionTime = -b - sqrt(discr); 70 | 71 | //if t is negative, ray started inside sphere so clamp t to zero and remember that we hit from the inside 72 | if (collisionTime < 0.0) 73 | collisionTime = -b + sqrt(discr); 74 | 75 | //enforce min and max distance 76 | if (collisionTime < c_rayHitMinimumT || (collisionInfo.collisionTime >= 0.0 && collisionTime > collisionInfo.collisionTime)) 77 | return false; 78 | 79 | float3 normal = normalize((ray.origin + ray.direction * collisionTime) - sphere.position); 80 | 81 | // make sure normal is facing opposite of ray direction. 82 | // this is for if we are hitting the object from the inside / back side. 83 | if (dot(normal, ray.direction) > 0.0f) 84 | normal *= -1.0f; 85 | 86 | collisionInfo.collisionTime = collisionTime; 87 | collisionInfo.normal = normal; 88 | collisionInfo.albedo = sphere.albedo; 89 | collisionInfo.emissive = sphere.emissive; 90 | collisionInfo.geoID = sphere.geoID; 91 | return true; 92 | } 93 | 94 | bool RayIntersects(in Ray ray, in Quad quad, inout CollisionInfo collisionInfo) 95 | { 96 | // This function adapted from "Real Time Collision Detection" 5.3.5 Intersecting Line Against Quadrilateral 97 | // IntersectLineQuad() 98 | float3 pa = quad.a - ray.origin; 99 | float3 pb = quad.b - ray.origin; 100 | float3 pc = quad.c - ray.origin; 101 | // Determine which triangle to test against by testing against diagonal first 102 | float3 m = cross(pc, ray.direction); 103 | float3 r; 104 | float v = dot(pa, m); // ScalarTriple(pq, pa, pc); 105 | if (v >= 0.0f) { 106 | // Test intersection against triangle abc 107 | float u = -dot(pb, m); // ScalarTriple(pq, pc, pb); 108 | if (u < 0.0f) return false; 109 | float w = ScalarTriple(ray.direction, pb, pa); 110 | if (w < 0.0f) return false; 111 | // Compute r, r = u*a + v*b + w*c, from barycentric coordinates (u, v, w) 112 | float denom = 1.0f / (u + v + w); 113 | u *= denom; 114 | v *= denom; 115 | w *= denom; // w = 1.0f - u - v; 116 | r = u*quad.a + v*quad.b + w*quad.c; 117 | } 118 | else { 119 | // Test intersection against triangle dac 120 | float3 pd = quad.d - ray.origin; 121 | float u = dot(pd, m); // ScalarTriple(pq, pd, pc); 122 | if (u < 0.0f) return false; 123 | float w = ScalarTriple(ray.direction, pa, pd); 124 | if (w < 0.0f) return false; 125 | v = -v; 126 | // Compute r, r = u*a + v*d + w*c, from barycentric coordinates (u, v, w) 127 | float denom = 1.0f / (u + v + w); 128 | u *= denom; 129 | v *= denom; 130 | w *= denom; // w = 1.0f - u - v; 131 | r = u*quad.a + v*quad.d + w*quad.c; 132 | } 133 | 134 | // make sure normal is facing opposite of ray direction. 135 | // this is for if we are hitting the object from the inside / back side. 136 | float3 normal = quad.normal; 137 | if (dot(quad.normal, ray.direction) > 0.0f) 138 | normal *= -1.0f; 139 | 140 | // figure out the time t that we hit the plane (quad) 141 | float t; 142 | if (abs(ray.direction[0]) > 0.0f) 143 | t = (r[0] - ray.origin[0]) / ray.direction[0]; 144 | else if (abs(ray.direction[1]) > 0.0f) 145 | t = (r[1] - ray.origin[1]) / ray.direction[1]; 146 | else if (abs(ray.direction[2]) > 0.0f) 147 | t = (r[2] - ray.origin[2]) / ray.direction[2]; 148 | 149 | // only positive time hits allowed! 150 | if (t < 0.0f) 151 | return false; 152 | 153 | //enforce min and max distance 154 | float collisionTime = t; 155 | if (collisionTime < c_rayHitMinimumT || (collisionInfo.collisionTime >= 0.0 && collisionTime > collisionInfo.collisionTime)) 156 | return false; 157 | 158 | collisionInfo.collisionTime = collisionTime; 159 | collisionInfo.normal = normal; 160 | collisionInfo.albedo = quad.albedo; 161 | collisionInfo.emissive = quad.emissive; 162 | collisionInfo.geoID = quad.geoID; 163 | return true; 164 | } -------------------------------------------------------------------------------- /Data/pathtrace.hlsl: -------------------------------------------------------------------------------- 1 | #include "geo.h" 2 | 3 | #ifndef MAX_RAY_BOUNCES 4 | #define MAX_RAY_BOUNCES 4 5 | #endif 6 | 7 | #ifndef SAMPLES_PER_FRAME 8 | #define SAMPLES_PER_FRAME 1 9 | #endif 10 | 11 | #ifndef SAMPLE_LIGHTS 12 | #define SAMPLE_LIGHTS 1 13 | #endif 14 | 15 | #ifndef WORK_GROUP_SIZE 16 | #define WORK_GROUP_SIZE 16 17 | #endif 18 | 19 | #ifndef BOKEH_SHAPE 20 | #define BOKEH_SHAPE 0 21 | #endif 22 | 23 | // keep in sync with main.cpp enum BokehShape 24 | #define BOKEH_SHAPE_CIRCLE 0 25 | #define BOKEH_SHAPE_CIRCLEG 1 26 | #define BOKEH_SHAPE_SQUARE 2 27 | #define BOKEH_SHAPE_RING 3 28 | #define BOKEH_SHAPE_TRIANGLE 4 29 | #define BOKEH_SHAPE_SOD 5 30 | 31 | static const float c_pi = 3.14159265359f; 32 | static const float c_goldenRatioConjugate = 0.61803398875f; 33 | 34 | // Note, this is how you'd read the blue noise texture: gBlueNoiseTexture[texturePixel].r; 35 | Texture2D gBlueNoiseTexture; 36 | 37 | RWTexture2D gOutputF32; 38 | RWTexture2D gOutputU8; 39 | 40 | cbuffer ShaderConstants 41 | { 42 | float4x4 invViewProjMtx; 43 | float4x4 viewMtx; 44 | float4x4 invViewMtx; 45 | float3 skyColor; 46 | float lerpAmount; 47 | uint frameRand; 48 | uint frameNumber; 49 | float DOFFocalLength; 50 | float DOFApertureRadius; 51 | float Exposure; 52 | float3 cameraPos; 53 | float3 cameraRight; 54 | float3 cameraUp; 55 | float4 sensorPlane; 56 | float4 focalPlane; 57 | }; 58 | 59 | StructuredBuffer g_spheres; 60 | StructuredBuffer g_lightSpheres; 61 | StructuredBuffer g_quads; 62 | StructuredBuffer g_plights; 63 | 64 | uint wang_hash(inout uint seed) 65 | { 66 | seed = (seed ^ 61) ^ (seed >> 16); 67 | seed *= 9; 68 | seed = seed ^ (seed >> 4); 69 | seed *= 0x27d4eb2d; 70 | seed = seed ^ (seed >> 15); 71 | return seed; 72 | } 73 | 74 | float RandomFloat01(inout uint state) 75 | { 76 | return float(wang_hash(state)) / 4294967296.0; 77 | } 78 | 79 | float3 RandomUnitVector(inout uint state) 80 | { 81 | float z = RandomFloat01(state) * 2.0f - 1.0f; 82 | float a = RandomFloat01(state) * 2.0f * 3.1415926f; 83 | float r = sqrt(1.0f - z * z); 84 | float x = r * cos(a); 85 | float y = r * sin(a); 86 | return float3(x, y, z); 87 | } 88 | 89 | float2 PointInTriangle(in float2 A, in float2 B, in float2 C, in float2 rand) 90 | { 91 | return (1.0f - sqrt(rand.x)) * A.yx + sqrt(rand.x)*(1.0f - rand.y) * B.yx + rand.y * sqrt(rand.x) * C.yx; 92 | } 93 | 94 | float SignedTriArea(float2 a, float2 b, float2 c) 95 | { 96 | return (b.x - a.x)*(c.y - a.y) - (b.y - a.y)*(c.x - a.x); 97 | } 98 | 99 | float TriArea(float2 a, float2 b, float2 c) 100 | { 101 | return abs(SignedTriArea(a, b, c)); 102 | } 103 | 104 | Ray GetRayForPixel(float2 uv, inout uint state, out float lightMultiplier) 105 | { 106 | lightMultiplier = 1.0f; 107 | 108 | // convert from [0,1] space to [-1,1] space 109 | float2 pixelClipSpace = uv * 2.0f - 1.0f; 110 | pixelClipSpace.x *= -1.0f; 111 | 112 | // the ray starts at the camera 113 | Ray ret; 114 | ret.origin = cameraPos; 115 | 116 | // transform the clip space pixel at z 0 to be able to calculate the ray origin in world space 117 | float4 destination = mul(float4(pixelClipSpace, 0.0f, 1.0f), invViewProjMtx); 118 | destination /= destination.w; 119 | ret.direction = -normalize(destination.xyz - cameraPos); 120 | 121 | // if DOF is on, need to adjust the origin and direction based on aperture size and shape 122 | #ifdef ENABLE_DOF 123 | float3 fwdVector = ret.direction; 124 | float3 upVector = cameraUp; 125 | float3 rightVector = cameraRight; 126 | 127 | #if BOKEH_SHAPE == BOKEH_SHAPE_SQUARE 128 | float2 offset = (float2(RandomFloat01(state), RandomFloat01(state)) * 2.0f - 1.0f) * DOFApertureRadius; 129 | float shapeArea = 4.0f * DOFApertureRadius; 130 | #elif BOKEH_SHAPE == BOKEH_SHAPE_CIRCLE 131 | float angle = RandomFloat01(state) * 2.0f * c_pi; 132 | float radius = sqrt(RandomFloat01(state)); 133 | float2 offset = float2(cos(angle), sin(angle)) * radius * DOFApertureRadius; 134 | float shapeArea = c_pi * DOFApertureRadius * DOFApertureRadius; 135 | #elif BOKEH_SHAPE == BOKEH_SHAPE_CIRCLEG 136 | float r = sqrt(-2.0f * log(RandomFloat01(state))); 137 | float theta = 2 * c_pi*RandomFloat01(state); 138 | float2 offset; 139 | offset.x = r * cos(theta); 140 | offset.y = r * sin(theta); 141 | offset *= DOFApertureRadius; 142 | float shapeArea = c_pi * DOFApertureRadius * DOFApertureRadius; 143 | #elif BOKEH_SHAPE == BOKEH_SHAPE_RING 144 | float angle = RandomFloat01(state) * 2.0f * c_pi; 145 | float2 offset = float2(cos(angle), sin(angle)) * DOFApertureRadius; 146 | float shapeArea = 2.0f * c_pi * DOFApertureRadius; 147 | #elif BOKEH_SHAPE == BOKEH_SHAPE_TRIANGLE 148 | const float2 A = float2(1.0f, 0.0f); //0 degrees 149 | const float2 B = float2(-0.5f, 0.866f); //120 degrees 150 | const float2 C = float2(-0.5f, -0.866f); //120 degrees 151 | 152 | float2 rand = float2(RandomFloat01(state), RandomFloat01(state)); 153 | 154 | float2 offset = PointInTriangle(A, B, C, rand); 155 | 156 | offset *= DOFApertureRadius; 157 | 158 | float shapeArea = TriArea(A*DOFApertureRadius,B*DOFApertureRadius,C*DOFApertureRadius); 159 | #elif BOKEH_SHAPE == BOKEH_SHAPE_SOD 160 | 161 | const float2 A = float2(1.0f, 0.0f); 162 | const float2 B = float2(-0.5f, 0.866f); 163 | const float2 C = float2(-0.5f, -0.866f); 164 | 165 | const float2 D = float2(0.5f, 0.866f); 166 | const float2 E = float2(-1.0f, 0.0f); 167 | const float2 F = float2(0.5f, -0.866f); 168 | 169 | const float2 G = float2(0.5f, 0.288f); 170 | const float2 H = float2(0.0f, 0.577f); 171 | const float2 I = float2(-0.5f, 0.288f); 172 | 173 | const float2 J = float2(-0.5f, -0.288f); 174 | const float2 K = float2(0.0f, -0.577f); 175 | const float2 L = float2(0.5f, -0.288f); 176 | 177 | float triangleChoice = RandomFloat01(state); 178 | float2 offset; 179 | 180 | float2 rand = float2(RandomFloat01(state), RandomFloat01(state)); 181 | if (triangleChoice < 0.76) 182 | offset = PointInTriangle(A, B, C, rand); 183 | else if (triangleChoice < 0.84) 184 | offset = PointInTriangle(G, D, H, rand); 185 | else if (triangleChoice < 0.92) 186 | offset = PointInTriangle(I, E, J, rand); 187 | else 188 | offset = PointInTriangle(K, F, L, rand); 189 | 190 | offset *= DOFApertureRadius; 191 | 192 | float shapeArea = 193 | TriArea(A*DOFApertureRadius, B*DOFApertureRadius, C*DOFApertureRadius) + 194 | TriArea(G*DOFApertureRadius, D*DOFApertureRadius, H*DOFApertureRadius) + 195 | TriArea(I*DOFApertureRadius, E*DOFApertureRadius, J*DOFApertureRadius) + 196 | TriArea(K*DOFApertureRadius, F*DOFApertureRadius, L*DOFApertureRadius); 197 | #endif 198 | 199 | // we are only sampling one sample in the bokeh shape, but light is coming in from all samples so we need to account for that. 200 | // a more mathematical way of looking at this is that we need to divide by the pdf to do proper monte carlo integration. 201 | // the pdf is 1/shapeArea, so dividing by that pdf is the same as multiplying by shapeArea. 202 | lightMultiplier = shapeArea; 203 | 204 | // Find where the ray hits the sensor plane (negative time) and move the image plane to the focal length distance. 205 | // This is where the ray starts. 206 | // Note: could also adjust FOV as focal distance changed, but this feels closer to the real geometry. 207 | // FOV is the FOV at a focal distance of 1. 208 | float3 sensorPos; 209 | { 210 | float t = -(dot(ret.origin, sensorPlane.xyz) + sensorPlane.w) / dot(ret.direction, sensorPlane.xyz); 211 | sensorPos = ret.origin + ret.direction * t; 212 | 213 | // convert the sensorPos from world space to camera space 214 | float3 cameraSpacePos = mul(float4(sensorPos, 1.0f), viewMtx).xyz; 215 | 216 | // elongate z by the focal length 217 | cameraSpacePos.z *= DOFFocalLength; 218 | 219 | // convert back into world space 220 | sensorPos = mul(float4(cameraSpacePos, 1.0f), invViewMtx).xyz; 221 | } 222 | 223 | // calculate the world space point chosen on the aperture 224 | float3 aperturePos = cameraPos + rightVector * offset.x + upVector.xyz * offset.y; 225 | 226 | #ifdef PINHOLE_CAMERA 227 | // pinhole camera shoots the ray from the sensor position to the aperture position 228 | ret.origin = sensorPos; 229 | ret.direction = normalize(aperturePos - sensorPos); 230 | #else 231 | // lense camera shoots a ray from the aperture position to the focus position on the focus plane 232 | float3 rstart = cameraPos; 233 | float3 rdir = -normalize(destination.xyz - cameraPos); 234 | float t = -(dot(rstart, focalPlane.xyz) + focalPlane.w) / dot(rdir, focalPlane.xyz); 235 | float3 focusPos = rstart + rdir * t; 236 | ret.origin = aperturePos; 237 | ret.direction = normalize(focusPos - aperturePos); 238 | #endif 239 | #endif 240 | 241 | return ret; 242 | } 243 | 244 | CollisionInfo RayIntersectsScene(Ray ray, bool allowEmissive, float maxT = -1.0f) 245 | { 246 | CollisionInfo collisionInfo; 247 | collisionInfo.collisionTime = maxT; 248 | collisionInfo.normal = float3(0.0f, 0.0f, 0.0f); 249 | collisionInfo.albedo = float3(0.0f, 0.0f, 0.0f); 250 | collisionInfo.emissive = float3(0.0f, 0.0f, 0.0f); 251 | 252 | // test the spheres 253 | { 254 | uint count = 0; 255 | uint stride; 256 | g_spheres.GetDimensions(count, stride); 257 | 258 | for (uint i = 0; i < count; i++) 259 | RayIntersects(ray, g_spheres[i], collisionInfo); 260 | } 261 | 262 | // test the light spheres 263 | { 264 | uint count = 0; 265 | uint stride; 266 | g_lightSpheres.GetDimensions(count, stride); 267 | 268 | for (uint i = 0; i < count; i++) 269 | RayIntersects(ray, g_lightSpheres[i], collisionInfo); 270 | } 271 | 272 | // test the quads 273 | { 274 | uint count = 0; 275 | uint stride; 276 | g_quads.GetDimensions(count, stride); 277 | 278 | for (uint i = 0; i < count; i++) 279 | RayIntersects(ray, g_quads[i], collisionInfo); 280 | } 281 | 282 | if (!allowEmissive) 283 | collisionInfo.emissive = float3(0.0f, 0.0f, 0.0f); 284 | 285 | return collisionInfo; 286 | } 287 | 288 | float lengthSq(float3 v) 289 | { 290 | return dot(v, v); 291 | } 292 | 293 | float3 SampleLight(in CollisionInfo collisionInfo, in float3 position, inout uint rngState, in Sphere sphere) 294 | { 295 | // for sampling the spherical light, see: 296 | // https://github.com/aras-p/ToyPathTracer/blob/01-initial/Cpp/Source/Test.cpp#L83 297 | 298 | // don't sample self 299 | if (collisionInfo.geoID == sphere.geoID) 300 | return float3(0.0f, 0.0f, 0.0f); 301 | 302 | // TODO: clean this up 303 | 304 | float distToLight = length(sphere.position - position); 305 | 306 | // create a random direction towards sphere 307 | // coord system for sampling: sw, su, sv 308 | float3 sw = normalize(sphere.position - position); 309 | float3 su = normalize(cross(abs(sw.x)>0.01f ? float3(0, 1, 0) : float3(1, 0, 0), sw)); 310 | float3 sv = cross(sw, su); 311 | 312 | // sample sphere by solid angle 313 | float cosAMax = sqrt(1.0f - sphere.radius*sphere.radius / lengthSq(position - sphere.position)); 314 | float eps1 = RandomFloat01(rngState); 315 | float eps2 = RandomFloat01(rngState); 316 | float cosA = 1.0f - eps1 + eps1 * cosAMax; 317 | float sinA = sqrt(1.0f - cosA * cosA); 318 | float phi = 2 * c_pi* eps2; 319 | float3 l = su * cos(phi) * sinA + sv * sin(phi) * sinA + sw * cosA; 320 | l = normalize(l); 321 | 322 | // raytrace against the scene 323 | Ray ray; 324 | ray.origin = position; 325 | ray.direction = l; 326 | CollisionInfo newCollisionInfo = RayIntersectsScene(ray, true); 327 | 328 | // TODO: maybe give a max distance. the below "missing" counting as valid doesn't make sense w/o that btw. 329 | 330 | // if we missed the world, or hit the light we wanted to, return the light 331 | if (newCollisionInfo.collisionTime < 0.0f || newCollisionInfo.geoID == sphere.geoID) 332 | { 333 | float omega = 2 * c_pi * (1 - cosAMax); 334 | 335 | float3 nDotL = max(dot(collisionInfo.normal, l), 0.0f); 336 | 337 | return collisionInfo.albedo * sphere.emissive * nDotL * omega / c_pi; 338 | } 339 | // otherwise, return no light 340 | else 341 | return float3(0.0f, 0.0f, 0.0f); 342 | } 343 | 344 | float3 SampleLight(in CollisionInfo collisionInfo, in float3 position, in PLight light) 345 | { 346 | float3 vecToLight = light.position - position; 347 | float distToLight = length(vecToLight); 348 | float3 dirToLight = normalize(vecToLight); 349 | 350 | // raytrace against the scene 351 | Ray ray; 352 | ray.origin = position; 353 | ray.direction = dirToLight; 354 | CollisionInfo newCollisionInfo = RayIntersectsScene(ray, true); 355 | 356 | // if we didn't hit anything, apply the lighting 357 | if (newCollisionInfo.collisionTime < 0.0f || newCollisionInfo.collisionTime > distToLight) 358 | { 359 | // TODO: is this correct? 360 | float3 nDotL = max(dot(collisionInfo.normal, dirToLight), 0.0f); 361 | 362 | // distance attenuation 363 | float atten = 1.0f / (distToLight*distToLight); 364 | 365 | return collisionInfo.albedo * light.color * atten * 2.0f * nDotL; 366 | } 367 | // otherwise, return no light 368 | else 369 | return float3(0.0f, 0.0f, 0.0f); 370 | } 371 | 372 | float3 SampleLights(in CollisionInfo collisionInfo, in float3 position, inout uint rngState) 373 | { 374 | float3 ret = float3(0.0f, 0.0f, 0.0f); 375 | 376 | // test the light spheres 377 | { 378 | uint count = 0; 379 | uint stride; 380 | g_lightSpheres.GetDimensions(count, stride); 381 | 382 | for (uint i = 0; i < count; i++) 383 | ret += SampleLight(collisionInfo, position, rngState, g_lightSpheres[i]); 384 | } 385 | 386 | // TODO: sample quad lights 387 | 388 | // test the point lights 389 | { 390 | uint count = 0; 391 | uint stride; 392 | g_plights.GetDimensions(count, stride); 393 | 394 | for (uint i = 0; i < count; i++) 395 | ret += SampleLight(collisionInfo, position, g_plights[i]); 396 | } 397 | 398 | return ret; 399 | } 400 | 401 | float3 LightOutgoing(in CollisionInfo collisionInfo, in float3 rayHitPos, inout uint rngState) 402 | { 403 | float3 lightSum = float3(0.0f, 0.0f, 0.0f); 404 | float3 lightMultiplier = float3(1.0f, 1.0f, 1.0f); 405 | float cosTheta = 1.0f; 406 | 407 | #if SAMPLE_LIGHTS == 1 408 | bool allowEmissive = false; 409 | #else 410 | bool allowEmissive = true; 411 | #endif 412 | 413 | for (int i = 0; i <= MAX_RAY_BOUNCES; ++i) 414 | { 415 | // do explicit light sampling if we should 416 | #if SAMPLE_LIGHTS == 1 417 | lightSum += SampleLights(collisionInfo, rayHitPos, rngState) * lightMultiplier; 418 | #endif 419 | 420 | // update our light sum and future light multiplier 421 | lightSum += collisionInfo.emissive * lightMultiplier; 422 | lightMultiplier *= collisionInfo.albedo * cosTheta; 423 | 424 | // add a random recursive sample for global illumination 425 | #ifdef COSINE_WEIGHTED_HEMISPHERE_SAMPLING 426 | float3 newRayDir = normalize(collisionInfo.normal + RandomUnitVector(rngState)); 427 | #else 428 | float3 newRayDir = RandomUnitVector(rngState); 429 | if (dot(collisionInfo.normal, newRayDir) < 0.0f) 430 | newRayDir *= -1.0f; 431 | cosTheta = 2.0f * dot(collisionInfo.normal, newRayDir); 432 | #endif 433 | 434 | Ray newRay; 435 | newRay.origin = rayHitPos; 436 | newRay.direction = newRayDir; 437 | CollisionInfo newCollisionInfo = RayIntersectsScene(newRay, allowEmissive); 438 | 439 | // if we hit something new, we continue 440 | if (newCollisionInfo.collisionTime >= 0.0f) 441 | { 442 | collisionInfo = newCollisionInfo; 443 | rayHitPos += newRayDir * newCollisionInfo.collisionTime; 444 | } 445 | // else we missed so we are done 446 | else 447 | { 448 | lightSum += skyColor * lightMultiplier; 449 | i = MAX_RAY_BOUNCES; 450 | } 451 | } 452 | 453 | return lightSum; 454 | } 455 | 456 | [numthreads(WORK_GROUP_SIZE, WORK_GROUP_SIZE, 1)] 457 | void main(uint3 gid : SV_DispatchThreadID) 458 | { 459 | // calculate the percentage across the screen that we are 460 | uint2 resolution; 461 | gOutputF32.GetDimensions(resolution.x, resolution.y); 462 | uint2 pixel = gid.xy; 463 | float2 uv = float2(pixel) / float2(resolution); 464 | 465 | // set up our initial random number generation state 466 | uint rngState = (pixel.x * 1973 + pixel.y * 9277 + frameNumber * 26699) | 1; 467 | rngState = rngState ^ frameRand; 468 | 469 | float3 ret = float3(0.0f, 0.0f, 0.0f); 470 | for (uint i = 0; i < SAMPLES_PER_FRAME; ++i) 471 | { 472 | // get a random offset to jitter the pixel by 473 | float2 uvOffset = float2(RandomFloat01(rngState), RandomFloat01(rngState)) * float2(1.0f / float(resolution.x), 1.0f / float(resolution.y)); 474 | 475 | // get the ray for this pixel 476 | float lightMultiplier = 1.0f; 477 | Ray ray = GetRayForPixel(uv + uvOffset, rngState, lightMultiplier); 478 | 479 | CollisionInfo collisionInfo = RayIntersectsScene(ray, true); 480 | 481 | if (collisionInfo.collisionTime > 0.0f) 482 | ret += LightOutgoing(collisionInfo, ray.origin + ray.direction * collisionInfo.collisionTime, rngState) * lightMultiplier; 483 | else 484 | ret += skyColor * lightMultiplier; 485 | } 486 | ret /= float(SAMPLES_PER_FRAME); 487 | 488 | // incremental average to integrate when you get one sample at a time 489 | // https://blog.demofox.org/2016/08/23/incremental-averaging/ 490 | ret = lerp(gOutputF32[pixel].rgb, ret.bgr, lerpAmount); 491 | 492 | gOutputF32[pixel] = float4(ret, 1.0f); 493 | 494 | // convert from linear to sRGB for output 495 | gOutputU8[pixel] = float4(pow(ret * Exposure, 1.0f / 2.2f), 1.0f); 496 | } -------------------------------------------------------------------------------- /FalcorPathTracer.sln: -------------------------------------------------------------------------------- 1 |  2 | Microsoft Visual Studio Solution File, Format Version 12.00 3 | # Visual Studio 15 4 | VisualStudioVersion = 15.0.27703.2018 5 | MinimumVisualStudioVersion = 10.0.40219.1 6 | Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "FalcorPathTracer", "FalcorPathTracer.vcxproj", "{9147C64B-6A7E-491E-ADC3-12FD0178CA34}" 7 | EndProject 8 | Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Falcor", "..\..\Falcor\Framework\Source\Falcor.vcxproj", "{3B602F0E-3834-4F73-B97D-7DFC91597A98}" 9 | EndProject 10 | Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "MakeGeoHelper", "MakeGeoHelper\MakeGeoHelper.vcxproj", "{CD4603EF-8479-4199-87A6-3FD6FD6383CC}" 11 | EndProject 12 | Global 13 | GlobalSection(SolutionConfigurationPlatforms) = preSolution 14 | Debug|x64 = Debug|x64 15 | Debug|x86 = Debug|x86 16 | DebugD3D12|x64 = DebugD3D12|x64 17 | DebugD3D12|x86 = DebugD3D12|x86 18 | DebugDXR|x64 = DebugDXR|x64 19 | DebugDXR|x86 = DebugDXR|x86 20 | DebugVK|x64 = DebugVK|x64 21 | DebugVK|x86 = DebugVK|x86 22 | Release|x64 = Release|x64 23 | Release|x86 = Release|x86 24 | ReleaseD3D12|x64 = ReleaseD3D12|x64 25 | ReleaseD3D12|x86 = ReleaseD3D12|x86 26 | ReleaseDXR|x64 = ReleaseDXR|x64 27 | ReleaseDXR|x86 = ReleaseDXR|x86 28 | ReleaseVK|x64 = ReleaseVK|x64 29 | ReleaseVK|x86 = ReleaseVK|x86 30 | EndGlobalSection 31 | GlobalSection(ProjectConfigurationPlatforms) = postSolution 32 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.Debug|x64.ActiveCfg = Debug|x64 33 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.Debug|x64.Build.0 = Debug|x64 34 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.Debug|x86.ActiveCfg = Debug|Win32 35 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.Debug|x86.Build.0 = Debug|Win32 36 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.DebugD3D12|x64.ActiveCfg = Debug|x64 37 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.DebugD3D12|x64.Build.0 = Debug|x64 38 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.DebugD3D12|x86.ActiveCfg = Debug|Win32 39 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.DebugD3D12|x86.Build.0 = Debug|Win32 40 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.DebugDXR|x64.ActiveCfg = Debug|x64 41 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.DebugDXR|x64.Build.0 = Debug|x64 42 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.DebugDXR|x86.ActiveCfg = Debug|Win32 43 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.DebugDXR|x86.Build.0 = Debug|Win32 44 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.DebugVK|x64.ActiveCfg = Debug|x64 45 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.DebugVK|x64.Build.0 = Debug|x64 46 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.DebugVK|x86.ActiveCfg = Debug|Win32 47 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.DebugVK|x86.Build.0 = Debug|Win32 48 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.Release|x64.ActiveCfg = Release|x64 49 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.Release|x64.Build.0 = Release|x64 50 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.Release|x86.ActiveCfg = Release|Win32 51 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.Release|x86.Build.0 = Release|Win32 52 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.ReleaseD3D12|x64.ActiveCfg = Release|x64 53 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.ReleaseD3D12|x64.Build.0 = Release|x64 54 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.ReleaseD3D12|x86.ActiveCfg = Release|Win32 55 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.ReleaseD3D12|x86.Build.0 = Release|Win32 56 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.ReleaseDXR|x64.ActiveCfg = Release|x64 57 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.ReleaseDXR|x64.Build.0 = Release|x64 58 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.ReleaseDXR|x86.ActiveCfg = Release|Win32 59 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.ReleaseDXR|x86.Build.0 = Release|Win32 60 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.ReleaseVK|x64.ActiveCfg = Release|x64 61 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.ReleaseVK|x64.Build.0 = Release|x64 62 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.ReleaseVK|x86.ActiveCfg = Release|Win32 63 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34}.ReleaseVK|x86.Build.0 = Release|Win32 64 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.Debug|x64.ActiveCfg = DebugDXR|x64 65 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.Debug|x64.Build.0 = DebugDXR|x64 66 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.Debug|x86.ActiveCfg = DebugDXR|x64 67 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.Debug|x86.Build.0 = DebugDXR|x64 68 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.DebugD3D12|x64.ActiveCfg = DebugD3D12|x64 69 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.DebugD3D12|x64.Build.0 = DebugD3D12|x64 70 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.DebugD3D12|x86.ActiveCfg = DebugD3D12|x64 71 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.DebugDXR|x64.ActiveCfg = DebugDXR|x64 72 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.DebugDXR|x64.Build.0 = DebugDXR|x64 73 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.DebugDXR|x86.ActiveCfg = DebugDXR|x64 74 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.DebugVK|x64.ActiveCfg = DebugVK|x64 75 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.DebugVK|x64.Build.0 = DebugVK|x64 76 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.DebugVK|x86.ActiveCfg = DebugVK|x64 77 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.Release|x64.ActiveCfg = ReleaseVK|x64 78 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.Release|x64.Build.0 = ReleaseVK|x64 79 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.Release|x86.ActiveCfg = DebugDXR|x64 80 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.Release|x86.Build.0 = DebugDXR|x64 81 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.ReleaseD3D12|x64.ActiveCfg = ReleaseD3D12|x64 82 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.ReleaseD3D12|x64.Build.0 = ReleaseD3D12|x64 83 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.ReleaseD3D12|x86.ActiveCfg = ReleaseD3D12|x64 84 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.ReleaseDXR|x64.ActiveCfg = ReleaseDXR|x64 85 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.ReleaseDXR|x64.Build.0 = ReleaseDXR|x64 86 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.ReleaseDXR|x86.ActiveCfg = ReleaseDXR|x64 87 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.ReleaseVK|x64.ActiveCfg = ReleaseVK|x64 88 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.ReleaseVK|x64.Build.0 = ReleaseVK|x64 89 | {3B602F0E-3834-4F73-B97D-7DFC91597A98}.ReleaseVK|x86.ActiveCfg = ReleaseVK|x64 90 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.Debug|x64.ActiveCfg = Debug|x64 91 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.Debug|x64.Build.0 = Debug|x64 92 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.Debug|x86.ActiveCfg = Debug|Win32 93 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.Debug|x86.Build.0 = Debug|Win32 94 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.DebugD3D12|x64.ActiveCfg = Debug|x64 95 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.DebugD3D12|x64.Build.0 = Debug|x64 96 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.DebugD3D12|x86.ActiveCfg = Debug|Win32 97 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.DebugD3D12|x86.Build.0 = Debug|Win32 98 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.DebugDXR|x64.ActiveCfg = Debug|x64 99 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.DebugDXR|x64.Build.0 = Debug|x64 100 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.DebugDXR|x86.ActiveCfg = Debug|Win32 101 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.DebugDXR|x86.Build.0 = Debug|Win32 102 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.DebugVK|x64.ActiveCfg = Debug|x64 103 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.DebugVK|x64.Build.0 = Debug|x64 104 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.DebugVK|x86.ActiveCfg = Debug|Win32 105 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.DebugVK|x86.Build.0 = Debug|Win32 106 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.Release|x64.ActiveCfg = Release|x64 107 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.Release|x64.Build.0 = Release|x64 108 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.Release|x86.ActiveCfg = Release|Win32 109 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.Release|x86.Build.0 = Release|Win32 110 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.ReleaseD3D12|x64.ActiveCfg = Release|x64 111 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.ReleaseD3D12|x64.Build.0 = Release|x64 112 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.ReleaseD3D12|x86.ActiveCfg = Release|Win32 113 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.ReleaseD3D12|x86.Build.0 = Release|Win32 114 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.ReleaseDXR|x64.ActiveCfg = Release|x64 115 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.ReleaseDXR|x64.Build.0 = Release|x64 116 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.ReleaseDXR|x86.ActiveCfg = Release|Win32 117 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.ReleaseDXR|x86.Build.0 = Release|Win32 118 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.ReleaseVK|x64.ActiveCfg = Release|x64 119 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.ReleaseVK|x64.Build.0 = Release|x64 120 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.ReleaseVK|x86.ActiveCfg = Release|Win32 121 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC}.ReleaseVK|x86.Build.0 = Release|Win32 122 | EndGlobalSection 123 | GlobalSection(SolutionProperties) = preSolution 124 | HideSolutionNode = FALSE 125 | EndGlobalSection 126 | GlobalSection(ExtensibilityGlobals) = postSolution 127 | SolutionGuid = {F81367E7-3A76-4AD3-8128-59446E5F46AD} 128 | EndGlobalSection 129 | EndGlobal 130 | -------------------------------------------------------------------------------- /FalcorPathTracer.vcxproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Debug 6 | Win32 7 | 8 | 9 | Release 10 | Win32 11 | 12 | 13 | Debug 14 | x64 15 | 16 | 17 | Release 18 | x64 19 | 20 | 21 | 22 | 15.0 23 | {9147C64B-6A7E-491E-ADC3-12FD0178CA34} 24 | FalcorPathTracer 25 | 10.0.17134.0 26 | 27 | 28 | 29 | Application 30 | true 31 | v141 32 | MultiByte 33 | 34 | 35 | Application 36 | false 37 | v141 38 | true 39 | MultiByte 40 | 41 | 42 | Application 43 | true 44 | v141 45 | MultiByte 46 | 47 | 48 | Application 49 | false 50 | v141 51 | true 52 | MultiByte 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | Level3 80 | Disabled 81 | true 82 | true 83 | 84 | 85 | 86 | 87 | Level3 88 | Disabled 89 | true 90 | true 91 | 92 | 93 | 94 | 95 | Level3 96 | MaxSpeed 97 | true 98 | true 99 | true 100 | true 101 | 102 | 103 | true 104 | true 105 | 106 | 107 | 108 | 109 | Level3 110 | MaxSpeed 111 | true 112 | true 113 | true 114 | true 115 | 116 | 117 | true 118 | true 119 | 120 | 121 | 122 | 123 | {3b602f0e-3834-4f73-b97d-7dfc91597a98} 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | true 132 | true 133 | true 134 | true 135 | 136 | 137 | 138 | 139 | true 140 | true 141 | true 142 | true 143 | 144 | 145 | 146 | 147 | true 148 | true 149 | true 150 | true 151 | 152 | 153 | 154 | 155 | 156 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Alan Wolfe 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MakeGeoHelper/MakeGeoHelper.vcxproj: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Debug 6 | Win32 7 | 8 | 9 | Release 10 | Win32 11 | 12 | 13 | Debug 14 | x64 15 | 16 | 17 | Release 18 | x64 19 | 20 | 21 | 22 | 15.0 23 | {CD4603EF-8479-4199-87A6-3FD6FD6383CC} 24 | MakeGeoHelper 25 | 10.0.17134.0 26 | 27 | 28 | 29 | Application 30 | true 31 | v141 32 | MultiByte 33 | 34 | 35 | Application 36 | false 37 | v141 38 | true 39 | MultiByte 40 | 41 | 42 | Application 43 | true 44 | v141 45 | MultiByte 46 | 47 | 48 | Application 49 | false 50 | v141 51 | true 52 | MultiByte 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | Level3 76 | MaxSpeed 77 | true 78 | true 79 | true 80 | true 81 | 82 | 83 | true 84 | true 85 | 86 | 87 | 88 | 89 | Level3 90 | Disabled 91 | true 92 | true 93 | 94 | 95 | 96 | 97 | Level3 98 | Disabled 99 | true 100 | true 101 | 102 | 103 | 104 | 105 | Level3 106 | MaxSpeed 107 | true 108 | true 109 | true 110 | true 111 | 112 | 113 | true 114 | true 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | -------------------------------------------------------------------------------- /MakeGeoHelper/Sandbox.cpp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Atrix256/FalcorPathTracer/c63896e8c30aad7b962e1999b4d0aefa4020328d/MakeGeoHelper/Sandbox.cpp -------------------------------------------------------------------------------- /main.cpp: -------------------------------------------------------------------------------- 1 | #include "Falcor.h" 2 | #include "SampleTest.h" 3 | #include 4 | #include 5 | 6 | #define ANIMATION_TRACK 0 7 | /* 8 | Animation Tracks: 9 | 0 = off 10 | 1 = Face and Bokeh Scene: Adjust Focal Length (pinhole) 11 | 2 = Face and Bokeh Scene: Adjust Aperature Size (pinhole) 12 | 3 = Face and Bokeh Scene: Adjust Focal Length (lens) 13 | 4 = Face and Bokeh Scene: Adjust Aperature Size (lens) 14 | */ 15 | 16 | static const size_t c_animationSamplesPerFrame = 1000; 17 | static const size_t c_animationNumFrames = 60; 18 | 19 | static const size_t c_width = 800; 20 | static const size_t c_height = 600; 21 | 22 | using namespace Falcor; 23 | 24 | static const float c_pi = 3.14159265359f; 25 | 26 | struct Sphere 27 | { 28 | float3 position; 29 | float radius; 30 | float3 albedo; 31 | float3 emissive; 32 | uint geoID; 33 | }; 34 | 35 | struct Quad 36 | { 37 | float3 a, b, c, d; 38 | float3 normal; 39 | float3 albedo; 40 | float3 emissive; 41 | uint geoID; 42 | }; 43 | 44 | struct PLight 45 | { 46 | float3 position; 47 | float3 color; 48 | }; 49 | 50 | // keep in sync with defines in pathtrace.hlsl 51 | enum class BokehShape : uint32 52 | { 53 | Circle, 54 | CircleG, 55 | Square, 56 | Ring, 57 | Triangle, 58 | SOD, 59 | 60 | Count 61 | }; 62 | 63 | const char* BokehShapeNames[] = 64 | { 65 | "Circle - Uniform", 66 | "Circle - Gaussian", 67 | "Square", 68 | "Ring", 69 | "Triangle", 70 | "Star of David" 71 | }; 72 | static_assert(countof(BokehShapeNames) == (uint)BokehShape::Count, "Wrong number of entries in BokehShapeNames"); 73 | 74 | struct PTScene 75 | { 76 | glm::vec3 cameraPos; 77 | float yaw; 78 | float pitch; 79 | 80 | BokehShape DOFBokehShape; 81 | float DOFFocalLength; 82 | float DOFApertureRadius; 83 | 84 | float3 skyColor; 85 | 86 | std::vector spheres; 87 | std::vector lightSpheres; 88 | std::vector quads; 89 | std::vector pLights; 90 | }; 91 | 92 | PTScene Scene_CornellBox = 93 | { 94 | // camera position 95 | { 2.780f, 2.730f, -8.0f }, 96 | 97 | // yaw and pitch 98 | 90.0f, 99 | 0.0f, 100 | 101 | // DOF bokeh shape, Focal length and aperture radius 102 | BokehShape::Circle, 103 | 8.0f, 104 | 0.1f, 105 | 106 | // sky color 107 | { 0.01f, 0.01f, 0.01f }, 108 | 109 | // spheres 110 | { 111 | {{ 1.5f, 1.5f, 2.5f }, 0.8f, { 1.0f, 1.0f, 1.0f }, { 0.0f, 0.0f, 0.0f }}, 112 | {{ 4.5f, 1.5f, 2.5f }, 0.8f, { 0.1f, 1.0f, 1.0f }, { 0.0f, 0.0f, 0.0f }}, 113 | {{ 2.0f, 3.5f, 3.5f }, 0.8f, { 1.0f, 0.1f, 1.0f }, { 0.0f, 0.0f, 0.0f }}, 114 | {{ 3.0f, 1.5f, 4.5f }, 0.8f, { 1.0f, 1.0f, 0.1f }, { 0.0f, 0.0f, 0.0f }}, 115 | }, 116 | 117 | // light spheres 118 | { 119 | {{ 5.0f, 0.5f, 1.0f }, 0.2f, { 1.0f, 1.0f, 0.0f }, { 1.0f, 25.0f, 25.0f }}, 120 | {{ 0.4f, 1.5f, 1.0f }, 0.2f, { 1.0f, 1.0f, 0.0f }, { 25.0f, 1.0f, 25.0f }}, 121 | {{ 4.0f, 3.5f, 4.0f }, 0.2f, { 1.0f, 1.0f, 0.0f }, { 25.0f, 25.0f, 1.0f }}, 122 | }, 123 | 124 | // quads 125 | { 126 | // floor 127 | {{ 5.528f, 0.0f, 0.0f }, { 0.0f, 0.0f, 0.0f }, { 0.0f, 0.0f, 5.592f },{ 5.496f, 0.0f, 5.592f }, {0.0f, 0.0f, 0.0f},{ 1.0f, 1.0f, 1.0f }, { 0.0f, 0.0f, 0.0f }}, 128 | 129 | // Light 130 | //{{ 3.430f, 5.486f, 2.270f },{ 3.43f, 5.486f, 3.32f },{ 2.13f, 5.486f, 3.32f },{ 2.13f, 5.486f, 2.27f },{ 0.0f, 0.0f, 0.0f },{ 0.78f, 0.78f, 0.78f },{25.0f, 25.0f, 25.0f}}, 131 | 132 | // Cieling 133 | {{ 5.560f, 5.488f, 0.0f },{ 5.56f, 5.488f, 5.592f },{ 0.0f, 5.488f, 5.592f },{ 0.0f, 5.488f, 0.0f },{ 0.0f, 0.0f, 0.0f },{ 1.0f, 1.0f, 1.0f },{0.0f, 0.0f, 0.0f}}, 134 | 135 | // back wall 136 | {{5.496f, 0.0f, 5.592f},{ 0.0f, 0.0f, 5.592f},{ 0.0f, 5.488f, 5.592f},{5.56f, 5.488f, 5.592f},{ 0.0f, 0.0f, 0.0f },{ 1.0f, 1.0f, 1.0f },{ 0.0f, 0.0f, 0.0f }}, 137 | 138 | // left wall 139 | {{0.0f, 0.0f, 5.592f},{0.0f, 0.0f, 0.0f},{0.0f, 5.488f, 0.0f},{0.0f, 5.488f, 5.592f},{ 0.0f, 0.0f, 0.0f },{ 0.0f, 1.0f, 0.0f },{ 0.0f, 0.0f, 0.0f }}, 140 | 141 | // right wall 142 | {{5.528f, 0.0f, 0.0f},{5.496f, 0.0f, 5.592f},{5.56f, 5.488f, 5.592f},{5.56f, 5.488f, 0.0f},{ 0.0f, 0.0f, 0.0f },{ 1.0f, 0.0f, 0.0f },{ 0.0f, 0.0f, 0.0f }}, 143 | }, 144 | 145 | // point lights 146 | { 147 | {{ 2.0f, 1.5f, 0.5f },{ 0.0f, 0.0f, 1.0f }} 148 | }, 149 | }; 150 | 151 | PTScene Scene_FaceAndBokeh = 152 | { 153 | // camera position 154 | { 0.0f, 5.0f, -16.0f }, 155 | 156 | // yaw and pitch 157 | -270.0f, 158 | -1.5f, 159 | 160 | // DOF bokeh shape, Focal length and aperture radius 161 | BokehShape::SOD, 162 | 16.0f, 163 | 1.0f, 164 | 165 | // sky color 166 | { 0.01f, 0.01f, 0.01f }, 167 | 168 | // spheres 169 | { 170 | // head 171 | { { -1.0f, 2.0f, 2.0f },3.0f,{ 1.0f, 0.25f, 0.25f },{ 0.0f,0.0f,0.0f } }, 172 | { { 1.0f, 1.5f, -1.0f },1.5f,{ 0.25f, 1.0f, 0.25f },{ 0.0f,0.0f,0.0f } }, 173 | 174 | { { -0.8f, 3.5f, -0.8f },1.0f,{ 1.0f, 1.0f, 1.0f },{ 0.0f,0.0f,0.0f } }, 175 | { { 1.6f, 3.5f, 0.8f },1.0f,{ 1.0f, 1.0f, 1.0f },{ 0.0f,0.0f,0.0f } }, 176 | 177 | { { -0.6f, 3.5f, -1.0f },0.75f,{ 0.1f, 0.1f, 0.1f },{ 0.0f,0.0f,0.0f } }, 178 | { { 1.8f, 3.5f, 0.6f },0.75f,{ 0.1f, 0.1f, 0.1f },{ 0.0f,0.0f,0.0f } }, 179 | 180 | 181 | // background spheres 182 | { { 4.0f, 2.0f, 0.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 183 | 184 | { { 6.0f, 2.0f, 4.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 185 | 186 | { { 8.0f, 2.0f, 8.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 187 | 188 | { { 10.0f, 2.0f, 12.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 189 | 190 | { { 12.0f, 2.0f, 16.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 191 | 192 | { { 14.0f, 2.0f, 20.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 193 | 194 | { { 16.0f, 2.0f, 24.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 195 | 196 | { { 18.0f, 2.0f, 28.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 197 | 198 | { { 20.0f, 2.0f, 32.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 199 | 200 | { { 22.0f, 2.0f, 36.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 201 | 202 | { { 24.0f, 2.0f, 40.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 203 | 204 | 205 | // foreground spheres 206 | 207 | { { -3.0f, 2.0f, -2.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 208 | 209 | { { -4.0f, 2.0f, -4.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 210 | 211 | { { -5.0f, 2.0f, -6.0f },1.0f,{ 0.25f, 0.25f, 1.0f },{ 0.0f,0.0f,0.0f } }, 212 | }, 213 | 214 | // light spheres 215 | { 216 | { { -4.0f, 8.0f, -1.0f},0.5f,{ 0.0f,0.0f,0.0f },{ 200.0f / 3.0f, 100.0f / 3.0f, 100.0f / 3.0f } }, 217 | { { 7.0f, 9.0f, -2.0f},0.5f,{ 0.0f,0.0f,0.0f },{ 100.0f / 3.0f, 200.0f / 3.0f, 100.0f / 3.0f } }, 218 | { { 2.0f, 12.0f, 6.0f},0.5f,{ 0.0f,0.0f,0.0f },{ 100.0f / 3.0f, 100.0f / 3.0f, 200.0f / 3.0f } }, 219 | 220 | 221 | {{22.49f,15.34f,48.69f},0.22f,{0.0f,0.0f,0.0f},{56.18f,60.93f,36.63f}}, 222 | {{-5.84f,15.38f,32.09f},0.27f,{0.0f,0.0f,0.0f},{20.18f,4.83f,29.09f}}, 223 | {{-4.21f,4.72f,58.04f},0.27f,{0.0f,0.0f,0.0f},{42.33f,42.99f,48.20f}}, 224 | {{-19.26f,12.98f,49.72f},0.19f,{0.0f,0.0f,0.0f},{47.91f,59.19f,54.09f}}, 225 | {{17.93f,12.47f,47.98f},0.15f,{0.0f,0.0f,0.0f},{9.02f,30.82f,32.28f}}, 226 | {{-24.54f,19.56f,40.60f},0.30f,{0.0f,0.0f,0.0f},{10.86f,32.18f,22.76f}}, 227 | {{-6.36f,13.52f,44.59f},0.29f,{0.0f,0.0f,0.0f},{44.18f,45.12f,65.58f}}, 228 | {{17.06f,5.17f,47.90f},0.18f,{0.0f,0.0f,0.0f},{11.27f,16.52f,16.08f}}, 229 | {{9.59f,16.46f,52.93f},0.26f,{0.0f,0.0f,0.0f},{23.62f,5.81f,39.09f}}, 230 | {{23.42f,15.64f,36.89f},0.14f,{0.0f,0.0f,0.0f},{12.54f,5.65f,10.91f}}, 231 | {{-24.46f,12.72f,55.70f},0.21f,{0.0f,0.0f,0.0f},{20.20f,16.17f,5.96f}}, 232 | {{16.94f,4.11f,57.63f},0.26f,{0.0f,0.0f,0.0f},{3.57f,10.80f,24.26f}}, 233 | {{26.09f,4.35f,30.40f},0.18f,{0.0f,0.0f,0.0f},{76.78f,10.68f,49.14f}}, 234 | {{29.38f,12.78f,41.91f},0.18f,{0.0f,0.0f,0.0f},{45.91f,48.66f,38.07f}}, 235 | {{17.15f,19.63f,56.44f},0.24f,{0.0f,0.0f,0.0f},{40.25f,39.04f,35.76f}}, 236 | {{-27.11f,11.20f,57.16f},0.23f,{0.0f,0.0f,0.0f},{48.02f,37.35f,48.05f}}, 237 | {{14.84f,7.79f,58.12f},0.23f,{0.0f,0.0f,0.0f},{20.68f,24.47f,11.51f}}, 238 | {{15.32f,4.82f,44.63f},0.20f,{0.0f,0.0f,0.0f},{26.22f,36.43f,39.77f}}, 239 | {{9.42f,11.09f,44.35f},0.23f,{0.0f,0.0f,0.0f},{4.11f,3.96f,23.77f}}, 240 | {{15.66f,5.08f,45.98f},0.21f,{0.0f,0.0f,0.0f},{16.72f,15.99f,18.70f}}, 241 | }, 242 | 243 | // quads 244 | { 245 | // floor 246 | {{ 100.0f, 0.0f, -100.0f }, { -100.0f, 0.0f, -100.0f }, { -100.0f, 0.0f, 100.0f },{ 100.0f, 0.0f, 100.0f }, {0.0f, 0.0f, 0.0f},{ 1.0f, 1.0f, 1.0f }, { 0.0f, 0.0f, 0.0f }}, 247 | }, 248 | 249 | // point lights 250 | { 251 | }, 252 | }; 253 | 254 | enum class PTScenes 255 | { 256 | CornellBox, 257 | FaceAndBokeh, 258 | 259 | Count 260 | }; 261 | 262 | const char* PTScenesNames[] = 263 | { 264 | "Cornell Box", 265 | "Face And Bokeh" 266 | }; 267 | static_assert(countof(PTScenesNames) == (uint)PTScenes::Count, "Wrong number of entries in PTScenesNames"); 268 | 269 | PTScene& GetScene(PTScenes scene) 270 | { 271 | switch (scene) 272 | { 273 | case PTScenes::CornellBox: return Scene_CornellBox; 274 | case PTScenes::FaceAndBokeh: return Scene_FaceAndBokeh; 275 | } 276 | static_assert((uint)PTScenes::Count == 2, "Unhandled enum value"); 277 | return Scene_CornellBox; 278 | } 279 | 280 | static float RandomFloat() 281 | { 282 | // from 0 to 1 283 | static std::random_device rd; 284 | static std::mt19937 mt(rd()); 285 | static std::uniform_real_distribution dist(0.0f, 1.0f); 286 | return dist(mt); 287 | } 288 | 289 | static uint32 RandomUint32() 290 | { 291 | // from 0 to 1 292 | static std::random_device rd; 293 | static std::mt19937 mt(rd()); 294 | static std::uniform_int_distribution dist(0,(uint32)-1); 295 | return dist(mt); 296 | } 297 | 298 | class Application : public Renderer 299 | { 300 | private: 301 | 302 | ComputeProgram::SharedPtr m_computeProgram; 303 | ComputeState::SharedPtr m_computeState; 304 | ComputeVars::SharedPtr m_computeVars; 305 | 306 | Texture::SharedPtr m_blueNoiseTexture; 307 | Texture::SharedPtr m_outputF32; 308 | Texture::SharedPtr m_outputU8; 309 | 310 | glm::mat4x4 m_projMtx; 311 | glm::mat4x4 m_viewMtx; 312 | glm::mat4x4 m_invViewProjMtx; 313 | 314 | bool m_keyState[256]; 315 | 316 | glm::vec3 m_cameraPos; 317 | 318 | glm::vec2 m_mouseDragPos; 319 | bool m_mouseDown = false; 320 | float m_yaw; 321 | float m_pitch; 322 | size_t m_frameCount = 0; 323 | size_t m_sampleCount = 0; 324 | float m_startTime = 0.0f; 325 | float m_stopTime = 0.0f; 326 | 327 | float3 m_skyColor; 328 | 329 | std::string m_animationMessage; 330 | 331 | // values controled by the UI 332 | float m_fov = 45.0f; 333 | int m_samplesPerFrame = 1; 334 | int m_maxRayBounces = 4; 335 | int m_StopAtSampleCount = 0; 336 | bool m_sampleLights = true; 337 | int m_workGroupSize = 8; 338 | 339 | bool m_pinholeCamera = false; 340 | bool m_DOFEnable = true; 341 | float m_DOFFocalLength = 8.0f; 342 | float m_DOFApertureRadius = 0.1f; 343 | float m_Exposure = 1.0f; 344 | BokehShape m_DOFBokehShape = BokehShape::SOD; 345 | 346 | PTScenes m_scene = PTScenes::FaceAndBokeh; 347 | 348 | // options to speed up rendering 349 | bool m_cosineWeightedhemisphereSampling = true; 350 | 351 | 352 | private: 353 | 354 | void ResetIntegration(SampleCallbacks* pSample) 355 | { 356 | m_frameCount = 0; 357 | m_sampleCount = 0; 358 | m_startTime = pSample->getCurrentTime(); 359 | } 360 | 361 | void UpdateProjectionMatrix(SampleCallbacks* pSample) 362 | { 363 | uint32_t width = pSample->getWindow()->getClientAreaWidth(); 364 | uint32_t height = pSample->getWindow()->getClientAreaHeight(); 365 | 366 | m_projMtx = glm::perspective(glm::radians(m_fov), float(width) / float(height), 0.1f, 100.0f); 367 | 368 | UpdateViewMatrix(pSample); 369 | } 370 | 371 | void UpdateViewMatrix(SampleCallbacks* pSample) 372 | { 373 | glm::vec3 forward; 374 | forward.x = cos(glm::radians(m_pitch)) * cos(glm::radians(m_yaw)); 375 | forward.y = sin(glm::radians(m_pitch)); 376 | forward.z = cos(glm::radians(m_pitch)) * sin(glm::radians(m_yaw)); 377 | 378 | m_viewMtx = glm::lookAtLH(m_cameraPos, m_cameraPos + forward, glm::vec3(0.0f, 1.0f, 0.0f)); 379 | 380 | glm::mat4x4 viewProjMtx = m_projMtx * m_viewMtx; 381 | m_invViewProjMtx = glm::inverse(viewProjMtx); 382 | 383 | ResetIntegration(pSample); 384 | } 385 | 386 | public: 387 | 388 | void onGuiRender(SampleCallbacks* pSample, Gui* pGui) 389 | { 390 | { 391 | Falcor::Gui::DropdownList scenes; 392 | for (uint i = 0; i < (uint)PTScenes::Count; ++i) 393 | { 394 | Falcor::Gui::DropdownValue v; 395 | v.label = PTScenesNames[i]; 396 | v.value = i; 397 | scenes.push_back(v); 398 | } 399 | if (pGui->addDropdown("Scene", scenes, *(uint32*)&m_scene)) 400 | OnChangeScene(pSample); 401 | } 402 | 403 | if (pGui->addButton("Reset Render")) 404 | ResetIntegration(pSample); 405 | 406 | // the main features of this demo 407 | if (pGui->beginGroup("Camera", true)) 408 | { 409 | if (pGui->addCheckBox("Enable Depth Of Field", m_DOFEnable)) 410 | ResetIntegration(pSample); 411 | 412 | if (pGui->addCheckBox("Pinhole Camera", m_pinholeCamera)) 413 | ResetIntegration(pSample); 414 | 415 | if (pGui->addFloatVar("DOF Focal Length", m_DOFFocalLength)) 416 | ResetIntegration(pSample); 417 | 418 | if (pGui->addFloatVar("DOF Aperture Size", m_DOFApertureRadius)) 419 | ResetIntegration(pSample); 420 | 421 | pGui->addFloatVar("Exposure", m_Exposure); 422 | 423 | { 424 | Falcor::Gui::DropdownList bokehShapes; 425 | for (uint i = 0; i < (uint)BokehShape::Count; ++i) 426 | { 427 | Falcor::Gui::DropdownValue v; 428 | v.label = BokehShapeNames[i]; 429 | v.value = i; 430 | bokehShapes.push_back(v); 431 | } 432 | if (pGui->addDropdown("DOF Bokeh Shape", bokehShapes, *(uint32*)&m_DOFBokehShape)) 433 | ResetIntegration(pSample); 434 | } 435 | pGui->endGroup(); 436 | } 437 | 438 | if (pGui->beginGroup("Other Settings")) 439 | { 440 | if (pGui->addCheckBox("Use Cosine Weighted Hemisphere Samples", m_cosineWeightedhemisphereSampling)) 441 | ResetIntegration(pSample); 442 | 443 | if (pGui->addFloatVar("FOV", m_fov, 1.0f, 180.0f, 1.0f)) 444 | UpdateProjectionMatrix(pSample); 445 | 446 | if (pGui->addCheckBox("Explicit Light Sampling", m_sampleLights)) 447 | ResetIntegration(pSample); 448 | 449 | pGui->addIntVar("Stop At Sample Count", m_StopAtSampleCount, 0); 450 | 451 | pGui->addIntVar("Work Group Size", m_workGroupSize, 1); 452 | 453 | pGui->addIntVar("Samples Per Frame", m_samplesPerFrame, 1, 10); 454 | 455 | if (pGui->addIntVar("Max Ray Bounces", m_maxRayBounces, 1, 10)) 456 | ResetIntegration(pSample); 457 | pGui->endGroup(); 458 | } 459 | 460 | uint32_t width = pSample->getWindow()->getClientAreaWidth(); 461 | uint32_t height = pSample->getWindow()->getClientAreaHeight(); 462 | 463 | char buffer[256]; 464 | sprintf(buffer, "%zu samples", m_sampleCount); 465 | pGui->addText(buffer); 466 | size_t rayCount = m_sampleCount * size_t(width) * size_t(height); 467 | sprintf(buffer, "%f M primary rays", double(rayCount) / 1000000.0); 468 | pGui->addText(buffer); 469 | 470 | float duration = m_stopTime - m_startTime; 471 | if (duration == 0.0f) 472 | duration = pSample->getLastFrameTime(); 473 | 474 | sprintf(buffer, "%f seconds", duration); 475 | pGui->addText(buffer); 476 | 477 | double sps = double(m_sampleCount) / double(duration); 478 | sprintf(buffer, "%f samples per second", sps); 479 | pGui->addText(buffer); 480 | 481 | double rps = double(rayCount) / double(duration); 482 | rps /= 1000000.0; 483 | sprintf(buffer, "%f M primary rays per second", rps); 484 | pGui->addText(buffer); 485 | 486 | pGui->addSeparator(); 487 | sprintf(buffer, "Camera Pos: %f, %f, %f", m_cameraPos.x, m_cameraPos.y, m_cameraPos.z); 488 | pGui->addText(buffer); 489 | sprintf(buffer, "Yaw, pitch = %f, %f", m_yaw, m_pitch); 490 | pGui->addText(buffer); 491 | } 492 | 493 | void OnChangeScene(SampleCallbacks* pSample) 494 | { 495 | PTScene& scene = GetScene(m_scene); 496 | 497 | m_computeVars->setStructuredBuffer("g_spheres", scene.spheres.size() == 0 ? nullptr : StructuredBuffer::create(m_computeProgram, "g_spheres", scene.spheres.size())); 498 | m_computeVars->setStructuredBuffer("g_lightSpheres", scene.lightSpheres.size() == 0 ? nullptr : StructuredBuffer::create(m_computeProgram, "g_lightSpheres", scene.lightSpheres.size())); 499 | m_computeVars->setStructuredBuffer("g_quads", scene.quads.size() == 0 ? nullptr : StructuredBuffer::create(m_computeProgram, "g_quads", scene.quads.size())); 500 | m_computeVars->setStructuredBuffer("g_plights", scene.pLights.size() == 0 ? nullptr : StructuredBuffer::create(m_computeProgram, "g_plights", scene.pLights.size())); 501 | 502 | std::fill(&m_keyState[0], &m_keyState[255], false); 503 | 504 | // calculate normals for quads 505 | for (uint i = 0; i < scene.quads.size(); ++i) 506 | { 507 | float3 ab = scene.quads[i].b - scene.quads[i].a; 508 | float3 ac = scene.quads[i].c - scene.quads[i].a; 509 | scene.quads[i].normal = normalize(-cross(ab, ac)); 510 | } 511 | 512 | // give all geo a geo id. 513 | uint nextID = 0; 514 | for (Sphere& s : scene.spheres) 515 | s.geoID = nextID++; 516 | for (Sphere& s : scene.lightSpheres) 517 | s.geoID = nextID++; 518 | for (Quad& q : scene.quads) 519 | q.geoID = nextID++; 520 | 521 | m_cameraPos = scene.cameraPos; 522 | m_skyColor = scene.skyColor; 523 | m_yaw = scene.yaw; 524 | m_pitch = scene.pitch; 525 | 526 | m_DOFBokehShape = scene.DOFBokehShape; 527 | m_DOFFocalLength = scene.DOFFocalLength; 528 | m_DOFApertureRadius = scene.DOFApertureRadius; 529 | 530 | UpdateViewMatrix(pSample); 531 | } 532 | 533 | void onLoad(SampleCallbacks* pSample, RenderContext::SharedPtr pContext) 534 | { 535 | m_computeProgram = ComputeProgram::createFromFile("pathtrace.hlsl", "main"); 536 | m_computeState = ComputeState::create(); 537 | m_computeState->setProgram(m_computeProgram); 538 | m_computeVars = ComputeVars::create(m_computeProgram->getReflector()); 539 | 540 | m_blueNoiseTexture = createTextureFromFile("Data/BlueNoise.bmp", false, false); 541 | m_computeVars->setTexture("gBlueNoiseTexture", m_blueNoiseTexture); 542 | 543 | OnChangeScene(pSample); 544 | } 545 | 546 | void UpdateCamera(SampleCallbacks* pSample) 547 | { 548 | static const float c_moveSpeed = 10.0f; 549 | 550 | glm::vec3 offset(0.0f, 0.0f, 0.0f); 551 | 552 | glm::vec4 forward = glm::vec4(0.0f, 0.0f, 1.0f, 0.0f) * m_viewMtx; 553 | glm::vec4 left = glm::vec4(-1.0f, 0.0f, 0.0f, 0.0f) * m_viewMtx; 554 | 555 | if (m_keyState['W']) 556 | offset += glm::vec3(forward); 557 | 558 | if (m_keyState['S']) 559 | offset -= glm::vec3(forward); 560 | 561 | if (m_keyState['A']) 562 | offset += glm::vec3(left); 563 | 564 | if (m_keyState['D']) 565 | offset -= glm::vec3(left); 566 | 567 | if (offset.x != 0 || offset.y != 0) 568 | { 569 | offset *= pSample->getLastFrameTime() * c_moveSpeed; 570 | m_cameraPos += offset; 571 | UpdateViewMatrix(pSample); 572 | } 573 | } 574 | 575 | static float Lerp(float A, float B, float t) 576 | { 577 | return A * (1.0f - t) + B * t; 578 | } 579 | 580 | template 581 | void AnimationLogic(SampleCallbacks* pSample, float timeSeconds) 582 | { 583 | 584 | } 585 | 586 | template <> 587 | void AnimationLogic<1>(SampleCallbacks* pSample, float percent) 588 | { 589 | // do initial setup 590 | if (percent == 0.0f) 591 | { 592 | m_scene = PTScenes::FaceAndBokeh; 593 | OnChangeScene(pSample); 594 | m_pinholeCamera = true; 595 | m_DOFApertureRadius = 0.001f; 596 | m_Exposure = 300000.0f; 597 | m_DOFBokehShape = BokehShape::Circle; 598 | } 599 | 600 | // do per frame logic 601 | float animationTime = sin(percent * c_pi * 2.0f) * 0.5f + 0.5f; 602 | m_DOFFocalLength = Lerp(0.5f, 5.0f, animationTime); 603 | 604 | // set the text 605 | std::ostringstream stringStream; 606 | stringStream << "Pinhole Camera\nFocal Length: " << m_DOFFocalLength << "\nAperture Radius: " << m_DOFApertureRadius << "\nExposure:" << m_Exposure; 607 | m_animationMessage = stringStream.str(); 608 | } 609 | 610 | template <> 611 | void AnimationLogic<2>(SampleCallbacks* pSample, float percent) 612 | { 613 | // do initial setup 614 | if (percent == 0.0f) 615 | { 616 | m_scene = PTScenes::FaceAndBokeh; 617 | OnChangeScene(pSample); 618 | m_pinholeCamera = true; 619 | m_DOFFocalLength = 1.0f; 620 | m_DOFApertureRadius = 0.001f; 621 | m_Exposure = 300000.0f; 622 | m_DOFBokehShape = BokehShape::Circle; 623 | } 624 | 625 | // do per frame logic 626 | float animationTime = sin(percent * c_pi * 2.0f) * 0.5f + 0.5f; 627 | m_DOFApertureRadius = Lerp(0.001f, 0.2f, animationTime); 628 | 629 | float invRadius = 1.0f / m_DOFApertureRadius; 630 | m_Exposure = (invRadius*invRadius) * 1.0f / c_pi; 631 | 632 | // set the text 633 | std::ostringstream stringStream; 634 | stringStream << "Pinhole Camera\nFocal Length: " << m_DOFFocalLength << "\nAperture Radius: " << m_DOFApertureRadius << "\nExposure:" << m_Exposure; 635 | m_animationMessage = stringStream.str(); 636 | } 637 | 638 | template <> 639 | void AnimationLogic<3>(SampleCallbacks* pSample, float percent) 640 | { 641 | // do initial setup 642 | if (percent == 0.0f) 643 | { 644 | m_scene = PTScenes::FaceAndBokeh; 645 | OnChangeScene(pSample); 646 | m_pinholeCamera = false; 647 | m_DOFApertureRadius = 1.0f; 648 | m_Exposure = 0.3f; 649 | m_DOFBokehShape = BokehShape::Circle; 650 | } 651 | 652 | // do per frame logic 653 | float animationTime = sin(percent * c_pi * 2.0f) * 0.5f + 0.5f; 654 | m_DOFFocalLength = Lerp(5.0f, 50.0f, animationTime); 655 | 656 | // set the text 657 | std::ostringstream stringStream; 658 | stringStream << "Lens Camera\nFocal Length: " << m_DOFFocalLength << "\nAperture Radius: " << m_DOFApertureRadius << "\nExposure:" << m_Exposure; 659 | m_animationMessage = stringStream.str(); 660 | } 661 | 662 | template <> 663 | void AnimationLogic<4>(SampleCallbacks* pSample, float percent) 664 | { 665 | // do initial setup 666 | if (percent == 0.0f) 667 | { 668 | m_scene = PTScenes::FaceAndBokeh; 669 | OnChangeScene(pSample); 670 | m_pinholeCamera = false; 671 | m_DOFBokehShape = BokehShape::Circle; 672 | } 673 | 674 | // do per frame logic 675 | float animationTime = sin(percent * c_pi * 2.0f) * 0.5f + 0.5f; 676 | m_DOFApertureRadius = Lerp(0.01f, 10.0f, animationTime); 677 | 678 | float invRadius = 1.0f / m_DOFApertureRadius; 679 | m_Exposure = (invRadius*invRadius) * 1.0f / c_pi; 680 | 681 | // set the text 682 | std::ostringstream stringStream; 683 | stringStream << "Lens Camera\nFocal Length: " << m_DOFFocalLength << "\nAperture Radius: " << m_DOFApertureRadius << "\nExposure:" << m_Exposure; 684 | m_animationMessage = stringStream.str(); 685 | } 686 | 687 | template 688 | void AnimationTrack(SampleCallbacks* pSample) 689 | { 690 | // On sample 0 do initial setup 691 | static size_t rawSampleIndex = 0; 692 | if (rawSampleIndex == 0) 693 | { 694 | pSample->toggleText(false); 695 | pSample->toggleUI(false); 696 | } 697 | 698 | // first couple samples seem to be black, so skip them. 699 | size_t sampleIndex = rawSampleIndex; 700 | if (sampleIndex > 2) 701 | sampleIndex -= 2; 702 | else 703 | sampleIndex = 0; 704 | 705 | // calculate where we are and stop doing animation logic / screen captures when we are done. 706 | // Note: can't shut down app because we have to wait for screen caps to finish. 707 | size_t frame = sampleIndex / c_animationSamplesPerFrame; 708 | size_t nextFrame = (sampleIndex + 1) / c_animationSamplesPerFrame; 709 | if (frame == c_animationNumFrames) 710 | { 711 | return; 712 | } 713 | 714 | // do animation logic 715 | if (rawSampleIndex == 0 || frame != nextFrame) 716 | { 717 | float percent = float(nextFrame) / float(c_animationNumFrames-1); 718 | AnimationLogic(pSample, percent); 719 | } 720 | 721 | // handle writing a frame when it's done 722 | if (frame != nextFrame) 723 | { 724 | pSample->captureScreen("frame","out"); 725 | ResetIntegration(pSample); 726 | } 727 | 728 | ++rawSampleIndex; 729 | } 730 | 731 | template <> 732 | void AnimationTrack<0>(SampleCallbacks* pSample) 733 | { 734 | 735 | } 736 | 737 | void onFrameRender(SampleCallbacks* pSample, RenderContext::SharedPtr pContext, Fbo::SharedPtr pTargetFbo) 738 | { 739 | UpdateCamera(pSample); 740 | AnimationTrack(pSample); 741 | 742 | PTScene& scene = GetScene(m_scene); 743 | 744 | if (m_StopAtSampleCount > 0 && m_sampleCount >= m_StopAtSampleCount) 745 | { 746 | pContext->copyResource(pTargetFbo->getColorTexture(0).get(), m_outputU8.get()); 747 | return; 748 | } 749 | 750 | uint32_t width = pSample->getWindow()->getClientAreaWidth(); 751 | uint32_t height = pSample->getWindow()->getClientAreaHeight(); 752 | 753 | char buffer[256]; 754 | sprintf(buffer, "%i", m_samplesPerFrame); 755 | m_computeProgram->addDefine("SAMPLES_PER_FRAME", buffer); 756 | 757 | sprintf(buffer, "%i", m_sampleLights ? 1 : 0); 758 | m_computeProgram->addDefine("SAMPLE_LIGHTS", buffer); 759 | 760 | sprintf(buffer, "%i", m_maxRayBounces); 761 | m_computeProgram->addDefine("MAX_RAY_BOUNCES", buffer); 762 | 763 | sprintf(buffer, "%i", m_workGroupSize); 764 | m_computeProgram->addDefine("WORK_GROUP_SIZE", buffer); 765 | 766 | sprintf(buffer, "%u", (uint32)m_DOFBokehShape); 767 | m_computeProgram->addDefine("BOKEH_SHAPE", buffer); 768 | 769 | if (m_cosineWeightedhemisphereSampling) 770 | m_computeProgram->addDefine("COSINE_WEIGHTED_HEMISPHERE_SAMPLING"); 771 | else 772 | m_computeProgram->removeDefine("COSINE_WEIGHTED_HEMISPHERE_SAMPLING"); 773 | 774 | if (m_DOFEnable) 775 | m_computeProgram->addDefine("ENABLE_DOF"); 776 | else 777 | m_computeProgram->removeDefine("ENABLE_DOF"); 778 | 779 | if (m_pinholeCamera) 780 | m_computeProgram->addDefine("PINHOLE_CAMERA"); 781 | else 782 | m_computeProgram->removeDefine("PINHOLE_CAMERA"); 783 | 784 | ConstantBuffer::SharedPtr pShaderConstants = m_computeVars["ShaderConstants"]; 785 | pShaderConstants["invViewProjMtx"] = m_invViewProjMtx; 786 | pShaderConstants["viewMtx"] = m_viewMtx; 787 | pShaderConstants["invViewMtx"] = glm::inverse(m_viewMtx); 788 | pShaderConstants["skyColor"] = m_skyColor; 789 | pShaderConstants["lerpAmount"] = 1.0f / float(m_frameCount + 1); 790 | pShaderConstants["frameRand"] = (uint)RandomUint32(); 791 | pShaderConstants["frameNumber"] = (uint)m_frameCount; 792 | 793 | pShaderConstants["DOFFocalLength"] = m_DOFFocalLength; 794 | pShaderConstants["DOFApertureRadius"] = m_DOFApertureRadius; 795 | pShaderConstants["Exposure"] = m_Exposure; 796 | 797 | glm::vec4 cameraForward = glm::vec4(0.0f, 0.0f, 1.0f, 0.0f) * m_viewMtx; 798 | 799 | pShaderConstants["cameraPos"] = m_cameraPos; 800 | pShaderConstants["cameraRight"] = glm::vec3(glm::vec4(1.0f, 0.0f, 0.0f, 0.0f) * m_viewMtx); 801 | pShaderConstants["cameraUp"] = glm::vec3(glm::vec4(0.0f, 1.0f, 0.0f, 0.0f) * m_viewMtx); 802 | 803 | { 804 | float3 cameraSensorPlanePoint = m_cameraPos - glm::vec3(cameraForward); 805 | 806 | glm::vec4 sensorPlane = cameraForward; 807 | 808 | sensorPlane.w = -(sensorPlane.x * cameraSensorPlanePoint.x + sensorPlane.y * cameraSensorPlanePoint.y + sensorPlane.z * cameraSensorPlanePoint.z); 809 | 810 | pShaderConstants["sensorPlane"] = sensorPlane; 811 | } 812 | 813 | { 814 | float3 cameraFocalPlanePoint = m_cameraPos + glm::vec3(cameraForward) * m_DOFFocalLength; 815 | 816 | glm::vec4 focalPlane = -cameraForward; 817 | 818 | focalPlane.w = -(focalPlane.x * cameraFocalPlanePoint.x + focalPlane.y * cameraFocalPlanePoint.y + focalPlane.z * cameraFocalPlanePoint.z); 819 | 820 | pShaderConstants["focalPlane"] = focalPlane; 821 | } 822 | 823 | for (uint i = 0; i < scene.spheres.size(); ++i) 824 | { 825 | m_computeVars->getStructuredBuffer("g_spheres")[i]["position"] = scene.spheres[i].position; 826 | m_computeVars->getStructuredBuffer("g_spheres")[i]["radius"] = scene.spheres[i].radius; 827 | m_computeVars->getStructuredBuffer("g_spheres")[i]["albedo"] = scene.spheres[i].albedo; 828 | m_computeVars->getStructuredBuffer("g_spheres")[i]["emissive"] = scene.spheres[i].emissive; 829 | m_computeVars->getStructuredBuffer("g_spheres")[i]["geoID"] = scene.spheres[i].geoID; 830 | } 831 | 832 | for (uint i = 0; i < scene.lightSpheres.size(); ++i) 833 | { 834 | m_computeVars->getStructuredBuffer("g_lightSpheres")[i]["position"] = scene.lightSpheres[i].position; 835 | m_computeVars->getStructuredBuffer("g_lightSpheres")[i]["radius"] = scene.lightSpheres[i].radius; 836 | m_computeVars->getStructuredBuffer("g_lightSpheres")[i]["albedo"] = scene.lightSpheres[i].albedo; 837 | m_computeVars->getStructuredBuffer("g_lightSpheres")[i]["emissive"] = scene.lightSpheres[i].emissive; 838 | m_computeVars->getStructuredBuffer("g_lightSpheres")[i]["geoID"] = scene.lightSpheres[i].geoID; 839 | } 840 | 841 | for (uint i = 0; i < scene.quads.size(); ++i) 842 | { 843 | m_computeVars->getStructuredBuffer("g_quads")[i]["a"] = scene.quads[i].a; 844 | m_computeVars->getStructuredBuffer("g_quads")[i]["b"] = scene.quads[i].b; 845 | m_computeVars->getStructuredBuffer("g_quads")[i]["c"] = scene.quads[i].c; 846 | m_computeVars->getStructuredBuffer("g_quads")[i]["d"] = scene.quads[i].d; 847 | m_computeVars->getStructuredBuffer("g_quads")[i]["normal"] = scene.quads[i].normal; 848 | m_computeVars->getStructuredBuffer("g_quads")[i]["albedo"] = scene.quads[i].albedo; 849 | m_computeVars->getStructuredBuffer("g_quads")[i]["emissive"] = scene.quads[i].emissive; 850 | m_computeVars->getStructuredBuffer("g_quads")[i]["geoID"] = scene.quads[i].geoID; 851 | } 852 | 853 | for (uint i = 0; i < scene.pLights.size(); ++i) 854 | { 855 | m_computeVars->getStructuredBuffer("g_plights")[i]["position"] = scene.pLights[i].position; 856 | m_computeVars->getStructuredBuffer("g_plights")[i]["color"] = scene.pLights[i].color; 857 | } 858 | 859 | m_computeVars->setTexture("gOutputF32", m_outputF32); 860 | m_computeVars->setTexture("gOutputU8", m_outputU8); 861 | 862 | pContext->setComputeState(m_computeState); 863 | pContext->setComputeVars(m_computeVars); 864 | 865 | pContext->dispatch(width/m_workGroupSize, height/m_workGroupSize, 1); 866 | pContext->copyResource(pTargetFbo->getColorTexture(0).get(), m_outputU8.get()); 867 | 868 | m_frameCount++; 869 | m_sampleCount += m_samplesPerFrame; 870 | 871 | m_stopTime = pSample->getCurrentTime(); 872 | 873 | #if ANIMATION_TRACK != 0 874 | pSample->toggleText(true); 875 | pSample->renderText(m_animationMessage, glm::vec2(10, 10)); 876 | pSample->toggleText(false); 877 | #endif 878 | } 879 | 880 | void onResizeSwapChain(SampleCallbacks* pSample, uint32_t width, uint32_t height) 881 | { 882 | m_outputU8 = Texture::create2D(width, height, ResourceFormat::RGBA8Unorm, 1, 1, nullptr, Resource::BindFlags::ShaderResource | Resource::BindFlags::UnorderedAccess); 883 | m_outputF32 = Texture::create2D(width, height, ResourceFormat::RGBA32Float, 1, 1, nullptr, Resource::BindFlags::ShaderResource | Resource::BindFlags::UnorderedAccess); 884 | 885 | UpdateProjectionMatrix(pSample); 886 | } 887 | 888 | bool onMouseEvent(SampleCallbacks* pSample, const MouseEvent& mouseEvent) 889 | { 890 | if (mouseEvent.type == MouseEvent::Type::LeftButtonDown) 891 | { 892 | m_mouseDragPos = mouseEvent.pos; 893 | m_mouseDown = true; 894 | return true; 895 | } 896 | else if (mouseEvent.type == MouseEvent::Type::LeftButtonUp) 897 | { 898 | m_mouseDown = false; 899 | return true; 900 | } 901 | else if (mouseEvent.type == MouseEvent::Type::Move) 902 | { 903 | if (m_mouseDown) 904 | { 905 | glm::vec2 mouseDelta = mouseEvent.pos - m_mouseDragPos; 906 | 907 | if (mouseDelta.x != 0.0f || mouseDelta.y != 0.0f) 908 | { 909 | m_mouseDragPos = mouseEvent.pos; 910 | 911 | mouseDelta *= pSample->getLastFrameTime() * 100000.0f; 912 | m_yaw -= mouseDelta.x; 913 | m_pitch -= mouseDelta.y; 914 | 915 | if (m_pitch > 89.0f) 916 | m_pitch = 89.0f; 917 | else if (m_pitch < -89.0f) 918 | m_pitch = -89.0f; 919 | 920 | UpdateViewMatrix(pSample); 921 | 922 | return true; 923 | } 924 | } 925 | } 926 | 927 | return false; 928 | } 929 | 930 | bool onKeyEvent(SampleCallbacks* pSample, const KeyboardEvent& keyEvent) 931 | { 932 | if ((uint32_t)keyEvent.key >= 256) 933 | return false; 934 | 935 | if (keyEvent.type != KeyboardEvent::Type::KeyPressed && keyEvent.type != KeyboardEvent::Type::KeyReleased) 936 | return false; 937 | 938 | m_keyState[(uint32_t)keyEvent.key] = (keyEvent.type == KeyboardEvent::Type::KeyPressed); 939 | return true; 940 | } 941 | }; 942 | 943 | #ifdef _WIN32 944 | int WINAPI WinMain(_In_ HINSTANCE hInstance, _In_opt_ HINSTANCE hPrevInstance, _In_ LPSTR lpCmdLine, _In_ int nShowCmd) 945 | #else 946 | int main(int argc, char** argv) 947 | #endif 948 | { 949 | Application::UniquePtr pRenderer = std::make_unique(); 950 | SampleConfig config; 951 | config.windowDesc.title = "Path Tracer"; 952 | config.windowDesc.resizableWindow = true; 953 | config.windowDesc.width = c_width; 954 | config.windowDesc.height = c_height; 955 | 956 | config.deviceDesc.depthFormat = ResourceFormat::Unknown; 957 | 958 | #ifdef _WIN32 959 | Sample::run(config, pRenderer); 960 | #else 961 | config.argc = (uint32_t)argc; 962 | config.argv = argv; 963 | Sample::run(config, pRenderer); 964 | #endif 965 | return 0; 966 | } 967 | -------------------------------------------------------------------------------- /out/__clear.bat: -------------------------------------------------------------------------------- 1 | del *.png 2 | del *.webm 3 | del *.mp4 -------------------------------------------------------------------------------- /out/__makevideo.bat: -------------------------------------------------------------------------------- 1 | 2 | rem this seems to actually work (chrome / firefox / windows) 3 | rem a bit low quality though. 4 | rem .\ffmpeg.exe -framerate 30 -i frame.%d.png -c:v libvpx -crf 10 -b:v 1M -c:a libvorbis __out.webm 5 | 6 | rem .\ffmpeg.exe -framerate 30 -i frame.%d.png -c:v mpeg2video -q:v 5 -c:a mp2 -f vob __out.mpg 7 | 8 | 9 | rem this makes a 3.6mb file which is a bit large 10 | rem .\ffmpeg.exe -framerate 30 -i frame.%%d.png -c:v libvpx -crf 4 -b:v 0 -c:a libvorbis __out.webm 11 | 12 | .\ffmpeg.exe -framerate 30 -i frame.%%d.png -vcodec libx264 -vb 20M __out.mp4 -------------------------------------------------------------------------------- /out/ffmpeg.exe: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Atrix256/FalcorPathTracer/c63896e8c30aad7b962e1999b4d0aefa4020328d/out/ffmpeg.exe -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | ## Getting Started 2 | 3 | I pulled down this falcor, built it and ran one of the samples to make sure it worked ok: "SimpleDeferred" 4 | https://github.com/NVIDIAGameWorks/Falcor/commit/afdd9125283c90374694ed35875144fde81e9312 5 | * git clone https://github.com/Atrix256/FalcorPathTracer.git 6 | * git checkout 0b561caae19e8325853166cc4c93d4763570774a 7 | * somehow it was able to find the location of falcor... weird. 8 | 9 | I followed the instructions for "creating a new project" in the readme.md file 10 | * If you haven't done so already, create a Visual Studio solution and project for your code. Falcor only supports 64-bit builds, so make sure you have a 64-bit build configuration 11 | * Add Falcor.props to your project (Property Manager -> Right click your project -> Add existing property sheet) 12 | * Add Falcor.vcxproj to your solution 13 | * Add a reference to Falcor in your project (Solution Explorer -> Your Project -> Right Click References -> Click Add Reference... -> Choose Falcor) 14 | 15 | I coppied the "ComputeShader" project's files into my own project... 16 | * ComputeShader.cpp / .h 17 | * Data/compute.hlsl 18 | 19 | Compiled, ran and it worked woo. Now time to start writing code. 20 | 21 | ## Pathtracing 22 | 23 | Starting simple with just diffuse and emissive, I used the info from my previous post: 24 | https://blog.demofox.org/2016/09/21/path-tracing-getting-started-with-diffuse-and-emissive/ 25 | 26 | But also: 27 | * importance sampling via cosine weighted hemisphere sampling (explain) 28 | * jittering the camera 29 | 30 | 31 | ## Explicit Light Sampling Notes 32 | 33 | * show how sampling point lights are really the answer to the integral, because there is only one place that has non zero value! 34 | * you need distance attenuation though... which complicates the explanation 35 | * i think point lights have different unit of measurement. radiance vs irrandiance. 36 | * then spherical lights being sampled from solid angle 37 | * then quad lights / generalize to other shapes and non solid angle. 38 | 39 | ## DOF blog post notes 40 | 41 | * talk about falcor 42 | * note that the code is a WIP for another path tracer, so there are todos and such that will get resolved as time goes on. 43 | * make a tag (?) in github for the code that goes with this blog post. understand how those work, but that's what you can put on your blog post i guess! 44 | * make sure it explains how to compile etc. Maybe have someone try it before you publish 45 | 46 | 47 | 48 | ## Notes 49 | 50 | * make this blog post about using falcor 51 | * also about sub pixel jitter for AA and cosine weighted hemisphere sampling 52 | * ... and about TAA / blue noise? 53 | * possibly also about whitted raytracing? 54 | * also about explicit light sampling? 55 | 56 | * quadrupling sample count halves error 57 | * https://en.wikipedia.org/wiki/Monte_Carlo_method#Integration 58 | 59 | ## Links 60 | 61 | * blue noise textures: http://momentsingraphics.de/?p=127 62 | * Aras' path tracing: http://aras-p.info/blog/2018/03/28/Daily-Pathtracer-Part-0-Intro/ 63 | * (READ HIS STUFF TOO) 64 | * mine: https://blog.demofox.org/2016/09/21/path-tracing-getting-started-with-diffuse-and-emissive/ 65 | * http://simonstechblog.blogspot.com/2018/06/simple-gpu-path-tracer.html?m=1 66 | * smallpt: https://drive.google.com/file/d/0B8g97JkuSSBwUENiWTJXeGtTOHFmSm51UC01YWtCZw/view 67 | 68 | * DOF: http://cg.skeelogy.com/depth-of-field-using-raytracing/ 69 | 70 | * mitsuba xml file format: https://mynameismjp.wordpress.com/2015/04/04/mitsuba-quick-start-guide/ 71 | 72 | ## TODOs new 9/12/18 - for PBR path tracer 73 | 74 | * clean out old stuff. 75 | * start making pbr path tracing stuff! 76 | 77 | ## TODOs 78 | 79 | 80 | * after post, maybe get rid of the multiply by lens area, and comment in the code that there's an implicit auto-exposure so that adjusting aperture size doesn't affect brightness. 81 | * also make auto fov on always. it's easier to work with and is simpler code. 82 | 83 | * rework the tracing loop, it's confusing to follow and I'm pretty sure if it hits max loop count it doesn't shade the final point, which is wasteful! 84 | 85 | * look at this for using blue noise for path tracing 86 | * http://www.iliyan.com/publications/DitheredSampling/DitheredSampling_Sig2016.pdf 87 | 88 | ? maybe try updating falcor? might get some fixes or something. 89 | 90 | * group UI into logical settings. DOF, camera, etc. 91 | 92 | * may need an option to sample a fixed number of lights maximum per frame. 93 | * should have a max defined i guess... 94 | 95 | * may want to try even a simple / lame tone mapping to see if it makes the lights not show up as white. 96 | 97 | * NaN's are being generated! need to make it so content can't cause NaNs to be generated! 98 | * hit this when randomly generating spheres 99 | 100 | * make a better scene at some point, or have a drop down of different scenes. 101 | * for bokeh blog post 102 | 103 | * maybe make this program able to generate a mitsuba path tracer xml scene file? 104 | * probably need it to make an obj, and then can import into mitsuba. 105 | * can obj handle spheres? or does mitsuba xml file support spheres? 106 | ! actually it looks like it can do spheres. mjp's shows this in the adding emitters section 107 | * aras' path tracing thing also has the mitsuba scene available. 108 | * https://github.com/aras-p/ToyPathTracer/blob/04-fixes/Mitsuba/scene.xml 109 | ? can it do triangles / quads? if so, can avoid obj files and make mitsuba files directly! 110 | 111 | * is the point light calculation correct? I don't think so... check out the non cosine weighted hemisphere equation to make sure 112 | 113 | * get mitsuba working after point lights. maybe a small blog post about what's involved? 114 | 115 | * support explicitly sampling punctual lights so you understand them 116 | 117 | * quad light support 118 | 119 | * mitsuba verificiation 120 | 121 | * read this and see what you want to take. it's already in the links section 122 | * http://simonstechblog.blogspot.com/2018/06/simple-gpu-path-tracer.html?m=1 123 | * it talks about cosine weighted explicit rectangular light sampling, and needing to use the jacobian. Should understand that better! 124 | 125 | * info here about pathtracing from the smallpt path tracer, including how basic DOF works. already in links section 126 | * https://drive.google.com/file/d/0B8g97JkuSSBwUENiWTJXeGtTOHFmSm51UC01YWtCZw/view 127 | 128 | * are you multiplying by cosine theta correctly when cosine weighted hemisphere sampling is off? 129 | * reason through it and maybe make it more explicit that it's correct 130 | 131 | * direct light sampling 132 | * russian roulette 133 | 134 | * refraction at some point 135 | 136 | * try profiling with renderdoc or vtune or who knows what else to see if you can find any obvious bottlenecks to fix 137 | 138 | ? can we importance sample the lense? like... shoot a ray at a random point on the lights from the camera each frame, like we do for surfaces 139 | * put this on the todo list, can be a follow up thing 140 | 141 | * try low discrepancy sequence on the lense? may make the noise in the bokeh be less white 142 | * if the importance sampling doesn't work. 143 | * Do you need to multiply the result by a scaling factor or anything? 144 | 145 | * TAA version 146 | * jitter camera 147 | * use IGN 5.5 noise 148 | * need world position per pixel to project to previous frame - either by actually storing the position, or getting it via depth. 149 | * probably not compatible with DOF. 150 | 151 | * clean up code. eg put rng stuff into a header? 152 | * or maybe it'd be better & simpler to have everything in a single file. if so, bring geo.h in! 153 | 154 | * comment code better, especially main? 155 | 156 | * blue noise isn't that compelling as you've implemented it. keep it? ditch it? modify it? 157 | * ditching it is probably fine, but i like the example of how to use a texture, so... ?? 158 | * may want it for TAA 159 | 160 | * presets if it makes sense... 161 | * path tracer slow 162 | * path tracer fast 163 | * TAA path tracer 164 | 165 | * should there be a better scene? or maybe a couple scenes you can choose from the drop down? 166 | 167 | * rename compute.hlsl to pathtrace.hlsl or something 168 | 169 | * if ray generation details get complex (russian roullette), have it somehow atomic count raycount like aras does? or leave it primary rays? 170 | 171 | * organize UI into logical groups 172 | 173 | * look for TODOs 174 | 175 | * motion blur -> longer exposure time means more motion blur! 176 | 177 | ? tone mapping? 178 | 179 | * should you have a denoise option? 180 | 181 | * ray marched fog 182 | 183 | * chromatic abberation 184 | * motion blur 185 | 186 | * textures. IQ has an aarticle on ray differentials for this 187 | 188 | Next: specular! Make a short blog post about this and a tag / release to link to from the blog post? 189 | * show this source to patrick so he can see how falcor works 190 | 191 | Notes: 192 | * color is assumed to be divided by pi already, so diffuse 1/pi multiplier isn't there. Does the value not divided by pi have real world units of measurement then? 193 | 194 | Links: 195 | * a pretty good read on lots of things pathtracing: https://computergraphics.stackexchange.com/questions/5152/progressive-path-tracing-with-explicit-light-sampling 196 | * deriving lambertian BRDF 1/pi: http://www.rorydriscoll.com/2009/01/25/energy-conservation-in-games/ 197 | * a good explanation of what importance sampling is all about: https://computergraphics.stackexchange.com/questions/4979/what-is-importance-sampling 198 | 199 | Questions: 200 | * how does direct light sampling work? 201 | * how do you importance sample arbitrary BRDFs / BSDFs? 202 | * how does multiple importance sampling work? 203 | * how and why does russian roulete work? 204 | * lambertian BRDF is 1/pi as explained in that link, but why is there the 2 in 2 * cosTheta * LOut * albedo? You mention it in your pathtracing post, maybe figure out why. 205 | * a lot of path tracers have a pdf value they multiply by. whats that about 206 | * It's the probability of having chosen that ray direction. If uniform sampling it's 1 / total area. 207 | ? this is the 1/pi in non cosine weighted hemispherical samples, right? 208 | 209 | Answered questions: 210 | --------------------------------------------------------------------------------