├── README.md ├── mono_360 ├── example_01 │ ├── cubeMapUtils.pde │ ├── data │ │ └── equirectangular.glsl │ └── example_01.pde └── example_02 │ ├── cubeMapUtils.pde │ ├── data │ └── equirectangular.glsl │ └── example_02.pde ├── mono_360_HYPE ├── example_01 │ ├── cubeMapUtils.pde │ ├── data │ │ └── equirectangular.glsl │ └── example_01.pde └── example_02 │ ├── cubeMapUtils.pde │ ├── data │ └── equirectangular.glsl │ └── example_02.pde └── top_bottom_360 └── build ├── Beams.pde ├── Drawable.pde ├── Geo.pde ├── Icos.pde ├── Pyramid.pde ├── TerrainGrid.pde ├── TerrainTile.pde ├── build.pde ├── data ├── data_30fps.csv ├── frag.glsl ├── poly.png ├── poly2.png ├── sky.jpg ├── sky2.jpg ├── sky3.jpg └── sun.svg └── utility.pde /README.md: -------------------------------------------------------------------------------- 1 | # Processing 360 video output 2 | 3 | A series of examples showing how to use a cubemap and GLSL shader to output an equirectangular image that can be captured to create mono & 3D 360 videos. 4 | 5 | ### UPDATE: 6 | 7 | Adding in an example of top/bottom 3D sterescoping rendering. This is largely a port of Kite & Lightning's Unreal Engine plugin, modified to work with Processing. Also, read [Paul Bourke's writings][pbLink] on the subject. It's invaluable information. I wouldn't have gotten this far without it. 8 | 9 | ### Source 10 | 11 | Taking the Dome Sphere Projection example code from the Processing app, it was modified to render the Cube Map to have 6 textures. The dome projection shaders were removed, and an [equirectangular shader][shader] by [user BeRo][berolink] added to convert the cubemap to an equirectangular image suitable for use in 360 video. 12 | 13 | ### Example videos 14 | - [https://www.youtube.com/watch?v=7HEyj7Mjoq4] 15 | - [https://www.youtube.com/watch?v=EbMCDnFhE_w] 16 | - [https://www.youtube.com/watch?v=AHRJxdK-obA] 17 | 18 | ### Usage 19 | - Place all objects that are to be drawn to screen in the drawScene() method 20 | - Put any animation update variables in the animationPreUpdate() or animationPostUpdate() methods 21 | - Render the sketch as a sequence of png / tif frames 22 | - Import the frames into Premier (or similar video editor) to create a video sequence 23 | - Export the video 24 | - Use the [YouTube 360][YT360] meta injector to add the 360 meta to your video 25 | - Ensure the resulting video has "_360" at the end of the filename for it to work with Gear VR, 'filename_360.mp4' for example. 26 | 27 | ### To do 28 | - Add settings for NEAREST/LINEAR/LINEAR_MIPMAP_LINEAR texture filter 29 | - Add camera control 30 | 31 | ### Know issues 32 | 33 | When saving frames, these sketches run slow. Between 2 and 5 fps slow. The goal isn't really to get it running in realtime, but having a way to export Procesing sketches as frames for a 360 video. That said, when not saving frames, it runs fine depending on hardware and how much you're drawing to the screen (anywhere between 30 and 60 fps in tests I've done). 34 | 35 | [YT360]: 36 | [shader]: 37 | [berolink]: 38 | [pbLink]: -------------------------------------------------------------------------------- /mono_360/example_01/cubeMapUtils.pde: -------------------------------------------------------------------------------- 1 | /* 2 | These functions are copied almost verbatim from the Processing domeProjection example, and shouldn't need changing 3 | 4 | The main changes made are: 5 | - replacing the dome shape with a rectangle 6 | - using an equirectangular shader 7 | - capturing 6 sides not 5 for the cubemap 8 | - using linear filtering for anti-aliasing the texture 9 | - putting the camera in the center of the scene with the translate call on line 108 10 | */ 11 | 12 | 13 | void initCubeMap() { 14 | //change the domeSphere shape to a rectangle as we're not projecting on a dome 15 | myRect = createShape(RECT, -width/2, -height/2, width, height); 16 | myRect.setStroke(false); 17 | 18 | int txtMapSize = envMapSize; 19 | if (g.pixelDensity == 2) { 20 | txtMapSize = envMapSize * 2; 21 | } 22 | 23 | PGL pgl = beginPGL(); 24 | 25 | envMapTextureID = IntBuffer.allocate(1); 26 | pgl.genTextures(1, envMapTextureID); 27 | pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, envMapTextureID.get(0)); 28 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_S, PGL.CLAMP_TO_EDGE); 29 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_T, PGL.CLAMP_TO_EDGE); 30 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_R, PGL.CLAMP_TO_EDGE); 31 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_MIN_FILTER, PGL.LINEAR); 32 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_MAG_FILTER, PGL.LINEAR); 33 | for (int i = PGL.TEXTURE_CUBE_MAP_POSITIVE_X; i < PGL.TEXTURE_CUBE_MAP_POSITIVE_X + 6; i++) { 34 | pgl.texImage2D(i, 0, PGL.RGBA8, txtMapSize, txtMapSize, 0, PGL.RGBA, PGL.UNSIGNED_BYTE, null); 35 | } 36 | 37 | // Init fbo, rbo 38 | fbo = IntBuffer.allocate(1); 39 | rbo = IntBuffer.allocate(1); 40 | pgl.genFramebuffers(1, fbo); 41 | pgl.bindFramebuffer(PGL.FRAMEBUFFER, fbo.get(0)); 42 | pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, PGL.TEXTURE_CUBE_MAP_POSITIVE_X, envMapTextureID.get(0), 0); 43 | 44 | pgl.genRenderbuffers(1, rbo); 45 | pgl.bindRenderbuffer(PGL.RENDERBUFFER, rbo.get(0)); 46 | pgl.renderbufferStorage(PGL.RENDERBUFFER, PGL.DEPTH_COMPONENT24, txtMapSize, txtMapSize); 47 | 48 | // Attach depth buffer to FBO 49 | pgl.framebufferRenderbuffer(PGL.FRAMEBUFFER, PGL.DEPTH_ATTACHMENT, PGL.RENDERBUFFER, rbo.get(0)); 50 | 51 | endPGL(); 52 | 53 | // Load cubemap shader. 54 | cubemapShader = loadShader("equirectangular.glsl"); 55 | cubemapShader.set("cubemap", 1); 56 | 57 | } 58 | 59 | void drawCubeMap() { 60 | PGL pgl = beginPGL(); 61 | pgl.activeTexture(PGL.TEXTURE1); 62 | pgl.enable(PGL.TEXTURE_CUBE_MAP); 63 | pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, envMapTextureID.get(0)); 64 | regenerateEnvMap(pgl); 65 | endPGL(); 66 | 67 | drawDomeMaster(); 68 | 69 | pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, 0); 70 | } 71 | 72 | void drawDomeMaster() { 73 | camera(); 74 | // ortho(); 75 | ortho(width/2, -width/2, -height/2, height/2); 76 | 77 | resetMatrix(); 78 | shader(cubemapShader); 79 | shape(myRect); 80 | resetShader(); 81 | } 82 | 83 | // Called to regenerate the envmap 84 | void regenerateEnvMap(PGL pgl) { 85 | // bind fbo 86 | pgl.bindFramebuffer(PGL.FRAMEBUFFER, fbo.get(0)); 87 | 88 | // generate 6 views from origin(0, 0, 0) 89 | pgl.viewport(0, 0, envMapSize, envMapSize); 90 | perspective(90.0f * DEG_TO_RAD, 1.0f, 1.0f, zClippingPlane); 91 | 92 | //note the <= to generate 6 faces, not 5 as per DomeProjection example 93 | for (int face = PGL.TEXTURE_CUBE_MAP_POSITIVE_X; face <= 94 | PGL.TEXTURE_CUBE_MAP_NEGATIVE_Z; face++) { 95 | 96 | resetMatrix(); 97 | 98 | if (face == PGL.TEXTURE_CUBE_MAP_POSITIVE_X) { 99 | camera(0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f); 100 | } else if (face == PGL.TEXTURE_CUBE_MAP_NEGATIVE_X) { 101 | camera(0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f); 102 | } else if (face == PGL.TEXTURE_CUBE_MAP_POSITIVE_Y) { 103 | camera(0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, -1.0f); 104 | } else if (face == PGL.TEXTURE_CUBE_MAP_NEGATIVE_Y) { 105 | camera(0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f); 106 | } else if (face == PGL.TEXTURE_CUBE_MAP_POSITIVE_Z) { 107 | camera(0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f); 108 | } else if (face == PGL.TEXTURE_CUBE_MAP_NEGATIVE_Z) { 109 | camera(0.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 1.0f, 0.0f); 110 | } 111 | 112 | rotateY(HALF_PI); //sets forward facing to center of screen for video output 113 | translate(-zClippingPlane, -zClippingPlane, 0);//defaults coords to processing style with 0,0 in top left of visible space 114 | 115 | pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, face, envMapTextureID.get(0), 0); 116 | 117 | drawScene(); // Draw objects in the scene 118 | flush(); // Make sure that the geometry in the scene is pushed to the GPU 119 | noLights(); // Disabling lights to avoid adding many times 120 | pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, face, 0, 0); 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /mono_360/example_01/data/equirectangular.glsl: -------------------------------------------------------------------------------- 1 | uniform samplerCube cubemap; varying vec4 vertTexCoord; void main() { vec2 thetaphi = ((vertTexCoord.xy * 2.0) - vec2(1.0)) * vec2(3.1415926535897932384626433832795, 1.5707963267948966192313216916398); vec3 rayDirection = vec3(cos(thetaphi.y) * cos(thetaphi.x), sin(thetaphi.y), cos(thetaphi.y) * sin(thetaphi.x)); vec3 color = vec3(textureCube(cubemap, rayDirection)); gl_FragColor = vec4(color, 1.0); } -------------------------------------------------------------------------------- /mono_360/example_01/example_01.pde: -------------------------------------------------------------------------------- 1 | /** 2 | * Processing to 360 video 3 | * 4 | * This sketch renders a cubemap to an equirectangular image that can be captured to create 360 photos or videos. 5 | * 6 | * example_001 - renders a single static image 7 | * 8 | * The code is a modification of the DomeProjection Processing example sketch, and an equirectangular GLSL shader by BeRo, on ShaderToy: 9 | * https://www.shadertoy.com/view/XsBSDR# 10 | * https://www.shadertoy.com/user/BeRo 11 | */ 12 | 13 | import java.nio.IntBuffer; 14 | 15 | PShader cubemapShader; 16 | PShape myRect; 17 | 18 | IntBuffer fbo; 19 | IntBuffer rbo; 20 | IntBuffer envMapTextureID; 21 | 22 | int envMapSize = 1024; //width & height used for the cubemap texture 23 | /* 24 | The following float, zClippingPlane, is the distance from center to a cubemap wall. 25 | The higher the number, the further objects can travel before they begin to clip out of frame. 26 | The pixel dimension of the drawable area is 0 to zClippingPlane * 2. 27 | e.g. if zClippingPlane is 2000, the drawable area is 0 to 4000 in each direction (x, y, z). 28 | 29 | Rather than rely on your screen width and height vars, you should use values based on the zClippingPlane. 30 | i.e. to translate to center screen, use: 31 | translate(zClippingPlane, zClippingPlane, 0); 32 | 33 | Remember instead of width/2 or height/2, use zClippingPlane, 34 | and instead of width or height, use zClippingPlane*2 35 | */ 36 | float zClippingPlane = 2000.0f; 37 | 38 | //set record to true if you want to save frames to make a video 39 | boolean record = false; 40 | 41 | void setup() { 42 | //we'll be using the saved frames to create a 2048 x 1024 video, 43 | size(2048, 1024, P3D); 44 | 45 | //for testing, you might want to work at a smaller resolution, but for export, the above is preferred 46 | //size(1024, 512, P3D); 47 | 48 | smooth(); 49 | background(0); 50 | 51 | // if you have a retina display, pixelDesnity(2) is supported. Bring on those 4k renders! 52 | // pixelDensity(2); 53 | 54 | initCubeMap(); 55 | } 56 | 57 | /* 58 | It's best to leave draw() alone and do all your updates and drawing in the methods at the bottom of this file 59 | - animationPreUpdate() 60 | - animationPostUpdate() 61 | - drawScene() 62 | */ 63 | void draw() { 64 | background(0); 65 | strokeWeight(0); 66 | noStroke(); 67 | 68 | //upate pre render call 69 | animationPreUpdate(); 70 | 71 | drawCubeMap(); 72 | 73 | //upate post render call 74 | animationPostUpdate(); 75 | 76 | //record frame 77 | if (record == true) { 78 | saveFrame("frames/frame-####.png"); 79 | } 80 | 81 | surface.setTitle("FPS: " + (int) frameRate); 82 | } 83 | 84 | 85 | 86 | /* 87 | Put any input processing or pre render updates here 88 | */ 89 | void animationPreUpdate() { 90 | } 91 | 92 | 93 | /* 94 | Put any post render updates here 95 | */ 96 | void animationPostUpdate() { 97 | } 98 | 99 | 100 | /* 101 | Put your shapes/objects and lights here to be drawn to the screen 102 | */ 103 | void drawScene() { 104 | background(0); 105 | lights(); 106 | 107 | //front - red 108 | pushMatrix(); 109 | translate(zClippingPlane, zClippingPlane, -250); 110 | fill(255, 0, 0); 111 | box(75); 112 | popMatrix(); 113 | 114 | //right - green 115 | pushMatrix(); 116 | translate(zClippingPlane + 250, zClippingPlane, 0); 117 | fill(0, 255, 0); 118 | box(75); 119 | popMatrix(); 120 | 121 | //back - blue 122 | pushMatrix(); 123 | translate(zClippingPlane, zClippingPlane, 250); 124 | fill(0, 0, 255); 125 | box(75); 126 | popMatrix(); 127 | 128 | //left - yellow 129 | pushMatrix(); 130 | translate(zClippingPlane - 250, zClippingPlane, 0); 131 | fill(255, 255, 0); 132 | box(75); 133 | popMatrix(); 134 | 135 | //top - cyan 136 | pushMatrix(); 137 | translate(zClippingPlane, zClippingPlane - 250, 0); 138 | fill(0, 255, 255); 139 | box(75); 140 | popMatrix(); 141 | 142 | //bottom - magenta 143 | pushMatrix(); 144 | translate(zClippingPlane, zClippingPlane + 250, 0); 145 | fill(255, 0, 255); 146 | box(75); 147 | popMatrix(); 148 | } 149 | -------------------------------------------------------------------------------- /mono_360/example_02/cubeMapUtils.pde: -------------------------------------------------------------------------------- 1 | /* 2 | These functions are copied almost verbatim from the Processing domeProjection example, and shouldn't need changing 3 | 4 | The main changes made are: 5 | - replacing the dome shape with a rectangle 6 | - using an equirectangular shader 7 | - capturing 6 sides not 5 for the cubemap 8 | - using linear filtering for anti-aliasing the texture 9 | - putting the camera in the center of the scene with the translate call on line 108 10 | */ 11 | 12 | 13 | void initCubeMap() { 14 | //change the domeSphere shape to a rectangle as we're not projecting on a dome 15 | myRect = createShape(RECT, -width/2, -height/2, width, height); 16 | myRect.setStroke(false); 17 | 18 | int txtMapSize = envMapSize; 19 | if (g.pixelDensity == 2) { 20 | txtMapSize = envMapSize * 2; 21 | } 22 | 23 | PGL pgl = beginPGL(); 24 | 25 | envMapTextureID = IntBuffer.allocate(1); 26 | pgl.genTextures(1, envMapTextureID); 27 | pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, envMapTextureID.get(0)); 28 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_S, PGL.CLAMP_TO_EDGE); 29 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_T, PGL.CLAMP_TO_EDGE); 30 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_R, PGL.CLAMP_TO_EDGE); 31 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_MIN_FILTER, PGL.LINEAR); 32 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_MAG_FILTER, PGL.LINEAR); 33 | for (int i = PGL.TEXTURE_CUBE_MAP_POSITIVE_X; i < PGL.TEXTURE_CUBE_MAP_POSITIVE_X + 6; i++) { 34 | pgl.texImage2D(i, 0, PGL.RGBA8, txtMapSize, txtMapSize, 0, PGL.RGBA, PGL.UNSIGNED_BYTE, null); 35 | } 36 | 37 | // Init fbo, rbo 38 | fbo = IntBuffer.allocate(1); 39 | rbo = IntBuffer.allocate(1); 40 | pgl.genFramebuffers(1, fbo); 41 | pgl.bindFramebuffer(PGL.FRAMEBUFFER, fbo.get(0)); 42 | pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, PGL.TEXTURE_CUBE_MAP_POSITIVE_X, envMapTextureID.get(0), 0); 43 | 44 | pgl.genRenderbuffers(1, rbo); 45 | pgl.bindRenderbuffer(PGL.RENDERBUFFER, rbo.get(0)); 46 | pgl.renderbufferStorage(PGL.RENDERBUFFER, PGL.DEPTH_COMPONENT24, txtMapSize, txtMapSize); 47 | 48 | // Attach depth buffer to FBO 49 | pgl.framebufferRenderbuffer(PGL.FRAMEBUFFER, PGL.DEPTH_ATTACHMENT, PGL.RENDERBUFFER, rbo.get(0)); 50 | 51 | endPGL(); 52 | 53 | // Load cubemap shader. 54 | cubemapShader = loadShader("equirectangular.glsl"); 55 | cubemapShader.set("cubemap", 1); 56 | 57 | } 58 | 59 | void drawCubeMap() { 60 | PGL pgl = beginPGL(); 61 | pgl.activeTexture(PGL.TEXTURE1); 62 | pgl.enable(PGL.TEXTURE_CUBE_MAP); 63 | pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, envMapTextureID.get(0)); 64 | regenerateEnvMap(pgl); 65 | endPGL(); 66 | 67 | drawDomeMaster(); 68 | 69 | pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, 0); 70 | } 71 | 72 | void drawDomeMaster() { 73 | camera(); 74 | // ortho(); 75 | ortho(width/2, -width/2, -height/2, height/2); 76 | 77 | resetMatrix(); 78 | shader(cubemapShader); 79 | shape(myRect); 80 | resetShader(); 81 | } 82 | 83 | // Called to regenerate the envmap 84 | void regenerateEnvMap(PGL pgl) { 85 | // bind fbo 86 | pgl.bindFramebuffer(PGL.FRAMEBUFFER, fbo.get(0)); 87 | 88 | // generate 6 views from origin(0, 0, 0) 89 | pgl.viewport(0, 0, envMapSize, envMapSize); 90 | perspective(90.0f * DEG_TO_RAD, 1.0f, 1.0f, zClippingPlane); 91 | 92 | //note the <= to generate 6 faces, not 5 as per DomeProjection example 93 | for (int face = PGL.TEXTURE_CUBE_MAP_POSITIVE_X; face <= 94 | PGL.TEXTURE_CUBE_MAP_NEGATIVE_Z; face++) { 95 | 96 | resetMatrix(); 97 | 98 | if (face == PGL.TEXTURE_CUBE_MAP_POSITIVE_X) { 99 | camera(0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f); 100 | } else if (face == PGL.TEXTURE_CUBE_MAP_NEGATIVE_X) { 101 | camera(0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f); 102 | } else if (face == PGL.TEXTURE_CUBE_MAP_POSITIVE_Y) { 103 | camera(0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, -1.0f); 104 | } else if (face == PGL.TEXTURE_CUBE_MAP_NEGATIVE_Y) { 105 | camera(0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f); 106 | } else if (face == PGL.TEXTURE_CUBE_MAP_POSITIVE_Z) { 107 | camera(0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f); 108 | } else if (face == PGL.TEXTURE_CUBE_MAP_NEGATIVE_Z) { 109 | camera(0.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 1.0f, 0.0f); 110 | } 111 | 112 | rotateY(HALF_PI); //sets forward facing to center of screen for video output 113 | translate(-zClippingPlane, -zClippingPlane, 0);//defaults coords to processing style with 0,0 in top left of visible space 114 | 115 | pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, face, envMapTextureID.get(0), 0); 116 | 117 | drawScene(); // Draw objects in the scene 118 | flush(); // Make sure that the geometry in the scene is pushed to the GPU 119 | noLights(); // Disabling lights to avoid adding many times 120 | pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, face, 0, 0); 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /mono_360/example_02/data/equirectangular.glsl: -------------------------------------------------------------------------------- 1 | uniform samplerCube cubemap; varying vec4 vertTexCoord; void main() { vec2 thetaphi = ((vertTexCoord.xy * 2.0) - vec2(1.0)) * vec2(3.1415926535897932384626433832795, 1.5707963267948966192313216916398); vec3 rayDirection = vec3(cos(thetaphi.y) * cos(thetaphi.x), sin(thetaphi.y), cos(thetaphi.y) * sin(thetaphi.x)); vec3 color = vec3(textureCube(cubemap, rayDirection)); gl_FragColor = vec4(color, 1.0); } -------------------------------------------------------------------------------- /mono_360/example_02/example_02.pde: -------------------------------------------------------------------------------- 1 | /** 2 | * Processing to 360 video 3 | * 4 | * This sketch renders a cubemap to an equirectangular image that can be captured to create 360 photos or videos. 5 | * 6 | * example_002 - renders an animated sequence of frames to use for 360 video 7 | * 8 | * The code is a modification of the DomeProjection Processing example sketch, and an equirectangular GLSL shader by BeRo, on ShaderToy: 9 | * https://www.shadertoy.com/view/XsBSDR# 10 | * https://www.shadertoy.com/user/BeRo 11 | */ 12 | 13 | import java.nio.IntBuffer; 14 | 15 | PShader cubemapShader; 16 | PShape myRect; 17 | 18 | IntBuffer fbo; 19 | IntBuffer rbo; 20 | IntBuffer envMapTextureID; 21 | 22 | int envMapSize = 1024; //width & height used for the cubemap texture 23 | /* 24 | The following float, zClippingPlane, is the distance from center to a cubemap wall. 25 | The higher the number, the further objects can travel before they begin to clip out of frame. 26 | The pixel dimension of the drawable area is 0 to zClippingPlane * 2. 27 | e.g. if zClippingPlane is 2000, the drawable area is 0 to 4000 in each direction (x, y, z). 28 | 29 | Rather than rely on your screen width and height vars, you should use values based on the zClippingPlane. 30 | i.e. to translate to center screen, use: 31 | translate(zClippingPlane, zClippingPlane, 0); 32 | 33 | Remember instead of width/2 or height/2, use zClippingPlane, 34 | and instead of width or height, use zClippingPlane*2 35 | */ 36 | float zClippingPlane = 2000.0f; 37 | 38 | //set record to true if you want to save frames to make a video 39 | boolean record = false; 40 | 41 | //some variables to handle the animations 42 | PVector[] rotations = new PVector[6]; 43 | PVector[] oscillations = new PVector[6]; 44 | 45 | void setup() { 46 | //we'll be using the saved frames to create a 2048 x 1024 video, 47 | size(2048, 1024, P3D); 48 | 49 | //for testing, you might want to work at a smaller resolution, but for export, the above is preferred 50 | //size(1024, 512, P3D); 51 | 52 | smooth(); 53 | background(0); 54 | 55 | // if you have a retina display, pixelDesnity(2) is supported. Bring on those 4k renders! 56 | // pixelDensity(2); 57 | 58 | initCubeMap(); 59 | 60 | //set up animation vars 61 | for (int i = 0; i < 6; i++) { 62 | rotations[i] = new PVector(radians(random(0, 360)), radians(random(0, 360)), radians(random(0, 360))); 63 | oscillations[i] = new PVector(0,0,0); 64 | } 65 | } 66 | 67 | /* 68 | It's best to leave draw() alone and do all your updates and drawing in the methods at the bottom of this file 69 | - animationPreUpdate() 70 | - animationPostUpdate() 71 | - drawScene() 72 | */ 73 | void draw() { 74 | background(0); 75 | strokeWeight(0); 76 | noStroke(); 77 | 78 | //upate pre render call 79 | animationPreUpdate(); 80 | 81 | drawCubeMap(); 82 | 83 | //upate post render call 84 | animationPostUpdate(); 85 | 86 | //record 900 frames for a 30 second 30 fps video 87 | if (record == true && frameCount < 901) { 88 | saveFrame("frames/frame-####.png"); 89 | } 90 | 91 | surface.setTitle("FPS: " + (int) frameRate); 92 | } 93 | 94 | 95 | 96 | /* 97 | Put any input processing or pre render updates here 98 | */ 99 | void animationPreUpdate() { 100 | } 101 | 102 | 103 | /* 104 | Put any post render updates here 105 | */ 106 | void animationPostUpdate() { 107 | for (int i = 0; i < 6; i++) { 108 | rotations[i].x += radians(0.5); 109 | rotations[i].y += radians(1); 110 | rotations[i].z += radians(2); 111 | 112 | //because we're rendering each frame for video, we will use frameCount as 1 frame = 1 degree 113 | float r = radians(frameCount); 114 | 115 | oscillations[i].x = sin(r * 0.7) * 150; 116 | oscillations[i].y = sin(r * 0.5) * 150; 117 | oscillations[i].z = sin(r) * 300; 118 | } 119 | } 120 | 121 | 122 | /* 123 | Put your shapes/objects and lights here to be drawn to the screen 124 | */ 125 | void drawScene() { 126 | background(0); 127 | lights(); 128 | 129 | //front - red 130 | pushMatrix(); 131 | translate(zClippingPlane + oscillations[0].x, zClippingPlane, -250); 132 | rotateX(rotations[0].x); 133 | fill(255, 0, 0); 134 | box(75); 135 | popMatrix(); 136 | 137 | //right - green 138 | pushMatrix(); 139 | translate(zClippingPlane + 250, zClippingPlane, oscillations[1].z); 140 | rotateZ(rotations[1].z); 141 | fill(0, 255, 0); 142 | box(75); 143 | popMatrix(); 144 | 145 | //back - blue 146 | pushMatrix(); 147 | translate(zClippingPlane, zClippingPlane + oscillations[2].y, 250); 148 | rotateY(rotations[2].y); 149 | fill(0, 0, 255); 150 | box(75); 151 | popMatrix(); 152 | 153 | //left - yellow 154 | pushMatrix(); 155 | translate(zClippingPlane - 250, zClippingPlane, 0); 156 | rotateY(rotations[3].y); 157 | rotateZ(rotations[3].z); 158 | fill(255, 255, 0); 159 | box(75); 160 | popMatrix(); 161 | 162 | //top - cyan 163 | pushMatrix(); 164 | translate(zClippingPlane, zClippingPlane - 250, 0); 165 | rotateY(rotations[4].y); 166 | rotateZ(rotations[4].z); 167 | fill(0, 255, 255); 168 | box(75); 169 | popMatrix(); 170 | 171 | //bottom - magenta 172 | pushMatrix(); 173 | translate(zClippingPlane, zClippingPlane + 250, 0); 174 | rotateY(rotations[5].y); 175 | rotateX(rotations[5].x); 176 | fill(255, 0, 255); 177 | box(75); 178 | popMatrix(); 179 | } 180 | -------------------------------------------------------------------------------- /mono_360_HYPE/example_01/cubeMapUtils.pde: -------------------------------------------------------------------------------- 1 | /* 2 | These functions are copied almost verbatim from the Processing domeProjection example, and shouldn't need changing 3 | 4 | The main changes made are: 5 | - replacing the dome shape with a rectangle 6 | - using an equirectangular shader 7 | - capturing 6 sides not 5 for the cubemap 8 | - using linear filtering for anti-aliasing the texture 9 | - putting the camera in the center of the scene with the translate call on line 108 10 | */ 11 | 12 | 13 | void initCubeMap() { 14 | //change the domeSphere shape to a rectangle as we're not projecting on a dome 15 | myRect = createShape(RECT, -width/2, -height/2, width, height); 16 | myRect.setStroke(false); 17 | 18 | int txtMapSize = envMapSize; 19 | if (g.pixelDensity == 2) { 20 | txtMapSize = envMapSize * 2; 21 | } 22 | 23 | PGL pgl = beginPGL(); 24 | 25 | envMapTextureID = IntBuffer.allocate(1); 26 | pgl.genTextures(1, envMapTextureID); 27 | pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, envMapTextureID.get(0)); 28 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_S, PGL.CLAMP_TO_EDGE); 29 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_T, PGL.CLAMP_TO_EDGE); 30 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_R, PGL.CLAMP_TO_EDGE); 31 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_MIN_FILTER, PGL.LINEAR); 32 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_MAG_FILTER, PGL.LINEAR); 33 | for (int i = PGL.TEXTURE_CUBE_MAP_POSITIVE_X; i < PGL.TEXTURE_CUBE_MAP_POSITIVE_X + 6; i++) { 34 | pgl.texImage2D(i, 0, PGL.RGBA8, txtMapSize, txtMapSize, 0, PGL.RGBA, PGL.UNSIGNED_BYTE, null); 35 | } 36 | 37 | // Init fbo, rbo 38 | fbo = IntBuffer.allocate(1); 39 | rbo = IntBuffer.allocate(1); 40 | pgl.genFramebuffers(1, fbo); 41 | pgl.bindFramebuffer(PGL.FRAMEBUFFER, fbo.get(0)); 42 | pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, PGL.TEXTURE_CUBE_MAP_POSITIVE_X, envMapTextureID.get(0), 0); 43 | 44 | pgl.genRenderbuffers(1, rbo); 45 | pgl.bindRenderbuffer(PGL.RENDERBUFFER, rbo.get(0)); 46 | pgl.renderbufferStorage(PGL.RENDERBUFFER, PGL.DEPTH_COMPONENT24, txtMapSize, txtMapSize); 47 | 48 | // Attach depth buffer to FBO 49 | pgl.framebufferRenderbuffer(PGL.FRAMEBUFFER, PGL.DEPTH_ATTACHMENT, PGL.RENDERBUFFER, rbo.get(0)); 50 | 51 | endPGL(); 52 | 53 | // Load cubemap shader. 54 | cubemapShader = loadShader("equirectangular.glsl"); 55 | cubemapShader.set("cubemap", 1); 56 | 57 | } 58 | 59 | void drawCubeMap() { 60 | PGL pgl = beginPGL(); 61 | pgl.activeTexture(PGL.TEXTURE1); 62 | pgl.enable(PGL.TEXTURE_CUBE_MAP); 63 | pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, envMapTextureID.get(0)); 64 | regenerateEnvMap(pgl); 65 | endPGL(); 66 | 67 | drawDomeMaster(); 68 | 69 | pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, 0); 70 | } 71 | 72 | void drawDomeMaster() { 73 | camera(); 74 | // ortho(); 75 | ortho(width/2, -width/2, -height/2, height/2); 76 | 77 | resetMatrix(); 78 | shader(cubemapShader); 79 | shape(myRect); 80 | resetShader(); 81 | } 82 | 83 | // Called to regenerate the envmap 84 | void regenerateEnvMap(PGL pgl) { 85 | // bind fbo 86 | pgl.bindFramebuffer(PGL.FRAMEBUFFER, fbo.get(0)); 87 | 88 | // generate 6 views from origin(0, 0, 0) 89 | pgl.viewport(0, 0, envMapSize, envMapSize); 90 | perspective(90.0f * DEG_TO_RAD, 1.0f, 1.0f, zClippingPlane); 91 | 92 | //note the <= to generate 6 faces, not 5 as per DomeProjection example 93 | for (int face = PGL.TEXTURE_CUBE_MAP_POSITIVE_X; face <= 94 | PGL.TEXTURE_CUBE_MAP_NEGATIVE_Z; face++) { 95 | 96 | resetMatrix(); 97 | 98 | if (face == PGL.TEXTURE_CUBE_MAP_POSITIVE_X) { 99 | camera(0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f); 100 | } else if (face == PGL.TEXTURE_CUBE_MAP_NEGATIVE_X) { 101 | camera(0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f); 102 | } else if (face == PGL.TEXTURE_CUBE_MAP_POSITIVE_Y) { 103 | camera(0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, -1.0f); 104 | } else if (face == PGL.TEXTURE_CUBE_MAP_NEGATIVE_Y) { 105 | camera(0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f); 106 | } else if (face == PGL.TEXTURE_CUBE_MAP_POSITIVE_Z) { 107 | camera(0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f); 108 | } else if (face == PGL.TEXTURE_CUBE_MAP_NEGATIVE_Z) { 109 | camera(0.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 1.0f, 0.0f); 110 | } 111 | 112 | rotateY(HALF_PI); //sets forward facing to center of screen for video output 113 | translate(-zClippingPlane, -zClippingPlane, 0);//defaults coords to processing style with 0,0 in top left of visible space 114 | 115 | pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, face, envMapTextureID.get(0), 0); 116 | 117 | drawScene(); // Draw objects in the scene 118 | flush(); // Make sure that the geometry in the scene is pushed to the GPU 119 | noLights(); // Disabling lights to avoid adding many times 120 | pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, face, 0, 0); 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /mono_360_HYPE/example_01/data/equirectangular.glsl: -------------------------------------------------------------------------------- 1 | uniform samplerCube cubemap; varying vec4 vertTexCoord; void main() { vec2 thetaphi = ((vertTexCoord.xy * 2.0) - vec2(1.0)) * vec2(3.1415926535897932384626433832795, 1.5707963267948966192313216916398); vec3 rayDirection = vec3(cos(thetaphi.y) * cos(thetaphi.x), sin(thetaphi.y), cos(thetaphi.y) * sin(thetaphi.x)); vec3 color = vec3(textureCube(cubemap, rayDirection)); gl_FragColor = vec4(color, 1.0); } -------------------------------------------------------------------------------- /mono_360_HYPE/example_01/example_01.pde: -------------------------------------------------------------------------------- 1 | /** 2 | * Processing to 360 video 3 | * 4 | * This sketch renders a cubemap to an equirectangular image that can be captured to create 360 photos or videos. 5 | * 6 | * example_01 - integration with HYPE Framework 7 | * 8 | * The code is a modification of the DomeProjection Processing example sketch, and an equirectangular GLSL shader by BeRo, on ShaderToy: 9 | * https://www.shadertoy.com/view/XsBSDR# 10 | * https://www.shadertoy.com/user/BeRo 11 | */ 12 | 13 | import hype.*; 14 | import hype.extended.behavior.HOrbiter3D; 15 | import hype.extended.colorist.HColorPool; 16 | 17 | import java.nio.IntBuffer; 18 | 19 | PShader cubemapShader; 20 | PShape myRect; 21 | 22 | IntBuffer fbo; 23 | IntBuffer rbo; 24 | IntBuffer envMapTextureID; 25 | 26 | int envMapSize = 1024; //width & height used for the cubemap texture 27 | /* 28 | The following float, zClippingPlane, is the distance from center to a cubemap wall. 29 | The higher the number, the further objects can travel before they begin to clip out of frame. 30 | The pixel dimension of the drawable area is 0 to zClippingPlane * 2. 31 | e.g. if zClippingPlane is 2000, the drawable area is 0 to 4000 in each direction (x, y, z). 32 | 33 | Rather than rely on your screen width and height vars, you should use values based on the zClippingPlane. 34 | i.e. to translate to center screen, use: 35 | translate(zClippingPlane, zClippingPlane, 0); 36 | 37 | Remember instead of width/2 or height/2, use zClippingPlane, 38 | and instead of width or height, use zClippingPlane*2 39 | */ 40 | float zClippingPlane = 2000.0f; 41 | 42 | //set record to true if you want to save frames to make a video 43 | boolean record = false; 44 | 45 | //HYPE specific 46 | HDrawablePool pool; 47 | 48 | void setup() { 49 | //we'll be using the saved frames to create a 2048 x 1024 video 50 | size(2048, 1024, P3D); 51 | 52 | //for testing, you might want to work at a smaller resolution, but for export, the above is preferred 53 | //size(1024, 512, P3D); 54 | 55 | smooth(); 56 | background(0); 57 | 58 | // if you have a retina display, pixelDesnity(2) is supported. Bring on those 4k renders! 59 | // pixelDensity(2); 60 | 61 | initCubeMap(); 62 | 63 | //HYPE stuff 64 | H.init(this).background(#242424).use3D(true); 65 | 66 | pool = new HDrawablePool(100); 67 | pool.autoAddToStage() 68 | .add(new HSphere()) 69 | .colorist( new HColorPool(#333333,#494949,#5F5F5F,#707070,#7D7D7D,#888888,#949494,#A2A2A2,#B1B1B1,#C3C3C3,#D6D6D6,#EBEBEB,#FFFFFF).fillOnly() ) 70 | .onCreate( 71 | new HCallback() { 72 | public void run(Object obj) { 73 | HSphere d = (HSphere) obj; 74 | int ranSize = 10 + ( (int)random(3)*7 ); 75 | 76 | d.size(ranSize).strokeWeight(0).noStroke().anchorAt(H.CENTER); 77 | 78 | HOrbiter3D orb = new HOrbiter3D(zClippingPlane, zClippingPlane, 0) 79 | .target(d) 80 | .zSpeed(random(-1.5, 1.5)) 81 | .ySpeed(random(-0.5, 0.5)) 82 | .radius(random(200, 500)) 83 | .zAngle( (int)random(360) ) 84 | .yAngle( (int)random(360) ) 85 | ; 86 | } 87 | } 88 | ) 89 | .requestAll() 90 | ; 91 | 92 | //I like to advance a couple frames in to get all the Drawables initialised and on their way 93 | H.drawStage(); 94 | H.drawStage(); 95 | background(0); 96 | } 97 | 98 | /* 99 | It's best to leave draw() alone and do all your updates and drawing in the methods at the bottom of this file 100 | - animationPreUpdate() 101 | - animationPostUpdate() 102 | - drawScene() 103 | */ 104 | void draw() { 105 | background(0); 106 | strokeWeight(0); 107 | noStroke(); 108 | 109 | //upate pre render call 110 | animationPreUpdate(); 111 | 112 | drawCubeMap(); 113 | 114 | //upate post render call 115 | animationPostUpdate(); 116 | 117 | //record 900 frames for a 30 second 30 fps video 118 | if (record == true && frameCount < 901) { 119 | saveFrame("frames/frame-####.png"); 120 | } 121 | 122 | surface.setTitle("FPS: " + (int) frameRate); 123 | } 124 | 125 | 126 | 127 | /* 128 | Put any input processing or pre render updates here 129 | */ 130 | void animationPreUpdate() { 131 | } 132 | 133 | 134 | /* 135 | Put any post render updates here 136 | */ 137 | void animationPostUpdate() { 138 | H.updateBehaviors(); 139 | } 140 | 141 | 142 | /* 143 | Put your shapes/objects and lights here to be drawn to the screen 144 | */ 145 | void drawScene() { 146 | pointLight(100, 0, 0, zClippingPlane, zClippingPlane*2, 200); // under red light 147 | pointLight(51, 153, 153, zClippingPlane, -50, 150); // over teal light 148 | pointLight(204, 204, 204, zClippingPlane, zClippingPlane - 50, 500); // mid light gray light 149 | 150 | sphereDetail(20); 151 | H.drawStageOnly(); 152 | } 153 | -------------------------------------------------------------------------------- /mono_360_HYPE/example_02/cubeMapUtils.pde: -------------------------------------------------------------------------------- 1 | /* 2 | These functions are copied almost verbatim from the Processing domeProjection example, and shouldn't need changing 3 | 4 | The main changes made are: 5 | - replacing the dome shape with a rectangle 6 | - using an equirectangular shader 7 | - capturing 6 sides not 5 for the cubemap 8 | - using linear filtering for anti-aliasing the texture 9 | - putting the camera in the center of the scene with the translate call on line 108 10 | */ 11 | 12 | 13 | void initCubeMap() { 14 | //change the domeSphere shape to a rectangle as we're not projecting on a dome 15 | myRect = createShape(RECT, -width/2, -height/2, width, height); 16 | myRect.setStroke(false); 17 | 18 | int txtMapSize = envMapSize; 19 | if (g.pixelDensity == 2) { 20 | txtMapSize = envMapSize * 2; 21 | } 22 | 23 | PGL pgl = beginPGL(); 24 | 25 | envMapTextureID = IntBuffer.allocate(1); 26 | pgl.genTextures(1, envMapTextureID); 27 | pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, envMapTextureID.get(0)); 28 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_S, PGL.CLAMP_TO_EDGE); 29 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_T, PGL.CLAMP_TO_EDGE); 30 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_WRAP_R, PGL.CLAMP_TO_EDGE); 31 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_MIN_FILTER, PGL.LINEAR); 32 | pgl.texParameteri(PGL.TEXTURE_CUBE_MAP, PGL.TEXTURE_MAG_FILTER, PGL.LINEAR); 33 | for (int i = PGL.TEXTURE_CUBE_MAP_POSITIVE_X; i < PGL.TEXTURE_CUBE_MAP_POSITIVE_X + 6; i++) { 34 | pgl.texImage2D(i, 0, PGL.RGBA8, txtMapSize, txtMapSize, 0, PGL.RGBA, PGL.UNSIGNED_BYTE, null); 35 | } 36 | 37 | // Init fbo, rbo 38 | fbo = IntBuffer.allocate(1); 39 | rbo = IntBuffer.allocate(1); 40 | pgl.genFramebuffers(1, fbo); 41 | pgl.bindFramebuffer(PGL.FRAMEBUFFER, fbo.get(0)); 42 | pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, PGL.TEXTURE_CUBE_MAP_POSITIVE_X, envMapTextureID.get(0), 0); 43 | 44 | pgl.genRenderbuffers(1, rbo); 45 | pgl.bindRenderbuffer(PGL.RENDERBUFFER, rbo.get(0)); 46 | pgl.renderbufferStorage(PGL.RENDERBUFFER, PGL.DEPTH_COMPONENT24, txtMapSize, txtMapSize); 47 | 48 | // Attach depth buffer to FBO 49 | pgl.framebufferRenderbuffer(PGL.FRAMEBUFFER, PGL.DEPTH_ATTACHMENT, PGL.RENDERBUFFER, rbo.get(0)); 50 | 51 | endPGL(); 52 | 53 | // Load cubemap shader. 54 | cubemapShader = loadShader("equirectangular.glsl"); 55 | cubemapShader.set("cubemap", 1); 56 | 57 | } 58 | 59 | void drawCubeMap() { 60 | PGL pgl = beginPGL(); 61 | pgl.activeTexture(PGL.TEXTURE1); 62 | pgl.enable(PGL.TEXTURE_CUBE_MAP); 63 | pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, envMapTextureID.get(0)); 64 | regenerateEnvMap(pgl); 65 | endPGL(); 66 | 67 | drawDomeMaster(); 68 | 69 | pgl.bindTexture(PGL.TEXTURE_CUBE_MAP, 0); 70 | } 71 | 72 | void drawDomeMaster() { 73 | camera(); 74 | // ortho(); 75 | ortho(width/2, -width/2, -height/2, height/2); 76 | 77 | resetMatrix(); 78 | shader(cubemapShader); 79 | shape(myRect); 80 | resetShader(); 81 | } 82 | 83 | // Called to regenerate the envmap 84 | void regenerateEnvMap(PGL pgl) { 85 | // bind fbo 86 | pgl.bindFramebuffer(PGL.FRAMEBUFFER, fbo.get(0)); 87 | 88 | // generate 6 views from origin(0, 0, 0) 89 | pgl.viewport(0, 0, envMapSize, envMapSize); 90 | perspective(90.0f * DEG_TO_RAD, 1.0f, 1.0f, zClippingPlane); 91 | 92 | //note the <= to generate 6 faces, not 5 as per DomeProjection example 93 | for (int face = PGL.TEXTURE_CUBE_MAP_POSITIVE_X; face <= 94 | PGL.TEXTURE_CUBE_MAP_NEGATIVE_Z; face++) { 95 | 96 | resetMatrix(); 97 | 98 | if (face == PGL.TEXTURE_CUBE_MAP_POSITIVE_X) { 99 | camera(0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f); 100 | } else if (face == PGL.TEXTURE_CUBE_MAP_NEGATIVE_X) { 101 | camera(0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f); 102 | } else if (face == PGL.TEXTURE_CUBE_MAP_POSITIVE_Y) { 103 | camera(0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 0.0f, -1.0f); 104 | } else if (face == PGL.TEXTURE_CUBE_MAP_NEGATIVE_Y) { 105 | camera(0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 0.0f, 0.0f, 1.0f); 106 | } else if (face == PGL.TEXTURE_CUBE_MAP_POSITIVE_Z) { 107 | camera(0.0f, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f); 108 | } else if (face == PGL.TEXTURE_CUBE_MAP_NEGATIVE_Z) { 109 | camera(0.0f, 0.0f, 0.0f, 0.0f, 0.0f, -1.0f, 0.0f, 1.0f, 0.0f); 110 | } 111 | 112 | rotateY(HALF_PI); //sets forward facing to center of screen for video output 113 | translate(-zClippingPlane, -zClippingPlane, 0);//defaults coords to processing style with 0,0 in top left of visible space 114 | 115 | pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, face, envMapTextureID.get(0), 0); 116 | 117 | drawScene(); // Draw objects in the scene 118 | flush(); // Make sure that the geometry in the scene is pushed to the GPU 119 | noLights(); // Disabling lights to avoid adding many times 120 | pgl.framebufferTexture2D(PGL.FRAMEBUFFER, PGL.COLOR_ATTACHMENT0, face, 0, 0); 121 | } 122 | } 123 | -------------------------------------------------------------------------------- /mono_360_HYPE/example_02/data/equirectangular.glsl: -------------------------------------------------------------------------------- 1 | uniform samplerCube cubemap; varying vec4 vertTexCoord; void main() { vec2 thetaphi = ((vertTexCoord.xy * 2.0) - vec2(1.0)) * vec2(3.1415926535897932384626433832795, 1.5707963267948966192313216916398); vec3 rayDirection = vec3(cos(thetaphi.y) * cos(thetaphi.x), sin(thetaphi.y), cos(thetaphi.y) * sin(thetaphi.x)); vec3 color = vec3(textureCube(cubemap, rayDirection)); gl_FragColor = vec4(color, 1.0); } -------------------------------------------------------------------------------- /mono_360_HYPE/example_02/example_02.pde: -------------------------------------------------------------------------------- 1 | /** 2 | * Processing to 360 video 3 | * 4 | * This sketch renders a cubemap to an equirectangular image that can be captured to create 360 photos or videos. 5 | * 6 | * example_02 - integration with HYPE Framework 7 | * 8 | * The code is a modification of the DomeProjection Processing example sketch, and an equirectangular GLSL shader by BeRo, on ShaderToy: 9 | * https://www.shadertoy.com/view/XsBSDR# 10 | * https://www.shadertoy.com/user/BeRo 11 | */ 12 | 13 | import hype.*; 14 | import hype.extended.layout.HSphereLayout; 15 | import hype.extended.behavior.HOscillator; 16 | import hype.extended.behavior.HOrbiter3D; 17 | 18 | import java.nio.IntBuffer; 19 | 20 | PShader cubemapShader; 21 | PShape myRect; 22 | 23 | IntBuffer fbo; 24 | IntBuffer rbo; 25 | IntBuffer envMapTextureID; 26 | 27 | int envMapSize = 1024; //width & height used for the cubemap texture 28 | /* 29 | The following float, zClippingPlane, is the distance from center to a cubemap wall. 30 | The higher the number, the further objects can travel before they begin to clip out of frame. 31 | The pixel dimension of the drawable area is 0 to zClippingPlane * 2. 32 | e.g. if zClippingPlane is 2000, the drawable area is 0 to 4000 in each direction (x, y, z). 33 | 34 | Rather than rely on your screen width and height vars, you should use values based on the zClippingPlane. 35 | i.e. to translate to center screen, use: 36 | translate(zClippingPlane, zClippingPlane, 0); 37 | 38 | Remember instead of width/2 or height/2, use zClippingPlane, 39 | and instead of width or height, use zClippingPlane*2 40 | */ 41 | float zClippingPlane = 2000.0f; 42 | 43 | //set record to true if you want to save frames to make a video 44 | boolean record = false; 45 | 46 | //HYPE specific vars 47 | HDrawablePool pool; 48 | HSphereLayout layout; 49 | HOrbiter3D orb, orb2; 50 | 51 | void setup() { 52 | //we'll be using the saved frames to create a 2048 x 1024 video 53 | size(2048, 1024, P3D); 54 | 55 | //for testing, you might want to work at a smaller resolution, but for export, the above is preferred 56 | //size(1024, 512, P3D); 57 | 58 | smooth(); 59 | background(0); 60 | 61 | // if you have a retina display, pixelDesnity(2) is supported. Bring on those 4k renders! 62 | // pixelDensity(2); 63 | 64 | initCubeMap(); 65 | 66 | //HYPE stuff 67 | H.init(this).background(#242424).use3D(true); 68 | 69 | pool = new HDrawablePool(200); 70 | 71 | layout = new HSphereLayout() 72 | .loc(zClippingPlane, zClippingPlane, 0) 73 | .radius(200) 74 | .rotate(true) 75 | .useSpiral() 76 | .numPoints(200) 77 | .phiModifier(3.0001) 78 | ; 79 | 80 | pool.add(new HIcosahedron()) 81 | .layout(layout) 82 | .autoAddToStage() 83 | .onCreate ( 84 | new HCallback() { 85 | public void run(Object obj) { 86 | HDrawable3D d = (HDrawable3D) obj; 87 | d.depth(20); 88 | d.size(20, 20); 89 | d.noStroke(); 90 | 91 | int i = pool.currentIndex(); 92 | 93 | new HOscillator() 94 | .target(d) 95 | .property(H.SCALE) 96 | .range(0.5, 1.5) 97 | .speed(1.5) 98 | .freq(1.6) 99 | .currentStep(i*3) 100 | ; 101 | } 102 | } 103 | ) 104 | .requestAll() 105 | ; 106 | 107 | orb = new HOrbiter3D(zClippingPlane, zClippingPlane, 0) 108 | .zSpeed(random(0.0, 1.0) + 0.5) 109 | .ySpeed(random(0.0, 1.0) + 0.5) 110 | .radius(250) 111 | .zAngle( (int)random(360) ) 112 | .yAngle( (int)random(360) ) 113 | ; 114 | 115 | orb2 = new HOrbiter3D(zClippingPlane, zClippingPlane, 0) 116 | .zSpeed(random(0.0, 1.0) + 1.0) 117 | .ySpeed(random(0.0, 1.0) + 0.5) 118 | .radius(300) 119 | .zAngle( (int)random(360) ) 120 | .yAngle( (int)random(360) ) 121 | ; 122 | 123 | //I like to advance a couple frames in to get all the Drawables initialised and on their way 124 | H.drawStage(); 125 | H.drawStage(); 126 | background(0); 127 | } 128 | 129 | /* 130 | It's best to leave draw() alone and do all your updates and drawing in the methods at the bottom of this file 131 | - animationPreUpdate() 132 | - animationPostUpdate() 133 | - drawScene() 134 | */ 135 | void draw() { 136 | background(0); 137 | strokeWeight(0); 138 | noStroke(); 139 | 140 | //upate pre render call 141 | animationPreUpdate(); 142 | 143 | drawCubeMap(); 144 | 145 | //upate post render call 146 | animationPostUpdate(); 147 | 148 | //record 900 frames for a 30 second 30 fps video 149 | if (record == true && frameCount < 901) { 150 | saveFrame("frames/frame-####.png"); 151 | } 152 | 153 | surface.setTitle("FPS: " + (int) frameRate); 154 | } 155 | 156 | 157 | 158 | /* 159 | Put any input processing or pre render updates here 160 | */ 161 | void animationPreUpdate() { 162 | orb._run(); 163 | orb2._run(); 164 | } 165 | 166 | 167 | /* 168 | Put any post render updates here 169 | */ 170 | void animationPostUpdate() { 171 | H.updateBehaviors(); 172 | } 173 | 174 | 175 | /* 176 | Put your shapes/objects and lights here to be drawn to the screen 177 | */ 178 | void drawScene() { 179 | pointLight(35, 35, 35, zClippingPlane, zClippingPlane-400, 300); 180 | pointLight(255, 0, 156, orb.x(), orb.y(), orb.z()); // magenta 181 | pointLight(0, 100, 180, orb2.x(), orb2.y(), orb2.z()); // blue 182 | 183 | H.drawStageOnly(); 184 | } 185 | -------------------------------------------------------------------------------- /top_bottom_360/build/Beams.pde: -------------------------------------------------------------------------------- 1 | class Beams extends Drawable { 2 | 3 | float myHeight; 4 | float heightCap; 5 | float speed; 6 | 7 | Beams() { 8 | super(); 9 | myHeight = 0; 10 | 11 | heightCap = floor(random(2, 6)) * 500; 12 | 13 | speed = floor(random(1, 4)) * 10; 14 | } 15 | 16 | public void update() { 17 | super.update(); 18 | myHeight += speed; 19 | myHeight = min(myHeight, heightCap); 20 | } 21 | 22 | public boolean isDead() { 23 | if (loc.z > 7500) { 24 | return true; 25 | } else { 26 | return false; 27 | } 28 | } 29 | 30 | public void display() { 31 | 32 | float boxSize = max(cur[myIdx]/6, 5); 33 | float a = max(75, (2*cur[myIdx]) + 55); 34 | 35 | canvas.pushMatrix(); 36 | 37 | canvas.translate(loc.x, loc.y, loc.z); 38 | canvas.noLights(); 39 | canvas.fill(220, 240, 255, a); 40 | canvas.strokeWeight(0); 41 | canvas.noStroke(); 42 | 43 | canvas.translate(0, 250, 0); 44 | canvas.box(boxSize, myHeight, boxSize); 45 | 46 | canvas.translate(50, -125, 0); 47 | canvas.box(boxSize, myHeight, boxSize); 48 | 49 | canvas.translate(50, -125, 0); 50 | canvas.box(boxSize, myHeight, boxSize); 51 | 52 | canvas.translate(50, 125, 0); 53 | canvas.box(boxSize, myHeight, boxSize); 54 | 55 | canvas.translate(50, 125, 0); 56 | canvas.box(boxSize, myHeight, boxSize); 57 | 58 | 59 | canvas.popMatrix(); 60 | } 61 | } -------------------------------------------------------------------------------- /top_bottom_360/build/Drawable.pde: -------------------------------------------------------------------------------- 1 | class Drawable { 2 | 3 | PVector loc; 4 | PVector vel; 5 | 6 | int myFill; 7 | int myStroke; 8 | float myStrokeWeight; 9 | int myIdx; 10 | 11 | 12 | Drawable() { 13 | loc = new PVector(); 14 | vel = new PVector(); 15 | 16 | myFill = color(255, 255, 255); 17 | myStroke = color(0, 0, 0); 18 | myStrokeWeight = 1; 19 | } 20 | 21 | public boolean isDead() { 22 | return false; 23 | } 24 | 25 | public void setIdx(int i) { 26 | myIdx = i; 27 | } 28 | 29 | public void setPos(float x, float y, float z) { 30 | loc.x = x; 31 | loc.y = y; 32 | loc.z = z; 33 | } 34 | 35 | public void setVel(float x, float y, float z) { 36 | vel.x = x; 37 | vel.y = y; 38 | vel.z = z; 39 | } 40 | 41 | public float getZ() { 42 | return loc.z; 43 | } 44 | 45 | public void setFill(color c) { 46 | myFill = c; 47 | } 48 | public void setStroke(color c) { 49 | myStroke = c; 50 | } 51 | public void setStrokeWeight(float f) { 52 | myStrokeWeight = f; 53 | } 54 | 55 | public void display() { 56 | 57 | } 58 | 59 | public void update() { 60 | loc.add(vel); 61 | } 62 | } -------------------------------------------------------------------------------- /top_bottom_360/build/Geo.pde: -------------------------------------------------------------------------------- 1 | class Geo extends Drawable { 2 | 3 | float w, h; 4 | 5 | Geo() { 6 | super(); 7 | 8 | w = 50; 9 | h = 50; 10 | } 11 | 12 | public void update() { 13 | super.update(); 14 | } 15 | 16 | public boolean isDead() { 17 | if (loc.z > 7500) { 18 | return true; 19 | } else { 20 | return false; 21 | } 22 | } 23 | 24 | public void display() { 25 | 26 | canvas.pushMatrix(); 27 | 28 | canvas.translate(loc.x, loc.y, loc.z); 29 | 30 | canvas.hint(DISABLE_DEPTH_TEST); 31 | canvas.hint(ENABLE_DEPTH_SORT); //woohoo!!!! 32 | 33 | for (int i = 7; i > 0; i--) { 34 | canvas.rotate(radians((i+1) * 15)); 35 | canvas.noStroke(); 36 | canvas.noFill(); 37 | canvas.fill(0, 255, 0); 38 | 39 | canvas.beginShape(QUADS); 40 | canvas.tint(255 - i*25, 65 + i*20, 50 + i*20, 200); 41 | canvas.texture(poly); 42 | canvas.textureMode(NORMAL); 43 | canvas.vertex(-w, -h, -i * 300, 0, 0); 44 | canvas.vertex(w, -h, -i * 300, 1, 0); 45 | canvas.vertex(w, h, -i * 300, 1, 1); 46 | canvas.vertex(-w, h, -i * 300, 0, 1); 47 | canvas.endShape(CLOSE); 48 | } 49 | canvas.hint(ENABLE_DEPTH_TEST); 50 | canvas.hint(DISABLE_DEPTH_SORT); 51 | 52 | canvas.popMatrix(); 53 | } 54 | } -------------------------------------------------------------------------------- /top_bottom_360/build/Icos.pde: -------------------------------------------------------------------------------- 1 | class Icos extends Drawable { 2 | 3 | ArrayList v; 4 | 5 | float fadeAmt; 6 | float curFade; 7 | 8 | float xSpin, zSpin, xSpeed, zSpeed; 9 | 10 | Icos() { 11 | super(); 12 | 13 | v = new ArrayList(); 14 | 15 | float t = 1.61803399f / 2.0f;//half golden ration to make icos unit radius .5 16 | 17 | v.add(new PVector(-0.5f, t, 0)); 18 | v.add(new PVector( 0.5f, t, 0)); 19 | v.add(new PVector(-0.5f, -t, 0)); 20 | v.add(new PVector( 0.5f, -t, 0)); 21 | v.add(new PVector( 0, -0.5f, t)); 22 | v.add(new PVector( 0, 0.5f, t)); 23 | v.add(new PVector( 0, -0.5f, -t)); 24 | v.add(new PVector( 0, 0.5f, -t)); 25 | v.add(new PVector( t, 0, -0.5f)); 26 | v.add(new PVector( t, 0, 0.5f)); 27 | v.add(new PVector(-t, 0, -0.5f)); 28 | v.add(new PVector(-t, 0, 0.5f)); 29 | 30 | fadeAmt = random(1, 5); 31 | curFade = 255; 32 | 33 | xSpeed = random(2, 8); 34 | zSpeed = random(2, 8); 35 | 36 | xSpin = 0; 37 | zSpin = 0; 38 | } 39 | 40 | public boolean isDead() { 41 | if (curFade <= 0) { 42 | return true; 43 | } else { 44 | return false; 45 | } 46 | } 47 | 48 | public void update() { 49 | super.update(); 50 | 51 | curFade -= max(fadeAmt, 0.0); 52 | 53 | xSpin += xSpeed; 54 | zSpin += zSpeed; 55 | } 56 | 57 | public void display() { 58 | 59 | canvas.pushMatrix(); 60 | 61 | canvas.translate(loc.x, loc.y, loc.z); 62 | canvas.rotateX(radians(xSpin)); 63 | canvas.rotateZ(radians(zSpin)); 64 | canvas.noLights(); 65 | canvas.fill(10, 10, 30, curFade); 66 | canvas.strokeWeight(2); 67 | canvas.stroke(#dd3300, curFade); 68 | 69 | canvas.scale(100,100,100); 70 | 71 | //scale the strokeWeight back down so it appears normal on screen 72 | float sw = (float) 1.0/100 * 2; 73 | canvas.strokeWeight(sw); 74 | 75 | canvas.beginShape(PConstants.TRIANGLES); 76 | 77 | // 5 faces around point 0 78 | canvas.vertex(v.get(0).x, v.get(0).y, v.get(0).z); 79 | canvas.vertex(v.get(11).x, v.get(11).y, v.get(11).z); 80 | canvas.vertex(v.get(5).x, v.get(5).y, v.get(5).z); 81 | 82 | canvas.vertex(v.get(0).x, v.get(0).y, v.get(0).z); 83 | canvas.vertex(v.get(5).x, v.get(5).y, v.get(5).z); 84 | canvas.vertex(v.get(1).x, v.get(1).y, v.get(1).z); 85 | 86 | canvas.vertex(v.get(0).x, v.get(0).y, v.get(0).z); 87 | canvas.vertex(v.get(1).x, v.get(1).y, v.get(1).z); 88 | canvas.vertex(v.get(7).x, v.get(7).y, v.get(7).z); 89 | 90 | canvas.vertex(v.get(0).x, v.get(0).y, v.get(0).z); 91 | canvas.vertex(v.get(7).x, v.get(7).y, v.get(7).z); 92 | canvas.vertex(v.get(10).x, v.get(10).y, v.get(10).z); 93 | 94 | canvas.vertex(v.get(0).x, v.get(0).y, v.get(0).z); 95 | canvas.vertex(v.get(10).x, v.get(10).y, v.get(10).z); 96 | canvas.vertex(v.get(11).x, v.get(11).y, v.get(11).z); 97 | 98 | // 5 adjacent faces 99 | canvas.vertex(v.get(1).x, v.get(1).y, v.get(1).z); 100 | canvas.vertex(v.get(5).x, v.get(5).y, v.get(5).z); 101 | canvas.vertex(v.get(9).x, v.get(9).y, v.get(9).z); 102 | 103 | canvas.vertex(v.get(5).x, v.get(5).y, v.get(5).z); 104 | canvas.vertex(v.get(11).x, v.get(11).y, v.get(11).z); 105 | canvas.vertex(v.get(4).x, v.get(4).y, v.get(4).z); 106 | 107 | canvas.vertex(v.get(11).x, v.get(11).y, v.get(11).z); 108 | canvas.vertex(v.get(10).x, v.get(10).y, v.get(10).z); 109 | canvas.vertex(v.get(2).x, v.get(2).y, v.get(2).z); 110 | 111 | canvas.vertex(v.get(10).x, v.get(10).y, v.get(10).z); 112 | canvas.vertex(v.get(7).x, v.get(7).y, v.get(7).z); 113 | canvas.vertex(v.get(6).x, v.get(6).y, v.get(6).z); 114 | 115 | canvas.vertex(v.get(7).x, v.get(7).y, v.get(7).z); 116 | canvas.vertex(v.get(1).x, v.get(1).y, v.get(1).z); 117 | canvas.vertex(v.get(8).x, v.get(8).y, v.get(8).z); 118 | 119 | // 5 faces around point 3 120 | canvas.vertex(v.get(3).x, v.get(3).y, v.get(3).z); 121 | canvas.vertex(v.get(9).x, v.get(9).y, v.get(9).z); 122 | canvas.vertex(v.get(4).x, v.get(4).y, v.get(4).z); 123 | 124 | canvas.vertex(v.get(3).x, v.get(3).y, v.get(3).z); 125 | canvas.vertex(v.get(4).x, v.get(4).y, v.get(4).z); 126 | canvas.vertex(v.get(2).x, v.get(2).y, v.get(2).z); 127 | 128 | canvas.vertex(v.get(3).x, v.get(3).y, v.get(3).z); 129 | canvas.vertex(v.get(2).x, v.get(2).y, v.get(2).z); 130 | canvas.vertex(v.get(6).x, v.get(6).y, v.get(6).z); 131 | 132 | canvas.vertex(v.get(3).x, v.get(3).y, v.get(3).z); 133 | canvas.vertex(v.get(6).x, v.get(6).y, v.get(6).z); 134 | canvas.vertex(v.get(8).x, v.get(8).y, v.get(8).z); 135 | 136 | canvas.vertex(v.get(3).x, v.get(3).y, v.get(3).z); 137 | canvas.vertex(v.get(8).x, v.get(8).y, v.get(8).z); 138 | canvas.vertex(v.get(9).x, v.get(9).y, v.get(9).z); 139 | 140 | // 5 adjacent faces 141 | canvas.vertex(v.get(4).x, v.get(4).y, v.get(4).z); 142 | canvas.vertex(v.get(9).x, v.get(9).y, v.get(9).z); 143 | canvas.vertex(v.get(5).x, v.get(5).y, v.get(5).z); 144 | 145 | canvas.vertex(v.get(2).x, v.get(2).y, v.get(2).z); 146 | canvas.vertex(v.get(4).x, v.get(4).y, v.get(4).z); 147 | canvas.vertex(v.get(11).x, v.get(11).y, v.get(11).z); 148 | 149 | canvas.vertex(v.get(6).x, v.get(6).y, v.get(6).z); 150 | canvas.vertex(v.get(2).x, v.get(2).y, v.get(2).z); 151 | canvas.vertex(v.get(10).x, v.get(10).y, v.get(10).z); 152 | 153 | canvas.vertex(v.get(8).x, v.get(8).y, v.get(8).z); 154 | canvas.vertex(v.get(6).x, v.get(6).y, v.get(6).z); 155 | canvas.vertex(v.get(7).x, v.get(7).y, v.get(7).z); 156 | 157 | canvas.vertex(v.get(9).x, v.get(9).y, v.get(9).z); 158 | canvas.vertex(v.get(8).x, v.get(8).y, v.get(8).z); 159 | canvas.vertex(v.get(1).x, v.get(1).y, v.get(1).z); 160 | 161 | canvas.endShape(PConstants.CLOSE); 162 | 163 | 164 | 165 | canvas.popMatrix(); 166 | } 167 | } -------------------------------------------------------------------------------- /top_bottom_360/build/Pyramid.pde: -------------------------------------------------------------------------------- 1 | class Pyramid extends Drawable { 2 | 3 | private PVector p1, p2, p3, p4; 4 | public float size; 5 | public int alpha; 6 | 7 | public float spinY; 8 | public float spinSpeed; 9 | 10 | public float dropSpeed; 11 | 12 | Pyramid() { 13 | super(); 14 | 15 | size = 100; 16 | loc = new PVector(0, 0, 0); 17 | alpha = 75; 18 | 19 | myIdx = 0; 20 | 21 | float spinY = 0; 22 | spinSpeed = random(2, 8); 23 | 24 | dropSpeed = random(0, 20); 25 | 26 | init(); 27 | } 28 | 29 | private void init() { 30 | //unit vector of pyramid 31 | p1 = new PVector(-0.5, -0.433, -0.289); 32 | p2 = new PVector(0.5, -0.433, -0.289); 33 | p3 = new PVector(0, -0.433, 0.866-0.289); 34 | p4 = new PVector(0, 0.433, 0); 35 | 36 | p1.mult(size); 37 | p2.mult(size); 38 | p3.mult(size); 39 | p4.mult(size); 40 | } 41 | 42 | public boolean isDead() { 43 | if (loc.z > 7500 || loc.y > 1000) { 44 | return true; 45 | } else { 46 | return false; 47 | } 48 | } 49 | 50 | public void setSize(float s) { 51 | size = s; 52 | init(); 53 | } 54 | 55 | public void update() { 56 | super.update(); 57 | 58 | loc.y += dropSpeed; 59 | 60 | //setSize((cur[0] + cur[1])/2 * 3); 61 | setSize(max(cur[myIdx] * 3, 40)); 62 | 63 | alpha = (int) map((cur[myIdx]), 0, 100, 25, 120); 64 | 65 | spinY += spinSpeed; 66 | } 67 | 68 | public void display() { 69 | canvas.pushMatrix(); 70 | canvas.translate(loc.x, loc.y, loc.z); 71 | canvas.noLights(); 72 | 73 | canvas.rotateY(radians(spinY)); 74 | 75 | canvas.fill(myFill, alpha); 76 | canvas.stroke(0, 150, 200, alpha); 77 | canvas.strokeWeight(2); 78 | 79 | canvas.beginShape(TRIANGLES); 80 | 81 | //top face 82 | canvas.vertex(p1.x, p1.y, p1.z); 83 | canvas.vertex(p2.x, p2.y, p2.z); 84 | canvas.vertex(p3.x, p3.y, p3.z); 85 | 86 | //back face 87 | canvas.vertex(p1.x, p1.y, p1.z); 88 | canvas.vertex(p2.x, p2.y, p2.z); 89 | canvas.vertex(p4.x, p4.y, p4.z); 90 | 91 | //left face 92 | canvas.vertex(p1.x, p1.y, p1.z); 93 | canvas.vertex(p3.x, p3.y, p3.z); 94 | canvas.vertex(p4.x, p4.y, p4.z); 95 | 96 | //left face 97 | canvas.vertex(p2.x, p2.y, p2.z); 98 | canvas.vertex(p3.x, p3.y, p3.z); 99 | canvas.vertex(p4.x, p4.y, p4.z); 100 | 101 | canvas.endShape(); 102 | 103 | canvas.popMatrix(); 104 | } 105 | } -------------------------------------------------------------------------------- /top_bottom_360/build/TerrainGrid.pde: -------------------------------------------------------------------------------- 1 | class TerrainGrid { 2 | 3 | float startX, startY, startZ; 4 | float spacing; 5 | int numTiles, numRows; 6 | 7 | ArrayList grid; 8 | 9 | float travelled; 10 | float speed; 11 | 12 | boolean useNoise = true; 13 | 14 | int tileIndex = 0; 15 | 16 | float killDist = 0; 17 | 18 | 19 | TerrainGrid () { 20 | 21 | numTiles = 100; 22 | numRows = 90; 23 | 24 | startX = 0.0f; 25 | startY = 0.0f; 26 | startZ = 0.0f; 27 | 28 | spacing = 150.0f; 29 | killDist = 10000.0f; 30 | 31 | travelled = 0.0f; 32 | speed = 20.0f; 33 | 34 | initGrid(); 35 | 36 | // println("INIT: " + grid.size()); 37 | 38 | } 39 | 40 | public void initGrid() { 41 | //fill arrayList with initial grid 42 | grid = new ArrayList(); 43 | 44 | color f = color(0); 45 | 46 | for (int i = 0; i < numRows; i++) { 47 | 48 | //f = color((int) random(0, 255), (int) random(0, 255), (int) random(0, 255)); 49 | f = color(#ff5500); 50 | 51 | for (int j = 0; j < numTiles; j++) { 52 | 53 | TerrainTile t = new TerrainTile(tileIndex); 54 | 55 | t.setFill(f); 56 | t.setYoff(startZ); 57 | 58 | float x = startX + j*spacing; 59 | float z = 0; 60 | float y = startY - i*spacing; 61 | 62 | t.setPoints(0, x, y, z); 63 | t.setPoints(1, x+spacing, y, z); 64 | t.setPoints(2, x+spacing, y-spacing, z); 65 | t.setPoints(3, x, y-spacing, z); 66 | 67 | if (useNoise) { 68 | t.noiseY(numTiles); 69 | } 70 | 71 | grid.add(t); 72 | 73 | tileIndex += 1; 74 | 75 | } 76 | } 77 | } 78 | 79 | public void startX(float x) { 80 | startX = x; 81 | } 82 | 83 | public void startY(float y) { 84 | startY = y; 85 | } 86 | 87 | public void startZ(float z) { 88 | startZ = z; 89 | } 90 | 91 | public void update() { 92 | 93 | // println("LOOP: " + grid.size()); 94 | 95 | for (TerrainTile tt : grid) { 96 | tt.zPlus(speed); 97 | } 98 | 99 | //kill any we need to 100 | for (int i = grid.size() - 1; i >= 0; i--) { 101 | TerrainTile t = grid.get(i); 102 | if (t.getZ() > killDist) { 103 | grid.remove(i); 104 | } 105 | } 106 | 107 | travelled += speed; 108 | 109 | 110 | //if last point is within threshold of horizon edge, add a row 111 | 112 | if (travelled >= spacing) { 113 | 114 | //println(travelled); 115 | 116 | float diff = travelled - spacing; 117 | 118 | color f = color((int) random(0, 255), (int) random(0, 255), (int) random(0, 255)); 119 | f = color(#ff5500); 120 | 121 | for (int j = 0; j < numTiles; j++) { 122 | 123 | //int index = i * numTiles + j; 124 | 125 | TerrainTile t = new TerrainTile(tileIndex); 126 | 127 | t.setFill(f); 128 | t.setYoff(startZ); 129 | 130 | float x = startX + j*spacing; 131 | float y = startY - (numRows*spacing) + travelled; 132 | float z = 0; 133 | 134 | t.setPoints(0, x, y, z); 135 | t.setPoints(1, x+spacing, y, z); 136 | t.setPoints(2, x+spacing, y - spacing, z); 137 | t.setPoints(3, x, y - spacing, z); 138 | 139 | if (useNoise) { 140 | t.noiseY(numTiles); 141 | } 142 | 143 | 144 | grid.add(t); 145 | 146 | tileIndex += 1; 147 | 148 | } 149 | travelled = diff; 150 | } 151 | 152 | } 153 | 154 | public void addRow() { 155 | for (int k = 0; k < numTiles; k++) { 156 | 157 | //int index = i * numTiles + j; 158 | 159 | TerrainTile mt = new TerrainTile(tileIndex); 160 | mt.setYoff(startZ); 161 | 162 | float x = startX + (k * spacing); 163 | float y = 0; 164 | float z = -1000; 165 | 166 | mt.setPoints(0, x, y, z); 167 | mt.setPoints(1, x+spacing, y, z); 168 | mt.setPoints(2, x+spacing, y, z+spacing); 169 | mt.setPoints(3, x, y, z+spacing); 170 | 171 | if (useNoise) { 172 | mt.noiseY(numTiles); 173 | } 174 | 175 | grid.add(mt); 176 | 177 | tileIndex += 1; 178 | 179 | } 180 | } 181 | 182 | public void display() { 183 | 184 | canvas.fill(255); 185 | canvas.strokeWeight(2); 186 | 187 | canvas.beginShape(QUADS); 188 | for (int i = 0; i < grid.size(); i++) { 189 | TerrainTile t = grid.get(i); 190 | t.drawVerts(); 191 | } 192 | canvas.endShape(); 193 | 194 | 195 | } 196 | 197 | } -------------------------------------------------------------------------------- /top_bottom_360/build/TerrainTile.pde: -------------------------------------------------------------------------------- 1 | class TerrainTile { 2 | 3 | PVector[] points = new PVector[4]; 4 | int index, myX, myZ; 5 | 6 | float nz = 0.12203; 7 | float nx = 0.183; 8 | float nMult = 650; 9 | 10 | float yOff = 0; 11 | 12 | color fill; 13 | 14 | TerrainTile(int i) { 15 | index = i; 16 | 17 | myX = floor(index%10); 18 | myZ = floor(index/10); 19 | 20 | fill = color(#ffffff); 21 | } 22 | 23 | public void setPoints(int idx, float x, float y, float z) { 24 | if (idx >=0 && idx <= 3) { 25 | points[idx] = new PVector(x, y, z); 26 | } 27 | } 28 | 29 | public void setFill(color f) { 30 | fill = f; 31 | } 32 | 33 | public void noiseY(int numTiles) { 34 | 35 | int x = floor(index%numTiles); 36 | int z = floor(index/numTiles); 37 | 38 | points[0].z = yOff + noise(x * nx, z*nz) * nMult; 39 | points[1].z = yOff + noise((x+1) * nx, z*nz) * nMult; 40 | points[2].z = yOff + noise((x+1) * nx, (z + 1) * nz) * nMult; 41 | points[3].z = yOff + noise(x * nx, (z + 1) * nz) * nMult; 42 | } 43 | 44 | public void setYoff(float y) { 45 | yOff = y; 46 | } 47 | 48 | public void zPlus(float z) { 49 | points[0].y += z; 50 | points[1].y += z; 51 | points[2].y += z; 52 | points[3].y += z; 53 | } 54 | 55 | float getZ() { 56 | return points[0].y; 57 | } 58 | 59 | public void drawVerts() { 60 | 61 | canvas.stroke( 62 | 200, 63 | min(map(points[0].y, 6500, 4500, 100, 0), 100), 64 | min(map(points[0].y, -8500, 3000, 0, 160), 160) 65 | ); 66 | 67 | canvas.vertex(points[0].x, points[0].z, points[0].y); 68 | canvas.vertex(points[1].x, points[1].z, points[1].y); 69 | canvas.vertex(points[2].x, points[2].z, points[2].y); 70 | canvas.vertex(points[3].x, points[3].z, points[3].y); 71 | 72 | } 73 | } -------------------------------------------------------------------------------- /top_bottom_360/build/build.pde: -------------------------------------------------------------------------------- 1 | /* 2 | Processing to 3D 360 video (Top/Bottom view) rendering. 3 | 4 | This is purely an exporting tool. There's nothing to see on screen, all the work is done behind the scenes and generates frames for 3D 360 photos or video. 5 | 6 | The code is largely a port of the Unreal Engine 360 exporter by Kite and Lightning, into the Processing / Java language, 7 | with the functionality that transforms the sampled texture to an equirectangular image for export, done by a GLSL shader. 8 | 9 | Currently, this code does NOT support pixelDensity(2). 10 | 11 | It is also not real time, so please don't expect 60FPS rendering. 12 | 13 | Inside the shader, there's 3 sampling levels. The best quality is the default, but you can go in and change it. 14 | There's probably some optimisation that could be done on the shader, but it works. 15 | 16 | Do all of your drawing to the canvas PGraphics object inside the drawStage() method. Anything drawn direct to the screen won't show up. 17 | */ 18 | 19 | boolean debug = false; 20 | 21 | int sphericalAtlasWidth = 2048; 22 | int sphericalAtlasHeight = 1024; 23 | 24 | int captureHeight = 720; 25 | int captureWidth = 720; 26 | 27 | float hAngIncrement = 2.0f; 28 | float vAngIncrement = 30.0f; 29 | float eyeSeparation = 6.4f; 30 | float fieldOfView = 60.0f; 31 | 32 | int numberOfHorizontalSteps; 33 | int numberOfVerticalSteps; 34 | int totalSteps; 35 | 36 | int unprojectedAtlasWidth; 37 | int unprojectedAtlasHeight; 38 | 39 | int stripWidth; 40 | int stripHeight; 41 | 42 | PVector slicePlaneDim; 43 | PVector capturePlaneDim; 44 | 45 | PImage unprojectedAtlas; 46 | PGraphics projection_tmp; 47 | PImage projection; 48 | PGraphics canvas; 49 | 50 | PShader myShader; 51 | 52 | /* 53 | Put your sketch variables under here 54 | */ 55 | //stuff for data analysis/mapping 56 | Table table; 57 | float[] avgs = {80.0, 80.0, 60.0, 50.0, 40.0, 38.0, 36.0, 33.0, 28.0, 22.0, 21.0, 19.0, 18.0}; 58 | float[] cur = new float[13]; 59 | 60 | //frame counting / table index 61 | int counter = 0; 62 | int max = 0; 63 | 64 | //my scene and anim vars 65 | TerrainGrid myGrid; 66 | ArrayList myObjects = new ArrayList(); 67 | PImage sky; 68 | PShape sun; 69 | PImage poly; 70 | float delay, myDelay; 71 | 72 | void setup() { 73 | 74 | size(720, 720, P3D); 75 | frameRate(30); 76 | smooth(); 77 | 78 | numberOfHorizontalSteps = int(360.0f / hAngIncrement); 79 | numberOfVerticalSteps = int(180.0f / vAngIncrement) + 1; //Need an extra b/c we only grab half of the top & bottom slices 80 | totalSteps = numberOfHorizontalSteps * numberOfVerticalSteps; 81 | 82 | slicePlaneDim = new PVector( 83 | 2.0f * tan(radians(hAngIncrement) / 2.0f), 84 | 2.0f * tan(radians(vAngIncrement) / 2.0f) 85 | ); 86 | capturePlaneDim = new PVector( 87 | 2.0f * tan(radians(fieldOfView) / 2.0f), 88 | 2.0f * tan(radians(fieldOfView) / 2.0f) 89 | ); 90 | 91 | stripWidth = ceil(width * slicePlaneDim.x / capturePlaneDim.x); 92 | stripHeight = ceil(height * slicePlaneDim.y / capturePlaneDim.y); 93 | 94 | //Ensure strip width and height are even 95 | stripWidth += stripWidth & 1; 96 | stripHeight += stripHeight & 1; 97 | 98 | unprojectedAtlasWidth = numberOfHorizontalSteps * stripWidth; 99 | unprojectedAtlasHeight = numberOfVerticalSteps * stripHeight; 100 | 101 | if (debug) { 102 | println("slicePlaneDim: " + slicePlaneDim); 103 | println("capturePlaneDim: " + capturePlaneDim); 104 | println("stripWidth: " + stripWidth); 105 | println("stripHeight: " + stripHeight); 106 | println("unprojectedAtlasWidth: " + unprojectedAtlasWidth); 107 | println("unprojectedAtlasHeight: " + unprojectedAtlasHeight); 108 | } 109 | 110 | canvas = createGraphics(captureWidth, captureHeight, P3D); 111 | 112 | //set up PImage to draw sampling texture to 113 | unprojectedAtlas = createImage(unprojectedAtlasWidth, unprojectedAtlasHeight, RGB); 114 | 115 | //setup the output graphics object that serves as our 360 photo/video file(s) 116 | projection_tmp = createGraphics(sphericalAtlasWidth, sphericalAtlasHeight, P3D); 117 | projection = createImage(sphericalAtlasWidth, sphericalAtlasHeight*2, RGB); 118 | 119 | //set up the shader to handle the sampling 120 | myShader = loadShader("frag.glsl"); 121 | 122 | myShader.set("u_resolution", float(projection_tmp.width), float(projection_tmp.height)); 123 | myShader.set("u_sampleResolution", float(unprojectedAtlas.width), float(unprojectedAtlas.height)); 124 | myShader.set("hAngIncrement", hAngIncrement); 125 | myShader.set("vAngIncrement", vAngIncrement); 126 | myShader.set("stripWidth", float(stripWidth)); 127 | myShader.set("stripHeight", float(stripHeight)); 128 | myShader.set("sampling_level", int(2));//For best quality use 2. For medium, use 1, for low, use 0. Low quality runs a tiny bit faster, but looks worse than 1 or 2. 129 | 130 | /* 131 | Your sketch vars init 132 | */ 133 | 134 | //load table data 135 | table = loadTable("data_30fps.csv"); 136 | max = table.getRowCount(); 137 | for (int i = 0; i < 13; i++) { 138 | cur[i] = 0; 139 | } 140 | 141 | //other stuff for animations... 142 | myGrid = new TerrainGrid(); 143 | myGrid.startZ(400); 144 | myGrid.startY(5000); 145 | myGrid.startX(-5115); 146 | myGrid.initGrid(); 147 | 148 | sky = loadImage("sky3.jpg"); 149 | 150 | sun = loadShape("sun.svg"); 151 | poly = loadImage("poly2.png"); 152 | 153 | delay = 30.0 * 6.3; 154 | myDelay = 30.0 * 6.3; 155 | 156 | 157 | /* 158 | Not sure why this is necessary yet, but you have to set the perspective 159 | before setting the camera or the first slice will ignore all the camera rotations 160 | For now, leave this as is and will investigate later. 161 | */ 162 | canvas.beginDraw(); 163 | canvas.perspective(radians(fieldOfView), 1, 1, 10000); 164 | canvas.endDraw(); 165 | } 166 | 167 | void draw() { 168 | projection_tmp.hint(DISABLE_TEXTURE_MIPMAPS); 169 | ((PGraphicsOpenGL)projection_tmp).textureSampling(2); 170 | 171 | //run any pre render animations 172 | animationPreUpdate(); 173 | 174 | //render the left eye 175 | renderTexture(-eyeSeparation/2); 176 | myShader.set("sampleSet", unprojectedAtlas); 177 | projection_tmp.filter(myShader); 178 | 179 | //copy left eye to main projection frame 180 | projection.copy(projection_tmp, 181 | 0, 0, projection_tmp.width, projection_tmp.height, 182 | 0, 0, projection_tmp.width, projection_tmp.height); 183 | 184 | //render the right eye 185 | renderTexture(eyeSeparation/2); 186 | myShader.set("sampleSet", unprojectedAtlas); 187 | projection_tmp.filter(myShader); 188 | 189 | //copy right eye to main projection frame 190 | projection.copy((PImage) projection_tmp, 191 | 0, 0, projection_tmp.width, projection_tmp.height, 192 | 0, projection_tmp.height, projection_tmp.width, projection_tmp.height); 193 | 194 | //save the frame 195 | if (counter <= max) { 196 | projection.save("frames/filename-"+counter+".png"); 197 | } else { 198 | noLoop(); 199 | exit(); 200 | } 201 | 202 | //run any post render animations 203 | animationPostUpdate(); 204 | } 205 | 206 | void drawStage() { 207 | canvas.beginDraw(); 208 | //leave perspective as is, it's necessary for accurate rendering 209 | canvas.perspective(radians(fieldOfView), 1, 1, 10000); 210 | 211 | /* 212 | Add your code below 213 | */ 214 | canvas.background(#050122); 215 | 216 | //can we listen to data and draw a frame? 217 | if (counter < max) { 218 | 219 | //skybox 220 | canvas.pushMatrix(); 221 | canvas.translate(width/2, height/2, 0); 222 | float rot = radians(360.0/36.0); 223 | float sRot = radians(-90.0); 224 | float rad = 8000; 225 | canvas.fill(255); 226 | canvas.noStroke(); 227 | canvas.strokeWeight(0); 228 | canvas.beginShape(QUADS); 229 | canvas.textureMode(NORMAL); 230 | canvas.tint(255); 231 | canvas.texture(sky); 232 | for (int i = 0; i < 36; i++) { 233 | float x = rad * cos(sRot + rot*i); 234 | float z = rad * sin(sRot + rot*i); 235 | 236 | float x1 = rad * cos(sRot + rot*(i+1)); 237 | float z1 = rad * sin(sRot + rot*(i+1)); 238 | 239 | float u = map(i, 0, 36, 1.0, 0.0); 240 | float u1 = map(i+1, 0, 36, 1.0, 0.0); 241 | 242 | canvas.vertex(x, -6000, z, u, 0.0); 243 | canvas.vertex(x1, -6000, z1, u1, 0.0); 244 | canvas.vertex(x1, 400, z1, u1, 1.0); 245 | canvas.vertex(x, 400, z, u, 1.0); 246 | 247 | 248 | } 249 | canvas.endShape(CLOSE); 250 | canvas.popMatrix(); 251 | 252 | //sun 253 | canvas.pushMatrix(); 254 | canvas.translate(width/2, height/2, -7800); 255 | canvas.noLights(); 256 | 257 | float sunSz = 1200; 258 | 259 | canvas.fill(#050122); 260 | canvas.ellipse(0, 0, sunSz, sunSz); 261 | 262 | canvas.translate(0, 0, 10); 263 | 264 | canvas.pointLight(255, 64, 0, 760, -250, 148); 265 | canvas.pointLight(255, 26, 156, -720, 350, 266); 266 | canvas.pointLight(255, 196, 13, 0, -760, 198); 267 | 268 | sun.disableStyle(); 269 | canvas.strokeWeight(0); 270 | canvas.noStroke(); 271 | canvas.fill(255); 272 | 273 | canvas.shape(sun, -sunSz/2, -sunSz/2, sunSz, sunSz); 274 | canvas.popMatrix(); 275 | 276 | //display my terrain 277 | canvas.pushMatrix(); 278 | canvas.noLights(); 279 | canvas.translate(width/2, 0, 0); 280 | canvas.directionalLight(128, 32, 0, 0, 0, 0.94868326); 281 | canvas.directionalLight(10, 0, 40, 0, 0.70710677, -0.70710677); 282 | 283 | canvas.spotLight(0, 10, 70, 0, -500, 500, 0, 0.4472136, 0.8944272, PI, 2); 284 | canvas.translate(-width/2, 0, 0); 285 | 286 | myGrid.display(); 287 | canvas.popMatrix(); 288 | 289 | canvas.blendMode(ADD); 290 | for (Drawable d : myObjects) { 291 | d.display(); 292 | } 293 | canvas.blendMode(BLEND); 294 | } 295 | 296 | canvas.endDraw(); 297 | } 298 | 299 | void animationPreUpdate() { 300 | //any variables used for animation BEFORE drawing is done should be placed here 301 | TableRow row = table.getRow(counter); 302 | 303 | for (int idx = 0; idx < 13; idx++) { 304 | float h = row.getFloat(idx); 305 | 306 | if (h > avgs[idx]) { 307 | 308 | cur[idx] = h; 309 | 310 | //trigger an animation somewhere with this variable attached 311 | if (idx == 0) { 312 | //add a pyramid 313 | Pyramid p = new Pyramid(); 314 | p.setPos(random(-4000, 4000), random(-4000, height/4), random(-6000, -4000)); 315 | p.setVel(0, 5, 20);//random(10, 50)); 316 | p.setIdx(floor(random(1, 8))); 317 | myObjects.add(p); 318 | } 319 | 320 | if (idx == 9) { 321 | Beams b = new Beams(); 322 | b.setPos(random(-5000, 5000), 900, -5500); 323 | b.setVel(0, 0, 20); 324 | b.setIdx(floor(random(1, 8))); 325 | myObjects.add(b); 326 | } 327 | 328 | if (idx == 11 && myDelay >= delay) { 329 | myDelay = 0; 330 | 331 | Geo g = new Geo(); 332 | g.setPos(width/2, height/2, -5000); 333 | g.setVel(0, 0, random(50, 80)); 334 | myObjects.add(g); 335 | } 336 | 337 | if (idx == 12 && frameCount > 1570) { 338 | Icos i = new Icos(); 339 | i.setPos(random(-5000, 5000), 800, random(-5500, -1000)); 340 | i.setVel(0, -10, 20); 341 | i.setIdx(floor(random(1, 8))); 342 | myObjects.add(i); 343 | } 344 | 345 | } else { 346 | cur[idx] *= 0.95; 347 | } 348 | } 349 | } 350 | 351 | void animationPostUpdate() { 352 | 353 | myGrid.update(); 354 | for (int i = myObjects.size() - 1; i >= 0; i--) { 355 | Drawable d = myObjects.get(i); 356 | d.update(); 357 | if (d.isDead() == true) { 358 | myObjects.remove(i); 359 | } 360 | } 361 | 362 | counter += 1; 363 | myDelay += 1; 364 | } 365 | -------------------------------------------------------------------------------- /top_bottom_360/build/data/frag.glsl: -------------------------------------------------------------------------------- 1 | #ifdef GL_ES 2 | precision highp float; 3 | precision highp int; 4 | #endif 5 | 6 | uniform sampler2D sampleSet; 7 | 8 | uniform vec2 u_resolution; 9 | uniform vec2 u_sampleResolution; 10 | 11 | uniform float hAngIncrement; 12 | uniform float vAngIncrement; 13 | 14 | uniform float stripWidth; 15 | uniform float stripHeight; 16 | 17 | uniform int sampling_level; 18 | 19 | float myClamp(float value) { 20 | float f = mod(value, 360.0f); 21 | if (f < 0.0f) { 22 | f = f + 360.0f; 23 | } 24 | return f; 25 | } 26 | 27 | 28 | vec3 getColorAt(float xOff, float yOff) { 29 | 30 | vec2 slicePlaneDim = vec2( 31 | 2.0f * tan(radians(hAngIncrement) / 2.0f), 32 | 2.0f * tan(radians(vAngIncrement) / 2.0f) 33 | ); 34 | 35 | double dimX = 0.03491013; 36 | double dimY = 0.5358984; 37 | 38 | vec2 pos = vec2(float(gl_FragCoord.x + xOff), float(gl_FragCoord.y + yOff)); 39 | // vec2 pos = gl_FragCoord.xy; 40 | pos = pos/u_resolution; 41 | pos.y = 1.0 - pos.y; 42 | 43 | // vec2 modifier = vec2(1.0, 1.0)/u_resolution; 44 | 45 | // pos.x = pos.x + xOff * modifier.x; 46 | // pos.y = pos.y + yOff * modifier.y; 47 | 48 | // vec4 clr = texture2D(sampleSet, pos); 49 | 50 | // return vec3(clr.r, clr.g, clr.b); 51 | 52 | float sampleTheta = pos.x * 360.0f; 53 | float samplePhi = pos.y * 180.0f; 54 | float sampleThetaRad = radians(sampleTheta); 55 | float samplePhiRad = radians(samplePhi); 56 | 57 | vec3 sampleDirection = vec3( 58 | sin(samplePhiRad) * cos(sampleThetaRad), 59 | sin(samplePhiRad) * sin(sampleThetaRad), 60 | cos(samplePhiRad) 61 | ); 62 | 63 | int sliceXIndex = int(myClamp(floor(sampleTheta + hAngIncrement / 2.0f)) / hAngIncrement); 64 | 65 | int sliceYIndex = 0; 66 | 67 | //calculate slice Y index 68 | 69 | float largestCosAngle = 0.0f; 70 | for (int vstep = 0; vstep < 7; vstep++) { 71 | 72 | vec2 sliceCenterThetaPhi = vec2( 73 | hAngIncrement * sliceXIndex, 74 | vAngIncrement * vstep 75 | ); 76 | 77 | vec3 sliceDir = vec3( 78 | sin(radians(sliceCenterThetaPhi.y)) * cos(radians(sliceCenterThetaPhi.x)), 79 | sin(radians(sliceCenterThetaPhi.y)) * sin(radians(sliceCenterThetaPhi.x)), 80 | cos(radians(sliceCenterThetaPhi.y)) 81 | ); 82 | 83 | float cosAngle = dot(sampleDirection, sliceDir); 84 | 85 | if (cosAngle > largestCosAngle) { 86 | largestCosAngle = cosAngle; 87 | sliceYIndex = vstep; 88 | } 89 | } 90 | 91 | vec2 sliceCenterThetaPhi = vec2(hAngIncrement * sliceXIndex, vAngIncrement * sliceYIndex); 92 | 93 | vec3 sliceDir = normalize(vec3( 94 | sin(radians(sliceCenterThetaPhi.y)) * cos(radians(sliceCenterThetaPhi.x)), 95 | sin(radians(sliceCenterThetaPhi.y)) * sin(radians(sliceCenterThetaPhi.x)), 96 | cos(radians(sliceCenterThetaPhi.y)) 97 | )); 98 | 99 | float planeW = dot(sliceDir, vec3(-sliceDir.x, -sliceDir.y, -sliceDir.z)); 100 | 101 | vec3 slicePlanePhiTangent = normalize(vec3( 102 | cos(radians(sliceCenterThetaPhi.y)) * cos(radians(sliceCenterThetaPhi.x)), 103 | cos(radians(sliceCenterThetaPhi.y)) * sin(radians(sliceCenterThetaPhi.x)), 104 | -sin(radians(sliceCenterThetaPhi.y)) 105 | )); 106 | 107 | vec3 slicePlaneThetaTangent = normalize(cross(sliceDir, slicePlanePhiTangent)); 108 | 109 | float t = -planeW / dot(sampleDirection, sliceDir); 110 | 111 | vec3 sliceIntersection = vec3( 112 | t * sampleDirection.x, 113 | t * sampleDirection.y, 114 | t * sampleDirection.z 115 | ); 116 | 117 | float sliceU = float(dot(sliceIntersection, slicePlaneThetaTangent) / dimX); 118 | float sliceV = float(dot(sliceIntersection, slicePlanePhiTangent) / dimY); 119 | 120 | //TODO: ikrimae: Supersample/bilinear filter 121 | float slicePixelX = floor(sliceU * stripWidth); 122 | float slicePixelY = floor(sliceV * stripHeight); 123 | 124 | float sliceCenterPixelX = floor(sliceXIndex * int(stripWidth) + floor(stripWidth/2.0)); 125 | float sliceCenterPixelY = floor(sliceYIndex * int(stripHeight) + floor(stripHeight/2.0)); 126 | 127 | pos.x = int(sliceCenterPixelX + slicePixelX)/u_sampleResolution.x; 128 | pos.y = int(sliceCenterPixelY + slicePixelY)/u_sampleResolution.y; 129 | 130 | vec4 color = texture2D(sampleSet, pos); 131 | 132 | return vec3(color.r, color.g, color.b); 133 | 134 | } 135 | 136 | 137 | void main(void) { 138 | 139 | vec3 myColor = vec3(0.0f, 0.0f, 0.0f); 140 | 141 | if (sampling_level < 2) { 142 | if (sampling_level == 0) { 143 | //Lowest quality sampling 144 | myColor = getColorAt(0.0f, 0.0f); 145 | gl_FragColor = vec4(myColor, 1.0); 146 | } else { 147 | //medium quality sampling 148 | myColor = myColor + getColorAt(0.125f, 0.625f); 149 | myColor = myColor + getColorAt(0.375f, 0.125f); 150 | myColor = myColor + getColorAt(0.625f, 0.875f); 151 | myColor = myColor + getColorAt(0.875f, 0.375f); 152 | gl_FragColor = vec4(myColor/4.0, 1.0); 153 | } 154 | } else { 155 | //best quality sampling 156 | myColor = myColor + getColorAt(0.125f, 0.125f); 157 | myColor = myColor + getColorAt(0.125f, 0.375f); 158 | myColor = myColor + getColorAt(0.125f, 0.625f); 159 | myColor = myColor + getColorAt(0.125f, 0.875f); 160 | myColor = myColor + getColorAt(0.375f, 0.125f); 161 | myColor = myColor + getColorAt(0.375f, 0.375f); 162 | myColor = myColor + getColorAt(0.375f, 0.625f); 163 | myColor = myColor + getColorAt(0.375f, 0.875f); 164 | myColor = myColor + getColorAt(0.625f, 0.125f); 165 | myColor = myColor + getColorAt(0.625f, 0.375f); 166 | myColor = myColor + getColorAt(0.625f, 0.625f); 167 | myColor = myColor + getColorAt(0.625f, 0.875f); 168 | myColor = myColor + getColorAt(0.875f, 0.125f); 169 | myColor = myColor + getColorAt(0.875f, 0.375f); 170 | myColor = myColor + getColorAt(0.875f, 0.625f); 171 | myColor = myColor + getColorAt(0.875f, 0.875f); 172 | gl_FragColor = vec4(myColor/16.0, 1.0); 173 | } 174 | 175 | } 176 | -------------------------------------------------------------------------------- /top_bottom_360/build/data/poly.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tracerstar/processing-360-video/63ad522d13a300698b2348c166f0b2018bf2cd69/top_bottom_360/build/data/poly.png -------------------------------------------------------------------------------- /top_bottom_360/build/data/poly2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tracerstar/processing-360-video/63ad522d13a300698b2348c166f0b2018bf2cd69/top_bottom_360/build/data/poly2.png -------------------------------------------------------------------------------- /top_bottom_360/build/data/sky.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tracerstar/processing-360-video/63ad522d13a300698b2348c166f0b2018bf2cd69/top_bottom_360/build/data/sky.jpg -------------------------------------------------------------------------------- /top_bottom_360/build/data/sky2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tracerstar/processing-360-video/63ad522d13a300698b2348c166f0b2018bf2cd69/top_bottom_360/build/data/sky2.jpg -------------------------------------------------------------------------------- /top_bottom_360/build/data/sky3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/tracerstar/processing-360-video/63ad522d13a300698b2348c166f0b2018bf2cd69/top_bottom_360/build/data/sky3.jpg -------------------------------------------------------------------------------- /top_bottom_360/build/data/sun.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /top_bottom_360/build/utility.pde: -------------------------------------------------------------------------------- 1 | void renderTexture(float offset) { 2 | { 3 | 4 | int currentHorizontalStep = 0; 5 | int currentVerticalStep = 0; 6 | 7 | //set start positions 8 | float roll = 0.0f; 9 | float yaw = 0.0f; 10 | float pitch = 90.0f - vAngIncrement/2.0f; 11 | 12 | float focalLength = 50.0; 13 | 14 | float pitchCheck = pitch + 0; 15 | 16 | int totalSteps = numberOfHorizontalSteps * numberOfVerticalSteps; 17 | for (int i = 0; i < totalSteps; i++) { 18 | currentHorizontalStep = i % numberOfHorizontalSteps; 19 | currentVerticalStep = floor(i/numberOfHorizontalSteps); 20 | 21 | yaw = 180.0f + currentHorizontalStep * hAngIncrement; 22 | pitch = -90.0f + currentVerticalStep * vAngIncrement; 23 | 24 | pitchCheck = pitch + 0; 25 | 26 | float pitchUp = 90.0f - pitch; 27 | 28 | if (currentVerticalStep >= (numberOfVerticalSteps-2) || currentVerticalStep < 2) { 29 | pitchUp = -90.0f - pitch;//up and down 30 | } 31 | 32 | pitchUp = radians(clamp(pitchUp)); 33 | 34 | //clamp rotations between 0 and 360 35 | float fwdYaw = radians(yaw - 90.0); 36 | yaw = radians(clamp(yaw)); 37 | pitch = radians(clamp(pitch)); 38 | 39 | //set cam rotation and pos 40 | PVector pos = new PVector(width/2, height/2, 0.0f); 41 | 42 | float x = cos(yaw) * offset; 43 | float z = sin(yaw) * offset; 44 | pos.x += x; 45 | pos.z += z; 46 | 47 | //handle eye separation 48 | PVector fwd = new PVector(); 49 | fwd.x = pos.x + focalLength * (cos(fwdYaw)*cos(pitch)); 50 | fwd.y = pos.y + focalLength * sin(pitch); 51 | fwd.z = pos.z + focalLength * (sin(fwdYaw)*cos(pitch)); 52 | 53 | PVector up = new PVector(); 54 | 55 | up.x = (cos(fwdYaw)*cos(pitchUp)); 56 | up.y = sin(pitchUp); 57 | up.z = sin(fwdYaw)*cos(pitchUp); 58 | 59 | up.normalize(); 60 | 61 | canvas.camera(pos.x, pos.y, pos.z, fwd.x, fwd.y, fwd.z, up.x, up.y, up.z); 62 | 63 | drawStage(); 64 | 65 | int capX = (int) (width/2 - stripWidth/2); 66 | int capY = (int) (height/2 - stripHeight/2); 67 | 68 | int dX = currentHorizontalStep * stripWidth; 69 | int dY = currentVerticalStep * stripHeight; 70 | 71 | PImage stage = canvas.get(); 72 | 73 | unprojectedAtlas.copy(stage, capX, capY, stripWidth, stripHeight, dX, dY, stripWidth, stripHeight); 74 | 75 | } 76 | } 77 | } 78 | 79 | 80 | float clamp(float value) { 81 | float f = value % 360.0f; 82 | if (f < 0.0f) { 83 | f += 360.0f; 84 | } 85 | return f; 86 | } 87 | --------------------------------------------------------------------------------