├── mov
└── theta1.m4v
├── LICENSE
├── index.html
├── movie_360.html
├── theta_360.html
├── theta_anzu_up.html
├── theta_anzu_360.html
├── README.md
└── js
├── anzu.min.js
└── theta_gl.js
/mov/theta1.m4v:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mganeko/THETA_GL/HEAD/mov/theta1.m4v
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2015 mganeko
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
23 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | THETA_GL sample page
7 |
8 |
9 | THETA_GL sample page
10 | RICHO THETA S の映像をWebGL(three.js )とUVマッピングを用いて、全天球動画としてブラウザで表示するTHETA_GL のサンプルです
11 |
12 |
13 | 録画した映像ファイルを表示
14 | HETA Sで録画した映像ファイルを表示するサンプルです
15 | movie_360.html
16 |
17 | USBカメラとして接続して利用
18 | navigator.getUserMedia()を利用してカメラ映像を取り込んだものを表示するサンプルです
19 | theta_360.html
20 |
21 | WebRTC SFU 配信映像を利用
22 | WebRTC SFU as a Service Anzu の配信映像を表示するサンプルです
23 |
24 | 配信: theta_anzu_uphtml
25 |
26 | 視聴: theta_anzu_360.html
27 |
28 | ※Anzuダッシュボード で配信を開始してから、チャネルIDを指定して接続してください
29 |
34 |
35 | 動作環境
36 | 次の環境で動作確認しています
37 | - Mac OS X用 Chrome 47.0.2526.106 (64-bit)
38 | - Mac OS X用 Firefox 43.0.
39 | - Windows用 Chrome 47.0.2526.106 m
40 | - Windows用 Firefox 43.0.2
41 | ※Android用Firefoxでは、m4v形式のためか録画した映像ファイルは再生できていません
42 |
43 |
44 |
--------------------------------------------------------------------------------
/movie_360.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | THETA Movie Viewer
8 |
15 |
16 |
17 |
18 |
19 |
20 |
21 | start
22 | stop
23 | Sense Orientation
24 |
25 |
26 |
27 |
28 |
62 |
63 |
64 |
65 |
--------------------------------------------------------------------------------
/theta_360.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | THETA Viewer
8 |
15 |
16 |
17 |
18 |
19 |
20 |
21 | start
22 | stop
23 | Sense Orientation
24 |
25 |
26 |
27 |
28 |
96 |
97 |
98 |
99 |
--------------------------------------------------------------------------------
/theta_anzu_up.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Anzu THETA Viewer
8 |
9 |
10 |
11 |
12 |
13 | channel
14 | token
15 | start upstream
16 | stop upsteam
17 |
18 |
19 |
20 |
21 |
121 |
122 |
123 |
124 |
--------------------------------------------------------------------------------
/theta_anzu_360.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Anzu THETA Viewer
8 |
15 |
16 |
17 |
18 |
19 |
20 |
21 | channel
22 | start
23 | stop
24 | Sense Orientation
25 |
26 |
27 |
28 |
29 |
30 |
140 |
141 |
142 |
143 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # THETA_GL
2 | THETA WebGL Viewer using three.js (Javascript 3D library)
3 |
4 |
5 | [Many thanks to [mganeko](https://github.com/mganeko) for this useful repo! This English translation of the README.md file for mganeko’s THETA_GL repo provided by the RICOH THETA Unofficial Guide. Original Japanese language README.md file information is appended below. This is provided “as-is and unofficial." Comments and edits welcome. More information on useful GitHub repos for the RICOH THETA curatated by the RICOH THETA Unofficial Guide [here](http://lists.theta360.guide/t/theta-github-repository/199/1)]
6 |
7 |
8 | ## Acknowledgments
9 |
10 | * In order to render WebGL, three.js is used
11 | * To show rendering of full photospheres, samples of panoramas / equirectangular are referenced
12 | * three.js is covered under the MIT license
13 |
14 | * In order to handle UV Mapping, the following data was utilized:
15 | * Hekomi’s [“Using RICOH THETA Live View With Unity”](http://lists.theta360.guide/t/using-ricoh-theta-live-view-with-unity/70/1)
16 | * Based on the UV Mapping information above, a mapping JSON converted and fine tuned for three.js by a Mr. Baba
17 |
18 | * Using [anzu-sdk.js](https://github.com/shiguredo/anzu-js-sdk) from the WebRTC SFU as a Service called Anzu, a WebRTC distribution service
19 | * The Anzu WebRTC SFU as a Service is provided by the company Shiguredo, K.K. (Japan)
20 | * anzu-sdk.js is under the Apache License Version 2.0
21 |
22 | Thank you, everyone.
23 |
24 | ## Operating Environment
25 |
26 | I have tested and confirmed the following operating environment:
27 |
28 | * Chrome 47.0.2526.106 (64-bit) for MacOS X
29 | * Firefox 43.0.3 for MacOS X
30 | * Chrome 47.0.2526.106 m for Windows
31 | * Firefox 43.0.2 for Windows
32 | * Chrome for Android
33 | * Firefox for Android
34 |
35 | _Maybe because Firefox for Android uses m4v format, playback for recorded video files is not possible._
36 |
37 | ## Samples
38 |
39 | ### Playback of prerecorded video file
40 | * [Displaying a video sample](https://mganeko.github.io/THETA_GL/movie_360.html) taken with THETA S
41 |
42 | ### Connecting to USB Camera
43 | * [Sample using `navigator.getUserMedia()`](https://mganeko.github.io/THETA_GL/movie_360.html) with camera video sample
44 |
45 | ### Using WebRTC SFU Distribution Video
46 | [Translator’s Note: This section of links appears to need an account with this service, and this section of links do not appear to work.]
47 | * Sample of [Anzu WebRTC SFU as a Service](https://anzu.shiguredo.jp/) video distribution service
48 | * [Example of distribution](https://mganeko.github.io/THETA_GL/theta_anzu_up.html)
49 | * [Example of viewing](https://mganeko.github.io/THETA_GL/theta_anzu_360.html)
50 | * ~~After starting distribution from the Anzu Dashboard, set the Channel ID and connect~~ [crossed out in original Japanese README]
51 | * Confirm the Channel ID and distribution token in the [Anzu Dashboard](https://mganeko.github.io/THETA_GL/theta_anzu_up.html), then input them on the page and start broadcasting. For viewing, set the Channel ID on [theta_anzu_360.html](https://mganeko.github.io/THETA_GL/theta_anzu_360.html) page and connect.
52 |
53 | ## Usage
54 |
55 | #### Prep
56 |
57 | * Load three.js and three.min.js in HTML
58 | * Download the newest version [here](http://github.com/mrdoob/three.js/zipball/master) or use the [CDN version](https://cdnjs.com/libraries/three.js/)
59 | * Load theta_gl.js in HTML
60 | * You must have the JSON file in the UV folder when you use it
61 | * When you use Anzu, anzu.js or anzu.min.js is required
62 | * Get the [newest version from Github](https://github.com/shiguredo/anzu-js-sdk)
63 |
64 |
65 | ## Initialization
66 |
67 | * `THETA_GL.init(divId, autoResuze, debugFlag)`
68 | * `string divId`: It will be a Canvas container displaying WebGL, set the div element ID [REQUIRED]
69 | * `bool autoResize`: Comply with window resize or not (true/false) - default is true
70 | * `bool debugFlag`: For debugging, running video elements or displaying canvas elements, or displaying log information in the console - default is false
71 |
72 | #### Starting WebGL Animation
73 |
74 | * `THETA_GL.startAnimate()`
75 |
76 | #### Setting the source URL of videos
77 | * `THETA_GL.setVideoSrc(url, loopFlag)`
78 | * `string url`: URL for videos. Use a web URL or set a URL created with `URL.createObjectURL()` [REQUIRED]
79 | * `bool loopFlag`: Run the video in a loop or not (true/false) - default is false
80 |
81 | #### Stopping Video
82 | * `THETA_GL.stopVideoSrc()`
83 |
84 | #### Setting Device Orientation
85 | * `THETA_GL.followOrientation(flag)`
86 | * `bool flag`: Setting whether to follow the smart device orientation or not. [REQUIRED]
87 |
88 | ### Code Samples
89 | ```
90 | var url = 'http://yourserver.com/video.mp4';
91 | THETA_GL.init('container', true, false);
92 | THETA_GL.setVideoSrc(url, true);
93 | THETA_GL.startAnimate();
94 | ```
95 |
96 | ## License
97 |
98 | THETA_GL is under the MIT license
99 |
100 |
101 | [Original Japanese README starts here]
102 | ----
103 |
104 | # THETA_GL
105 | THETA WebGL Viewer with three.js
106 |
107 | RICHO THETA S の映像をWebGL(three.js)とUVマッピングを用いて、全天球動画としてブラウザで表示します。
108 |
109 | ## 謝辞
110 | * WebGLでの描画には、[three.js](http://threejs.org/)を利用しています
111 | * 全天球描画には[panorama / equirectangularサンプル](http://threejs.org/examples/#webgl_panorama_equirectangular)を参考にしています。
112 | * three.js はMITライセンスで提供されています
113 | * UVマッピングの作成には、次のデータを利用させていただいています
114 | * 凹みtips [発売前に RICOH THETA S のライブビューを Unity でリアルタイムに全天球で見るやつ作ってみた](http://tips.hecomi.com/entry/2015/10/11/211456)
115 | * 上記のUnity用UVマッピングを元に、baba氏がthree.js用に変換、微調整したマッピングjsonデータ
116 | * WebRTC配信サービス [WebRTC SFU as a Service Anzu](https://anzu.shiguredo.jp/) の利用サンプルには、[anzu-sdk.js](https://github.com/shiguredo/anzu-js-sdk)を利用しています
117 | * WebRTC SFU as a Service Anzuは株式会社時雨堂が提供しているサービスです
118 | * anzu-sdk.js は Apache License Version 2.0 で提供されています
119 |
120 | 皆様、どうもありがとうございます。
121 |
122 |
123 | ## 動作環境
124 | 次の環境で動作確認しています
125 | * Mac OS X用 Chrome 47.0.2526.106 (64-bit)
126 | * Mac OS X用 Firefox 43.0.3
127 | * Windows用 Chrome 47.0.2526.106 m
128 | * Windows用 Firefox 43.0.2
129 | * Android用 Chrome
130 | * Android用 Firefox
131 |
132 | ※Android用Firefoxでは、m4v形式のためか録画した映像ファイルは再生できていません
133 |
134 |
135 | ## サンプル
136 | https://mganeko.github.io/THETA_GL/
137 |
138 | ### 録画した映像ファイルを再生
139 | * THETA Sで録画したファイルを表示するサンプルです
140 | * [movie_360.html](https://mganeko.github.io/THETA_GL/movie_360.html)
141 |
142 | ### USBカメラとして接続して利用
143 | * navigator.getUserMedia()を利用してカメラ映像を取り込んだものを表示するサンプルです
144 | * [theta_360.html](https://mganeko.github.io/THETA_GL/theta_360.html)
145 |
146 | ### WebRTC SFU 配信映像を利用
147 | * [WebRTC SFU as a Service Anzu](https://anzu.shiguredo.jp/) の配信映像を表示するサンプルです
148 | * 配信側: [theta_anzu_up.html](https://mganeko.github.io/THETA_GL/theta_anzu_up.html)
149 | * 視聴側: [theta_anzu_360.html](https://mganeko.github.io/THETA_GL/theta_anzu_360.html)
150 | * ~~[Anzuダッシュボード](https://anzu.shiguredo.jp/dashboard.html)で配信を開始してから、サンプルでチャネルIDを指定して接続してください~~
151 | * [Anzuダッシュボード](https://anzu.shiguredo.jp/dashboard.html)でチャンネルIDと配信用トークンを確認し、[theta_anzu_up.html](https://mganeko.github.io/THETA_GL/theta_anzu_up.html)で指定してから配信してください。視聴側は[theta_anzu_360.html](https://mganeko.github.io/THETA_GL/theta_anzu_360.html)でチャネルIDを指定して接続してください
152 |
153 |
154 | ## 使い方
155 | #### 準備
156 | * HTMLでthree.js または trhee.min.js を読み込みます
157 | * [最新版をダウンロード](http://github.com/mrdoob/three.js/zipball/master)するか、[CDNのもの](http://cdnjs.com/libraries/three.js/)を利用します
158 | * HTMLでtheta_gl.jsを読み込みます
159 | * 利用にあたっては、uvフォルダの下のjsonファイルも必要になります
160 | * Anzuを利用する場合は、anzu.js または anzu.min.js も必要です
161 | * [最新版をgithub](https://github.com/shiguredo/anzu-js-sdk)から取得してください
162 |
163 | #### 初期化
164 | * THETA_GL.init(divId, autoResuze, debugFlag)
165 | * string divId : WebGLを表示するCanvasのコンテナとなる、div要素のIDを指定 ※必須
166 | * bool autoResize : Windowのリサイズに追従するかどうか(true/false) ※省略時はtrue
167 | * bool debugFlag : デバッグ用に作業用のvideo要素、canvas要素を表示するか、ログ情報をconsoleに表示するか ※省略時はfalse
168 |
169 | #### WebGLアニメーションの開始
170 | * THETA_GL.startAnimate()
171 |
172 | #### 映像ソースURLの指定
173 | * THETA_GL.setVideoSrc(url, loopFlag)
174 | * string url : 映像のURL。Web上のURLか、URL.createObjectURL()で生成したURLを指定 ※必須
175 | * bool loopFlag : 映像をループ再生するかどうか(true/false) ※省略時はfalse
176 |
177 | #### 映像の停止
178 | * THETA_GL.stopVideoSrc()
179 |
180 | #### デバイスの方向に追従
181 | * THETA_GL.followOrientation(flag)
182 | * bool flag: スマートデバイスの方向に追従されるかどうかを指定 ※必須
183 |
184 | ### コード例
185 | ```
186 | var url = 'http://yourserver.com/video.mp4';
187 | THETA_GL.init('container', true, false);
188 | THETA_GL.setVideoSrc(url, true);
189 | THETA_GL.startAnimate();
190 | ```
191 |
192 |
193 | ## ライセンス
194 | THETA_GLはMITランセンスで提供されます
195 |
196 | 
197 |
--------------------------------------------------------------------------------
/js/anzu.min.js:
--------------------------------------------------------------------------------
1 | /*!
2 | * anzu-js-sdk
3 | * WebRTC SFU as a Service Anzu Library
4 | * @version 0.5.2
5 | * @author Shiguredo Inc.
6 | * @license MIT
7 | */
8 | !function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var n;n="undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this,n.Anzu=e()}}(function(){var e;return function n(e,t,o){function i(c,a){if(!t[c]){if(!e[c]){var s="function"==typeof require&&require;if(!a&&s)return s(c,!0);if(r)return r(c,!0);var u=new Error("Cannot find module '"+c+"'");throw u.code="MODULE_NOT_FOUND",u}var f=t[c]={exports:{}};e[c][0].call(f.exports,function(n){var t=e[c][1][n];return i(t?t:n)},f,f.exports,n,e,t,o)}return t[c].exports}for(var r="function"==typeof require&&require,c=0;c=0&&(t.video={codecType:c[o]}),n._ws.send(JSON.stringify(t))},n._ws.onclose=function(e){/440\d$/.test(e.code)?o(e):n._onclose(e)},n._ws.onerror=function(e){n._onerror(e)},n._ws.onmessage=function(e){var o=JSON.parse(e.data);"offer"==o.type?t(o):"ping"==o.type&&n._ws.send(JSON.stringify({type:"pong"}))}})}},{key:"answer",value:function(e){this._ws.send(JSON.stringify({type:"answer",sdp:e}))}},{key:"candidate",value:function(e){var n=e.toJSON();n.type="candidate",this._ws.send(JSON.stringify(n))}},{key:"onError",value:function(e){this._onerror=e}},{key:"onDisconnect",value:function(e){this._onclose=e}},{key:"disconnect",value:function(){this._ws.close(),this._ws=null}}]),e}();n.exports=r},{}]},{},[1])(1)})}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}],2:[function(e,n,t){"use strict";function o(e){return e&&e.__esModule?e:{"default":e}}function i(e,n){if(!(e instanceof n))throw new TypeError("Cannot call a class as a function")}var r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol?"symbol":typeof e},c=function(){function e(e,n){for(var t=0;t 1 == normal human)
179 | var light = new THREE.AmbientLight(0xffffff);
180 | light.position.set(0, 0, 0).normalize();
181 | scene = new THREE.Scene();
182 | scene.add(light);
183 |
184 | videoTexture = new THREE.Texture( videoImage );
185 | videoTexture.minFilter = THREE.LinearFilter;
186 | videoTexture.magFilter = THREE.LinearFilter;
187 |
188 | //var videoMaterial = new THREE.MeshBasicMaterial( { map: videoTexture, overdraw: true, side:THREE.DoubleSide } );
189 | loader.load('uv/half_sphere_1.json', function(geometry1, materials1) {
190 | // create a new material
191 | var material1 = new THREE.MeshPhongMaterial({
192 | map: videoTexture, // specify and load the texture
193 | color: 0xffffff,
194 | specular: 0xcccccc,
195 | shininess: 50,
196 | //ambient: 0xffffff,
197 | overdraw: true
198 | });
199 |
200 | // create a mesh with models geometry and material
201 | var mesh1 = new THREE.Mesh(
202 | geometry1,
203 | material1
204 | );
205 |
206 | mesh1.scale.set(0.1, 0.1, 0.1);
207 | mesh1.position.set(0, 0, 0).normalize();
208 | scene.add(mesh1);
209 |
210 | console.log('mesh1 ready');
211 | });
212 |
213 | loader.load('uv/half_sphere_2.json', function(geometry2, materials2) {
214 | // create a new material
215 | var material2 = new THREE.MeshPhongMaterial({
216 | map: videoTexture, // specify and load the texture
217 | color: 0xffffff,
218 | specular: 0xcccccc,
219 | shininess: 50,
220 | //ambient: 0xffffff,
221 | overdraw: true
222 | });
223 |
224 | // create a mesh with models geometry and material
225 | var mesh2 = new THREE.Mesh(
226 | geometry2,
227 | material2
228 | );
229 |
230 | mesh2.scale.set(0.1, 0.1, 0.1);
231 | mesh2.position.set(0, 0, 0).normalize();
232 | scene.add(mesh2);
233 |
234 | console.log('mesh2 ready');
235 | });
236 |
237 | var geometry = new THREE.SphereGeometry( 500, 60, 40 );
238 | geometry.scale( - 1, 1, 1 );
239 |
240 | renderer = new THREE.WebGLRenderer({
241 | alpha: true
242 | });
243 |
244 | renderer.setPixelRatio( window.devicePixelRatio );
245 | renderer.setSize( winWidth, winHeight );
246 | container.appendChild( renderer.domElement );
247 |
248 | container.addEventListener( 'mousedown', onDocumentMouseDown, false );
249 | container.addEventListener( 'mousemove', onDocumentMouseMove, false );
250 | container.addEventListener( 'mouseup', onDocumentMouseUp, false );
251 | container.addEventListener( 'mouseout', onDocumentMouseOut, false );
252 | container.addEventListener( 'mousewheel', onDocumentMouseWheel, false );
253 | container.addEventListener( 'MozMousePixelScroll', onDocumentMouseWheel, false);
254 |
255 | container.addEventListener( 'touchstart', onDocumentTouchStart, false );
256 | container.addEventListener( 'touchend', onDocumentTouchEnd, false );
257 | container.addEventListener( 'touchcancel', onDocumentTouchEnd, false );
258 | container.addEventListener( 'touchmove', onDocumentTouchMove, false );
259 |
260 | if (autoResize !== false) {
261 | window.addEventListener( 'resize', onWindowResize, false );
262 | onWindowResize();
263 | }
264 | }
265 |
266 | // prepare Hidden elements to convert video to texture
267 | function prepareVideoElements(debugFlag) {
268 | // video element to render
269 | if (!videoRenderElement) {
270 | videoRenderElement = document.createElement('video');
271 | videoRenderElement.width = 1280;
272 | videoRenderElement.height = 720;
273 | if (debugFlag) {
274 | videoRenderElement.style.visibility = 'true';
275 | videoRenderElement.style.position = 'absolute';
276 | videoRenderElement.style.top = '200px';
277 | videoRenderElement.width = 320;
278 | videoRenderElement.height = 180;
279 | }
280 | else {
281 | videoRenderElement.style.visibility = 'hidden';
282 | }
283 | videoRenderElement.volume = 0;
284 | document.body.appendChild(videoRenderElement);
285 | }
286 |
287 | // canvas to convert
288 | videoImage = document.createElement('canvas');
289 |
290 | if (debugFlag) {
291 | videoImage.style.visibility = 'true';
292 | videoImage.style.position = 'absolute';
293 | videoImage.style.top = '200px';
294 | videoImage.style.left = '400px';
295 | videoImage.width = 320;
296 | videoImage.height = 180;
297 | }
298 | else {
299 | videoImage.style.visibility = 'hidden';
300 | videoImage.width = 1280;
301 | videoImage.height = 720;
302 | }
303 | document.body.appendChild(videoImage);
304 |
305 | // context
306 | videoImageContext = videoImage.getContext('2d');
307 | videoImageContext.transform( -1, 0, 0, 1, videoImage.width, 0 );
308 | }
309 |
310 | function onWindowResize() {
311 | var winWidth = window.innerWidth;
312 | var winHeight = window.innerHeight;
313 | camera.aspect = winWidth / winHeight;
314 | camera.updateProjectionMatrix();
315 | renderer.setSize( winWidth, winHeight )
316 | }
317 |
318 | function followOrientation(event) {
319 | var orientation = event.alpha; // 0 to 360
320 | var pitch = event.beta; // -90 to 90
321 | var roll= event.gamma; // -90 to 270
322 |
323 | lon = -orientation;
324 | lat = pitch - 90;
325 | }
326 |
327 | function addOrientationEvent() {
328 | window.addEventListener("deviceorientation", followOrientation, false);
329 | }
330 |
331 | function removeOrientationEvent() {
332 | window.removeEventListener("deviceorientation", followOrientation, false);
333 | }
334 |
335 | function onDocumentMouseDown(event) {
336 | event.preventDefault();
337 |
338 | isUserInteracting = true;
339 |
340 | onPointerDownPointerX = event.clientX;
341 | onPointerDownPointerY = event.clientY;
342 |
343 | onPointerDownLon = lon;
344 | onPointerDownLat = lat;
345 | }
346 |
347 | function onDocumentMouseMove(event) {
348 | if ( isUserInteracting === true ) {
349 | lon = ( onPointerDownPointerX - event.clientX ) * 0.1 + onPointerDownLon;
350 | lat = ( event.clientY - onPointerDownPointerY ) * 0.1 + onPointerDownLat;
351 | }
352 | }
353 |
354 | function onDocumentMouseUp(event) {
355 | isUserInteracting = false;
356 | }
357 |
358 | //
359 | // reset dragability out mouseout
360 | //
361 | function onDocumentMouseOut(event) {
362 | isUserInteracting = false;
363 | }
364 |
365 | var FOV_MIN = 20;
366 | var FOV_MAX = 140;
367 | function onDocumentMouseWheel(event) {
368 | // WebKit
369 | if ( event.wheelDeltaY ) {
370 | camera.fov -= event.wheelDeltaY * 0.05;
371 |
372 | // Opera / Explorer 9
373 | } else if ( event.wheelDelta ) {
374 | camera.fov -= event.wheelDelta * 0.05;
375 |
376 | // Firefox
377 | } else if ( event.detail ) {
378 | camera.fov += event.detail * 1.0;
379 | }
380 |
381 | if (camera.fov < FOV_MIN) {
382 | camera.fov = FOV_MIN;
383 | }
384 | else if (camera.fov > FOV_MAX) {
385 | camera.fov = FOV_MAX;
386 | }
387 |
388 | showMessage('mouseWheel camera.fov=' + camera.fov);
389 | camera.updateProjectionMatrix();
390 | }
391 |
392 | function onDocumentTouchStart(event) {
393 | event.preventDefault();
394 |
395 | isUserInteracting = true;
396 |
397 | var touches = event.touches;
398 | var l = touches.length;
399 | if (l == 1) {
400 | var touch = touches[0];
401 | onPointerDownPointerX = touch.clientX;
402 | onPointerDownPointerY = touch.clientY;
403 | onPointerDownLon = lon;
404 | onPointerDownLat = lat;
405 |
406 | showMessage('touch start:' + l);
407 | }
408 | else if (l == 2) {
409 | isPinching = true;
410 |
411 | // distance
412 | var touch1 = touches[0];
413 | var touch2 = touches[1];
414 | var dx = touch1.clientX - touch2.clientX;
415 | var dy = touch1.clientY - touch2.clientY;
416 | pinchStartDistance = Math.sqrt(dx*dx + dy*dy);
417 |
418 | showMessage('pinch start:' + l + ', dist=' + pinchStartDistance);
419 | }
420 | }
421 |
422 | function onDocumentTouchEnd(event) {
423 | isUserInteracting = false;
424 | showMessage('touch end');
425 |
426 | if (isPinching) {
427 | isPinching = false;
428 | showMessage('pinch end');
429 | }
430 | }
431 |
432 | function onDocumentTouchMove(event) {
433 | if ( isUserInteracting === true ) {
434 | var touches = event.touches;
435 | var l = touches.length;
436 | if (l == 1) {
437 | var touch = touches[0];
438 | lon = ( onPointerDownPointerX - touch.clientX ) * 0.1 + onPointerDownLon;
439 | lat = ( touch.clientY - onPointerDownPointerY ) * 0.1 + onPointerDownLat;
440 |
441 | showMessage('touch move:' + l);
442 | }
443 | else if (l == 2) {
444 | // distance
445 | var touch1 = touches[0];
446 | var touch2 = touches[1];
447 | var dx = touch1.clientX - touch2.clientX;
448 | var dy = touch1.clientY - touch2.clientY;
449 | var distance = Math.sqrt(dx*dx + dy*dy);
450 |
451 | showMessage('pinch move:' + l + ', dist=' + distance);
452 |
453 | camera.fov -= (distance - pinchStartDistance)*0.02;
454 | if (camera.fov > FOV_MAX) {
455 | camera.fov = FOV_MAX;
456 | }
457 | if (camera.fov < FOV_MIN) {
458 | camera.fov = FOV_MIN;
459 | }
460 | camera.updateProjectionMatrix();
461 | }
462 | }
463 | }
464 |
465 | function showMessage(msg) {
466 | if (debugMode) {
467 | console.log(msg);
468 | }
469 | }
470 |
471 | function animate() {
472 | animation = requestAnimationFrame( animate );
473 | update();
474 | }
475 |
476 | function update() {
477 | lat = Math.max( - 85, Math.min( 85, lat ) );
478 | phi = THREE.Math.degToRad( 90 - lat );
479 | theta = THREE.Math.degToRad( lon );
480 |
481 | //camera.target.x = 500 * Math.sin( phi ) * Math.cos( theta );
482 | //camera.target.y = 500 * Math.cos( phi );
483 | //camera.target.z = 500 * Math.sin( phi ) * Math.sin( theta );
484 |
485 | camera.target.x = Math.sin(phi) * Math.cos(theta);
486 | camera.target.y = Math.cos(phi);
487 | camera.target.z = Math.sin(phi) * Math.sin(theta);
488 |
489 | camera.lookAt( camera.target );
490 |
491 | // video to image
492 | videoImageContext.drawImage( videoRenderElement, 0, 0, videoImage.width, videoImage.height );
493 | if ( videoTexture ) {
494 | videoTexture.needsUpdate = true;
495 | }
496 |
497 | if (controls) {
498 | controls.update();
499 | }
500 |
501 | if (effect) {
502 | effect.render( scene, camera );
503 | } else {
504 | renderer.render( scene, camera );
505 | }
506 | }
507 |
508 | };
509 |
510 | // Global Instance for THETA_GL.js
511 | var THETA_GL = new _theta_gl();
512 |
--------------------------------------------------------------------------------