├── .gitignore ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── README_Kr.md └── WebARonARKit ├── WebARonARKit.js ├── WebARonARKit.xcodeproj ├── project.pbxproj └── project.xcworkspace │ └── contents.xcworkspacedata └── WebARonARKit ├── AppDelegate.h ├── AppDelegate.m ├── Assets.xcassets ├── AppIcon.appiconset │ └── Contents.json ├── BackIcon.imageset │ ├── Contents.json │ ├── LICENSE │ ├── ic_arrow_back_black_24px.png │ ├── ic_arrow_back_black_24px_2x.png │ └── ic_arrow_back_black_24px_3x.png ├── Contents.json └── RefreshIcon.imageset │ ├── Contents.json │ ├── LICENSE │ ├── ic_refresh_black_24px.png │ ├── ic_refresh_black_24px_2x.png │ └── ic_refresh_black_24px_3x.png ├── Base.lproj ├── LaunchScreen.storyboard └── Main.storyboard ├── Info.plist ├── NavigationView.h ├── NavigationView.m ├── ProgressView.h ├── ProgressView.m ├── Renderer.h ├── Renderer.m ├── ShaderTypes.h ├── Shaders.metal ├── ViewController.h ├── ViewController.m └── main.m /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | xcuserdata 3 | *.iml 4 | .idea 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # How to contribute 2 | 3 | We'd love to accept your patches and contributions to this project. There are 4 | just a few small guidelines you need to follow. 5 | 6 | ## Contributor License Agreement 7 | 8 | Contributions to any Google project must be accompanied by a Contributor License 9 | Agreement. This is necessary because you own the copyright to your changes, even 10 | after your contribution becomes part of this project. So this agreement simply 11 | gives us permission to use and redistribute your contributions as part of the 12 | project. Head over to to see your current 13 | agreements on file or to sign a new one. 14 | 15 | You generally only need to submit a CLA once, so if you've already submitted one 16 | (even if it was for a different project), you probably don't need to do it 17 | again. 18 | 19 | ## Code reviews 20 | 21 | All submissions, including submissions by project members, require review. We 22 | use GitHub pull requests for this purpose. Consult [GitHub Help] for more 23 | information on using pull requests. 24 | 25 | [GitHub Help]: https://help.github.com/articles/about-pull-requests/ -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2017 Google Inc. 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # WebARonARKit 2 | 3 | **An experimental app for iOS that lets developers create Augmented Reality (AR) experiences using web technologies. An [Android version](https://github.com/google-ar/WebARonARCore) is also available.** 4 | 5 | Spawn-at-Camera exampleSpawn-at-Surface example 6 | 7 | **Note:** This is not an official Google product. Nor is it a fully-featured web browser. Nor are the enabling JavaScript APIs standards, or on the standardization path. WebARonARKit is only meant to enable developer experimentation. For details on the WebARonARKit architecture, see [How WebARonARKit works](#HowWebARonARKitWorks). 8 | 9 | ## Getting started 10 | WebARonARKit must be built from source using Xcode 9 and iOS 11. This requires an Apple Developer Account. If you do not have one already, sign up at [developer.apple.com](http://developer.apple.com). 11 | 12 | ### 0. Prerequisites 13 | WebARonARKit is built on top of iOS [ARKit](https://developer.apple.com/arkit/), which requires an iOS device with an A9+ processor, running iOS 11. For best results, we recommend one of the following: 14 | + iPad (2017) 15 | + iPad Pro (9.7, 10.5 or 12.9 inches) 16 | + iPhone 7 and 7 Plus 17 | 18 | WebARonARKit must be built from source and requires the following: 19 | 20 | + [Xcode 9](https://developer.apple.com/xcode/) 21 | + iOS 11 22 | + An Apple Developer Account. If you do not have one already, sign up at [developer.apple.com](http://developer.apple.com). 23 | 24 | 25 | ### 1. Run WebARonARKit 26 | 1. Clone the WebARonARKit GitHub repo. 27 | 2. Launch Xcode 9. 28 | 3. Open the Xcode project (.xcodeproj) from within the cloned WebARonARKit repo using Xcode 9. 29 | 4. Select WebARonARKit Project file from the Project Navigator (top blue icon in the left column) and then select the WebARonARKit target under `Targets`. 30 | - ![GIF showing how to set the project target.](https://media.giphy.com/media/xUOxfc84FVlNqqeJeU/giphy.gif) 31 | 5. With WebARonARKit as the selected target, you'll automatically be viewing the "General" tab in the main panel. From there find the signing section and select the Team that corresponds to your iOS Developer Account / Team. If you get an error in the following steps, it's probably due to a code signing error. If you encounter errors, follow the instructions provided within Xcode (note one default you'll certainly have to change is the `Bundle Identifier`, can be a trivial addition such as "-personal" appended to the identifier in the GIF example below). 32 | - ![GIF showing how to code sign.](https://media.giphy.com/media/3osBL6RqUu3prBVYOc/giphy.gif) 33 | 6. Set your device as the build destination by first ensuring it is connected to your computer, then selecting it from Product >> Destination menu or from the drop-down menu next to the Run button in the top top-left of the UI. 34 | - ![GIF showing how to set the build destination!](https://media.giphy.com/media/3osBL6aab1y581gPyE/giphy.gif) 35 | 7. Build and push to your device by selecting the Run button or typing `⌘-R`. Once the build is complete and has been pushed to your device the app should open automatically. You may have to follow on screen instructions to authorize your developer account to push to your device. Note that the first time you run the app installing the correct user profiles may take some time. 36 | 37 | ### 2. Viewing examples 38 | A [list of example scenes](https://developers.google.com/ar/develop/web/getting-started#examples) compatible with WebARonARKit and [WebARonARCore](https://github.com/google-ar/WebARonARCore) are available at [developers.google.com](https://developers.google.com/ar/develop/web/getting-started#examples). 39 | 40 | ### 3. Building your own scenes 41 | To build AR web experiences that work with WebARonARKit and [WebARonARCore for Android](https://github.com/google-ar/WebARonARCore), we recommend **[three.ar.js](https://github.com/google-ar/three.ar.js)**, a helper library that works with the popular [three.js](http://threejs.org) WebGL framework. [Three.ar.js](https://github.com/google-ar/three.ar.js) provides common AR building blocks, such as a visible reticle that draws on top of real world surfaces, and [example scenes](https://github.com/google-ar/three.ar.js#examples). 42 | 43 | ### 4. Debugging 44 | Pages in WebARonARKit can be inspected and debugged remotely with MacOS Safari, however this requires MacOS Safari 11.0 (available as Safari Technology Preview) or higher. You can download MacOS Safari 11 from [https://developer.apple.com/safari/technology-preview/](https://developer.apple.com/safari/technology-preview/). 45 | 46 | ## How WebARonARKit works 47 | 48 | WebARonARKit is built on the following: 49 | 50 | * **An WKWebView instance**. [WKWebView](https://developer.apple.com/documentation/webkit/wkwebview) is an iOS class that enables developers to embed web views in their native apps, and to expose native device capabilities to web content via custom APIs. In our case, we use WKWebView to expose ARKit functionality to web content. Native/web app frameworks such as [Cordova](https://cordova.apache.org/) use a similar approach. 51 | * **Extensions to the WebVR API.** The WebVR API (v1.1) gives us much of what we need for AR. We then extend it to add a few more essentials: motion tracking, rendering of the camera's video feed, and basic understanding of the real world. For details, see [WebVR API extension for smartphone AR](https://github.com/google-ar/three.ar.js/blob/master/webvr_ar_extension.md) 52 | 53 | WebARonARKit injects a script (WebARonARKit.js) as soon as a page is loaded into the WKWebView. This script, among other things, polyfills the WebVR 1.1 API and handles all the communication between native and web content. 54 | 55 | When running, WebARonARKit layers a fullscreen camera feed in the background with a transparent WKWebView on top. This arrangement creates a fairly seamless result between "real world" and rendered web content, but comes with a few limitations: 56 | 57 | * In pass-through camera-based AR, the time stamp based syncrhonization of the camera frame and the 6DOF pose needs to be as accurate as possible. Beause of the nature of this two-layer system, WebARonARKit is not able to ensure the proper synchronization. This contributes to perceptible "drift" between virtual objects and the real world seen in with the camera feed, especially on iPhones. 58 | * The bidirectional communciation bridge between the native side and the JavaScript side in always asynchronous. WebARonARKit tries to resolve this limitation as much as possible using various techniques (like for hitTest, that has to be synchronous). 59 | * In the current version of WebARonARKit the camera feed is always rendered in the native side, underneath the WKWebView that runs the web content. Not being able to expose the video frame to the web side prevents interesting use cases such as reflections, refractions, environment mapping, or simply rendering the video in a specific location or size (in current builds it is always fullscreen). Rendering the video feed in the web side would also resolve the synchronization problems mentioned earlier. 60 | 61 | ## Known issues 62 | + There seems to be a [bug](https://bugs.webkit.org/show_bug.cgi?id=170595) in WebKit that affects the [WKWebView](https://developer.apple.com/documentation/webkit/wkwebview) in iOS 10+ where the window.innerWidth and window.innerHeight values are not correctly up to date when the event is fired and thus, not being able to properly resizing when the device changes orientation. To resolve this issue, in WebARonARKit, when an event listener is created to listen to the 'resize' event on the window, it is intercepted and updated only when the device orientation changes. 63 | 64 | + Because of the nature of how WebARonARKit is built (a webview executing the web content on top of a native process rendering the camera feed and handling ARKit code and the communication between them), it is very hard to get a correct pose estimation that completely matches the underlying camera feed. This lack of tracking and rendering synchronization is particularly noticeable on iPhones. It is less perceptible on iPads, so we recommend iPads for optimal results. 65 | 66 | ## Future work 67 | + Improve performance, particularly on iPhones, by implementing alternative methods of communicating between the WKWebView and the native side. The goal being to synchronize as much as possible the camera feed and the pose used in WebVR. 68 | + Add more AR-related features. 69 | 70 | ## License 71 | Apache License Version 2.0 (see the `LICENSE` file inside this repo). 72 | -------------------------------------------------------------------------------- /README_Kr.md: -------------------------------------------------------------------------------- 1 | # WebARonARKit 2 | 3 | **개발자들이 웹 기술을 이용해 증강현실(AR) 환경을 만들 수 있도록 하는 iOS의 실험적 앱입니다. [안드로이드 버전](https://github.com/google-ar/WebARonARCore)도 사용할 수 있습니다.** 4 | 5 | Spawn-at-Camera exampleSpawn-at-Surface example 6 | 7 | **Note**: 이 것은 구글의 정식 제품이 아닙니다. 완전한 기능을 갖춘 웹브라우저도 아닙니다. 또한 JavaScript API의 표준 혹은 표준경로를 사용하도록 설정하지 않았습니다. WebARonARKit는 개발자 실험을 가능하게 하는데 의미가 있습니다. WebARonARKit 의 시스템 구성에 대한 자세한 내용은, [WebARonARKit작동방식](#WebARonARKit작동방식)를 참조하세요. 8 | 9 | ## 10 | 11 | ## 시작하기 12 | 13 | WebARonARKit는 반드시 Xcode9 와 iOS 11을 이용하는 환경으로부터 빌드되어야 합니다. 이것은 Apple 개발자 계정을 필요로 합니다. 아직 계정이 없으시다면, [developer.apple.com](http://developer.apple.com)에 가입하세요. 14 | 15 | 16 | 17 | ### 0. 전제조건 18 | 19 | WebARonARKit은 iOS ARKit 위에서 구축되었으며, A9+ 프로세서가 탑재되고, iOS11이 구동되는 iOS 기기를 필요로 합니다: 20 | 21 | + iPad (2017) 22 | + iPad Pro (9.7, 10.5 or 12.9 inches) 23 | + iPhone 7 and 7 Plus 24 | 25 | 26 | 27 | WebARonARKit은 다음과 같은 환경과 조건에서 빌드되어야 합니다. 28 | 29 | + [Xcode 9](https://developer.apple.com/xcode/) 30 | + iOS 11 31 | + Apple 개발자 계정. 아직 계정이 없으시다면, [developer.apple.com](http://developer.apple.com)에 가입하세요. 32 | ​ 33 | 34 | 35 | ### 1. WebARonARKit를 실행하세요 36 | 37 | 1. WebARonARKit 깃헙 저장소를 복사하세요. 38 | 2. Xcode9를 실행하세요. 39 | 3. 복사한 WebARonARKit 저장소안에 있는 Xcode 프로젝트를 (.xcodeproj) Xcode9로 실행하세요. 40 | 4. 프로젝트 네비게이션 (왼쪽 행에 있는 윗쪽의 파란 아이콘)에서 WebARonARKit 프로젝트를 선택하세요. 그리고 WebARonARKit의 target을 `Targets`아래 것으로 선택해주세요. 41 | 42 | - ![GIF showing how to set the project target.](https://media.giphy.com/media/xUOxfc84FVlNqqeJeU/giphy.gif) 43 | 5. WebARonARKit를 target으로 선택한 경우, 기본 패널에서 자동으로 "General" 탭이 표시됩니다. 여기서 signing 섹션을 찾아 iOS 개발자 계정/팀에 해당하는 팀을 선택합니다. 다음 단계에서 오류가 발생하는 경우, 아마도 code signing 오류 때문입니다. code signing 오류가 발생한다면, Xcode에서 아래와 같이 지시사항을 따르세요. (아래의 GIF 예제에서 식별자에 "-personal"이 추가 된 것처럼 번들 식별자가 변경되어야 합니다.) 44 | - ![GIF showing how to code sign.](https://media.giphy.com/media/3osBL6RqUu3prBVYOc/giphy.gif) 45 | 6. 먼저 컴퓨터에 연결되어 있는지 확인한 다음, Product >> Destination 메뉴에서 선택하거나 실행버튼 옆에 있는 드롭 다운 메뉴에서 기기를 빌드 대상으로 설정하세요. 46 | - ![GIF showing how to set the build destination!](https://media.giphy.com/media/3osBL6aab1y581gPyE/giphy.gif) 47 | 7. 빌드하세요. 그리고 Run 버튼을 누르거나 `⌘-R`을 타이핑해서 당신의 기기에 푸시하세요. 빌드가 완료되고 기기에 푸시되면 앱이 자동으로 열립니다. 기기로 푸시하기 위해 개발자 계정을 승인하려면 화면의 안내를 따라야 할 수 있습니다. 앱을 처음 실행하면 올바른 사용자 프로필을 설치하는 데 약간의 시간이 걸릴 수 있습니다. 48 | 49 | 50 | 51 | ### 2. 예제보기 52 | 53 | WebARonARKit 및 [WebARonARCore](https://github.com/google-ar/WebARonARCore)과 호환되는 [예제 장면 목록들](https://developers.google.com/ar/develop/web/getting-started#examples)은 [developers.google.com](https://developers.google.com/ar/develop/web/getting-started#examples)에서 사용할 수 있습니다. 54 | 55 | ### 3. 당신만의 장면을 만드는 것 56 | 57 | WebARonARKit 및 [Android 용 WebARonARCore](https://github.com/google-ar/WebARonARCore)과 함께 작동하는 AR 웹 환경을 구축하기 위해, 인기있는 WebGL 프레임워크인 [three.js](http://threejs.org)과 함께 작동하는 헬퍼 라이브러리인 [three.ar.js](https://github.com/google-ar/three.ar.js)를 추천합니다. [three.ar.js](https://github.com/google-ar/three.ar.js)는 현실 세계의 표면 위에 그리는 가시적 십자선과 같은 일반적인 AR구성 요소와 예제 장면들을 제공합니다. 58 | 59 | ### 4. 디버깅 60 | 61 | 웹 사이트의 페이지는 MacOS Safari와 함께 원격으로 검사하고 디버깅 할 수 있지만 이 작업을 수행하려면 MacOSSafari11.0(SafariTechnologyPreview)이상이 필요합니다. [https://developer.apple.com/safari/technology-preview/](https://developer.apple.com/safari/technology-preview/)에서 MacOSSafari11을 다운로드할 수 있습니다. 62 | 63 | 64 | 65 | ## WebARonARKit 작동방식 66 | 67 | WebARonARKit은 다음과 같이 구축되었습니다: 68 | 69 | * **WKWebView 인스턴스.** [WKWebView](https://developer.apple.com/documentation/webkit/wkwebview)는 개발자가 네이티브 앱에 웹 뷰를 삽입할 수 있게 하고, 사용자 정의 API를 통해 네이티브 장치 기능을 웹 콘텐츠에 노출시킬 수 있게 하는 iOS 클래스입니다. 우리의 경우에는 ARKit의 기능을 웹 콘텐츠에 노출시키는 것을 위해 WKWebView를 사용합니다. [Cordova](https://cordova.apache.org/)와 같은 네이티브/웹 앱 프레임워크도 유사한 접근 방식을 사용합니다. 70 | * **WebVR API의 확장.** WebVR API (v1.1)는 AR에 필요한 대부분을 제공합니다. 그런 다음 이 기능을 확장하여 모션 추적, 카메라의 비디오 피드 렌더링, 실제 환경에 대한 기본적인 이해 등의 몇가지 필수 요소를 추가합니다. 자세한 내용은 [WebVR API extension for smartphone AR](https://github.com/google-ar/three.ar.js/blob/master/webvr_ar_extension.md) 보세요. 71 | 72 | WebARonARKit는 페이지가 WKWebView에 로딩되는 즉시 스크립트(WebARonARKit.js)를 주입합니다. 특히 이 스크립트는 WebVR 1.1 API 를 폴리필해주고 네이티브와 웹 컨텐츠 사이의 모든 통신을 처리해줍니다. 73 | 74 | 실행 중일때, WebARonARKit은 전체 화면 카메라 피드를 가장 상단에 위치한 투명한 WKWebView의 백그라운드에 삽입합니다. 이런 배열은 "현실 세계"와 렌더링된 웹 컨텐츠 사이에서 비교적 매끄러운 결과를 만듭니다. 그러나 몇가지 한계가 있습니다. 75 | 76 | * 패스-스루 카메라 기반 AR에서는 카메라 프레임의 동기화를 기반으로 한 타임 스탬프와 6DOF(6Degrees Of Freedom , 6자유도) 포즈가 최대한 정확해야 합니다. 이러한 두 계층 시스템 특성 때문에, WebARonARKit은 적절한 동기화를 보장할 수 없습니다. 이것은 특히 아이폰에서 카메라 피드와 함께 보여지는 가상 객체들과 현실 세계 사이의 감지할 수 있는 "편차"에 기여합니다. 77 | * 항상 네이티브 측과 Javascript 측 사이의 양방향 통신 브릿지를 사용합니다. WebARonARKit는 이런 한계를 해결하기 위해 가능한 많은 다양한 기술들(동기적이어야하는 hitTest와 같은)을 사용하며 시도합니다. 78 | * 현재 WebARonARKit 버전에서는, 웹 컨텐츠를 실행하는 WKWebView의 아래에서 카메라 피드는 항상 네이티브 측에서 렌더링 됩니다. 비디오 프레임을 웹에 노출시키지 못하는 것은 반사,굴절, 환경 매핑 또는 특정 위치 혹은 사이즈 (현재 빌드되어 있는 것은 항상 전체화면입니다.)에서의 단순한 비디오 렌더링과 같은 흥미로운 유스케이스들을 방해합니다. 웹 측에서 비디오 피드를 렌더링을 하면 이전에 언급한 동기 문제도 해결됩니다. 79 | 80 | ## 알려진 문제 81 | + iOS 10 [WKWebView](https://developer.apple.com/documentation/webkit/wkwebview)에 영향을 주는 WebKit의 [버그](https://bugs.webkit.org/show_bug.cgi?id=170595)가 있는 것 같습니다. 여기서 이벤트가 발생하면 window.innerWidth 및 window.innerHeight 값이 정확하게 업데이트되지 않아 디바이스의 오리엔테이션이 변경 될 때 제대로 크기를 조정할 수 없습니다. WebARonARKit에서 이 문제를 해결하려면, 윈도우에서 '크기조정' 이벤트를 수신하기위해 이벤트 리스너가 생성됬을 때, 이 것은 디바이스의 오리엔테이션이 변경될때만 인터셉트 되어 업데이트 되면 됩니다. 82 | + WebARonARKit의 구현된 방식때문에 (웹뷰는 웹 컨텐츠를 렌더링하여 카메라 피드를 렌더링하고 ARKit 코드와 그 사이의 통신을 처리하는 웹 프로세스를 실행하는 웹 뷰이므로) 기본 카메라 피드를 기반으로 완전히 일치하는 올바른 포즈 추정을 얻는 것은 매우 어렵습니다. 이러한 추적 및 렌더링 동기화의 부족은 iPhone에서 특히 두드러집니다. iPads에서는 이러한 오류가 덜 감지되므로 최적의 결과를 위해 iPads를 권장합니다. 83 | 84 | ## 앞으로 할 일 85 | + 특히 iPhone에서 대체가능한 WKWebView와 네이티브 측 간의 통신 방법을 구현하여 성능을 향상시켜야 합니다. 목표는 가능한 한 카메라 피드와 WebVR에서 사용되는 포즈를 동기화하는 것입니다. 86 | + 더 많은 AR 관련 기능을 추가하세요. 87 | 88 | ## License 89 | Apache License Version 2.0 (see the `LICENSE` file inside this repo). 90 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 48; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | BD61CF8C1F33FB1700733763 /* WebARonARKit.js in Resources */ = {isa = PBXBuildFile; fileRef = BD61CF8B1F33FA0A00733763 /* WebARonARKit.js */; }; 11 | E1C5EBAF1F59C45A00C667C2 /* ProgressView.m in Sources */ = {isa = PBXBuildFile; fileRef = E1C5EBAE1F59C45A00C667C2 /* ProgressView.m */; }; 12 | E1F3E62A200817CA00156904 /* NavigationView.m in Sources */ = {isa = PBXBuildFile; fileRef = E1F3E629200817CA00156904 /* NavigationView.m */; }; 13 | F66038681EF5FCC100385A4C /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = F66038671EF5FCC100385A4C /* AppDelegate.m */; }; 14 | F660386A1EF5FCC100385A4C /* Shaders.metal in Sources */ = {isa = PBXBuildFile; fileRef = F66038691EF5FCC100385A4C /* Shaders.metal */; }; 15 | F660386E1EF5FCC100385A4C /* Renderer.m in Sources */ = {isa = PBXBuildFile; fileRef = F660386D1EF5FCC100385A4C /* Renderer.m */; }; 16 | F66038711EF5FCC100385A4C /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = F66038701EF5FCC100385A4C /* ViewController.m */; }; 17 | F66038741EF5FCC100385A4C /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = F66038721EF5FCC100385A4C /* Main.storyboard */; }; 18 | F66038761EF5FCC100385A4C /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = F66038751EF5FCC100385A4C /* Assets.xcassets */; }; 19 | F66038791EF5FCC100385A4C /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = F66038771EF5FCC100385A4C /* LaunchScreen.storyboard */; }; 20 | F660387C1EF5FCC100385A4C /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = F660387B1EF5FCC100385A4C /* main.m */; }; 21 | /* End PBXBuildFile section */ 22 | 23 | /* Begin PBXCopyFilesBuildPhase section */ 24 | F6161D111EF8408100C915FA /* CopyFiles */ = { 25 | isa = PBXCopyFilesBuildPhase; 26 | buildActionMask = 8; 27 | dstPath = ""; 28 | dstSubfolderSpec = 7; 29 | files = ( 30 | ); 31 | runOnlyForDeploymentPostprocessing = 1; 32 | }; 33 | /* End PBXCopyFilesBuildPhase section */ 34 | 35 | /* Begin PBXFileReference section */ 36 | BD61CF8B1F33FA0A00733763 /* WebARonARKit.js */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.javascript; path = WebARonARKit.js; sourceTree = ""; }; 37 | E1C5EBAE1F59C45A00C667C2 /* ProgressView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ProgressView.m; sourceTree = ""; }; 38 | E1C5EBB01F59C47000C667C2 /* ProgressView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ProgressView.h; sourceTree = ""; }; 39 | E1F3E6282008178300156904 /* NavigationView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = NavigationView.h; sourceTree = ""; }; 40 | E1F3E629200817CA00156904 /* NavigationView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = NavigationView.m; sourceTree = ""; }; 41 | F66038631EF5FCC100385A4C /* WebARonARKit.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = WebARonARKit.app; sourceTree = BUILT_PRODUCTS_DIR; }; 42 | F66038661EF5FCC100385A4C /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 43 | F66038671EF5FCC100385A4C /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 44 | F66038691EF5FCC100385A4C /* Shaders.metal */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.metal; path = Shaders.metal; sourceTree = ""; }; 45 | F660386B1EF5FCC100385A4C /* ShaderTypes.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ShaderTypes.h; sourceTree = ""; }; 46 | F660386C1EF5FCC100385A4C /* Renderer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Renderer.h; sourceTree = ""; }; 47 | F660386D1EF5FCC100385A4C /* Renderer.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = Renderer.m; sourceTree = ""; }; 48 | F660386F1EF5FCC100385A4C /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; 49 | F66038701EF5FCC100385A4C /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; }; 50 | F66038731EF5FCC100385A4C /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 51 | F66038751EF5FCC100385A4C /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 52 | F66038781EF5FCC100385A4C /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 53 | F660387A1EF5FCC100385A4C /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 54 | F660387B1EF5FCC100385A4C /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 55 | /* End PBXFileReference section */ 56 | 57 | /* Begin PBXFrameworksBuildPhase section */ 58 | F66038601EF5FCC100385A4C /* Frameworks */ = { 59 | isa = PBXFrameworksBuildPhase; 60 | buildActionMask = 2147483647; 61 | files = ( 62 | ); 63 | runOnlyForDeploymentPostprocessing = 0; 64 | }; 65 | /* End PBXFrameworksBuildPhase section */ 66 | 67 | /* Begin PBXGroup section */ 68 | F660385A1EF5FCC100385A4C = { 69 | isa = PBXGroup; 70 | children = ( 71 | BD61CF8B1F33FA0A00733763 /* WebARonARKit.js */, 72 | F66038651EF5FCC100385A4C /* WebARonARKit */, 73 | F66038641EF5FCC100385A4C /* Products */, 74 | ); 75 | sourceTree = ""; 76 | }; 77 | F66038641EF5FCC100385A4C /* Products */ = { 78 | isa = PBXGroup; 79 | children = ( 80 | F66038631EF5FCC100385A4C /* WebARonARKit.app */, 81 | ); 82 | name = Products; 83 | sourceTree = ""; 84 | }; 85 | F66038651EF5FCC100385A4C /* WebARonARKit */ = { 86 | isa = PBXGroup; 87 | children = ( 88 | F66038661EF5FCC100385A4C /* AppDelegate.h */, 89 | F66038671EF5FCC100385A4C /* AppDelegate.m */, 90 | F66038691EF5FCC100385A4C /* Shaders.metal */, 91 | F660386B1EF5FCC100385A4C /* ShaderTypes.h */, 92 | F660386C1EF5FCC100385A4C /* Renderer.h */, 93 | F660386D1EF5FCC100385A4C /* Renderer.m */, 94 | F660386F1EF5FCC100385A4C /* ViewController.h */, 95 | F66038701EF5FCC100385A4C /* ViewController.m */, 96 | F66038721EF5FCC100385A4C /* Main.storyboard */, 97 | F66038751EF5FCC100385A4C /* Assets.xcassets */, 98 | F66038771EF5FCC100385A4C /* LaunchScreen.storyboard */, 99 | F660387A1EF5FCC100385A4C /* Info.plist */, 100 | F660387B1EF5FCC100385A4C /* main.m */, 101 | E1C5EBB01F59C47000C667C2 /* ProgressView.h */, 102 | E1C5EBAE1F59C45A00C667C2 /* ProgressView.m */, 103 | E1F3E6282008178300156904 /* NavigationView.h */, 104 | E1F3E629200817CA00156904 /* NavigationView.m */, 105 | ); 106 | path = WebARonARKit; 107 | sourceTree = ""; 108 | }; 109 | /* End PBXGroup section */ 110 | 111 | /* Begin PBXNativeTarget section */ 112 | F66038621EF5FCC100385A4C /* WebARonARKit */ = { 113 | isa = PBXNativeTarget; 114 | buildConfigurationList = F660387F1EF5FCC100385A4C /* Build configuration list for PBXNativeTarget "WebARonARKit" */; 115 | buildPhases = ( 116 | F660385F1EF5FCC100385A4C /* Sources */, 117 | F66038601EF5FCC100385A4C /* Frameworks */, 118 | F66038611EF5FCC100385A4C /* Resources */, 119 | F6161D111EF8408100C915FA /* CopyFiles */, 120 | ); 121 | buildRules = ( 122 | ); 123 | dependencies = ( 124 | ); 125 | name = WebARonARKit; 126 | productName = WebARonARKit; 127 | productReference = F66038631EF5FCC100385A4C /* WebARonARKit.app */; 128 | productType = "com.apple.product-type.application"; 129 | }; 130 | /* End PBXNativeTarget section */ 131 | 132 | /* Begin PBXProject section */ 133 | F660385B1EF5FCC100385A4C /* Project object */ = { 134 | isa = PBXProject; 135 | attributes = { 136 | LastUpgradeCheck = 0900; 137 | ORGANIZATIONNAME = "Iker Jamardo Zugaza"; 138 | TargetAttributes = { 139 | F66038621EF5FCC100385A4C = { 140 | CreatedOnToolsVersion = 9.0; 141 | }; 142 | }; 143 | }; 144 | buildConfigurationList = F660385E1EF5FCC100385A4C /* Build configuration list for PBXProject "WebARonARKit" */; 145 | compatibilityVersion = "Xcode 8.0"; 146 | developmentRegion = en; 147 | hasScannedForEncodings = 0; 148 | knownRegions = ( 149 | en, 150 | Base, 151 | ); 152 | mainGroup = F660385A1EF5FCC100385A4C; 153 | productRefGroup = F66038641EF5FCC100385A4C /* Products */; 154 | projectDirPath = ""; 155 | projectRoot = ""; 156 | targets = ( 157 | F66038621EF5FCC100385A4C /* WebARonARKit */, 158 | ); 159 | }; 160 | /* End PBXProject section */ 161 | 162 | /* Begin PBXResourcesBuildPhase section */ 163 | F66038611EF5FCC100385A4C /* Resources */ = { 164 | isa = PBXResourcesBuildPhase; 165 | buildActionMask = 2147483647; 166 | files = ( 167 | F66038791EF5FCC100385A4C /* LaunchScreen.storyboard in Resources */, 168 | F66038761EF5FCC100385A4C /* Assets.xcassets in Resources */, 169 | F66038741EF5FCC100385A4C /* Main.storyboard in Resources */, 170 | BD61CF8C1F33FB1700733763 /* WebARonARKit.js in Resources */, 171 | ); 172 | runOnlyForDeploymentPostprocessing = 0; 173 | }; 174 | /* End PBXResourcesBuildPhase section */ 175 | 176 | /* Begin PBXSourcesBuildPhase section */ 177 | F660385F1EF5FCC100385A4C /* Sources */ = { 178 | isa = PBXSourcesBuildPhase; 179 | buildActionMask = 2147483647; 180 | files = ( 181 | F660387C1EF5FCC100385A4C /* main.m in Sources */, 182 | E1F3E62A200817CA00156904 /* NavigationView.m in Sources */, 183 | F660386A1EF5FCC100385A4C /* Shaders.metal in Sources */, 184 | F66038681EF5FCC100385A4C /* AppDelegate.m in Sources */, 185 | F66038711EF5FCC100385A4C /* ViewController.m in Sources */, 186 | F660386E1EF5FCC100385A4C /* Renderer.m in Sources */, 187 | E1C5EBAF1F59C45A00C667C2 /* ProgressView.m in Sources */, 188 | ); 189 | runOnlyForDeploymentPostprocessing = 0; 190 | }; 191 | /* End PBXSourcesBuildPhase section */ 192 | 193 | /* Begin PBXVariantGroup section */ 194 | F66038721EF5FCC100385A4C /* Main.storyboard */ = { 195 | isa = PBXVariantGroup; 196 | children = ( 197 | F66038731EF5FCC100385A4C /* Base */, 198 | ); 199 | name = Main.storyboard; 200 | sourceTree = ""; 201 | }; 202 | F66038771EF5FCC100385A4C /* LaunchScreen.storyboard */ = { 203 | isa = PBXVariantGroup; 204 | children = ( 205 | F66038781EF5FCC100385A4C /* Base */, 206 | ); 207 | name = LaunchScreen.storyboard; 208 | sourceTree = ""; 209 | }; 210 | /* End PBXVariantGroup section */ 211 | 212 | /* Begin XCBuildConfiguration section */ 213 | F660387D1EF5FCC100385A4C /* Debug */ = { 214 | isa = XCBuildConfiguration; 215 | buildSettings = { 216 | ALWAYS_SEARCH_USER_PATHS = NO; 217 | CLANG_ANALYZER_NONNULL = YES; 218 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 219 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 220 | CLANG_CXX_LIBRARY = "libc++"; 221 | CLANG_ENABLE_MODULES = YES; 222 | CLANG_ENABLE_OBJC_ARC = YES; 223 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 224 | CLANG_WARN_BOOL_CONVERSION = YES; 225 | CLANG_WARN_COMMA = YES; 226 | CLANG_WARN_CONSTANT_CONVERSION = YES; 227 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 228 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 229 | CLANG_WARN_EMPTY_BODY = YES; 230 | CLANG_WARN_ENUM_CONVERSION = YES; 231 | CLANG_WARN_INFINITE_RECURSION = YES; 232 | CLANG_WARN_INT_CONVERSION = YES; 233 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 234 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 235 | CLANG_WARN_STRICT_PROTOTYPES = YES; 236 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 237 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 238 | CLANG_WARN_UNREACHABLE_CODE = YES; 239 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 240 | CODE_SIGN_IDENTITY = "iPhone Developer"; 241 | COPY_PHASE_STRIP = NO; 242 | DEBUG_INFORMATION_FORMAT = dwarf; 243 | ENABLE_STRICT_OBJC_MSGSEND = YES; 244 | ENABLE_TESTABILITY = YES; 245 | GCC_C_LANGUAGE_STANDARD = gnu11; 246 | GCC_DYNAMIC_NO_PIC = NO; 247 | GCC_NO_COMMON_BLOCKS = YES; 248 | GCC_OPTIMIZATION_LEVEL = 0; 249 | GCC_PREPROCESSOR_DEFINITIONS = ( 250 | "DEBUG=1", 251 | "$(inherited)", 252 | ); 253 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 254 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 255 | GCC_WARN_UNDECLARED_SELECTOR = YES; 256 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 257 | GCC_WARN_UNUSED_FUNCTION = YES; 258 | GCC_WARN_UNUSED_VARIABLE = YES; 259 | IPHONEOS_DEPLOYMENT_TARGET = 11.0; 260 | MTL_ENABLE_DEBUG_INFO = YES; 261 | ONLY_ACTIVE_ARCH = YES; 262 | SDKROOT = iphoneos; 263 | }; 264 | name = Debug; 265 | }; 266 | F660387E1EF5FCC100385A4C /* Release */ = { 267 | isa = XCBuildConfiguration; 268 | buildSettings = { 269 | ALWAYS_SEARCH_USER_PATHS = NO; 270 | CLANG_ANALYZER_NONNULL = YES; 271 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 272 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 273 | CLANG_CXX_LIBRARY = "libc++"; 274 | CLANG_ENABLE_MODULES = YES; 275 | CLANG_ENABLE_OBJC_ARC = YES; 276 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 277 | CLANG_WARN_BOOL_CONVERSION = YES; 278 | CLANG_WARN_COMMA = YES; 279 | CLANG_WARN_CONSTANT_CONVERSION = YES; 280 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 281 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 282 | CLANG_WARN_EMPTY_BODY = YES; 283 | CLANG_WARN_ENUM_CONVERSION = YES; 284 | CLANG_WARN_INFINITE_RECURSION = YES; 285 | CLANG_WARN_INT_CONVERSION = YES; 286 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 287 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 288 | CLANG_WARN_STRICT_PROTOTYPES = YES; 289 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 290 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 291 | CLANG_WARN_UNREACHABLE_CODE = YES; 292 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 293 | CODE_SIGN_IDENTITY = "iPhone Developer"; 294 | COPY_PHASE_STRIP = NO; 295 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 296 | ENABLE_NS_ASSERTIONS = NO; 297 | ENABLE_STRICT_OBJC_MSGSEND = YES; 298 | GCC_C_LANGUAGE_STANDARD = gnu11; 299 | GCC_NO_COMMON_BLOCKS = YES; 300 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 301 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 302 | GCC_WARN_UNDECLARED_SELECTOR = YES; 303 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 304 | GCC_WARN_UNUSED_FUNCTION = YES; 305 | GCC_WARN_UNUSED_VARIABLE = YES; 306 | IPHONEOS_DEPLOYMENT_TARGET = 11.0; 307 | MTL_ENABLE_DEBUG_INFO = NO; 308 | SDKROOT = iphoneos; 309 | VALIDATE_PRODUCT = YES; 310 | }; 311 | name = Release; 312 | }; 313 | F66038801EF5FCC100385A4C /* Debug */ = { 314 | isa = XCBuildConfiguration; 315 | buildSettings = { 316 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 317 | DEVELOPMENT_TEAM = ETH8NG5FR9; 318 | INFOPLIST_FILE = "$(SRCROOT)/WebARonARKit/Info.plist"; 319 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 320 | PRODUCT_BUNDLE_IDENTIFIER = Google.AR.WebARonARKit; 321 | PRODUCT_NAME = "$(TARGET_NAME)"; 322 | TARGETED_DEVICE_FAMILY = "1,2"; 323 | }; 324 | name = Debug; 325 | }; 326 | F66038811EF5FCC100385A4C /* Release */ = { 327 | isa = XCBuildConfiguration; 328 | buildSettings = { 329 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 330 | DEVELOPMENT_TEAM = ETH8NG5FR9; 331 | INFOPLIST_FILE = "$(SRCROOT)/WebARonARKit/Info.plist"; 332 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 333 | PRODUCT_BUNDLE_IDENTIFIER = Google.AR.WebARonARKit; 334 | PRODUCT_NAME = "$(TARGET_NAME)"; 335 | TARGETED_DEVICE_FAMILY = "1,2"; 336 | }; 337 | name = Release; 338 | }; 339 | /* End XCBuildConfiguration section */ 340 | 341 | /* Begin XCConfigurationList section */ 342 | F660385E1EF5FCC100385A4C /* Build configuration list for PBXProject "WebARonARKit" */ = { 343 | isa = XCConfigurationList; 344 | buildConfigurations = ( 345 | F660387D1EF5FCC100385A4C /* Debug */, 346 | F660387E1EF5FCC100385A4C /* Release */, 347 | ); 348 | defaultConfigurationIsVisible = 0; 349 | defaultConfigurationName = Release; 350 | }; 351 | F660387F1EF5FCC100385A4C /* Build configuration list for PBXNativeTarget "WebARonARKit" */ = { 352 | isa = XCConfigurationList; 353 | buildConfigurations = ( 354 | F66038801EF5FCC100385A4C /* Debug */, 355 | F66038811EF5FCC100385A4C /* Release */, 356 | ); 357 | defaultConfigurationIsVisible = 0; 358 | defaultConfigurationName = Release; 359 | }; 360 | /* End XCConfigurationList section */ 361 | }; 362 | rootObject = F660385B1EF5FCC100385A4C /* Project object */; 363 | } 364 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/AppDelegate.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #import 18 | #import 19 | 20 | @interface AppDelegate : UIResponder 21 | 22 | @property(strong, nonatomic) UIWindow *window; 23 | 24 | @end 25 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/AppDelegate.m: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #import "AppDelegate.h" 18 | #import "ViewController.h" 19 | 20 | @interface AppDelegate () 21 | 22 | @end 23 | 24 | @implementation AppDelegate 25 | 26 | - (BOOL)application:(UIApplication *)application 27 | didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 28 | // Override point for customization after application launch. 29 | return YES; 30 | } 31 | 32 | - (void)applicationWillResignActive:(UIApplication *)application { 33 | // Sent when the application is about to move from active to inactive state. 34 | // This can occur for certain types of temporary interruptions (such as an 35 | // incoming phone call or SMS message) or when the user quits the application 36 | // and it begins the transition to the background state. Use this method to 37 | // pause ongoing tasks, disable timers, and invalidate graphics rendering 38 | // callbacks. Games should use this method to pause the game. 39 | } 40 | 41 | - (void)applicationDidEnterBackground:(UIApplication *)application { 42 | // Use this method to release shared resources, save user data, invalidate 43 | // timers, and store enough application state information to restore your 44 | // application to its current state in case it is terminated later. If your 45 | // application supports background execution, this method is called instead of 46 | // applicationWillTerminate: when the user quits. 47 | } 48 | 49 | - (void)applicationWillEnterForeground:(UIApplication *)application { 50 | // Called as part of the transition from the background to the active state; 51 | // here you can undo many of the changes made on entering the background. 52 | } 53 | 54 | - (void)applicationDidBecomeActive:(UIApplication *)application { 55 | // Restart any tasks that were paused (or not yet started) while the 56 | // application was inactive. If the application was previously in the 57 | // background, optionally refresh the user interface. 58 | } 59 | 60 | - (void)applicationWillTerminate:(UIApplication *)application { 61 | // Called when the application is about to terminate. Save data if 62 | // appropriate. See also applicationDidEnterBackground:. 63 | } 64 | 65 | - (BOOL)application:(UIApplication *)application handleOpenURL:(NSURL *)url { 66 | // Remove the custom scheme from the URL and use the rest as a URL to 67 | // be loaded in the ViewController. 68 | NSString *urlString = url.absoluteString; 69 | NSString *urlStringNoScheme = [urlString 70 | stringByReplacingOccurrencesOfString:[[url scheme] stringByAppendingString:@"://"] 71 | withString:@""]; 72 | ViewController *viewController = (ViewController *)self.window.rootViewController; 73 | [viewController loadURL:urlStringNoScheme]; 74 | return YES; 75 | } 76 | 77 | @end 78 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | }, 88 | { 89 | "idiom" : "ios-marketing", 90 | "size" : "1024x1024", 91 | "scale" : "1x" 92 | } 93 | ], 94 | "info" : { 95 | "version" : 1, 96 | "author" : "xcode" 97 | } 98 | } -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Assets.xcassets/BackIcon.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "ic_arrow_back_black_24px.png", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "filename" : "ic_arrow_back_black_24px_2x.png", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "idiom" : "universal", 15 | "filename" : "ic_arrow_back_black_24px_3x.png", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "version" : 1, 21 | "author" : "xcode" 22 | } 23 | } -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Assets.xcassets/BackIcon.imageset/LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Assets.xcassets/BackIcon.imageset/ic_arrow_back_black_24px.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/google-ar/WebARonARKit/995c54bfc2b8b6b751d1b8adfd75c9305f9c552e/WebARonARKit/WebARonARKit/Assets.xcassets/BackIcon.imageset/ic_arrow_back_black_24px.png -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Assets.xcassets/BackIcon.imageset/ic_arrow_back_black_24px_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/google-ar/WebARonARKit/995c54bfc2b8b6b751d1b8adfd75c9305f9c552e/WebARonARKit/WebARonARKit/Assets.xcassets/BackIcon.imageset/ic_arrow_back_black_24px_2x.png -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Assets.xcassets/BackIcon.imageset/ic_arrow_back_black_24px_3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/google-ar/WebARonARKit/995c54bfc2b8b6b751d1b8adfd75c9305f9c552e/WebARonARKit/WebARonARKit/Assets.xcassets/BackIcon.imageset/ic_arrow_back_black_24px_3x.png -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Assets.xcassets/RefreshIcon.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "filename" : "ic_refresh_black_24px.png", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "filename" : "ic_refresh_black_24px_2x.png", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "idiom" : "universal", 15 | "filename" : "ic_refresh_black_24px_3x.png", 16 | "scale" : "3x" 17 | } 18 | ], 19 | "info" : { 20 | "version" : 1, 21 | "author" : "xcode" 22 | } 23 | } -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Assets.xcassets/RefreshIcon.imageset/LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Assets.xcassets/RefreshIcon.imageset/ic_refresh_black_24px.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/google-ar/WebARonARKit/995c54bfc2b8b6b751d1b8adfd75c9305f9c552e/WebARonARKit/WebARonARKit/Assets.xcassets/RefreshIcon.imageset/ic_refresh_black_24px.png -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Assets.xcassets/RefreshIcon.imageset/ic_refresh_black_24px_2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/google-ar/WebARonARKit/995c54bfc2b8b6b751d1b8adfd75c9305f9c552e/WebARonARKit/WebARonARKit/Assets.xcassets/RefreshIcon.imageset/ic_refresh_black_24px_2x.png -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Assets.xcassets/RefreshIcon.imageset/ic_refresh_black_24px_3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/google-ar/WebARonARKit/995c54bfc2b8b6b751d1b8adfd75c9305f9c552e/WebARonARKit/WebARonARKit/Assets.xcassets/RefreshIcon.imageset/ic_refresh_black_24px_3x.png -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleURLTypes 20 | 21 | 22 | CFBundleURLName 23 | com.google.webar 24 | CFBundleURLSchemes 25 | 26 | webar 27 | 28 | 29 | 30 | CFBundleVersion 31 | 1 32 | LSRequiresIPhoneOS 33 | 34 | NSAppTransportSecurity 35 | 36 | NSAllowsArbitraryLoads 37 | 38 | 39 | NSCameraUsageDescription 40 | This application will use the camera for Augmented Reality. 41 | NSLocationWhenInUseUsageDescription 42 | Allow location access from the WKWebView whenever is requested. 43 | UILaunchStoryboardName 44 | LaunchScreen 45 | UIMainStoryboardFile 46 | Main 47 | UIRequiredDeviceCapabilities 48 | 49 | armv7 50 | metal 51 | 52 | UIRequiresFullScreen 53 | 54 | UIStatusBarHidden 55 | 56 | UISupportedInterfaceOrientations 57 | 58 | UIInterfaceOrientationLandscapeRight 59 | UIInterfaceOrientationPortrait 60 | UIInterfaceOrientationPortraitUpsideDown 61 | UIInterfaceOrientationLandscapeLeft 62 | 63 | UISupportedInterfaceOrientations~ipad 64 | 65 | UIInterfaceOrientationPortrait 66 | UIInterfaceOrientationPortraitUpsideDown 67 | UIInterfaceOrientationLandscapeLeft 68 | UIInterfaceOrientationLandscapeRight 69 | 70 | UIViewControllerBasedStatusBarAppearance 71 | 72 | 73 | 74 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/NavigationView.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #import 18 | 19 | @interface NavigationView : UIView 20 | 21 | @end 22 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/NavigationView.m: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #include 18 | 19 | #import "NavigationView.h" 20 | 21 | @interface NavigationView () 22 | 23 | @end 24 | 25 | @implementation NavigationView 26 | 27 | - (void)drawRect:(CGRect)rect { 28 | [super drawRect:rect]; 29 | CGContextRef context = UIGraphicsGetCurrentContext(); 30 | UIColor *lineColor = [UIColor colorWithRed:0.5 green:0.5 blue:0.5 alpha:0.25]; 31 | CGContextSetStrokeColorWithColor(context, lineColor.CGColor); 32 | CGContextSetLineWidth(context, 0.5f); 33 | CGContextMoveToPoint(context, 0, rect.size.height); 34 | CGContextAddLineToPoint(context, rect.size.width, rect.size.height); 35 | CGContextStrokePath(context); 36 | } 37 | 38 | @end 39 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/ProgressView.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #import 18 | 19 | @interface ProgressView : UIView 20 | 21 | @property(nonatomic, strong, null_resettable) UIColor *progressFillColor; 22 | @property(nonatomic, strong, null_resettable) UIColor *progressBackgroundColor; 23 | @property(nonatomic, assign) float progressValue; 24 | @property(nonatomic, assign) NSTimeInterval animationDuration; 25 | 26 | - (void)setProgressValue:(float)value 27 | animated:(BOOL)animated 28 | completion:(void (^__nullable)(BOOL complete))completion; 29 | 30 | - (void)setHidden:(BOOL)hidden 31 | animated:(BOOL)animated 32 | completion:(void (^__nullable)(BOOL complete))completion; 33 | 34 | @end 35 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/ProgressView.m: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #include 18 | 19 | #import "ProgressView.h" 20 | 21 | @interface ProgressView () 22 | @property(nonatomic, strong) UIView *progressFillView; 23 | @property(nonatomic, strong) UIView *progressBackgroundView; 24 | @property(nonatomic) BOOL animateHide; 25 | @end 26 | 27 | @implementation ProgressView 28 | 29 | - (instancetype)initWithFrame:(CGRect)frame { 30 | self = [super initWithFrame:frame]; 31 | if (self) { 32 | [self progressViewInit]; 33 | } 34 | return self; 35 | } 36 | 37 | - (instancetype)initWithCoder:(NSCoder *)aDecoder { 38 | self = [super initWithCoder:aDecoder]; 39 | if (self) { 40 | [self progressViewInit]; 41 | } 42 | return self; 43 | } 44 | 45 | - (void)progressViewInit { 46 | self.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleTopMargin; 47 | self.backgroundColor = [UIColor clearColor]; 48 | self.clipsToBounds = YES; 49 | self.isAccessibilityElement = YES; 50 | 51 | self.progressBackgroundView = [[UIView alloc] initWithFrame:self.frame]; 52 | [self.progressBackgroundView setAutoresizingMask:UIViewAutoresizingFlexibleWidth]; 53 | [self addSubview:self.progressBackgroundView]; 54 | 55 | self.progressFillView = [[UIView alloc] initWithFrame:CGRectZero]; 56 | [self addSubview:self.progressFillView]; 57 | 58 | [self.progressFillView 59 | setBackgroundColor:[UIColor colorWithRed:1.0 green:0.0 blue:1.0 alpha:1.0]]; 60 | [self.progressBackgroundView 61 | setBackgroundColor:[UIColor colorWithRed:0.0 green:1.0 blue:1.0 alpha:1.0]]; 62 | 63 | self.progressValue = 0.0; 64 | self.animationDuration = 0.25; 65 | } 66 | 67 | - (void)layoutSubviews { 68 | [super layoutSubviews]; 69 | if (!self.animateHide) { 70 | [self updateProgressBackgroundView]; 71 | [self updateProgressFillView]; 72 | } 73 | } 74 | 75 | - (UIColor *)progressFillColor { 76 | return self.progressFillView.backgroundColor; 77 | } 78 | 79 | - (void)setProgressFillColor:(UIColor *)fillColor { 80 | if (fillColor == nil) { 81 | fillColor = [UIColor colorWithRed:1.0 green:0.0 blue:1.0 alpha:1.0]; 82 | } 83 | [self.progressFillView setBackgroundColor:fillColor]; 84 | } 85 | 86 | - (UIColor *)progressBackgroundColor { 87 | return self.progressBackgroundView.backgroundColor; 88 | } 89 | 90 | - (void)setProgressBackgroundColor:(UIColor *)backgroundColor { 91 | if (backgroundColor == nil) { 92 | backgroundColor = [UIColor colorWithRed:0.0 green:1.0 blue:1.0 alpha:1.0]; 93 | } 94 | [self.progressBackgroundView setBackgroundColor:backgroundColor]; 95 | } 96 | 97 | - (void)setProgressValue:(float)value { 98 | _progressValue = MAX(MIN(value, 1.0), 0.0); 99 | [self setNeedsLayout]; 100 | } 101 | 102 | - (void)setProgressValue:(float)value 103 | animated:(BOOL)animated 104 | completion:(void (^__nullable)(BOOL complete))completion { 105 | _progressValue = value; 106 | [UIView animateWithDuration:animated ? self.animationDuration : 0 107 | delay:0 108 | options:UIViewAnimationOptionCurveLinear 109 | animations:^{ 110 | [self updateProgressFillView]; 111 | } 112 | completion:completion]; 113 | } 114 | 115 | - (void)setHidden:(BOOL)hidden { 116 | [super setHidden:hidden]; 117 | UIAccessibilityPostNotification(UIAccessibilityLayoutChangedNotification, hidden ? nil : self); 118 | } 119 | 120 | - (void)setHidden:(BOOL)hidden 121 | animated:(BOOL)animated 122 | completion:(void (^__nullable)(BOOL complete))completion { 123 | if (hidden == self.hidden) { 124 | if (completion) { 125 | completion(YES); 126 | } 127 | return; 128 | } 129 | 130 | void (^animations)(void); 131 | 132 | if (hidden) { 133 | self.animateHide = YES; 134 | animations = ^{ 135 | CGFloat y = CGRectGetHeight(self.bounds); 136 | 137 | CGRect backgroundViewFrame = self.progressBackgroundView.frame; 138 | backgroundViewFrame.origin.y = y; 139 | backgroundViewFrame.size.height = 0; 140 | self.progressBackgroundView.frame = backgroundViewFrame; 141 | 142 | CGRect fillViewFrame = self.progressFillView.frame; 143 | fillViewFrame.origin.y = y; 144 | fillViewFrame.size.height = 0; 145 | self.progressFillView.frame = fillViewFrame; 146 | }; 147 | } else { 148 | self.hidden = NO; 149 | animations = ^{ 150 | self.progressBackgroundView.frame = self.bounds; 151 | 152 | CGRect fillViewFrame = self.progressFillView.frame; 153 | fillViewFrame.origin.y = 0; 154 | fillViewFrame.size.height = CGRectGetHeight(self.bounds); 155 | self.progressFillView.frame = fillViewFrame; 156 | }; 157 | } 158 | 159 | [UIView animateWithDuration:animated ? self.animationDuration : 0 160 | delay:0 161 | options:UIViewAnimationOptionCurveLinear 162 | animations:animations 163 | completion:^(BOOL complete) { 164 | if (hidden) { 165 | self.animateHide = NO; 166 | self.hidden = YES; 167 | } 168 | if (completion) { 169 | completion(complete); 170 | } 171 | }]; 172 | } 173 | 174 | - (void)updateProgressFillView { 175 | CGFloat progressWidth = ceilf(self.progressValue * CGRectGetWidth(self.bounds)); 176 | CGRect progressFrame = CGRectMake(0, 0, progressWidth, CGRectGetHeight(self.bounds)); 177 | [self.progressFillView setFrame:progressFrame]; 178 | } 179 | 180 | - (void)updateProgressBackgroundView { 181 | const CGSize size = self.bounds.size; 182 | CGRect frame = self.hidden ? CGRectMake(0.0, size.height, size.width, 0.0) : self.bounds; 183 | [self.progressBackgroundView setFrame:frame]; 184 | } 185 | 186 | @end 187 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Renderer.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #import 18 | #import 19 | 20 | NS_ASSUME_NONNULL_BEGIN 21 | 22 | /* 23 | Protocol abstracting the platform specific view in order to keep the Renderer 24 | class independent from platform. 25 | */ 26 | @protocol RenderDestinationProvider 27 | 28 | @property(nonatomic, readonly, nullable) MTLRenderPassDescriptor *currentRenderPassDescriptor; 29 | @property(nonatomic, readonly, nullable) id currentDrawable; 30 | 31 | @property(nonatomic) MTLPixelFormat colorPixelFormat; 32 | @property(nonatomic) MTLPixelFormat depthStencilPixelFormat; 33 | @property(nonatomic) NSUInteger sampleCount; 34 | 35 | @end 36 | 37 | /* 38 | The main class performing the rendering of a session. 39 | */ 40 | @interface Renderer : NSObject { 41 | @public 42 | CGSize viewportSize; 43 | UIInterfaceOrientation interfaceOrientation; 44 | } 45 | 46 | @property(atomic) bool cameraRenderEnabled; 47 | 48 | - (instancetype)initWithSession:(ARSession *)session 49 | metalDevice:(id)device 50 | renderDestinationProvider:(id)renderDestinationProvider; 51 | 52 | - (void)drawRectResized:(CGSize)size; 53 | 54 | - (void)update; 55 | 56 | - (void)setInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation; 57 | 58 | @end 59 | 60 | NS_ASSUME_NONNULL_END 61 | 62 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Renderer.m: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #import 18 | #import 19 | #import 20 | 21 | #import "Renderer.h" 22 | 23 | // Include header shared between C code here, which executes Metal API commands, 24 | // and .metal files 25 | #import "ShaderTypes.h" 26 | 27 | // The max number of command buffers in flight 28 | static const NSUInteger kMaxBuffersInFlight = 3; 29 | 30 | // The max number anchors our uniform buffer will hold 31 | static const NSUInteger kMaxAnchorInstanceCount = 64; 32 | 33 | // The 256 byte aligned size of our uniform structures 34 | static const size_t kAlignedSharedUniformsSize = (sizeof(SharedUniforms) & ~0xFF) + 0x100; 35 | static const size_t kAlignedInstanceUniformsSize = 36 | ((sizeof(InstanceUniforms) * kMaxAnchorInstanceCount) & ~0xFF) + 0x100; 37 | 38 | // Vertex data for an image plane 39 | static const float kImagePlaneVertexData[16] = { 40 | -1.0, -1.0, 0.0, 1.0, 1.0, -1.0, 1.0, 1.0, -1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 41 | }; 42 | 43 | @implementation Renderer { 44 | // The session the renderer will render 45 | ARSession *_session; 46 | 47 | // The object controlling the ultimate render destination 48 | __weak id _renderDestination; 49 | 50 | dispatch_semaphore_t _inFlightSemaphore; 51 | 52 | // Metal objects 53 | id _device; 54 | id _commandQueue; 55 | id _sharedUniformBuffer; 56 | id _anchorUniformBuffer; 57 | id _imagePlaneVertexBuffer; 58 | id _capturedImagePipelineState; 59 | id _capturedImageDepthState; 60 | id _anchorPipelineState; 61 | id _anchorDepthState; 62 | id _capturedImageTextureY; 63 | id _capturedImageTextureCbCr; 64 | 65 | // Captured image texture cache 66 | CVMetalTextureCacheRef _capturedImageTextureCache; 67 | 68 | // Metal vertex descriptor specifying how vertices will by laid out for input 69 | // into our 70 | // anchor geometry render pipeline and how we'll layout our Model IO 71 | // verticies 72 | MTLVertexDescriptor *_geometryVertexDescriptor; 73 | 74 | // MetalKit mesh containing vertex data and index buffer for our anchor 75 | // geometry 76 | MTKMesh *_cubeMesh; 77 | 78 | // Used to determine _uniformBufferStride each frame. 79 | // This is the current frame number modulo kMaxBuffersInFlight 80 | uint8_t _uniformBufferIndex; 81 | 82 | // Offset within _sharedUniformBuffer to set for the current frame 83 | uint32_t _sharedUniformBufferOffset; 84 | 85 | // Offset within _anchorUniformBuffer to set for the current frame 86 | uint32_t _anchorUniformBufferOffset; 87 | 88 | // Addresses to write shared uniforms to each frame 89 | void *_sharedUniformBufferAddress; 90 | 91 | // Addresses to write anchor uniforms to each frame 92 | void *_anchorUniformBufferAddress; 93 | 94 | // The number of anchor instances to render 95 | NSUInteger _anchorInstanceCount; 96 | 97 | // Flag for viewport size changes 98 | BOOL _viewportSizeDidChange; 99 | } 100 | 101 | - (instancetype)initWithSession:(ARSession *)session 102 | metalDevice:(id)device 103 | renderDestinationProvider:(id)renderDestinationProvider { 104 | self = [super init]; 105 | if (self) { 106 | _session = session; 107 | _device = device; 108 | _renderDestination = renderDestinationProvider; 109 | _inFlightSemaphore = dispatch_semaphore_create(kMaxBuffersInFlight); 110 | self.cameraRenderEnabled = true; 111 | [self _loadMetal]; 112 | [self _loadAssets]; 113 | } 114 | 115 | return self; 116 | } 117 | 118 | - (void)drawRectResized:(CGSize)size { 119 | self->viewportSize = size; 120 | _viewportSizeDidChange = YES; 121 | } 122 | 123 | - (void)update { 124 | // Wait to ensure only kMaxBuffersInFlight are getting proccessed by any stage 125 | // in the Metal 126 | // pipeline (App, Metal, Drivers, GPU, etc) 127 | dispatch_semaphore_wait(_inFlightSemaphore, DISPATCH_TIME_FOREVER); 128 | 129 | // Create a new command buffer for each renderpass to the current drawable 130 | id commandBuffer = [_commandQueue commandBuffer]; 131 | commandBuffer.label = @"MyCommand"; 132 | 133 | // Add completion hander which signal _inFlightSemaphore when Metal and the 134 | // GPU has fully 135 | // finished proccssing the commands we're encoding this frame. This 136 | // indicates when the dynamic buffers, that we're writing to this frame, 137 | // will no longer be needed by Metal and the GPU. 138 | __block dispatch_semaphore_t block_sema = _inFlightSemaphore; 139 | [commandBuffer addCompletedHandler:^(id buffer) { 140 | dispatch_semaphore_signal(block_sema); 141 | }]; 142 | 143 | [self _updateBufferStates]; 144 | [self _updateGameState]; 145 | 146 | // Obtain a renderPassDescriptor generated from the view's drawable textures 147 | MTLRenderPassDescriptor *renderPassDescriptor = _renderDestination.currentRenderPassDescriptor; 148 | 149 | // If we've gotten a renderPassDescriptor we can render to the drawable, 150 | // otherwise we'll skip 151 | // any rendering this frame because we have no drawable to draw to 152 | if (renderPassDescriptor != nil) { 153 | // Create a render command encoder so we can render into something 154 | id renderEncoder = 155 | [commandBuffer renderCommandEncoderWithDescriptor:renderPassDescriptor]; 156 | renderEncoder.label = @"MyRenderEncoder"; 157 | 158 | [self _drawCapturedImageWithCommandEncoder:renderEncoder]; 159 | [self _drawAnchorGeometryWithCommandEncoder:renderEncoder]; 160 | 161 | // We're done encoding commands 162 | [renderEncoder endEncoding]; 163 | } 164 | 165 | // Schedule a present once the framebuffer is complete using the current 166 | // drawable 167 | [commandBuffer presentDrawable:_renderDestination.currentDrawable]; 168 | 169 | // Finalize rendering here & push the command buffer to the GPU 170 | [commandBuffer commit]; 171 | } 172 | 173 | - (void)setInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation { 174 | self->interfaceOrientation = interfaceOrientation; 175 | } 176 | 177 | #pragma mark - Private 178 | 179 | - (void)_loadMetal { 180 | // Create and load our basic Metal state objects 181 | 182 | // Set the default formats needed to render 183 | _renderDestination.depthStencilPixelFormat = MTLPixelFormatDepth32Float_Stencil8; 184 | _renderDestination.colorPixelFormat = MTLPixelFormatBGRA8Unorm; 185 | _renderDestination.sampleCount = 1; 186 | 187 | // Calculate our uniform buffer sizes. We allocate kMaxBuffersInFlight 188 | // instances for uniform 189 | // storage in a single buffer. This allows us to update uniforms in a ring 190 | // (i.e. triple buffer the uniforms) so that the GPU reads from one slot in 191 | // the ring wil the CPU writes to another. Anchor uniforms should be 192 | // specified with a max instance count for instancing. Also uniform storage 193 | // must be aligned (to 256 bytes) to meet the requirements to be an argument 194 | // in the constant address space of our shading functions. 195 | const NSUInteger sharedUniformBufferSize = kAlignedSharedUniformsSize * kMaxBuffersInFlight; 196 | const NSUInteger anchorUniformBufferSize = kAlignedInstanceUniformsSize * kMaxBuffersInFlight; 197 | 198 | // Create and allocate our uniform buffer objects. Indicate shared storage so 199 | // that both the 200 | // CPU can access the buffer 201 | _sharedUniformBuffer = 202 | [_device newBufferWithLength:sharedUniformBufferSize options:MTLResourceStorageModeShared]; 203 | 204 | _sharedUniformBuffer.label = @"SharedUniformBuffer"; 205 | 206 | _anchorUniformBuffer = 207 | [_device newBufferWithLength:anchorUniformBufferSize options:MTLResourceStorageModeShared]; 208 | 209 | _anchorUniformBuffer.label = @"AnchorUniformBuffer"; 210 | 211 | // Create a vertex buffer with our image plane vertex data. 212 | _imagePlaneVertexBuffer = [_device newBufferWithBytes:&kImagePlaneVertexData 213 | length:sizeof(kImagePlaneVertexData) 214 | options:MTLResourceCPUCacheModeDefaultCache]; 215 | 216 | _imagePlaneVertexBuffer.label = @"ImagePlaneVertexBuffer"; 217 | 218 | // Load all the shader files with a metal file extension in the project 219 | id defaultLibrary = [_device newDefaultLibrary]; 220 | 221 | id capturedImageVertexFunction = 222 | [defaultLibrary newFunctionWithName:@"capturedImageVertexTransform"]; 223 | id capturedImageFragmentFunction = 224 | [defaultLibrary newFunctionWithName:@"capturedImageFragmentShader"]; 225 | 226 | // Create a vertex descriptor for our image plane vertex buffer 227 | MTLVertexDescriptor *imagePlaneVertexDescriptor = [[MTLVertexDescriptor alloc] init]; 228 | 229 | // Positions. 230 | imagePlaneVertexDescriptor.attributes[kVertexAttributePosition].format = MTLVertexFormatFloat2; 231 | imagePlaneVertexDescriptor.attributes[kVertexAttributePosition].offset = 0; 232 | imagePlaneVertexDescriptor.attributes[kVertexAttributePosition].bufferIndex = 233 | kBufferIndexMeshPositions; 234 | 235 | // Texture coordinates. 236 | imagePlaneVertexDescriptor.attributes[kVertexAttributeTexcoord].format = MTLVertexFormatFloat2; 237 | imagePlaneVertexDescriptor.attributes[kVertexAttributeTexcoord].offset = 8; 238 | imagePlaneVertexDescriptor.attributes[kVertexAttributeTexcoord].bufferIndex = 239 | kBufferIndexMeshPositions; 240 | 241 | // Position Buffer Layout 242 | imagePlaneVertexDescriptor.layouts[kBufferIndexMeshPositions].stride = 16; 243 | imagePlaneVertexDescriptor.layouts[kBufferIndexMeshPositions].stepRate = 1; 244 | imagePlaneVertexDescriptor.layouts[kBufferIndexMeshPositions].stepFunction = 245 | MTLVertexStepFunctionPerVertex; 246 | 247 | // Create a pipeline state for rendering the captured image 248 | MTLRenderPipelineDescriptor *capturedImagePipelineStateDescriptor = 249 | [[MTLRenderPipelineDescriptor alloc] init]; 250 | capturedImagePipelineStateDescriptor.label = @"MyCapturedImagePipeline"; 251 | capturedImagePipelineStateDescriptor.sampleCount = _renderDestination.sampleCount; 252 | capturedImagePipelineStateDescriptor.vertexFunction = capturedImageVertexFunction; 253 | capturedImagePipelineStateDescriptor.fragmentFunction = capturedImageFragmentFunction; 254 | capturedImagePipelineStateDescriptor.vertexDescriptor = imagePlaneVertexDescriptor; 255 | capturedImagePipelineStateDescriptor.colorAttachments[0].pixelFormat = 256 | _renderDestination.colorPixelFormat; 257 | capturedImagePipelineStateDescriptor.depthAttachmentPixelFormat = 258 | _renderDestination.depthStencilPixelFormat; 259 | capturedImagePipelineStateDescriptor.stencilAttachmentPixelFormat = 260 | _renderDestination.depthStencilPixelFormat; 261 | 262 | NSError *error = nil; 263 | _capturedImagePipelineState = 264 | [_device newRenderPipelineStateWithDescriptor:capturedImagePipelineStateDescriptor 265 | error:&error]; 266 | if (!_capturedImagePipelineState) { 267 | NSLog(@"Failed to created captured image pipeline state, error %@", error); 268 | } 269 | 270 | MTLDepthStencilDescriptor *capturedImageDepthStateDescriptor = 271 | [[MTLDepthStencilDescriptor alloc] init]; 272 | capturedImageDepthStateDescriptor.depthCompareFunction = MTLCompareFunctionAlways; 273 | capturedImageDepthStateDescriptor.depthWriteEnabled = NO; 274 | _capturedImageDepthState = 275 | [_device newDepthStencilStateWithDescriptor:capturedImageDepthStateDescriptor]; 276 | 277 | // Create captured image texture cache 278 | CVMetalTextureCacheCreate(NULL, NULL, _device, NULL, &_capturedImageTextureCache); 279 | 280 | id anchorGeometryVertexFunction = 281 | [defaultLibrary newFunctionWithName:@"anchorGeometryVertexTransform"]; 282 | id anchorGeometryFragmentFunction = 283 | [defaultLibrary newFunctionWithName:@"anchorGeometryFragmentLighting"]; 284 | 285 | // Create a vertex descriptor for our Metal pipeline. Specifies the layout of 286 | // vertices the 287 | // pipeline should expect. The layout below keeps attributes used to 288 | // calculate vertex shader output position separate (world position, 289 | // skinning, tweening weights) separate from other attributes (texture 290 | // coordinates, normals). This generally maximizes pipeline efficiency 291 | _geometryVertexDescriptor = [[MTLVertexDescriptor alloc] init]; 292 | 293 | // Positions. 294 | _geometryVertexDescriptor.attributes[kVertexAttributePosition].format = MTLVertexFormatFloat3; 295 | _geometryVertexDescriptor.attributes[kVertexAttributePosition].offset = 0; 296 | _geometryVertexDescriptor.attributes[kVertexAttributePosition].bufferIndex = 297 | kBufferIndexMeshPositions; 298 | 299 | // Texture coordinates. 300 | _geometryVertexDescriptor.attributes[kVertexAttributeTexcoord].format = MTLVertexFormatFloat2; 301 | _geometryVertexDescriptor.attributes[kVertexAttributeTexcoord].offset = 0; 302 | _geometryVertexDescriptor.attributes[kVertexAttributeTexcoord].bufferIndex = 303 | kBufferIndexMeshGenerics; 304 | 305 | // Normals. 306 | _geometryVertexDescriptor.attributes[kVertexAttributeNormal].format = MTLVertexFormatHalf3; 307 | _geometryVertexDescriptor.attributes[kVertexAttributeNormal].offset = 8; 308 | _geometryVertexDescriptor.attributes[kVertexAttributeNormal].bufferIndex = 309 | kBufferIndexMeshGenerics; 310 | 311 | // Position Buffer Layout 312 | _geometryVertexDescriptor.layouts[kBufferIndexMeshPositions].stride = 12; 313 | _geometryVertexDescriptor.layouts[kBufferIndexMeshPositions].stepRate = 1; 314 | _geometryVertexDescriptor.layouts[kBufferIndexMeshPositions].stepFunction = 315 | MTLVertexStepFunctionPerVertex; 316 | 317 | // Generic Attribute Buffer Layout 318 | _geometryVertexDescriptor.layouts[kBufferIndexMeshGenerics].stride = 16; 319 | _geometryVertexDescriptor.layouts[kBufferIndexMeshGenerics].stepRate = 1; 320 | _geometryVertexDescriptor.layouts[kBufferIndexMeshGenerics].stepFunction = 321 | MTLVertexStepFunctionPerVertex; 322 | 323 | // Create a reusable pipeline state for rendering anchor geometry 324 | MTLRenderPipelineDescriptor *anchorPipelineStateDescriptor = 325 | [[MTLRenderPipelineDescriptor alloc] init]; 326 | anchorPipelineStateDescriptor.label = @"MyAnchorPipeline"; 327 | anchorPipelineStateDescriptor.sampleCount = _renderDestination.sampleCount; 328 | anchorPipelineStateDescriptor.vertexFunction = anchorGeometryVertexFunction; 329 | anchorPipelineStateDescriptor.fragmentFunction = anchorGeometryFragmentFunction; 330 | anchorPipelineStateDescriptor.vertexDescriptor = _geometryVertexDescriptor; 331 | anchorPipelineStateDescriptor.colorAttachments[0].pixelFormat = 332 | _renderDestination.colorPixelFormat; 333 | anchorPipelineStateDescriptor.depthAttachmentPixelFormat = 334 | _renderDestination.depthStencilPixelFormat; 335 | anchorPipelineStateDescriptor.stencilAttachmentPixelFormat = 336 | _renderDestination.depthStencilPixelFormat; 337 | 338 | _anchorPipelineState = 339 | [_device newRenderPipelineStateWithDescriptor:anchorPipelineStateDescriptor error:&error]; 340 | if (!_anchorPipelineState) { 341 | NSLog(@"Failed to created geometry pipeline state, error %@", error); 342 | } 343 | 344 | MTLDepthStencilDescriptor *anchorDepthStateDescriptor = 345 | [[MTLDepthStencilDescriptor alloc] init]; 346 | anchorDepthStateDescriptor.depthCompareFunction = MTLCompareFunctionLess; 347 | anchorDepthStateDescriptor.depthWriteEnabled = YES; 348 | _anchorDepthState = [_device newDepthStencilStateWithDescriptor:anchorDepthStateDescriptor]; 349 | 350 | // Create the command queue 351 | _commandQueue = [_device newCommandQueue]; 352 | } 353 | 354 | - (void)_loadAssets { 355 | // Create and load our assets into Metal objects including meshes and textures 356 | 357 | // Create a MetalKit mesh buffer allocator so that ModelIO will load mesh data 358 | // directly into 359 | // Metal buffers accessible by the GPU 360 | MTKMeshBufferAllocator *metalAllocator = 361 | [[MTKMeshBufferAllocator alloc] initWithDevice:_device]; 362 | 363 | // Creata a Model IO vertexDescriptor so that we format/layout our model IO 364 | // mesh vertices to 365 | // fit our Metal render pipeline's vertex descriptor layout 366 | MDLVertexDescriptor *vertexDescriptor = 367 | MTKModelIOVertexDescriptorFromMetal(_geometryVertexDescriptor); 368 | 369 | // Indicate how each Metal vertex descriptor attribute maps to each ModelIO 370 | // attribute 371 | vertexDescriptor.attributes[kVertexAttributePosition].name = MDLVertexAttributePosition; 372 | vertexDescriptor.attributes[kVertexAttributeTexcoord].name = 373 | MDLVertexAttributeTextureCoordinate; 374 | vertexDescriptor.attributes[kVertexAttributeNormal].name = MDLVertexAttributeNormal; 375 | 376 | // Use ModelIO to create a box mesh as our object 377 | MDLMesh *mesh = [MDLMesh newBoxWithDimensions:(vector_float3) { .075, .075, .075 } 378 | segments:(vector_uint3) { 1, 1, 1 } 379 | geometryType:MDLGeometryTypeTriangles 380 | inwardNormals:NO 381 | allocator:metalAllocator]; 382 | 383 | // Perform the format/relayout of mesh vertices by setting the new vertex 384 | // descriptor in our 385 | // Model IO mesh 386 | mesh.vertexDescriptor = vertexDescriptor; 387 | 388 | NSError *error = nil; 389 | 390 | // Create a MetalKit mesh (and submeshes) backed by Metal buffers 391 | _cubeMesh = [[MTKMesh alloc] initWithMesh:mesh device:_device error:&error]; 392 | 393 | if (!_cubeMesh || error) { 394 | NSLog(@"Error creating MetalKit mesh %@", error.localizedDescription); 395 | } 396 | } 397 | 398 | - (void)_updateBufferStates { 399 | // Update the location(s) to which we'll write to in our dynamically changing 400 | // Metal buffers for 401 | // the current frame (i.e. update our slot in the ring buffer used for the 402 | // current frame) 403 | 404 | _uniformBufferIndex = (_uniformBufferIndex + 1) % kMaxBuffersInFlight; 405 | 406 | _sharedUniformBufferOffset = kAlignedSharedUniformsSize * _uniformBufferIndex; 407 | _anchorUniformBufferOffset = kAlignedInstanceUniformsSize * _uniformBufferIndex; 408 | 409 | _sharedUniformBufferAddress = 410 | ((uint8_t *)_sharedUniformBuffer.contents) + _sharedUniformBufferOffset; 411 | _anchorUniformBufferAddress = 412 | ((uint8_t *)_anchorUniformBuffer.contents) + _anchorUniformBufferOffset; 413 | } 414 | 415 | - (void)_updateGameState { 416 | // Update any game state 417 | 418 | ARFrame *currentFrame = _session.currentFrame; 419 | 420 | if (!currentFrame) { 421 | return; 422 | } 423 | 424 | // These calls are not necessary for WbARKit 425 | // [self _updateSharedUniformsWithFrame:currentFrame]; 426 | // [self _updateAnchorsWithFrame:currentFrame]; 427 | if (self.cameraRenderEnabled) { 428 | [self _updateCapturedImageTexturesWithFrame:currentFrame]; 429 | } 430 | 431 | if (_viewportSizeDidChange) { 432 | _viewportSizeDidChange = NO; 433 | 434 | [self _updateImagePlaneWithFrame:currentFrame]; 435 | } 436 | } 437 | 438 | - (void)_updateSharedUniformsWithFrame:(ARFrame *)frame { 439 | // Update the shared uniforms of the frame 440 | SharedUniforms *uniforms = (SharedUniforms *)_sharedUniformBufferAddress; 441 | 442 | uniforms->viewMatrix = matrix_invert(frame.camera.transform); 443 | uniforms->projectionMatrix = [frame.camera projectionMatrixForOrientation:interfaceOrientation 444 | viewportSize:self->viewportSize 445 | zNear:0.001 446 | zFar:1000]; 447 | 448 | // Set up lighting for the scene using the ambient intensity if provided 449 | float ambientIntensity = 1.0; 450 | 451 | if (frame.lightEstimate) { 452 | ambientIntensity = frame.lightEstimate.ambientIntensity / 1000; 453 | } 454 | 455 | vector_float3 ambientLightColor = {0.5, 0.5, 0.5}; 456 | uniforms->ambientLightColor = ambientLightColor * ambientIntensity; 457 | 458 | vector_float3 directionalLightDirection = {0.0, 0.0, -1.0}; 459 | directionalLightDirection = vector_normalize(directionalLightDirection); 460 | uniforms->directionalLightDirection = directionalLightDirection; 461 | 462 | vector_float3 directionalLightColor = {0.6, 0.6, 0.6}; 463 | uniforms->directionalLightColor = directionalLightColor * ambientIntensity; 464 | 465 | uniforms->materialShininess = 30; 466 | } 467 | 468 | - (void)_updateAnchorsWithFrame:(ARFrame *)frame { 469 | // Update the anchor uniform buffer with transforms of the current frame's 470 | // anchors 471 | NSInteger anchorInstanceCount = MIN(frame.anchors.count, kMaxAnchorInstanceCount); 472 | 473 | NSInteger anchorOffset = 0; 474 | if (anchorInstanceCount == kMaxAnchorInstanceCount) { 475 | anchorOffset = MAX(frame.anchors.count - kMaxAnchorInstanceCount, 0); 476 | } 477 | 478 | for (NSInteger index = 0; index < anchorInstanceCount; index++) { 479 | InstanceUniforms *anchorUniforms = 480 | ((InstanceUniforms *)_anchorUniformBufferAddress) + index; 481 | ARAnchor *anchor = frame.anchors[index + anchorOffset]; 482 | 483 | // Flip Z axis to convert geometry from right handed to left handed 484 | matrix_float4x4 coordinateSpaceTransform = matrix_identity_float4x4; 485 | coordinateSpaceTransform.columns[2].z = -1.0; 486 | 487 | anchorUniforms->modelMatrix = matrix_multiply(anchor.transform, coordinateSpaceTransform); 488 | } 489 | 490 | _anchorInstanceCount = anchorInstanceCount; 491 | } 492 | 493 | - (void)_updateCapturedImageTexturesWithFrame:(ARFrame *)frame { 494 | // Create two textures (Y and CbCr) from the provided frame's captured image 495 | CVPixelBufferRef pixelBuffer = frame.capturedImage; 496 | 497 | if (CVPixelBufferGetPlaneCount(pixelBuffer) < 2) { 498 | return; 499 | } 500 | 501 | _capturedImageTextureY = [self _createTextureFromPixelBuffer:pixelBuffer 502 | pixelFormat:MTLPixelFormatR8Unorm 503 | planeIndex:0]; 504 | _capturedImageTextureCbCr = [self _createTextureFromPixelBuffer:pixelBuffer 505 | pixelFormat:MTLPixelFormatRG8Unorm 506 | planeIndex:1]; 507 | } 508 | 509 | - (id)_createTextureFromPixelBuffer:(CVPixelBufferRef)pixelBuffer 510 | pixelFormat:(MTLPixelFormat)pixelFormat 511 | planeIndex:(NSInteger)planeIndex { 512 | id mtlTexture = nil; 513 | 514 | const size_t width = CVPixelBufferGetWidthOfPlane(pixelBuffer, planeIndex); 515 | const size_t height = CVPixelBufferGetHeightOfPlane(pixelBuffer, planeIndex); 516 | 517 | CVMetalTextureRef texture = NULL; 518 | CVReturn status = CVMetalTextureCacheCreateTextureFromImage( 519 | NULL, _capturedImageTextureCache, pixelBuffer, NULL, pixelFormat, width, height, planeIndex, 520 | &texture); 521 | 522 | if (status == kCVReturnSuccess) { 523 | mtlTexture = CVMetalTextureGetTexture(texture); 524 | CFRelease(texture); 525 | } 526 | 527 | return mtlTexture; 528 | } 529 | 530 | - (void)_updateImagePlaneWithFrame:(ARFrame *)frame { 531 | // Update the texture coordinates of our image plane to aspect fill the 532 | // viewport 533 | CGAffineTransform displayToCameraTransform = 534 | CGAffineTransformInvert([frame displayTransformForOrientation:interfaceOrientation 535 | viewportSize:self->viewportSize]); 536 | 537 | float *vertexData = [_imagePlaneVertexBuffer contents]; 538 | for (NSInteger index = 0; index < 4; index++) { 539 | NSInteger textureCoordIndex = 4 * index + 2; 540 | CGPoint textureCoord = CGPointMake(kImagePlaneVertexData[textureCoordIndex], 541 | kImagePlaneVertexData[textureCoordIndex + 1]); 542 | CGPoint transformedCoord = 543 | CGPointApplyAffineTransform(textureCoord, displayToCameraTransform); 544 | vertexData[textureCoordIndex] = transformedCoord.x; 545 | vertexData[textureCoordIndex + 1] = transformedCoord.y; 546 | } 547 | } 548 | 549 | - (void)_drawCapturedImageWithCommandEncoder:(id)renderEncoder { 550 | if (_capturedImageTextureY == nil || _capturedImageTextureCbCr == nil) { 551 | return; 552 | } 553 | 554 | // Push a debug group allowing us to identify render commands in the GPU Frame 555 | // Capture tool 556 | [renderEncoder pushDebugGroup:@"DrawCapturedImage"]; 557 | 558 | // Set render command encoder state 559 | [renderEncoder setCullMode:MTLCullModeNone]; 560 | [renderEncoder setRenderPipelineState:_capturedImagePipelineState]; 561 | [renderEncoder setDepthStencilState:_capturedImageDepthState]; 562 | 563 | // Set mesh's vertex buffers 564 | [renderEncoder setVertexBuffer:_imagePlaneVertexBuffer 565 | offset:0 566 | atIndex:kBufferIndexMeshPositions]; 567 | 568 | // Set any textures read/sampled from our render pipeline 569 | [renderEncoder setFragmentTexture:_capturedImageTextureY atIndex:kTextureIndexY]; 570 | [renderEncoder setFragmentTexture:_capturedImageTextureCbCr atIndex:kTextureIndexCbCr]; 571 | 572 | // Draw each submesh of our mesh 573 | [renderEncoder drawPrimitives:MTLPrimitiveTypeTriangleStrip vertexStart:0 vertexCount:4]; 574 | 575 | [renderEncoder popDebugGroup]; 576 | } 577 | 578 | - (void)_drawAnchorGeometryWithCommandEncoder:(id)renderEncoder { 579 | if (_anchorInstanceCount == 0) { 580 | return; 581 | } 582 | 583 | // Push a debug group allowing us to identify render commands in the GPU Frame 584 | // Capture tool 585 | [renderEncoder pushDebugGroup:@"DrawAnchors"]; 586 | 587 | // Set render command encoder state 588 | [renderEncoder setCullMode:MTLCullModeBack]; 589 | [renderEncoder setRenderPipelineState:_anchorPipelineState]; 590 | [renderEncoder setDepthStencilState:_anchorDepthState]; 591 | 592 | // Set any buffers fed into our render pipeline 593 | [renderEncoder setVertexBuffer:_anchorUniformBuffer 594 | offset:_anchorUniformBufferOffset 595 | atIndex:kBufferIndexInstanceUniforms]; 596 | 597 | [renderEncoder setVertexBuffer:_sharedUniformBuffer 598 | offset:_sharedUniformBufferOffset 599 | atIndex:kBufferIndexSharedUniforms]; 600 | 601 | [renderEncoder setFragmentBuffer:_sharedUniformBuffer 602 | offset:_sharedUniformBufferOffset 603 | atIndex:kBufferIndexSharedUniforms]; 604 | 605 | // Set mesh's vertex buffers 606 | for (NSUInteger bufferIndex = 0; bufferIndex < _cubeMesh.vertexBuffers.count; bufferIndex++) { 607 | MTKMeshBuffer *vertexBuffer = _cubeMesh.vertexBuffers[bufferIndex]; 608 | [renderEncoder setVertexBuffer:vertexBuffer.buffer 609 | offset:vertexBuffer.offset 610 | atIndex:bufferIndex]; 611 | } 612 | 613 | // Draw each submesh of our mesh 614 | for (MTKSubmesh *submesh in _cubeMesh.submeshes) { 615 | [renderEncoder drawIndexedPrimitives:submesh.primitiveType 616 | indexCount:submesh.indexCount 617 | indexType:submesh.indexType 618 | indexBuffer:submesh.indexBuffer.buffer 619 | indexBufferOffset:submesh.indexBuffer.offset 620 | instanceCount:_anchorInstanceCount]; 621 | } 622 | 623 | [renderEncoder popDebugGroup]; 624 | } 625 | 626 | - (void)disableCameraRender { 627 | self.cameraRenderEnabled = false; 628 | } 629 | 630 | - (void)enableCameraRender { 631 | self.cameraRenderEnabled = true; 632 | } 633 | 634 | @end 635 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/ShaderTypes.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | // 18 | // Header containing types and enum constants shared between Metal shaders and 19 | // C/ObjC source 20 | // 21 | #ifndef ShaderTypes_h 22 | #define ShaderTypes_h 23 | 24 | #include 25 | 26 | // Buffer index values shared between shader and C code to ensure Metal shader 27 | // buffer inputs match Metal API buffer set calls 28 | typedef enum BufferIndices { 29 | kBufferIndexMeshPositions = 0, 30 | kBufferIndexMeshGenerics = 1, 31 | kBufferIndexInstanceUniforms = 2, 32 | kBufferIndexSharedUniforms = 3 33 | } BufferIndices; 34 | 35 | // Attribute index values shared between shader and C code to ensure Metal 36 | // shader vertex attribute indices match the Metal API vertex descriptor attribute indices 37 | typedef enum VertexAttributes { 38 | kVertexAttributePosition = 0, 39 | kVertexAttributeTexcoord = 1, 40 | kVertexAttributeNormal = 2 41 | } VertexAttributes; 42 | 43 | // Texture index values shared between shader and C code to ensure Metal shader 44 | // texture indices match indices of Metal API texture set calls 45 | typedef enum TextureIndices { 46 | kTextureIndexColor = 0, 47 | kTextureIndexY = 1, 48 | kTextureIndexCbCr = 2 49 | } TextureIndices; 50 | 51 | // Structure shared between shader and C code to ensure the layout of shared 52 | // uniform data accessed in Metal shaders matches the layout of uniform data set in C code 53 | typedef struct { 54 | // Camera Uniforms 55 | matrix_float4x4 projectionMatrix; 56 | matrix_float4x4 viewMatrix; 57 | // Lighting Properties 58 | vector_float3 ambientLightColor; 59 | vector_float3 directionalLightDirection; 60 | vector_float3 directionalLightColor; 61 | float materialShininess; 62 | } SharedUniforms; 63 | 64 | // Structure shared between shader and C code to ensure the layout of instance 65 | // uniform data accessed in Metal shaders matches the layout of uniform data set in C code 66 | typedef struct { 67 | matrix_float4x4 modelMatrix; 68 | } InstanceUniforms; 69 | 70 | #endif /* ShaderTypes_h */ 71 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/Shaders.metal: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #include 18 | #include 19 | 20 | // Include header shared between this Metal shader code and C code executing Metal API commands 21 | #import "ShaderTypes.h" 22 | 23 | using namespace metal; 24 | 25 | typedef struct { 26 | float2 position[[attribute(kVertexAttributePosition)]]; 27 | float2 texCoord[[attribute(kVertexAttributeTexcoord)]]; 28 | } ImageVertex; 29 | 30 | typedef struct { 31 | float4 position[[position]]; 32 | float2 texCoord; 33 | } ImageColorInOut; 34 | 35 | // Captured image vertex function 36 | vertex ImageColorInOut capturedImageVertexTransform(ImageVertex in[[stage_in]]) { 37 | ImageColorInOut out; 38 | // Pass through the image vertex's position 39 | out.position = float4(in.position, 0.0, 1.0); 40 | // Pass through the texture coordinate 41 | out.texCoord = in.texCoord; 42 | return out; 43 | } 44 | 45 | // Captured image fragment function 46 | fragment float4 capturedImageFragmentShader( 47 | ImageColorInOut in[[stage_in]], 48 | texture2d capturedImageTextureY[[texture(kTextureIndexY)]], 49 | texture2d capturedImageTextureCbCr[[texture(kTextureIndexCbCr)]]) { 50 | constexpr sampler colorSampler(mip_filter::linear, mag_filter::linear, min_filter::linear); 51 | 52 | const float4x4 ycbcrToRGBTransform = float4x4(float4(+1.0000f, +1.0000f, +1.0000f, +0.0000f), 53 | float4(+0.0000f, -0.3441f, +1.7720f, +0.0000f), 54 | float4(+1.4020f, -0.7141f, +0.0000f, +0.0000f), 55 | float4(-0.7010f, +0.5291f, -0.8860f, +1.0000f)); 56 | 57 | // Sample Y and CbCr textures to get the YCbCr color at the given texture 58 | // coordinate 59 | float4 ycbcr = float4(capturedImageTextureY.sample(colorSampler, in.texCoord).r, 60 | capturedImageTextureCbCr.sample(colorSampler, in.texCoord).rg, 1.0); 61 | 62 | // Return converted RGB color 63 | return ycbcrToRGBTransform * ycbcr; 64 | } 65 | 66 | typedef struct { 67 | float3 position[[attribute(kVertexAttributePosition)]]; 68 | float2 texCoord[[attribute(kVertexAttributeTexcoord)]]; 69 | half3 normal[[attribute(kVertexAttributeNormal)]]; 70 | } Vertex; 71 | 72 | typedef struct { 73 | float4 position[[position]]; 74 | float4 color; 75 | half3 eyePosition; 76 | half3 normal; 77 | } ColorInOut; 78 | 79 | // Anchor geometry vertex function 80 | vertex ColorInOut anchorGeometryVertexTransform( 81 | Vertex in[[stage_in]], 82 | constant SharedUniforms &sharedUniforms[[buffer(kBufferIndexSharedUniforms)]], 83 | constant InstanceUniforms *instanceUniforms[[buffer(kBufferIndexInstanceUniforms)]], 84 | ushort vid[[vertex_id]], 85 | ushort iid[[instance_id]]) { 86 | ColorInOut out; 87 | // Make position a float4 to perform 4x4 matrix math on it 88 | float4 position = float4(in.position, 1.0); 89 | float4x4 modelMatrix = instanceUniforms[iid].modelMatrix; 90 | float4x4 modelViewMatrix = sharedUniforms.viewMatrix * modelMatrix; 91 | // Calculate the position of our vertex in clip space and output for clipping 92 | // and rasterization 93 | out.position = sharedUniforms.projectionMatrix * modelViewMatrix * position; 94 | // Color each face a different color 95 | ushort colorID = vid / 4 % 6; 96 | out.color = colorID == 0 97 | ? float4(0.0, 1.0, 0.0, 1.0) // Right face 98 | : colorID == 1 ? float4(1.0, 0.0, 0.0, 1.0) // Left face 99 | : colorID == 2 ? float4(0.0, 0.0, 1.0, 1.0) // Top face 100 | : colorID == 3 ? float4(1.0, 0.5, 0.0, 1.0) // Bottom face 101 | : colorID == 4 ? float4(1.0, 1.0, 0.0, 1.0) // Back face 102 | : float4(1.0, 1.0, 1.0, 1.0); // Front face 103 | // Calculate the positon of our vertex in eye space 104 | out.eyePosition = half3((modelViewMatrix * position).xyz); 105 | // Rotate our normals to world coordinates 106 | float4 normal = modelMatrix * float4(in.normal.x, in.normal.y, in.normal.z, 0.0f); 107 | out.normal = normalize(half3(normal.xyz)); 108 | return out; 109 | } 110 | 111 | // Anchor geometry fragment function 112 | fragment float4 anchorGeometryFragmentLighting( 113 | ColorInOut in[[stage_in]], 114 | constant SharedUniforms &uniforms[[buffer(kBufferIndexSharedUniforms)]]) { 115 | float3 normal = float3(in.normal); 116 | // Calculate the contribution of the directional light as a sum of diffuse and specular terms 117 | float3 directionalContribution = float3(0); 118 | { 119 | // Light falls off based on how closely aligned the surface normal is to the light direction 120 | float nDotL = saturate(dot(normal, -uniforms.directionalLightDirection)); 121 | // The diffuse term is then the product of the light color, the surface material 122 | // reflectance, and the falloff 123 | float3 diffuseTerm = uniforms.directionalLightColor * nDotL; 124 | // Apply specular lighting... 125 | // 1) Calculate the halfway vector between the light direction and the direction they eye 126 | // is looking 127 | float3 halfwayVector = normalize(-uniforms.directionalLightDirection - float3(in.eyePosition)); 128 | // 2) Calculate the reflection angle between our reflection vector and the eye's direction 129 | float reflectionAngle = saturate(dot(normal, halfwayVector)); 130 | // 3) Calculate the specular intensity by multiplying our reflection angle with our 131 | // object's shininess 132 | float specularIntensity = saturate(powr(reflectionAngle, uniforms.materialShininess)); 133 | // 4) Obtain the specular term by multiplying the intensity by our light's color 134 | float3 specularTerm = uniforms.directionalLightColor * specularIntensity; 135 | // Calculate total contribution from this light is the sum of the diffuse 136 | // and specular values 137 | directionalContribution = diffuseTerm + specularTerm; 138 | } 139 | // The ambient contribution, which is an approximation for global, indirect 140 | // lighting, is the product of the ambient light intensity multiplied by the 141 | // material's reflectance 142 | float3 ambientContribution = uniforms.ambientLightColor; 143 | // Now that we have the contributions our light sources in the scene, we sum 144 | // them together to get the fragment's lighting value 145 | float3 lightContributions = ambientContribution + directionalContribution; 146 | // We compute the final color by multiplying the sample from our color maps by 147 | // the fragment's lighting value 148 | float3 color = in.color.rgb * lightContributions; 149 | // We use the color we just computed and the alpha channel of our 150 | // colorMap for this fragment's alpha value 151 | return float4(color, in.color.w); 152 | } 153 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/ViewController.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #import 18 | #import 19 | #import 20 | #import 21 | #import 22 | #import 23 | 24 | @interface ViewController : UIViewController { 30 | MTKView *mtkView; 31 | WKWebView *wkWebView; 32 | 33 | UITextField *urlTextField; 34 | bool urlTextFieldActive; 35 | UIButton *backButton; 36 | UIButton *refreshButton; 37 | 38 | bool initialPageLoadedWhenTrackingBegins; 39 | 40 | UIDeviceOrientation deviceOrientation; 41 | UIInterfaceOrientation interfaceOrientation; 42 | bool updateWindowSize; 43 | 44 | float near; 45 | float far; 46 | 47 | bool showingCameraFeed; 48 | bool iPhoneXDevice; 49 | UIColor *wkWebViewOriginalBackgroundColor; 50 | 51 | NSMutableDictionary *jsAnchorIdsToObjCAnchorIds; 52 | NSMutableDictionary *objCAnchorIdsToJSAnchorIds; 53 | NSMutableDictionary *anchors; 54 | 55 | // These flags control the synchronization between the natively 56 | // rendered camera frame and the 3D scene rendered in the JS side. 57 | bool drawNextCameraFrame; 58 | bool sendARData; 59 | 60 | CFTimeInterval timeOfLastDrawnCameraFrame; 61 | } 62 | 63 | - (void)loadURL:(NSString *)urlString; 64 | 65 | @end 66 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/ViewController.m: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #import "NavigationView.h" 18 | #import "ProgressView.h" 19 | #import "Renderer.h" 20 | #import "ViewController.h" 21 | 22 | #import 23 | 24 | #define FBOX(x) [NSNumber numberWithFloat:x] 25 | 26 | NSString *deviceName() { 27 | struct utsname systemInfo; 28 | uname(&systemInfo); 29 | return [NSString stringWithCString:systemInfo.machine 30 | encoding:NSUTF8StringEncoding]; 31 | } 32 | 33 | // TODO: Should this be a percentage? 34 | #define NOTCH_TAB_WIDTH 83 35 | #define NOTCH_HEIGHT 30 36 | #define NOTCH_WIDTH 209 37 | 38 | #define URL_SAFE_AREA_VERTICAL 8 39 | #define URL_SAFE_AREA_HORIZONTAL 16 40 | 41 | #define URL_BUTTON_PADDING 16 42 | 43 | #define URL_BUTTON_WIDTH_PORTRAIT_X 64 44 | #define URL_BUTTON_HEIGHT_PORTRAIT_X 30 45 | 46 | #define URL_BUTTON_WIDTH_PORTRAIT 44 47 | #define URL_BUTTON_HEIGHT_PORTRAIT 44 48 | 49 | #define URL_BUTTON_WIDTH_LANDSCAPE 44 50 | #define URL_BUTTON_HEIGHT_LANDSCAPE 44 51 | 52 | #define URL_TEXTFIELD_HEIGHT_EXPANDED 44 53 | #define URL_TEXTFIELD_HEIGHT_MINIFIED 14 54 | 55 | #define URL_TEXTFIELD_HEIGHT 44 56 | #define PROGRESSVIEW_HEIGHT 2 57 | 58 | // Helper functions to determine the iOS version 59 | #define SYSTEM_VERSION_EQUAL_TO(v) ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] == NSOrderedSame) 60 | #define SYSTEM_VERSION_GREATER_THAN(v) ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] == NSOrderedDescending) 61 | #define SYSTEM_VERSION_GREATER_THAN_OR_EQUAL_TO(v) ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] != NSOrderedAscending) 62 | #define SYSTEM_VERSION_LESS_THAN(v) ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] == NSOrderedAscending) 63 | #define SYSTEM_VERSION_LESS_THAN_OR_EQUAL_TO(v) ([[[UIDevice currentDevice] systemVersion] compare:v options:NSNumericSearch] != NSOrderedDescending) 64 | 65 | // Set this value to true or false to enable the passing of the camera 66 | // frame from the native side to the JS side in each frame. 67 | const bool SEND_CAMERA_FRAME_TO_JS = false; 68 | // Use these values to control the camera frame quality 69 | const float CAMERA_FRAME_SCALE_FACTOR = 0.4; 70 | const float CAMERA_FRAME_JPEG_COMPRESSION_FACTOR = 0.5; 71 | 72 | @interface ViewController () 73 | 74 | @property(nonatomic, strong) ARSession *session; 75 | @property(nonatomic, strong) Renderer *renderer; 76 | @property(nonatomic, strong) ProgressView *progressView; 77 | @property(nonatomic, strong) NavigationView *navigationBacking; 78 | @property(nonatomic, assign) bool webviewNavigationSuccess; 79 | 80 | @end 81 | 82 | @interface MTKView () 83 | 84 | @end 85 | 86 | @implementation ViewController 87 | 88 | - (void)showAlertDialog:(NSString *)message 89 | completionHandler:(void (^)(void))completionHandler { 90 | UIAlertController *alertController = 91 | [UIAlertController alertControllerWithTitle:message 92 | message:nil 93 | preferredStyle:UIAlertControllerStyleAlert]; 94 | [alertController 95 | addAction:[UIAlertAction actionWithTitle:@"OK" 96 | style:UIAlertActionStyleCancel 97 | handler:^(UIAlertAction *action) { 98 | if (completionHandler) { 99 | completionHandler(); 100 | } 101 | }]]; 102 | [self presentViewController:alertController 103 | animated:YES 104 | completion:^{ 105 | }]; 106 | } 107 | 108 | - (void)setWKWebViewScrollEnabled:(BOOL)enabled { 109 | wkWebView.scrollView.scrollEnabled = enabled; 110 | wkWebView.scrollView.panGestureRecognizer.enabled = enabled; 111 | wkWebView.scrollView.bounces = enabled; 112 | } 113 | 114 | - (bool)loadURLInWKWebView:(NSString *)urlString { 115 | bool result = true; 116 | // Try to create a url with the provided string 117 | NSURL *nsurl = [NSURL URLWithString:urlString]; 118 | bool fileScheme = nsurl && nsurl.scheme && 119 | [[nsurl.scheme lowercaseString] isEqualToString:@"file"]; 120 | // Quick hack: If the url string is not a proper URL, try to add http to it to 121 | // see if it is an actual URL 122 | if (!nsurl || !nsurl.scheme || !nsurl.host) { 123 | NSString *urlStringWithHTTP = 124 | [NSString stringWithFormat:@"http://%@", urlString]; 125 | nsurl = [NSURL URLWithString:urlStringWithHTTP]; 126 | } 127 | // If the string did not represent a url or is a filescheme url, the way the 128 | // page is loaded is different 129 | if (!nsurl || !nsurl.scheme || !nsurl.host || fileScheme) { 130 | NSString *nsurlPath = urlString; 131 | NSString *pathExtension = @"html"; 132 | // If the file:// scheme was provided, remove the scheme and trim the 133 | // extension if included. 134 | if (fileScheme) { 135 | nsurlPath = [NSString stringWithFormat:@"%@%@", nsurl.host, nsurl.path]; 136 | if ([[nsurl.pathExtension lowercaseString] 137 | isEqualToString:pathExtension]) { 138 | NSRange range = 139 | [[nsurlPath lowercaseString] rangeOfString:@".html" 140 | options:NSBackwardsSearch]; 141 | nsurlPath = 142 | [nsurlPath stringByReplacingCharactersInRange:range withString:@""]; 143 | } 144 | } else { 145 | // If the file:// was not provided, trim the extension if included. 146 | NSRange range = 147 | [[nsurlPath lowercaseString] rangeOfString:@".html" 148 | options:NSBackwardsSearch]; 149 | if (range.location != NSNotFound && 150 | range.location == nsurlPath.length - 5) { 151 | nsurlPath = 152 | [nsurlPath stringByReplacingCharactersInRange:range withString:@""]; 153 | } 154 | } 155 | // NSLog(@"nsurlPath = %@", nsurlPath); 156 | // Is the URL string a path to a file? 157 | NSString *path = 158 | [[NSBundle mainBundle] pathForResource:nsurlPath ofType:pathExtension]; 159 | // If the path is incorrect, it could be because is a path to a folder 160 | // instead of a file 161 | if (!path) { 162 | path = [[NSBundle mainBundle] pathForResource:nsurlPath ofType:nil]; 163 | } 164 | bool isDirectory = false; 165 | // NSLog(@"Loading a file from resources with path = %@", path); 166 | // Make sure that the path exists and get a flag to indicate if the path 167 | // represents a directory 168 | if (path && 169 | [[NSFileManager defaultManager] fileExistsAtPath:path 170 | isDirectory:&isDirectory]) { 171 | // If the path is to a directory, add the index at the end (try to load 172 | // index.html). 173 | if (isDirectory) { 174 | nsurlPath = [NSString stringWithFormat:@"%@/index", nsurlPath]; 175 | } 176 | NSURL *url = [[NSBundle mainBundle] URLForResource:nsurlPath 177 | withExtension:pathExtension]; 178 | // The final URL to the resource may fail so just in case... 179 | if (!url) { 180 | result = false; 181 | } else { 182 | // NSLog(@"Loading a file from resources with url = %@", 183 | // url.absoluteString); 184 | [self->wkWebView loadRequest:[NSURLRequest requestWithURL:url]]; 185 | } 186 | } else { 187 | result = false; 188 | } 189 | } else { 190 | NSURLRequest *nsrequest = [NSURLRequest requestWithURL:nsurl]; 191 | [self->wkWebView loadRequest:nsrequest]; 192 | } 193 | return result; 194 | } 195 | 196 | - (void)loadURL:(NSString *)urlString { 197 | if (![self loadURLInWKWebView:urlString]) { 198 | [self showAlertDialog:@"The URL is not valid." completionHandler:NULL]; 199 | } else { 200 | [self storeURLInUserDefaults:urlString]; 201 | } 202 | } 203 | 204 | - (NSString *)getURLFromUserDefaults { 205 | NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults]; 206 | return [defaults stringForKey:@"url"]; 207 | } 208 | 209 | - (void)storeURLInUserDefaults:(NSString *)urlString { 210 | NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults]; 211 | [defaults setObject:urlString forKey:@"url"]; 212 | [defaults synchronize]; 213 | } 214 | 215 | - (void)backButtonClicked:(UIButton *)button { 216 | if ([wkWebView canGoBack]) { 217 | WKBackForwardList *backForwardList = [wkWebView backForwardList]; 218 | WKBackForwardListItem *backItem = [backForwardList backItem]; 219 | if (backItem != nil) { 220 | NSURL *url = [backItem URL]; 221 | [urlTextField setText:[url absoluteString]]; 222 | } 223 | [wkWebView goBack]; 224 | } 225 | } 226 | 227 | - (void)forwardButtonClicked:(UIButton *)button { 228 | [wkWebView goForward]; 229 | } 230 | 231 | - (void)refreshButtonClicked:(UIButton *)button { 232 | [wkWebView reload]; 233 | } 234 | 235 | - (void)setShowCameraFeed:(bool)show { 236 | if (show && !SEND_CAMERA_FRAME_TO_JS) { 237 | wkWebView.opaque = false; 238 | wkWebView.backgroundColor = [UIColor clearColor]; 239 | wkWebView.scrollView.backgroundColor = [UIColor clearColor]; 240 | } else { 241 | wkWebView.opaque = true; 242 | wkWebView.backgroundColor = wkWebViewOriginalBackgroundColor; 243 | wkWebView.scrollView.backgroundColor = wkWebViewOriginalBackgroundColor; 244 | } 245 | showingCameraFeed = show; 246 | NSLog(@"show camera feed: %@", show ? @"YES" : @"NO"); 247 | } 248 | 249 | - (void)deviceCheck { 250 | NSString *deviceType = deviceName(); 251 | NSRange containiPhoneX = [deviceType rangeOfString:@"iPhone10" options:NSCaseInsensitiveSearch]; 252 | if (containiPhoneX.location == NSNotFound) { 253 | iPhoneXDevice = false; 254 | 255 | } else { 256 | iPhoneXDevice = true; 257 | } 258 | } 259 | 260 | - (void)viewDidLoad { 261 | [super viewDidLoad]; 262 | 263 | [self deviceCheck]; 264 | 265 | near = 0.01f; 266 | far = 10000.0f; 267 | showingCameraFeed = false; 268 | 269 | // By default, we draw camera frames but do not send AR data until a frame 270 | // is drawn. 271 | drawNextCameraFrame = false; 272 | sendARData = false; 273 | 274 | timeOfLastDrawnCameraFrame = 0; 275 | 276 | jsAnchorIdsToObjCAnchorIds = [[NSMutableDictionary alloc] init]; 277 | objCAnchorIdsToJSAnchorIds = [[NSMutableDictionary alloc] init]; 278 | anchors = [[NSMutableDictionary alloc] init]; 279 | 280 | // Create an ARSession 281 | _session = [ARSession new]; 282 | _session.delegate = self; 283 | 284 | // Set the view to use the default device 285 | mtkView = [[MTKView alloc] initWithFrame:self.view.frame device:MTLCreateSystemDefaultDevice()]; 286 | mtkView.delegate = self; 287 | int mtkViewOffset = 288 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL * 2 + URL_TEXTFIELD_HEIGHT_MINIFIED; 289 | [mtkView setFrame:CGRectMake(0, mtkViewOffset, self.view.frame.size.width, 290 | self.view.frame.size.height - mtkViewOffset)]; 291 | 292 | if (!mtkView.device) { 293 | NSLog(@"Metal is not supported on this device"); 294 | return; 295 | } 296 | [self.view addSubview:mtkView]; 297 | 298 | // Configure the renderer to draw to the view 299 | _renderer = [[Renderer alloc] initWithSession:self.session 300 | metalDevice:mtkView.device 301 | renderDestinationProvider:mtkView]; 302 | [_renderer drawRectResized:mtkView.bounds.size]; 303 | 304 | UITapGestureRecognizer *tapGesture = 305 | [[UITapGestureRecognizer alloc] initWithTarget:self 306 | action:@selector(handleTap:)]; 307 | NSMutableArray *gestureRecognizers = [NSMutableArray array]; 308 | [gestureRecognizers addObject:tapGesture]; 309 | [gestureRecognizers addObjectsFromArray:self.view.gestureRecognizers]; 310 | self.view.gestureRecognizers = gestureRecognizers; 311 | 312 | // Clear the webview completely 313 | // NSSet *websiteDataTypes = [NSSet setWithArray:@[ 314 | // WKWebsiteDataTypeDiskCache, 315 | // //WKWebsiteDataTypeOfflineWebApplicationCache, 316 | // WKWebsiteDataTypeMemoryCache, 317 | // //WKWebsiteDataTypeLocalStorage, 318 | // //WKWebsiteDataTypeCookies, 319 | // //WKWebsiteDataTypeSessionStorage, 320 | // //WKWebsiteDataTypeIndexedDBDatabases, 321 | // //WKWebsiteDataTypeWebSQLDatabases 322 | // ]]; 323 | NSSet *websiteDataTypes = [WKWebsiteDataStore allWebsiteDataTypes]; 324 | NSDate *dateFrom = [NSDate dateWithTimeIntervalSince1970:0]; 325 | [[WKWebsiteDataStore defaultDataStore] removeDataOfTypes:websiteDataTypes 326 | modifiedSince:dateFrom 327 | completionHandler:^{ 328 | }]; 329 | // Make sure that WebARonARKit.js is injected at the beginning of any webpage 330 | // Load the WebARonARKit.js file 331 | NSString *WebARonARKitJSPath = 332 | [[NSBundle mainBundle] pathForResource:@"WebARonARKit" ofType:@"js"]; 333 | NSString *WebARonARKitJSContent = 334 | [NSString stringWithContentsOfFile:WebARonARKitJSPath 335 | encoding:NSUTF8StringEncoding 336 | error:NULL]; 337 | // Setup the script injection 338 | WKUserScript *sendCameraFrameUserScript = [[WKUserScript alloc] 339 | initWithSource: 340 | [NSString stringWithFormat: 341 | @"window.WebARonARKitSendsCameraFrames = %@;", 342 | SEND_CAMERA_FRAME_TO_JS ? @"true" : @"false"] 343 | injectionTime:WKUserScriptInjectionTimeAtDocumentStart 344 | forMainFrameOnly:true]; 345 | WKUserScript *WebARonARKitJSUserScript = [[WKUserScript alloc] 346 | initWithSource:WebARonARKitJSContent 347 | injectionTime:WKUserScriptInjectionTimeAtDocumentStart 348 | forMainFrameOnly:true]; 349 | WKUserContentController *userContentController = 350 | [[WKUserContentController alloc] init]; 351 | [userContentController addScriptMessageHandler:self name:@"WebARonARKit"]; 352 | [userContentController addUserScript:sendCameraFrameUserScript]; 353 | [userContentController addUserScript:WebARonARKitJSUserScript]; 354 | WKWebViewConfiguration *wkWebViewConfig = 355 | [[WKWebViewConfiguration alloc] init]; 356 | wkWebViewConfig.userContentController = userContentController; 357 | // Create the WKWebView using the configuration/script injection and add it to 358 | // the top of the view graph 359 | wkWebView = [[WKWebView alloc] initWithFrame:self.view.frame 360 | configuration:wkWebViewConfig]; 361 | wkWebViewOriginalBackgroundColor = [UIColor whiteColor]; 362 | // By default, the camera feed won't be shown until instructed otherwise 363 | [self setShowCameraFeed:NO]; 364 | 365 | // Fixes the webview scalling problem on iPhoneX. 366 | [wkWebView.scrollView 367 | setContentInsetAdjustmentBehavior: 368 | UIScrollViewContentInsetAdjustmentNever]; 369 | 370 | [wkWebView.configuration.preferences setValue:@TRUE 371 | forKey:@"allowFileAccessFromFileURLs"]; 372 | [self setWKWebViewScrollEnabled:true]; 373 | // Needed to show alerts. Check the WKUIDelegate protocol and the 374 | // runJavaScriptAlertPanelWithMessage method in this file :( 375 | wkWebView.UIDelegate = self; 376 | wkWebView.navigationDelegate = self; 377 | [self.view addSubview:wkWebView]; 378 | 379 | [self initNavigation]; 380 | 381 | // Observe the estimatedProgress to uodate progress view 382 | [wkWebView addObserver:self 383 | forKeyPath:NSStringFromSelector(@selector(estimatedProgress)) 384 | options:NSKeyValueObservingOptionNew 385 | context:NULL]; 386 | 387 | // Load the default website. 388 | NSString *defaultSite = 389 | @"https://developers.google.com/ar/develop/web/getting-started#examples"; 390 | NSURL *url = [NSURL URLWithString:defaultSite]; 391 | [wkWebView loadRequest:[NSURLRequest requestWithURL:url]]; 392 | [urlTextField setText:url.absoluteString]; 393 | initialPageLoadedWhenTrackingBegins = false; 394 | 395 | [self initOrientationNotifications]; 396 | [self updateOrientation]; 397 | [self updateInterface]; 398 | } 399 | 400 | - (void)initOrientationNotifications { 401 | // Calculate the orientation of the device 402 | UIDevice *device = [UIDevice currentDevice]; 403 | [device beginGeneratingDeviceOrientationNotifications]; 404 | [[NSNotificationCenter defaultCenter] 405 | addObserver:self 406 | selector:@selector(deviceOrientationDidChange:) 407 | name:UIDeviceOrientationDidChangeNotification 408 | object:nil]; 409 | deviceOrientation = [device orientation]; 410 | } 411 | 412 | - (void)initNavigation { 413 | [self initNavigationBacking]; 414 | [self initUrlTextField]; 415 | [self initButtons]; 416 | [self initProgressView]; 417 | } 418 | 419 | - (void)initNavigationBacking { 420 | _navigationBacking = [[NavigationView alloc] init]; 421 | _navigationBacking.backgroundColor = [UIColor whiteColor]; 422 | [self.view addSubview:_navigationBacking]; 423 | } 424 | 425 | - (void)initUrlTextField { 426 | urlTextFieldActive = false; 427 | urlTextField = [[UITextField alloc] init]; 428 | [urlTextField setBackgroundColor:[UIColor clearColor]]; 429 | [urlTextField 430 | setTextColor:[UIColor colorWithRed:0.2f green:0.2f blue:0.2f alpha:1.0]]; 431 | [urlTextField setKeyboardType:UIKeyboardTypeURL]; 432 | [urlTextField setAutocorrectionType:UITextAutocorrectionTypeNo]; 433 | [urlTextField setAutocapitalizationType:UITextAutocapitalizationTypeNone]; 434 | [urlTextField setAdjustsFontSizeToFitWidth:YES]; 435 | 436 | urlTextField.contentVerticalAlignment = 437 | UIControlContentVerticalAlignmentCenter; 438 | urlTextField.textAlignment = NSTextAlignmentCenter; 439 | 440 | [urlTextField setDelegate:self]; 441 | [self.view addSubview:urlTextField]; 442 | } 443 | 444 | - (void)initButtons { 445 | [self initBackButton]; 446 | [self initRefreshButton]; 447 | } 448 | 449 | - (void)initBackButton { 450 | backButton = [[UIButton alloc] init]; 451 | UIImage *backIcon = [UIImage imageNamed:@"BackIcon"]; 452 | [backButton setBackgroundColor:[UIColor clearColor]]; 453 | [backButton setImage:backIcon forState:UIControlStateNormal]; 454 | [backButton addTarget:self 455 | action:@selector(backButtonClicked:) 456 | forControlEvents:UIControlEventTouchDown]; 457 | [self.view addSubview:backButton]; 458 | } 459 | 460 | - (void)initRefreshButton { 461 | refreshButton = [[UIButton alloc] init]; 462 | [refreshButton setBackgroundColor:[UIColor clearColor]]; 463 | UIImage *refreshIcon = [UIImage imageNamed:@"RefreshIcon"]; 464 | [refreshButton setImage:refreshIcon forState:UIControlStateNormal]; 465 | [refreshButton addTarget:self 466 | action:@selector(refreshButtonClicked:) 467 | forControlEvents:UIControlEventTouchDown]; 468 | [self.view addSubview:refreshButton]; 469 | } 470 | 471 | - (void)dealloc { 472 | if ([self isViewLoaded]) { 473 | [wkWebView 474 | removeObserver:self 475 | forKeyPath:NSStringFromSelector(@selector(estimatedProgress))]; 476 | } 477 | 478 | [wkWebView setNavigationDelegate:nil]; 479 | [wkWebView setUIDelegate:nil]; 480 | } 481 | 482 | #pragma mark - Progress View 483 | 484 | - (void)initProgressView { 485 | _progressView = [[ProgressView alloc] init]; 486 | [self.view addSubview:_progressView]; 487 | [self setProgressViewColorSuccessful]; 488 | [self startAndShowProgressView]; 489 | } 490 | 491 | - (void)setProgressViewColorSuccessful { 492 | // Material Design Blue 100 #BBDEFB 493 | [_progressView setProgressBackgroundColor:[UIColor colorWithRed:0.7333333333 494 | green:0.8705882353 495 | blue:0.9843137255 496 | alpha:1.0]]; 497 | // Material Design Blue 500 #2196F3 498 | [_progressView setProgressFillColor:[UIColor colorWithRed:0.1294117647 499 | green:0.5882352941 500 | blue:0.9529411765 501 | alpha:1.0]]; 502 | } 503 | 504 | - (void)setProgressViewColorErrored { 505 | // Material Design Red 100 #FFCDD2 506 | [_progressView setProgressFillColor:[UIColor colorWithRed:1.0 507 | green:0.8039215686 508 | blue:0.8235294118 509 | alpha:1.0]]; 510 | // Material Design Red 500 #F44336 511 | [_progressView setProgressFillColor:[UIColor colorWithRed:0.9568627451 512 | green:0.262745098 513 | blue:0.2117647059 514 | alpha:1.0]]; 515 | } 516 | 517 | - (void)startAndShowProgressView { 518 | _progressView.progressValue = 0; 519 | [_progressView setHidden:NO animated:YES completion:nil]; 520 | } 521 | 522 | - (void)completeAndHideProgressViewSuccessful { 523 | [self updateInterface]; 524 | __weak __typeof__(self) weakSelf = self; 525 | [_progressView 526 | setProgressValue:1 527 | animated:YES 528 | completion:^(BOOL finished) { 529 | [weakSelf.progressView setHidden:YES animated:YES completion:nil]; 530 | }]; 531 | } 532 | 533 | - (void)completeAndHideProgressViewErrored:(float)progress { 534 | __weak __typeof__(self) weakSelf = self; 535 | [self.progressView 536 | setProgressValue:progress 537 | animated:YES 538 | completion:^(BOOL finished) { 539 | [weakSelf.progressView setHidden:YES animated:YES completion:nil]; 540 | }]; 541 | } 542 | 543 | #pragma mark - Orientation Change 544 | 545 | - (void)deviceOrientationDidChange:(NSNotification *)notification { 546 | [self updateOrientation]; 547 | [self updateInterface]; 548 | updateWindowSize = true; 549 | } 550 | 551 | - (void)updateOrientation { 552 | deviceOrientation = [[UIDevice currentDevice] orientation]; 553 | switch (deviceOrientation) { 554 | case UIDeviceOrientationPortrait: { 555 | interfaceOrientation = UIInterfaceOrientationPortrait; 556 | } break; 557 | 558 | case UIDeviceOrientationPortraitUpsideDown: { 559 | interfaceOrientation = UIInterfaceOrientationPortraitUpsideDown; 560 | } break; 561 | 562 | case UIDeviceOrientationLandscapeLeft: { 563 | interfaceOrientation = UIInterfaceOrientationLandscapeRight; 564 | } break; 565 | 566 | case UIDeviceOrientationLandscapeRight: { 567 | interfaceOrientation = UIInterfaceOrientationLandscapeLeft; 568 | } break; 569 | 570 | default: 571 | break; 572 | } 573 | [_renderer setInterfaceOrientation:interfaceOrientation]; 574 | } 575 | 576 | - (void)updateInterface { 577 | if (iPhoneXDevice) { 578 | if (UIInterfaceOrientationIsPortrait(interfaceOrientation)) { 579 | [backButton 580 | setFrame:CGRectMake(URL_BUTTON_PADDING, 0, URL_BUTTON_WIDTH_PORTRAIT, 581 | URL_BUTTON_HEIGHT_PORTRAIT)]; 582 | [refreshButton setFrame:CGRectMake(self.view.frame.size.width - 583 | URL_BUTTON_WIDTH_PORTRAIT - 584 | URL_BUTTON_PADDING, 585 | 0, URL_BUTTON_WIDTH_PORTRAIT, 586 | URL_BUTTON_HEIGHT_PORTRAIT)]; 587 | int contentOffset = NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL * 2 + 588 | URL_TEXTFIELD_HEIGHT_MINIFIED; 589 | CGRect contentRect = CGRectMake(0, contentOffset, self.view.frame.size.width, 590 | self.view.frame.size.height - contentOffset); 591 | [mtkView setFrame:contentRect]; 592 | [wkWebView setFrame:contentRect]; 593 | 594 | if (urlTextFieldActive) { 595 | [urlTextField setFont:[UIFont systemFontOfSize:17]]; 596 | [urlTextField setFrame:CGRectMake(URL_SAFE_AREA_HORIZONTAL, 597 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL, 598 | self.view.frame.size.width - 599 | URL_SAFE_AREA_HORIZONTAL * 2.0, 600 | URL_TEXTFIELD_HEIGHT_EXPANDED)]; 601 | [_navigationBacking 602 | setFrame:CGRectMake(0, 0, self.view.frame.size.width, 603 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL * 2 + 604 | URL_TEXTFIELD_HEIGHT_EXPANDED)]; 605 | [_progressView 606 | setFrame:CGRectMake(0, 607 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL * 2 + 608 | URL_TEXTFIELD_HEIGHT_EXPANDED - 609 | PROGRESSVIEW_HEIGHT, 610 | self.view.frame.size.width, 611 | PROGRESSVIEW_HEIGHT)]; 612 | } else { 613 | [urlTextField setFont:[UIFont systemFontOfSize:12]]; 614 | [urlTextField setFrame:CGRectMake(URL_SAFE_AREA_HORIZONTAL, 615 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL, 616 | self.view.frame.size.width - 617 | URL_SAFE_AREA_HORIZONTAL * 2.0, 618 | URL_TEXTFIELD_HEIGHT_MINIFIED)]; 619 | [_navigationBacking 620 | setFrame:CGRectMake(0, 0, self.view.frame.size.width, 621 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL * 2 + 622 | URL_TEXTFIELD_HEIGHT_MINIFIED)]; 623 | [_progressView 624 | setFrame:CGRectMake(0, 625 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL * 2 + 626 | URL_TEXTFIELD_HEIGHT_MINIFIED - 627 | PROGRESSVIEW_HEIGHT, 628 | self.view.frame.size.width, 629 | PROGRESSVIEW_HEIGHT)]; 630 | } 631 | } else { 632 | [urlTextField setFont:[UIFont systemFontOfSize:17]]; 633 | [urlTextField 634 | setFrame:CGRectMake( 635 | URL_BUTTON_PADDING + URL_SAFE_AREA_HORIZONTAL, 0, 636 | self.view.frame.size.width - 637 | (URL_BUTTON_PADDING + URL_SAFE_AREA_HORIZONTAL) * 2, 638 | URL_TEXTFIELD_HEIGHT_EXPANDED)]; 639 | 640 | [backButton 641 | setFrame:CGRectMake(URL_BUTTON_PADDING, 0, URL_BUTTON_WIDTH_LANDSCAPE, 642 | URL_BUTTON_HEIGHT_LANDSCAPE)]; 643 | 644 | [refreshButton 645 | setFrame:CGRectMake(self.view.frame.size.width - URL_BUTTON_PADDING - 646 | URL_BUTTON_WIDTH_LANDSCAPE, 647 | 0, URL_BUTTON_WIDTH_LANDSCAPE, 648 | URL_BUTTON_HEIGHT_LANDSCAPE)]; 649 | 650 | int contentOffset = URL_TEXTFIELD_HEIGHT_EXPANDED; 651 | CGRect contentRect = CGRectMake(0, contentOffset, self.view.frame.size.width, 652 | self.view.frame.size.height - contentOffset); 653 | [mtkView setFrame:contentRect]; 654 | [wkWebView setFrame:contentRect]; 655 | 656 | [_navigationBacking setFrame:CGRectMake(0, 0, self.view.frame.size.width, 657 | URL_TEXTFIELD_HEIGHT_EXPANDED)]; 658 | [_progressView 659 | setFrame:CGRectMake( 660 | 0, URL_TEXTFIELD_HEIGHT_EXPANDED - PROGRESSVIEW_HEIGHT, 661 | self.view.frame.size.width, PROGRESSVIEW_HEIGHT)]; 662 | } 663 | } else { 664 | [_navigationBacking setFrame:CGRectMake(0, 0, self.view.frame.size.width, 665 | URL_TEXTFIELD_HEIGHT_EXPANDED)]; 666 | [backButton setFrame:CGRectMake(0, 0, URL_BUTTON_WIDTH_LANDSCAPE, 667 | URL_BUTTON_HEIGHT_LANDSCAPE)]; 668 | [urlTextField setFont:[UIFont systemFontOfSize:17]]; 669 | [urlTextField setFrame:CGRectMake(URL_BUTTON_WIDTH_LANDSCAPE, 0, 670 | self.view.frame.size.width - 671 | URL_BUTTON_WIDTH_LANDSCAPE * 2, 672 | URL_TEXTFIELD_HEIGHT_EXPANDED)]; 673 | [refreshButton 674 | setFrame:CGRectMake( 675 | self.view.frame.size.width - URL_BUTTON_WIDTH_LANDSCAPE, 0, 676 | URL_BUTTON_WIDTH_LANDSCAPE, URL_BUTTON_HEIGHT_LANDSCAPE)]; 677 | 678 | 679 | int contentOffset = URL_TEXTFIELD_HEIGHT_EXPANDED; 680 | CGRect contentRect = CGRectMake(0, contentOffset, self.view.frame.size.width, 681 | self.view.frame.size.height - contentOffset); 682 | 683 | [mtkView setFrame:contentRect]; 684 | [wkWebView setFrame:contentRect]; 685 | 686 | [_progressView 687 | setFrame:CGRectMake(0, 688 | URL_TEXTFIELD_HEIGHT_EXPANDED - PROGRESSVIEW_HEIGHT, 689 | self.view.frame.size.width, PROGRESSVIEW_HEIGHT)]; 690 | } 691 | } 692 | 693 | - (void)restartSession { 694 | // Remove all the cached structures. 695 | [anchors removeAllObjects]; 696 | [jsAnchorIdsToObjCAnchorIds removeAllObjects]; 697 | [objCAnchorIdsToJSAnchorIds removeAllObjects]; 698 | ARWorldTrackingConfiguration *configuration = 699 | [ARWorldTrackingConfiguration new]; 700 | configuration.planeDetection = ARPlaneDetectionHorizontal; 701 | [_session runWithConfiguration:configuration 702 | options:ARSessionRunOptionResetTracking]; 703 | } 704 | 705 | - (void)viewWillAppear:(BOOL)animated { 706 | [super viewWillAppear:animated]; 707 | 708 | UIDevice *device = [UIDevice currentDevice]; 709 | if (![device isGeneratingDeviceOrientationNotifications]) { 710 | [device beginGeneratingDeviceOrientationNotifications]; 711 | } 712 | 713 | [self restartSession]; 714 | } 715 | 716 | - (void)viewWillDisappear:(BOOL)animated { 717 | [super viewWillDisappear:animated]; 718 | 719 | UIDevice *device = [UIDevice currentDevice]; 720 | if ([device isGeneratingDeviceOrientationNotifications]) { 721 | [device endGeneratingDeviceOrientationNotifications]; 722 | } 723 | 724 | [_session pause]; 725 | } 726 | 727 | - (void)didReceiveMemoryWarning { 728 | [super didReceiveMemoryWarning]; 729 | // Release any cached data, images, etc that aren't in use. 730 | } 731 | 732 | - (void)handleTap:(UIGestureRecognizer *)gestureRecognize { 733 | ARFrame *currentFrame = [_session currentFrame]; 734 | 735 | // Create anchor using the camera's current position 736 | if (currentFrame) { 737 | // Create a transform with a translation of 0.2 meters in front of the 738 | // camera 739 | matrix_float4x4 translation = matrix_identity_float4x4; 740 | translation.columns[3].z = -0.2; 741 | matrix_float4x4 transform = 742 | matrix_multiply(currentFrame.camera.transform, translation); 743 | 744 | // Add a new anchor to the session 745 | ARAnchor *anchor = [[ARAnchor alloc] initWithTransform:transform]; 746 | [self.session addAnchor:anchor]; 747 | } 748 | } 749 | 750 | #pragma mark - MTKViewDelegate 751 | 752 | // Called whenever view changes orientation or layout is changed 753 | - (void)mtkView:(nonnull MTKView *)view drawableSizeWillChange:(CGSize)size { 754 | [_renderer drawRectResized:view.bounds.size]; 755 | } 756 | 757 | // Called whenever the view needs to render 758 | - (void)drawInMTKView:(nonnull MTKView *)view { 759 | // Calculate the time passed since the last camera frame that was drawn. 760 | CFTimeInterval currentTime = CACurrentMediaTime(); 761 | // If the time of the last drawn camera frame is 0, use the current time. 762 | if (timeOfLastDrawnCameraFrame == 0) { 763 | timeOfLastDrawnCameraFrame = currentTime; 764 | } 765 | CFTimeInterval timeSinceLastDrawnCameraFrame = 766 | currentTime - timeOfLastDrawnCameraFrame; 767 | // If the time passed since the last camera frame was drawn is over a second 768 | // it means that the JS side was not ready to listen to the send AR data event 769 | // and therefore, we need to force a camera frame draw and an AR data send. 770 | if (timeSinceLastDrawnCameraFrame > 1) { 771 | drawNextCameraFrame = true; 772 | } 773 | // Only if the JS side stated that the AR data was used to render the 3D 774 | // scene, we can render a camera frame. 775 | if (drawNextCameraFrame) { 776 | if (!SEND_CAMERA_FRAME_TO_JS) { 777 | [_renderer update]; 778 | } 779 | drawNextCameraFrame = false; 780 | // Now that the camera frame has been rendered, the AR data can be sent. 781 | sendARData = true; 782 | // Store the time when the camera frame was drawn just in case... 783 | timeOfLastDrawnCameraFrame = currentTime; 784 | } 785 | } 786 | 787 | #pragma mark - ARSessionDelegate 788 | 789 | - (void)session:(ARSession *)session didFailWithError:(NSError *)error { 790 | // Present an error message to the user 791 | } 792 | 793 | - (void)sessionWasInterrupted:(ARSession *)session { 794 | // Inform the user that the session has been interrupted, for example, by 795 | // presenting an overlay 796 | } 797 | 798 | - (void)sessionInterruptionEnded:(ARSession *)session { 799 | // Reset tracking and/or remove existing anchors if consistent tracking is 800 | // required 801 | } 802 | 803 | - (NSString *)getPlanesString:(nonnull NSArray *)anchors { 804 | // Return nil if no planes are among the anchors 805 | NSString *result = nil; 806 | for (int i = 0; i < anchors.count; i++) { 807 | if (![anchors[i] isKindOfClass:[ARPlaneAnchor class]]) { 808 | // We only want anchors of type plane. 809 | continue; 810 | } 811 | // Now that we know that there is at least one plane among the anchors, 812 | // create the returning string. 813 | if (result == nil) { 814 | result = @"["; 815 | } 816 | ARPlaneAnchor *plane = (ARPlaneAnchor *)anchors[i]; 817 | matrix_float4x4 planeTransform = plane.transform; 818 | const float *planeMatrix = (const float *)(&planeTransform); 819 | NSString *planeStr = [NSString 820 | stringWithFormat: 821 | @"{\"modelMatrix\":[%f,%f,%f,%f,%f,%f,%f,%f," 822 | @"%f,%f,%f,%f,%f,%f,%f,%f]," 823 | @"\"identifier\":%i," 824 | @"\"extent\":[%f,%f]," 825 | @"\"vertices\":[%f,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f]}", 826 | planeMatrix[0], planeMatrix[1], planeMatrix[2], planeMatrix[3], 827 | planeMatrix[4], planeMatrix[5], planeMatrix[6], planeMatrix[7], 828 | planeMatrix[8], planeMatrix[9], planeMatrix[10], planeMatrix[11], 829 | planeMatrix[12] + plane.center.x, planeMatrix[13] + plane.center.y, 830 | planeMatrix[14] + plane.center.z, planeMatrix[15], 831 | (int)plane.identifier, plane.extent.x, plane.extent.z, 832 | plane.extent.x / 2, 0.0, plane.extent.z / 2, -plane.extent.x / 2, 833 | 0.0, plane.extent.z / 2, -plane.extent.x / 2, 0.0, 834 | -plane.extent.z / 2, plane.extent.x / 2, 0.0, -plane.extent.z / 2]; 835 | planeStr = [planeStr stringByAppendingString:@","]; 836 | result = [result stringByAppendingString:planeStr]; 837 | } 838 | // Remove the last coma if there is any string 839 | if (result != nil) { 840 | result = [result substringToIndex:result.length - 1]; 841 | result = [result stringByAppendingString:@"]"]; 842 | } 843 | return result; 844 | } 845 | 846 | - (NSString *)getAnchorsString:(nonnull NSArray *)anchors { 847 | NSString *result = nil; 848 | for (int i = 0; i < anchors.count; i++) { 849 | if ([anchors[i] isKindOfClass:[ARPlaneAnchor class]] || 850 | [anchors[i] isKindOfClass:[ARFaceAnchor class]]) { 851 | // We do not want Plane or Face anchors. 852 | continue; 853 | } 854 | if (result == nil) { 855 | result = @"["; 856 | } 857 | ARAnchor *anchor = (ARAnchor *)anchors[i]; 858 | matrix_float4x4 anchorTransform = anchor.transform; 859 | const float *anchorMatrix = (const float *)(&anchorTransform); 860 | NSString *jsAnchorId = 861 | objCAnchorIdsToJSAnchorIds[anchor.identifier.UUIDString]; 862 | NSString *anchorStr = [NSString 863 | stringWithFormat: 864 | @"{\"modelMatrix\":[%f,%f,%f,%f,%f,%f,%f,%f," 865 | @"%f,%f,%f,%f,%f,%f,%f,%f]," 866 | @"\"identifier\":%@}", 867 | anchorMatrix[0], anchorMatrix[1], anchorMatrix[2], anchorMatrix[3], 868 | anchorMatrix[4], anchorMatrix[5], anchorMatrix[6], anchorMatrix[7], 869 | anchorMatrix[8], anchorMatrix[9], anchorMatrix[10], 870 | anchorMatrix[11], anchorMatrix[12], anchorMatrix[13], 871 | anchorMatrix[14], anchorMatrix[15], jsAnchorId]; 872 | anchorStr = [anchorStr stringByAppendingString:@","]; 873 | result = [result stringByAppendingString:anchorStr]; 874 | } 875 | // Remove the last coma if there is any string 876 | if (result != nil) { 877 | result = [result substringToIndex:result.length - 1]; 878 | result = [result stringByAppendingString:@"]"]; 879 | } 880 | return result; 881 | } 882 | 883 | - (void)dispatchVRDisplayEvent:(NSString *)type 884 | dataName:(NSString *)dataName 885 | dataString:(NSString *)dataString { 886 | NSString *jsCode = 887 | [NSString stringWithFormat: 888 | @"if (window.WebARonARKitDispatchARDisplayEvent) " 889 | @"window.WebARonARKitDispatchARDisplayEvent({" 890 | @"\"type\":\"%@\"," 891 | @"\"%@\":%@" 892 | @"});", 893 | type, dataName, dataString]; 894 | 895 | [wkWebView 896 | evaluateJavaScript:jsCode 897 | completionHandler:^(id data, NSError *error) { 898 | if (error) { 899 | [self showAlertDialog: 900 | [NSString stringWithFormat:@"ERROR: Evaluating jscode: %@", 901 | error] 902 | completionHandler:^{ 903 | }]; 904 | } 905 | }]; 906 | } 907 | 908 | - (void)session:(ARSession *)session 909 | didAddAnchors:(nonnull NSArray *)anchors { 910 | NSString *planesString = [self getPlanesString:anchors]; 911 | if (planesString) { 912 | [self dispatchVRDisplayEvent:@"planesadded" 913 | dataName:@"planes" 914 | dataString:planesString]; 915 | } 916 | } 917 | 918 | - (void)session:(ARSession *)session 919 | didUpdateAnchors:(nonnull NSArray *)anchors { 920 | // TODO(@ijamardo): Instead of iterating over the anchors collection several 921 | // times for differnt types, merge them into one function. 922 | NSString *planesString = [self getPlanesString:anchors]; 923 | if (planesString) { 924 | [self dispatchVRDisplayEvent:@"planesupdated" 925 | dataName:@"planes" 926 | dataString:planesString]; 927 | } 928 | NSString *anchorsString = [self getAnchorsString:anchors]; 929 | if (anchorsString) { 930 | [self dispatchVRDisplayEvent:@"anchorsupdated" 931 | dataName:@"anchors" 932 | dataString:anchorsString]; 933 | } 934 | 935 | // TODO: As we are not able to get an update on the anchors this code forces 936 | // a call to the anchorsUpdated event dispatching for testing purposes. 937 | // if (anchorsString == nil && self->anchors.count > 0) { 938 | // anchorsString = [self getAnchorsString:self->anchors.allValues]; 939 | // [self dispatchVRDisplayEvent:@"anchorsupdated" 940 | // dataName:@"anchors" dataString:anchorsString]; 941 | // } 942 | } 943 | 944 | - (void)session:(ARSession *)session 945 | didRemoveAnchors:(nonnull NSArray *)anchors { 946 | NSString *planesString = [self getPlanesString:anchors]; 947 | if (planesString) { 948 | [self dispatchVRDisplayEvent:@"planesremoved" 949 | dataName:@"planes" 950 | dataString:planesString]; 951 | } 952 | } 953 | 954 | - (void)session:(ARSession *)session didUpdateFrame:(ARFrame *)frame { 955 | // Do not send AR data until a camera frame has been rendered. 956 | if (!sendARData) { 957 | return; 958 | } 959 | 960 | // If the window size has changed, notify the JS side about it. 961 | // This is a hack due to the WKWebView not handling the 962 | // window.innerWidth/Height 963 | // correctly in the window.onresize events. 964 | // TODO: Remove this hack once the WKWebView has fixed the issue. 965 | 966 | // Send the per frame data needed in the JS side 967 | matrix_float4x4 viewMatrix = 968 | [frame.camera viewMatrixForOrientation:interfaceOrientation]; 969 | matrix_float4x4 modelMatrix = matrix_invert(viewMatrix); 970 | matrix_float4x4 projectionMatrix = [frame.camera 971 | projectionMatrixForOrientation:interfaceOrientation 972 | viewportSize:CGSizeMake(wkWebView.frame.size.width, 973 | wkWebView.frame.size.height) 974 | zNear:near 975 | zFar:far]; 976 | 977 | const float *pModelMatrix = (const float *)(&modelMatrix); 978 | const float *pViewMatrix = (const float *)(&viewMatrix); 979 | const float *pProjectionMatrix = (const float *)(&projectionMatrix); 980 | 981 | simd_quatf orientationQuat = simd_quaternion(modelMatrix); 982 | const float *pOrientationQuat = (const float *)(&orientationQuat); 983 | float position[3]; 984 | position[0] = pModelMatrix[12]; 985 | position[1] = pModelMatrix[13]; 986 | position[2] = pModelMatrix[14]; 987 | 988 | // Get the camera frame in base 64. 989 | NSString* base64ImageString = @""; 990 | if (SEND_CAMERA_FRAME_TO_JS) { 991 | base64ImageString = [self getBase64ImageFromPixelBuffer: 992 | frame.capturedImage]; 993 | if (!base64ImageString) { 994 | base64ImageString = @""; 995 | } else { 996 | base64ImageString = [NSString stringWithFormat: 997 | @"data:image/jpg;base64, %@", base64ImageString]; 998 | } 999 | } 1000 | 1001 | // Create a NSDictionary that will be parsed as a json and then passed to the JS side 1002 | NSDictionary* jsonDictionary = @{ 1003 | @"position": 1004 | @[FBOX(position[0]), FBOX(position[1]), FBOX(position[2])], 1005 | @"orientation": 1006 | @[FBOX(pOrientationQuat[0]), FBOX(pOrientationQuat[1]), 1007 | FBOX(pOrientationQuat[2]), FBOX(pOrientationQuat[3])], 1008 | @"viewMatrix": 1009 | @[FBOX(pViewMatrix[0]), FBOX(pViewMatrix[1]), 1010 | FBOX(pViewMatrix[2]), FBOX(pViewMatrix[3]), 1011 | FBOX(pViewMatrix[4]), FBOX(pViewMatrix[5]), 1012 | FBOX(pViewMatrix[6]), FBOX(pViewMatrix[7]), 1013 | FBOX(pViewMatrix[8]), FBOX(pViewMatrix[9]), 1014 | FBOX(pViewMatrix[10]), FBOX(pViewMatrix[11]), 1015 | FBOX(pViewMatrix[12]), FBOX(pViewMatrix[13]), 1016 | FBOX(pViewMatrix[14]), FBOX(pViewMatrix[15])], 1017 | @"projectionMatrix": 1018 | @[FBOX(pProjectionMatrix[0]), 1019 | FBOX(pProjectionMatrix[1]), 1020 | FBOX(pProjectionMatrix[2]), 1021 | FBOX(pProjectionMatrix[3]), 1022 | FBOX(pProjectionMatrix[4]), 1023 | FBOX(pProjectionMatrix[5]), 1024 | FBOX(pProjectionMatrix[6]), 1025 | FBOX(pProjectionMatrix[7]), 1026 | FBOX(pProjectionMatrix[8]), 1027 | FBOX(pProjectionMatrix[9]), 1028 | FBOX(pProjectionMatrix[10]), 1029 | FBOX(pProjectionMatrix[11]), 1030 | FBOX(pProjectionMatrix[12]), 1031 | FBOX(pProjectionMatrix[13]), 1032 | FBOX(pProjectionMatrix[14]), 1033 | FBOX(pProjectionMatrix[15])] 1034 | ,@"cameraFrame":base64ImageString 1035 | }; 1036 | // Pass the dictionary to JSON and back to a string. 1037 | NSError* error; 1038 | NSData* jsonData = 1039 | [NSJSONSerialization dataWithJSONObject:jsonDictionary 1040 | options:NSJSONWritingPrettyPrinted error:&error]; 1041 | NSString* jsonString = [[NSString alloc] initWithData:jsonData 1042 | encoding:NSUTF8StringEncoding]; 1043 | 1044 | // NSLog(@"jsonString = %@", jsonString); 1045 | 1046 | // This will be the final JS code to evaluate 1047 | NSString* jsCode = 1048 | [NSString stringWithFormat: 1049 | @"if (window.WebARonARKitSetData) " 1050 | @"window.WebARonARKitSetData(%@)", 1051 | jsonString]; 1052 | 1053 | // Execute the JS code 1054 | [wkWebView 1055 | evaluateJavaScript:jsCode 1056 | completionHandler:^(id data, NSError *error) { 1057 | if (error) { 1058 | [self showAlertDialog: 1059 | [NSString stringWithFormat:@"ERROR: Evaluating jscode: %@", error] 1060 | completionHandler:^{ }]; 1061 | } 1062 | }]; 1063 | 1064 | // This needs to be called after because the window size will affect the 1065 | // projection matrix calculation upon resize 1066 | if (updateWindowSize) { 1067 | int width = wkWebView.frame.size.width; 1068 | int height = wkWebView.frame.size.height; 1069 | NSString *updateWindowSizeJsCode = 1070 | [NSString stringWithFormat: @"if(window.WebARonARKitSetWindowSize)" 1071 | @"WebARonARKitSetWindowSize({\"width\":%i,\"height\":%i});", 1072 | width, height]; 1073 | [wkWebView 1074 | evaluateJavaScript:updateWindowSizeJsCode 1075 | completionHandler:^(id data, NSError *error) { 1076 | if (error) { 1077 | [self showAlertDialog:[NSString 1078 | stringWithFormat:@"ERROR: Evaluating " 1079 | @"jscode to provide " 1080 | @"window size: %@", 1081 | error] 1082 | completionHandler:^{ 1083 | }]; 1084 | } 1085 | }]; 1086 | updateWindowSize = false; 1087 | } 1088 | 1089 | sendARData = false; 1090 | } 1091 | 1092 | #pragma mark - WK Estimated Progress 1093 | 1094 | - (void)observeValueForKeyPath:(NSString *)keyPath 1095 | ofObject:(id)object 1096 | change:(NSDictionary *)change 1097 | context:(void *)context { 1098 | if ([keyPath 1099 | isEqualToString:NSStringFromSelector(@selector(estimatedProgress))] && 1100 | object == self->wkWebView) { 1101 | if (_webviewNavigationSuccess) { 1102 | [_progressView setProgressValue:wkWebView.estimatedProgress]; 1103 | } 1104 | } else { 1105 | [super observeValueForKeyPath:keyPath 1106 | ofObject:object 1107 | change:change 1108 | context:context]; 1109 | } 1110 | } 1111 | 1112 | #pragma mark - WKUIDelegate 1113 | 1114 | - (void)webView:(WKWebView *)webView 1115 | runJavaScriptAlertPanelWithMessage:(NSString *)message 1116 | initiatedByFrame:(WKFrameInfo *)frame 1117 | completionHandler:(void (^)(void))completionHandler { 1118 | [self showAlertDialog:message completionHandler:completionHandler]; 1119 | } 1120 | 1121 | #pragma mark - WKNavigationDelegate 1122 | 1123 | - (void)webView:(WKWebView *)webView 1124 | didStartProvisionalNavigation:(null_unspecified WKNavigation *)navigation { 1125 | [self restartSession]; 1126 | [self setShowCameraFeed:NO]; 1127 | [self startAndShowProgressView]; 1128 | [self setProgressViewColorSuccessful]; 1129 | _webviewNavigationSuccess = true; 1130 | } 1131 | 1132 | - (void)webView:(WKWebView *)webView 1133 | didFinishNavigation:(WKNavigation *)navigation { 1134 | [self restartSession]; 1135 | // By default, when a page is loaded, the camera feed should not be shown. 1136 | if (initialPageLoadedWhenTrackingBegins) { 1137 | [self storeURLInUserDefaults:urlTextField.text]; 1138 | } 1139 | [urlTextField setText:[[wkWebView URL] absoluteString]]; 1140 | if (initialPageLoadedWhenTrackingBegins) { 1141 | [self storeURLInUserDefaults:[[wkWebView URL] absoluteString]]; 1142 | } 1143 | [self setProgressViewColorSuccessful]; 1144 | // By default, when a page is loaded, the camera feed should not be shown. 1145 | [self completeAndHideProgressViewSuccessful]; 1146 | } 1147 | 1148 | - (void)webView:(WKWebView *)webView 1149 | didFailNavigation:(WKNavigation *)navigation 1150 | withError:(NSError *)error { 1151 | _webviewNavigationSuccess = false; 1152 | if (error.code != -999) { 1153 | [self showAlertDialog:error.localizedDescription completionHandler:nil]; 1154 | NSLog(@"ERROR: webview didFailNavigation with error '%@'", error); 1155 | } 1156 | [self setProgressViewColorErrored]; 1157 | [self completeAndHideProgressViewErrored:wkWebView.estimatedProgress]; 1158 | } 1159 | 1160 | - (void)webViewDidFinishLoad:(UIWebView *)webView { 1161 | [self restartSession]; 1162 | [self completeAndHideProgressViewSuccessful]; 1163 | } 1164 | 1165 | - (void)webView:(WKWebView *)webView 1166 | didFailProvisionalNavigation:(WKNavigation *)navigation 1167 | withError:(NSError *)error { 1168 | _webviewNavigationSuccess = false; 1169 | if (error.code != -999) { 1170 | [self showAlertDialog:error.localizedDescription completionHandler:nil]; 1171 | NSLog(@"ERROR: webview didFailProvisionalNavigation with error '%@'", 1172 | error); 1173 | } 1174 | [self setProgressViewColorErrored]; 1175 | [self completeAndHideProgressViewErrored:wkWebView.estimatedProgress]; 1176 | } 1177 | 1178 | #pragma mark - UITextFieldDelegate 1179 | 1180 | - (void)textFieldDidBeginEditing:(UITextField *)textField { 1181 | urlTextFieldActive = YES; 1182 | [urlTextField 1183 | setSelectedTextRange:[urlTextField 1184 | textRangeFromPosition:urlTextField 1185 | .beginningOfDocument 1186 | toPosition:urlTextField 1187 | .endOfDocument]]; 1188 | if (iPhoneXDevice) { 1189 | if (UIInterfaceOrientationIsPortrait(interfaceOrientation)) { 1190 | [urlTextField setFont:[UIFont systemFontOfSize:17]]; 1191 | [urlTextField setFrame:CGRectMake(URL_SAFE_AREA_HORIZONTAL, 1192 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL, 1193 | self.view.frame.size.width - 1194 | URL_SAFE_AREA_HORIZONTAL * 2, 1195 | URL_TEXTFIELD_HEIGHT_EXPANDED)]; 1196 | 1197 | [_navigationBacking 1198 | setFrame:CGRectMake(0, 0, self.view.frame.size.width, 1199 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL * 2 + 1200 | URL_TEXTFIELD_HEIGHT_EXPANDED)]; 1201 | 1202 | [_progressView 1203 | setFrame:CGRectMake(0, 1204 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL * 2 + 1205 | URL_TEXTFIELD_HEIGHT_EXPANDED - 1206 | PROGRESSVIEW_HEIGHT, 1207 | self.view.frame.size.width, PROGRESSVIEW_HEIGHT)]; 1208 | } 1209 | } 1210 | } 1211 | 1212 | - (void)textFieldDidEndEditing:(UITextField *)textField { 1213 | urlTextFieldActive = NO; 1214 | if (iPhoneXDevice) { 1215 | if (UIInterfaceOrientationIsPortrait(interfaceOrientation)) { 1216 | [urlTextField setFont:[UIFont systemFontOfSize:12]]; 1217 | [urlTextField setFrame:CGRectMake(URL_SAFE_AREA_HORIZONTAL, 1218 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL, 1219 | self.view.frame.size.width - 1220 | URL_SAFE_AREA_HORIZONTAL * 2.0, 1221 | URL_TEXTFIELD_HEIGHT_MINIFIED)]; 1222 | 1223 | [_navigationBacking 1224 | setFrame:CGRectMake(0, 0, self.view.frame.size.width, 1225 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL * 2 + 1226 | URL_TEXTFIELD_HEIGHT_MINIFIED)]; 1227 | 1228 | [_progressView 1229 | setFrame:CGRectMake(0, 1230 | NOTCH_HEIGHT + URL_SAFE_AREA_VERTICAL * 2 + 1231 | URL_TEXTFIELD_HEIGHT_MINIFIED - 1232 | PROGRESSVIEW_HEIGHT, 1233 | self.view.frame.size.width, PROGRESSVIEW_HEIGHT)]; 1234 | } else { 1235 | [urlTextField setFont:[UIFont systemFontOfSize:17]]; 1236 | } 1237 | } 1238 | } 1239 | 1240 | - (BOOL)textFieldShouldReturn:(UITextField *)textField { 1241 | BOOL result = NO; 1242 | NSString *urlString = urlTextField.text; 1243 | if (![self loadURLInWKWebView:urlString]) { 1244 | [self showAlertDialog:@"The URL is not valid." completionHandler:NULL]; 1245 | } else { 1246 | [self storeURLInUserDefaults:urlString]; 1247 | [urlTextField resignFirstResponder]; 1248 | result = YES; 1249 | } 1250 | return result; 1251 | } 1252 | 1253 | #pragma mark - ARSessionObserver 1254 | 1255 | - (void)session:(ARSession *)session 1256 | cameraDidChangeTrackingState:(ARCamera *)camera { 1257 | NSString *trackingStateString = nil; 1258 | if (camera.trackingState == ARTrackingStateNotAvailable) { 1259 | trackingStateString = @"Not Available"; 1260 | } else if (camera.trackingState == ARTrackingStateLimited) { 1261 | trackingStateString = @"Limited"; 1262 | } else if (camera.trackingState == ARTrackingStateNormal) { 1263 | trackingStateString = @"Normal"; 1264 | } 1265 | NSString *trackingStateReasonString = nil; 1266 | if (camera.trackingStateReason == ARTrackingStateReasonExcessiveMotion) { 1267 | trackingStateReasonString = @"Excessive Motion"; 1268 | } else if (camera.trackingStateReason == 1269 | ARTrackingStateReasonInsufficientFeatures) { 1270 | trackingStateReasonString = @"Insufficient Featues"; 1271 | } 1272 | NSLog(@"AR camera tracking state = %@%@", trackingStateString, 1273 | (trackingStateReasonString != nil ? trackingStateReasonString : @"")); 1274 | 1275 | // Only the first time the tacking state is something else but unavailable 1276 | // load the initial page. 1277 | if (camera.trackingState != ARTrackingStateNotAvailable && 1278 | !initialPageLoadedWhenTrackingBegins) { 1279 | // Retore a URL from a previous execution and load it. 1280 | NSString *urlString = [self getURLFromUserDefaults]; 1281 | if (urlString) { 1282 | // As the code bellow does not allow to store invalid URLs, we will assume 1283 | // that the URL is 1284 | // correct. 1285 | if (![self loadURLInWKWebView:urlString]) { 1286 | [self showAlertDialog:@"The URL is not valid." completionHandler:NULL]; 1287 | } 1288 | urlTextField.text = urlString; 1289 | } 1290 | initialPageLoadedWhenTrackingBegins = true; 1291 | } 1292 | } 1293 | 1294 | #pragma mark - WKScriptMessageHandler 1295 | 1296 | - (void)userContentController:(WKUserContentController *)userContentController 1297 | didReceiveScriptMessage:(WKScriptMessage *)message { 1298 | NSString *messageString = message.body; 1299 | NSArray *values = [messageString componentsSeparatedByString:@":"]; 1300 | if ([values count] > 1) { 1301 | NSString *method = values[0]; 1302 | NSArray *params = [values[1] componentsSeparatedByString:@","]; 1303 | if ([method isEqualToString:@"setDepthNear"]) { 1304 | near = [params[0] floatValue]; 1305 | } else if ([method isEqualToString:@"setDepthFar"]) { 1306 | far = [params[0] floatValue]; 1307 | } else if ([method isEqualToString:@"log"]) { 1308 | // As a log command can have colons in its content, just get rid of the 1309 | // 'log:' string and show the rest. 1310 | NSRange range = NSMakeRange(4, messageString.length - 4); 1311 | NSLog(@"%@", [message.body substringWithRange:range]); 1312 | } else if ([method isEqualToString:@"resetPose"]) { 1313 | [self restartSession]; 1314 | } else if ([method isEqualToString:@"showCameraFeed"]) { 1315 | [self setShowCameraFeed:true]; 1316 | } else if ([method isEqualToString:@"hideCameraFeed"]) { 1317 | [self setShowCameraFeed:false]; 1318 | } else if ([method isEqualToString:@"addAnchor"]) { 1319 | // Construct the ARAnchor with the matrix provided from the js side. 1320 | NSString *jsAnchorId = params[0]; 1321 | matrix_float4x4 modelMatrix; 1322 | float *pModelMatrix = (float *)(&modelMatrix); 1323 | for (int i = 0; i < 16; i++) { 1324 | pModelMatrix[i] = [params[i + 1] floatValue]; 1325 | } 1326 | ARAnchor *anchor = [[ARAnchor alloc] initWithTransform:modelMatrix]; 1327 | [_session addAnchor:anchor]; 1328 | // Create an entry to convert from the js id to the objective c id (and 1329 | // viceversa) 1330 | [jsAnchorIdsToObjCAnchorIds setValue:anchor.identifier.UUIDString 1331 | forKey:jsAnchorId]; 1332 | [objCAnchorIdsToJSAnchorIds setValue:jsAnchorId 1333 | forKey:anchor.identifier.UUIDString]; 1334 | // Store the anchor 1335 | [anchors setValue:anchor forKey:jsAnchorId]; 1336 | } else if ([method isEqualToString:@"removeAnchor"]) { 1337 | // Retrive the ARAnchor from the jsAnchorId and remove it from the 1338 | // session. Of course, also remove all the id mapping and the anchor 1339 | // from the anchors container. 1340 | NSString *jsAnchorId = params[0]; 1341 | ARAnchor *anchor = anchors[jsAnchorId]; 1342 | NSString *objCAnchorId = anchor.identifier.UUIDString; 1343 | [jsAnchorIdsToObjCAnchorIds removeObjectForKey:jsAnchorId]; 1344 | [objCAnchorIdsToJSAnchorIds removeObjectForKey:objCAnchorId]; 1345 | [anchors removeObjectForKey:jsAnchorId]; 1346 | 1347 | [_session removeAnchor:anchor]; 1348 | } else if ([method isEqualToString:@"advanceFrame"]) { 1349 | // The JS side stated that the AR data was used so we can render 1350 | // a new camera frame now. 1351 | drawNextCameraFrame = true; 1352 | } else { 1353 | NSLog(@"WARNING: Unknown message received: '%@'", method); 1354 | } 1355 | } 1356 | } 1357 | 1358 | /* 1359 | This code is inspired by the open source project: 1360 | https://github.com/Stinkstudios/arkit-web 1361 | Kudos to: Amelie (@ixviii_io) 1362 | */ 1363 | -(NSString*)getBase64ImageFromPixelBuffer:(CVPixelBufferRef)pixelBuffer { 1364 | // The context to be able to create the CGImage. 1365 | static CIContext* ciContext = nil; 1366 | ciContext = [CIContext contextWithOptions:nil]; 1367 | // Convert the pixel buffer to a CIImage 1368 | CIImage* ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer]; 1369 | // Apply a scaling transformation to the CIImage and get a new one 1370 | CGAffineTransform scaleTransform = 1371 | CGAffineTransformScale(CGAffineTransformIdentity, 1372 | CAMERA_FRAME_SCALE_FACTOR, CAMERA_FRAME_SCALE_FACTOR); 1373 | CIImage* resizedCIImage = [ciImage imageByApplyingTransform:scaleTransform]; 1374 | // Create a CGImage from the CIImage 1375 | CGImageRef cgImage = [ciContext createCGImage:resizedCIImage 1376 | fromRect:resizedCIImage.extent]; 1377 | if (cgImage) { 1378 | // Create an UIImage from the CGImage 1379 | UIImage* uiImage = [UIImage imageWithCGImage:cgImage]; 1380 | // IMPORTANT: CG structures are not handled by the ARC system. 1381 | // Release the CG image now that we have a corresponding UIImage. 1382 | CGImageRelease(cgImage); 1383 | // Compress the image as JPEG 1384 | NSData* jpegImageData = 1385 | UIImageJPEGRepresentation(uiImage, CAMERA_FRAME_JPEG_COMPRESSION_FACTOR); 1386 | if (jpegImageData) { 1387 | // Transform the JPEG data into a base64 format so it can be 1388 | // passed to the JS side as a string. 1389 | return [jpegImageData base64EncodedStringWithOptions: 1390 | NSDataBase64Encoding64CharacterLineLength]; 1391 | } 1392 | } 1393 | return nil; 1394 | } 1395 | 1396 | @end 1397 | -------------------------------------------------------------------------------- /WebARonARKit/WebARonARKit/main.m: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2017 Google Inc. All Rights Reserved. 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | #import 18 | #import "AppDelegate.h" 19 | 20 | int main(int argc, char *argv[]) { 21 | @autoreleasepool { 22 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 23 | } 24 | } 25 | 26 | --------------------------------------------------------------------------------