├── .gitignore ├── LICENSE ├── Models ├── mscoco_label_map.txt ├── ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03 │ └── frozen_inference_graph.pb └── ssdlite_mobilenet_v2_coco_2018_05_09 │ └── frozen_inference_graph.pb ├── README.md ├── images └── cat.png ├── objC ├── App │ ├── AppDelegate.h │ ├── AppDelegate.m │ ├── BoundingBoxView.h │ ├── BoundingBoxView.m │ ├── CameraPreviewView.h │ ├── CameraPreviewView.m │ ├── ViewController.h │ └── ViewController.mm ├── Assets │ ├── Assets.xcassets │ │ ├── AppIcon.appiconset │ │ │ ├── Contents.json │ │ │ ├── tensorflowiOS_Icon_1024x1024-1.png │ │ │ ├── tensorflowiOS_Icon_29x29-1.png │ │ │ ├── tensorflowiOS_Icon_29x29.png │ │ │ ├── tensorflowiOS_Icon_29x29@2x-1.png │ │ │ ├── tensorflowiOS_Icon_29x29@2x.png │ │ │ ├── tensorflowiOS_Icon_29x29@3x.png │ │ │ ├── tensorflowiOS_Icon_40x40.png │ │ │ ├── tensorflowiOS_Icon_40x40@2x-1.png │ │ │ ├── tensorflowiOS_Icon_40x40@2x.png │ │ │ ├── tensorflowiOS_Icon_40x40@3x.png │ │ │ ├── tensorflowiOS_Icon_50x50.png │ │ │ ├── tensorflowiOS_Icon_50x50@2x.png │ │ │ ├── tensorflowiOS_Icon_57x57.png │ │ │ ├── tensorflowiOS_Icon_57x57@2x.png │ │ │ ├── tensorflowiOS_Icon_60x60@2x.png │ │ │ ├── tensorflowiOS_Icon_60x60@3x.png │ │ │ ├── tensorflowiOS_Icon_72x72.png │ │ │ ├── tensorflowiOS_Icon_72x72@2x.png │ │ │ ├── tensorflowiOS_Icon_76x76.png │ │ │ ├── tensorflowiOS_Icon_76x76@2x.png │ │ │ └── tensorflowiOS_Icon_83.5x83.5.png │ │ └── Contents.json │ ├── Base.lproj │ │ └── LaunchScreen.storyboard │ ├── Default-568h@2x.png │ └── Main.storyboard ├── SupportingFiles │ ├── BuildPhase.sh │ ├── Info.plist │ ├── main.m │ └── tensorflow.xcconfig ├── Tensorflow │ ├── TensorflowGraph.h │ ├── TensorflowGraph.mm │ ├── TensorflowPrediction.h │ ├── TensorflowPrediction.m │ ├── TensorflowUtils.mm │ ├── constants.h │ ├── constants.m │ ├── ops_to_register.h │ └── tensorflowUtils.h └── tensorflowiOS.xcodeproj │ ├── project.pbxproj │ └── project.xcworkspace │ └── contents.xcworkspacedata └── swift ├── App ├── AppDelegate.swift ├── BoundingBoxView.swift ├── CameraPreviewView.swift └── ViewController.swift ├── Assets ├── Assets.xcassets │ ├── AppIcon.appiconset │ │ ├── Contents.json │ │ ├── tensorflowiOS_Icon_1024x1024-1.png │ │ ├── tensorflowiOS_Icon_29x29-1.png │ │ ├── tensorflowiOS_Icon_29x29.png │ │ ├── tensorflowiOS_Icon_29x29@2x-1.png │ │ ├── tensorflowiOS_Icon_29x29@2x.png │ │ ├── tensorflowiOS_Icon_29x29@3x.png │ │ ├── tensorflowiOS_Icon_40x40.png │ │ ├── tensorflowiOS_Icon_40x40@2x-1.png │ │ ├── tensorflowiOS_Icon_40x40@2x.png │ │ ├── tensorflowiOS_Icon_40x40@3x.png │ │ ├── tensorflowiOS_Icon_50x50.png │ │ ├── tensorflowiOS_Icon_50x50@2x.png │ │ ├── tensorflowiOS_Icon_57x57.png │ │ ├── tensorflowiOS_Icon_57x57@2x.png │ │ ├── tensorflowiOS_Icon_60x60@2x.png │ │ ├── tensorflowiOS_Icon_60x60@3x.png │ │ ├── tensorflowiOS_Icon_72x72.png │ │ ├── tensorflowiOS_Icon_72x72@2x.png │ │ ├── tensorflowiOS_Icon_76x76.png │ │ ├── tensorflowiOS_Icon_76x76@2x.png │ │ └── tensorflowiOS_Icon_83.5x83.5.png │ └── Contents.json ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard └── Default-568h@2x.png ├── SupportingFiles ├── BuildPhase.sh ├── Constants.swift ├── Info.plist └── tensorflow.xcconfig ├── Tensorflow ├── TensorflowGraph.h ├── TensorflowGraph.mm ├── TensorflowPrediction.h ├── TensorflowPrediction.m ├── TensorflowUtils.h ├── TensorflowUtils.mm └── tensorflowiOS-Bridging-Header.h └── tensorflowiOS.xcodeproj ├── project.pbxproj └── project.xcworkspace └── contents.xcworkspacedata /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | xcuserdata 3 | xcshareddata 4 | 5 | string_int_label_map.pb.cc 6 | string_int_label_map.pb.h 7 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Chris Sharp 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /Models/mscoco_label_map.txt: -------------------------------------------------------------------------------- 1 | item { 2 | name: "/m/01g317" 3 | id: 1 4 | display_name: "person" 5 | } 6 | item { 7 | name: "/m/0199g" 8 | id: 2 9 | display_name: "bicycle" 10 | } 11 | item { 12 | name: "/m/0k4j" 13 | id: 3 14 | display_name: "car" 15 | } 16 | item { 17 | name: "/m/04_sv" 18 | id: 4 19 | display_name: "motorcycle" 20 | } 21 | item { 22 | name: "/m/05czz6l" 23 | id: 5 24 | display_name: "airplane" 25 | } 26 | item { 27 | name: "/m/01bjv" 28 | id: 6 29 | display_name: "bus" 30 | } 31 | item { 32 | name: "/m/07jdr" 33 | id: 7 34 | display_name: "train" 35 | } 36 | item { 37 | name: "/m/07r04" 38 | id: 8 39 | display_name: "truck" 40 | } 41 | item { 42 | name: "/m/019jd" 43 | id: 9 44 | display_name: "boat" 45 | } 46 | item { 47 | name: "/m/015qff" 48 | id: 10 49 | display_name: "traffic light" 50 | } 51 | item { 52 | name: "/m/01pns0" 53 | id: 11 54 | display_name: "fire hydrant" 55 | } 56 | item { 57 | name: "/m/02pv19" 58 | id: 13 59 | display_name: "stop sign" 60 | } 61 | item { 62 | name: "/m/015qbp" 63 | id: 14 64 | display_name: "parking meter" 65 | } 66 | item { 67 | name: "/m/0cvnqh" 68 | id: 15 69 | display_name: "bench" 70 | } 71 | item { 72 | name: "/m/015p6" 73 | id: 16 74 | display_name: "bird" 75 | } 76 | item { 77 | name: "/m/01yrx" 78 | id: 17 79 | display_name: "cat" 80 | } 81 | item { 82 | name: "/m/0bt9lr" 83 | id: 18 84 | display_name: "dog" 85 | } 86 | item { 87 | name: "/m/03k3r" 88 | id: 19 89 | display_name: "horse" 90 | } 91 | item { 92 | name: "/m/07bgp" 93 | id: 20 94 | display_name: "sheep" 95 | } 96 | item { 97 | name: "/m/01xq0k1" 98 | id: 21 99 | display_name: "cow" 100 | } 101 | item { 102 | name: "/m/0bwd_0j" 103 | id: 22 104 | display_name: "elephant" 105 | } 106 | item { 107 | name: "/m/01dws" 108 | id: 23 109 | display_name: "bear" 110 | } 111 | item { 112 | name: "/m/0898b" 113 | id: 24 114 | display_name: "zebra" 115 | } 116 | item { 117 | name: "/m/03bk1" 118 | id: 25 119 | display_name: "giraffe" 120 | } 121 | item { 122 | name: "/m/01940j" 123 | id: 27 124 | display_name: "backpack" 125 | } 126 | item { 127 | name: "/m/0hnnb" 128 | id: 28 129 | display_name: "umbrella" 130 | } 131 | item { 132 | name: "/m/080hkjn" 133 | id: 31 134 | display_name: "handbag" 135 | } 136 | item { 137 | name: "/m/01rkbr" 138 | id: 32 139 | display_name: "tie" 140 | } 141 | item { 142 | name: "/m/01s55n" 143 | id: 33 144 | display_name: "suitcase" 145 | } 146 | item { 147 | name: "/m/02wmf" 148 | id: 34 149 | display_name: "frisbee" 150 | } 151 | item { 152 | name: "/m/071p9" 153 | id: 35 154 | display_name: "skis" 155 | } 156 | item { 157 | name: "/m/06__v" 158 | id: 36 159 | display_name: "snowboard" 160 | } 161 | item { 162 | name: "/m/018xm" 163 | id: 37 164 | display_name: "sports ball" 165 | } 166 | item { 167 | name: "/m/02zt3" 168 | id: 38 169 | display_name: "kite" 170 | } 171 | item { 172 | name: "/m/03g8mr" 173 | id: 39 174 | display_name: "baseball bat" 175 | } 176 | item { 177 | name: "/m/03grzl" 178 | id: 40 179 | display_name: "baseball glove" 180 | } 181 | item { 182 | name: "/m/06_fw" 183 | id: 41 184 | display_name: "skateboard" 185 | } 186 | item { 187 | name: "/m/019w40" 188 | id: 42 189 | display_name: "surfboard" 190 | } 191 | item { 192 | name: "/m/0dv9c" 193 | id: 43 194 | display_name: "tennis racket" 195 | } 196 | item { 197 | name: "/m/04dr76w" 198 | id: 44 199 | display_name: "bottle" 200 | } 201 | item { 202 | name: "/m/09tvcd" 203 | id: 46 204 | display_name: "wine glass" 205 | } 206 | item { 207 | name: "/m/08gqpm" 208 | id: 47 209 | display_name: "cup" 210 | } 211 | item { 212 | name: "/m/0dt3t" 213 | id: 48 214 | display_name: "fork" 215 | } 216 | item { 217 | name: "/m/04ctx" 218 | id: 49 219 | display_name: "knife" 220 | } 221 | item { 222 | name: "/m/0cmx8" 223 | id: 50 224 | display_name: "spoon" 225 | } 226 | item { 227 | name: "/m/04kkgm" 228 | id: 51 229 | display_name: "bowl" 230 | } 231 | item { 232 | name: "/m/09qck" 233 | id: 52 234 | display_name: "banana" 235 | } 236 | item { 237 | name: "/m/014j1m" 238 | id: 53 239 | display_name: "apple" 240 | } 241 | item { 242 | name: "/m/0l515" 243 | id: 54 244 | display_name: "sandwich" 245 | } 246 | item { 247 | name: "/m/0cyhj_" 248 | id: 55 249 | display_name: "orange" 250 | } 251 | item { 252 | name: "/m/0hkxq" 253 | id: 56 254 | display_name: "broccoli" 255 | } 256 | item { 257 | name: "/m/0fj52s" 258 | id: 57 259 | display_name: "carrot" 260 | } 261 | item { 262 | name: "/m/01b9xk" 263 | id: 58 264 | display_name: "hot dog" 265 | } 266 | item { 267 | name: "/m/0663v" 268 | id: 59 269 | display_name: "pizza" 270 | } 271 | item { 272 | name: "/m/0jy4k" 273 | id: 60 274 | display_name: "donut" 275 | } 276 | item { 277 | name: "/m/0fszt" 278 | id: 61 279 | display_name: "cake" 280 | } 281 | item { 282 | name: "/m/01mzpv" 283 | id: 62 284 | display_name: "chair" 285 | } 286 | item { 287 | name: "/m/02crq1" 288 | id: 63 289 | display_name: "couch" 290 | } 291 | item { 292 | name: "/m/03fp41" 293 | id: 64 294 | display_name: "potted plant" 295 | } 296 | item { 297 | name: "/m/03ssj5" 298 | id: 65 299 | display_name: "bed" 300 | } 301 | item { 302 | name: "/m/04bcr3" 303 | id: 67 304 | display_name: "dining table" 305 | } 306 | item { 307 | name: "/m/09g1w" 308 | id: 70 309 | display_name: "toilet" 310 | } 311 | item { 312 | name: "/m/07c52" 313 | id: 72 314 | display_name: "tv" 315 | } 316 | item { 317 | name: "/m/01c648" 318 | id: 73 319 | display_name: "laptop" 320 | } 321 | item { 322 | name: "/m/020lf" 323 | id: 74 324 | display_name: "mouse" 325 | } 326 | item { 327 | name: "/m/0qjjc" 328 | id: 75 329 | display_name: "remote" 330 | } 331 | item { 332 | name: "/m/01m2v" 333 | id: 76 334 | display_name: "keyboard" 335 | } 336 | item { 337 | name: "/m/050k8" 338 | id: 77 339 | display_name: "cell phone" 340 | } 341 | item { 342 | name: "/m/0fx9l" 343 | id: 78 344 | display_name: "microwave" 345 | } 346 | item { 347 | name: "/m/029bxz" 348 | id: 79 349 | display_name: "oven" 350 | } 351 | item { 352 | name: "/m/01k6s3" 353 | id: 80 354 | display_name: "toaster" 355 | } 356 | item { 357 | name: "/m/0130jx" 358 | id: 81 359 | display_name: "sink" 360 | } 361 | item { 362 | name: "/m/040b_t" 363 | id: 82 364 | display_name: "refrigerator" 365 | } 366 | item { 367 | name: "/m/0bt_c3" 368 | id: 84 369 | display_name: "book" 370 | } 371 | item { 372 | name: "/m/01x3z" 373 | id: 85 374 | display_name: "clock" 375 | } 376 | item { 377 | name: "/m/02s195" 378 | id: 86 379 | display_name: "vase" 380 | } 381 | item { 382 | name: "/m/01lsmm" 383 | id: 87 384 | display_name: "scissors" 385 | } 386 | item { 387 | name: "/m/0kmg4" 388 | id: 88 389 | display_name: "teddy bear" 390 | } 391 | item { 392 | name: "/m/03wvsk" 393 | id: 89 394 | display_name: "hair drier" 395 | } 396 | item { 397 | name: "/m/012xff" 398 | id: 90 399 | display_name: "toothbrush" 400 | } -------------------------------------------------------------------------------- /Models/ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03/frozen_inference_graph.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/Models/ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03/frozen_inference_graph.pb -------------------------------------------------------------------------------- /Models/ssdlite_mobilenet_v2_coco_2018_05_09/frozen_inference_graph.pb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/Models/ssdlite_mobilenet_v2_coco_2018_05_09/frozen_inference_graph.pb -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Tensorflow iOS Object Detection 2 | 3 | An Object Detection application on iOS using Tensorflow and pre-trained COCO dataset models. Video frames are captured and inference is done locally using one of the 3 provided models: ssd_mobilenet_v1_coco, ssd_modelnet_v2_coco, or ssd_inception_v2_coco. Both Swift and Objective-C projects. 4 | 5 | ![cat image](images/cat.png) 6 | 7 | ## Building 8 | 9 | * Make sure you have automake and libtool. Using homebrew: 10 | 11 | `brew install automake libtool` 12 | 13 | 14 | * Clone the tensorflow source repo on GitHub 15 | 16 | `git clone https://github.com/tensorflow/tensorflow` 17 | 18 | 19 | * `cd` into the tensorflow repo and at a minimum checkout the `v1.5.0` tag. The v1.5.0 release has the Makefile support for the following `ANDROID_TYPES` environment variable. I have tested up to `v1.12.0` 20 | 21 | `git checkout origin/r1.11` 22 | 23 | if you run into this issue: [thread-local storage is not supported for the current target](https://github.com/tensorflow/tensorflow/issues/18356), you may need to edit the two files affected. The following one-liners worked for me: 24 | 25 | `gsed '/ifeq[^,]*,I386)/!b;n;n;n;n;n;s/thread_local//' < ./tensorflow/contrib/makefile/Makefile > foo; mv foo ./tensorflow/contrib/makefile/Makefile` 26 | 27 | `gsed 's/thread_local int per_thread_max_parallism/__thread int per_thread_max_parallism/' < tensorflow/core/util/work_sharder.cc > foo; mv foo ./tensorflow/core/util/work_sharder.cc` 28 | 29 | You can get `gsed` using homebrew: `brew install gnu-sed` 30 | 31 | 32 | * We need to build the tensorflow components with ANDROID_TYPES_FULL. In the terminal type: 33 | 34 | `export ANDROID_TYPES="-D__ANDROID_TYPES_FULL__"` 35 | 36 | 37 | * Build the tensorflow libraries for iOS. Go to the root of your newly downloaded tensorflow repo and run: 38 | 39 | `tensorflow/contrib/makefile/build_all_ios.sh` 40 | 41 | Go get a coffee. This can take a while. On my macBook it took almost 2 hours. 42 | 43 | 44 | * Open either the Swift or Objective-C project in this repo and edit the **tensorflow.xconfig** file to point to the folder where you cloned the tensorflow repo 45 | 46 | `TENSORFLOW_ROOT=/Users/username/Development/tensorflow` 47 | 48 | 49 | * Compile the xcode project and run. Since we need a camera this will only run on a device. 50 | -------------------------------------------------------------------------------- /images/cat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/images/cat.png -------------------------------------------------------------------------------- /objC/App/AppDelegate.h: -------------------------------------------------------------------------------- 1 | 2 | @import UIKit; 3 | 4 | @interface AppDelegate : UIResponder 5 | 6 | @property (nonatomic) UIWindow *window; 7 | 8 | @end 9 | -------------------------------------------------------------------------------- /objC/App/AppDelegate.m: -------------------------------------------------------------------------------- 1 | 2 | #import "AppDelegate.h" 3 | 4 | @implementation AppDelegate 5 | @end 6 | -------------------------------------------------------------------------------- /objC/App/BoundingBoxView.h: -------------------------------------------------------------------------------- 1 | // 2 | // BoundingBoxView.h 3 | // tensorflowiOS 4 | // 5 | // Created by Sharp, Chris T on 10/9/17. 6 | // Copyright © 2017 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface BoundingBoxView : UIView 12 | 13 | @property (nonatomic) NSMutableArray* labels; 14 | 15 | - (void) updateBoundingBoxes: (NSArray*) boxes; 16 | 17 | @end 18 | -------------------------------------------------------------------------------- /objC/App/BoundingBoxView.m: -------------------------------------------------------------------------------- 1 | // 2 | // BoundingBoxView.m 3 | // tensorflowiOS 4 | // 5 | // Created by Sharp, Chris T on 10/9/17. 6 | // Copyright © 2017 Apple. All rights reserved. 7 | // 8 | 9 | #import "BoundingBoxView.h" 10 | #import "TensorflowPrediction.h" 11 | 12 | const CGFloat BoundingBoxLineWidth = 3.5f; 13 | 14 | @interface BoundingBoxView() 15 | @property (nonatomic) NSArray *boxesToBeErased; 16 | @property (nonatomic) NSArray *boxesToBeDrawn; 17 | @end 18 | 19 | @implementation BoundingBoxView 20 | 21 | - (instancetype)initWithCoder:(NSCoder *)coder 22 | { 23 | self = [super initWithCoder:coder]; 24 | if (self) 25 | { 26 | // 27 | // Maintain a list of UILabels for easy removal from superView. 28 | // 29 | self.labels = [[NSMutableArray alloc] init]; 30 | } 31 | return self; 32 | } 33 | 34 | 35 | // 36 | // in drawRect we have a clear UIView that we draw green bounding boxes on. 37 | // As a new list of boundingboxes comes in we erase the old boxes and draw the new ones. 38 | // Since this view is just a layer over the videoPreview the bounding boxes could be a few 39 | // frames behind and the box will not align with the object underneath it. This will likely 40 | // be an issue until Tensorflow processing is as fast as the video preview's frame capture. 41 | // 42 | - (void)drawRect:(CGRect)rect 43 | { 44 | // 45 | // Our drawing context 46 | // 47 | CGContextRef context = UIGraphicsGetCurrentContext(); 48 | 49 | // 50 | // The width of the bounding box lines. 51 | // 52 | CGContextSetLineWidth(context, BoundingBoxLineWidth); 53 | 54 | // 55 | // The fill color of the bounding box is always clear 56 | // 57 | CGContextSetRGBFillColor(context, 1.0, 1.0, 1.0, 0.0); 58 | 59 | // 60 | // Erase boxes from the previous frame 61 | // 62 | if (self.boxesToBeErased) 63 | { 64 | for (TensorflowPrediction* pred in self.boxesToBeErased) 65 | { 66 | // Erase the previous bounding box by using a clear stroke color 67 | CGContextSetRGBStrokeColor(context, 1.0, 1.0, 1.0, 0.0); 68 | 69 | // Calculate box dimensions of box to be erased. 70 | CGFloat x = pred.left * self.frame.size.width; 71 | CGFloat y = pred.top * self.frame.size.height; 72 | CGFloat w = (pred.right * self.frame.size.width) - x; 73 | CGFloat h = (pred.bottom * self.frame.size.height) - y; 74 | CGRect rectangle = CGRectMake(x, y, w, h); 75 | 76 | //Erase it. (draw clear pixels over the green) 77 | CGContextFillRect(context, rectangle); 78 | CGContextStrokeRect(context, rectangle); 79 | } 80 | 81 | // 82 | // Remove existing labels too. 83 | // 84 | for (UILabel * label in self.labels) 85 | { 86 | [label removeFromSuperview]; 87 | } 88 | [self.labels removeAllObjects]; 89 | self.boxesToBeErased = nil; 90 | } 91 | 92 | // 93 | // Draw newly predicted boxes 94 | // 95 | for (TensorflowPrediction* pred in self.boxesToBeDrawn) 96 | { 97 | // 98 | // Calculate the box dimensions. The box dimensions are given 99 | // as normalized values. Because this view has the same dimensions 100 | // as the original image multiplying by width and height gives the 101 | // correct location for the bounding box. 102 | // 103 | CGFloat x = pred.left * self.frame.size.width; 104 | CGFloat y = pred.top * self.frame.size.height; 105 | CGFloat w = (pred.right * self.frame.size.width) - x; 106 | CGFloat h = (pred.bottom * self.frame.size.height) - y; 107 | CGRect rectangle = CGRectMake(x, y, w, h); 108 | 109 | // Draw with a green stroke. 110 | CGContextSetRGBStrokeColor(context, 0.0, 1.0, 0.0, 0.75); 111 | CGContextFillRect(context, rectangle); 112 | CGContextStrokeRect(context, rectangle); 113 | 114 | // Add the label to the upper left of the bounding box 115 | UILabel * label = [[UILabel alloc] initWithFrame:CGRectMake(x, y, 75, 35)]; 116 | [label setBackgroundColor:[UIColor whiteColor]]; 117 | [label setTextColor:[UIColor orangeColor]]; 118 | [label setText:[NSString stringWithFormat:@"%@ %.1f%%", pred.label, pred.score * 100]]; 119 | [label sizeToFit]; 120 | [self addSubview:label]; 121 | 122 | // 123 | // Keep a list of labels so we can easily remove from superview. 124 | // 125 | [self.labels addObject:label]; 126 | } 127 | } 128 | 129 | - (void) updateBoundingBoxes: (NSArray*) boxes 130 | { 131 | // 132 | // flag the old boxes to be erased and flag the new to be drawn. 133 | // 134 | self.boxesToBeErased = self.boxesToBeDrawn; 135 | self.boxesToBeDrawn = boxes; 136 | 137 | // 138 | // trigger a drawRect call next frame 139 | // 140 | [self setNeedsDisplay]; 141 | } 142 | 143 | @end 144 | -------------------------------------------------------------------------------- /objC/App/CameraPreviewView.h: -------------------------------------------------------------------------------- 1 | 2 | #import 3 | 4 | @class AVCaptureSession; 5 | 6 | @interface CameraPreviewView : UIView 7 | @property (nonatomic, readonly) AVCaptureVideoPreviewLayer *videoPreviewLayer; 8 | 9 | - (void) configureSession; 10 | - (void) startSessionWithDelegate: (id) delegate; 11 | - (void) stopSession; 12 | @end 13 | -------------------------------------------------------------------------------- /objC/App/CameraPreviewView.m: -------------------------------------------------------------------------------- 1 | 2 | #import 3 | 4 | #import "CameraPreviewView.h" 5 | #import "constants.h" 6 | 7 | static void * SessionRunningContext = &SessionRunningContext; 8 | 9 | typedef NS_ENUM( NSInteger, CameraSetupResult ) 10 | { 11 | SetupResultSuccess, 12 | SetupResultCameraNotAuthorized, 13 | SetupResultSessionConfigurationFailed 14 | }; 15 | 16 | @interface AVCaptureDeviceDiscoverySession (Utilities) 17 | 18 | - (NSInteger)uniqueDevicePositionsCount; 19 | 20 | @end 21 | 22 | @implementation AVCaptureDeviceDiscoverySession (Utilities) 23 | 24 | - (NSInteger)uniqueDevicePositionsCount 25 | { 26 | NSMutableArray *uniqueDevicePositions = [NSMutableArray array]; 27 | 28 | for ( AVCaptureDevice *device in self.devices ) 29 | { 30 | if ( ! [uniqueDevicePositions containsObject:@(device.position)] ) 31 | { 32 | [uniqueDevicePositions addObject:@(device.position)]; 33 | } 34 | } 35 | 36 | return uniqueDevicePositions.count; 37 | } 38 | 39 | @end 40 | 41 | @interface CameraPreviewView() 42 | @property (nonatomic) CameraSetupResult cameraSetupResult; 43 | @property (nonatomic) AVCaptureSession *avSession; 44 | @property (nonatomic) dispatch_queue_t sessionQueue; 45 | @property (nonatomic) dispatch_queue_t videoFrameSerialQueue; 46 | @property (nonatomic, getter=isSessionRunning) BOOL sessionRunning; 47 | @property (nonatomic) AVCaptureDeviceInput *videoDeviceInput; 48 | @property (nonatomic) AVCaptureVideoDataOutput *videoDataOutput; 49 | @end 50 | 51 | @implementation CameraPreviewView 52 | 53 | + (Class)layerClass 54 | { 55 | return [AVCaptureVideoPreviewLayer class]; 56 | } 57 | 58 | - (instancetype)initWithCoder:(NSCoder *)aDecoder 59 | { 60 | self = [super initWithCoder:aDecoder]; 61 | if (self) 62 | { 63 | [self setupSession]; 64 | } 65 | return self; 66 | 67 | } 68 | 69 | - (instancetype)init 70 | { 71 | self = [super init]; 72 | if (self) 73 | { 74 | [self setupSession]; 75 | } 76 | return self; 77 | } 78 | 79 | #pragma mark Session Management 80 | 81 | 82 | - (AVCaptureVideoPreviewLayer *)videoPreviewLayer 83 | { 84 | return (AVCaptureVideoPreviewLayer *)self.layer; 85 | } 86 | 87 | - (AVCaptureSession *)session 88 | { 89 | return self.videoPreviewLayer.session; 90 | } 91 | 92 | - (void)setSession:(AVCaptureSession *)session 93 | { 94 | self.videoPreviewLayer.session = session; 95 | } 96 | 97 | - (void) setupSession 98 | { 99 | self.avSession = [[AVCaptureSession alloc] init]; 100 | 101 | self.videoPreviewLayer.session = self.avSession; 102 | 103 | // 104 | // Communicate with the session and other session objects on this queue. 105 | // 106 | self.sessionQueue = dispatch_queue_create( "PreviewSessionQueue", DISPATCH_QUEUE_SERIAL ); 107 | 108 | // We use a serial queue for the video frames so that 109 | // they are dispatched in the order that they are captured 110 | self.videoFrameSerialQueue = dispatch_queue_create("VideoFrameQueue", DISPATCH_QUEUE_SERIAL); 111 | 112 | self.cameraSetupResult = SetupResultSuccess; 113 | 114 | //Check video authorization status. Video access is required. 115 | switch ( [AVCaptureDevice authorizationStatusForMediaType:AVMediaTypeVideo] ) 116 | { 117 | case AVAuthorizationStatusAuthorized: 118 | { 119 | // The user has previously granted access to the camera. 120 | break; 121 | } 122 | case AVAuthorizationStatusNotDetermined: 123 | { 124 | /* 125 | The user has not yet been presented with the option to grant 126 | video access. We suspend the session queue to delay session 127 | setup until the access request has completed. 128 | */ 129 | dispatch_suspend( self.sessionQueue ); 130 | [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^( BOOL granted ) { 131 | if ( ! granted ) 132 | { 133 | self.cameraSetupResult = SetupResultCameraNotAuthorized; 134 | } 135 | dispatch_resume( self.sessionQueue ); 136 | }]; 137 | break; 138 | } 139 | default: 140 | { 141 | // The user has previously denied access. 142 | self.cameraSetupResult = SetupResultCameraNotAuthorized; 143 | break; 144 | } 145 | } 146 | } 147 | 148 | // Call this on the session queue. 149 | - (void)configureSession 150 | { 151 | dispatch_async( self.sessionQueue, ^{ 152 | if ( self.cameraSetupResult != SetupResultSuccess ) 153 | { 154 | return; 155 | } 156 | 157 | [self.avSession beginConfiguration]; 158 | self.avSession.sessionPreset = AVCaptureSessionPresetLow; 159 | 160 | // Add video input. 161 | 162 | // Choose the back dual camera if available, otherwise default to a wide angle camera. 163 | AVCaptureDevice *videoDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInDualCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack]; 164 | if ( ! videoDevice ) 165 | { 166 | // If the back dual camera is not available, default to the back wide angle camera. 167 | videoDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionBack]; 168 | 169 | // In some cases where users break their phones, the back wide angle camera is not available. In this case, we should default to the front wide angle camera. 170 | if ( ! videoDevice ) 171 | { 172 | videoDevice = [AVCaptureDevice defaultDeviceWithDeviceType:AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionFront]; 173 | } 174 | } 175 | 176 | // Set the frame rate to 15fps max on the video preview. 177 | [videoDevice lockForConfiguration:nil]; 178 | [videoDevice setActiveVideoMaxFrameDuration:CMTimeMake(1,15)]; 179 | [videoDevice unlockForConfiguration]; 180 | 181 | NSError *error = nil; 182 | AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; 183 | if ( ! videoDeviceInput ) 184 | { 185 | NSLog( @"Could not create video device input: %@", error ); 186 | self.cameraSetupResult = SetupResultSessionConfigurationFailed; 187 | [self.avSession commitConfiguration]; 188 | return; 189 | } 190 | if ( [self.avSession canAddInput:videoDeviceInput] ) 191 | { 192 | [self.avSession addInput:videoDeviceInput]; 193 | self.videoDeviceInput = videoDeviceInput; 194 | 195 | dispatch_async( dispatch_get_main_queue(), ^{ 196 | // Dispatching this to the main queue because a UIView (CameraPreviewView) can only be 197 | // changed on the main thread. 198 | UIInterfaceOrientation statusBarOrientation = [UIApplication sharedApplication].statusBarOrientation; 199 | AVCaptureVideoOrientation initialVideoOrientation = AVCaptureVideoOrientationPortrait; 200 | if ( statusBarOrientation != UIInterfaceOrientationUnknown ) 201 | { 202 | initialVideoOrientation = (AVCaptureVideoOrientation)statusBarOrientation; 203 | } 204 | 205 | self.videoPreviewLayer.connection.videoOrientation = initialVideoOrientation; 206 | } ); 207 | } 208 | else 209 | { 210 | NSLog( @"Could not add video device input to the session" ); 211 | self.cameraSetupResult = SetupResultSessionConfigurationFailed; 212 | [self.avSession commitConfiguration]; 213 | return; 214 | } 215 | 216 | [self addVideoOutput]; 217 | 218 | [self.avSession commitConfiguration]; 219 | } ); 220 | } 221 | 222 | - (void) addVideoOutput 223 | { 224 | AVCaptureVideoDataOutput *videoOutput = [[AVCaptureVideoDataOutput alloc] init]; 225 | 226 | // 227 | // We use the 32 bit BGRA pixel format type. That way we can just pass the data to 228 | // Tensorflow without pre-processing. 229 | // 230 | NSDictionary *newSettings = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_32BGRA) }; 231 | videoOutput.videoSettings = newSettings; 232 | videoOutput.alwaysDiscardsLateVideoFrames = YES; 233 | 234 | // 235 | // Add the videoOutput to our AVSession 236 | // 237 | if ( [self.avSession canAddOutput:videoOutput] ) 238 | { 239 | [self.avSession beginConfiguration]; 240 | [self.avSession addOutput:videoOutput]; 241 | self.avSession.sessionPreset = AVCaptureSessionPresetHigh; 242 | AVCaptureConnection *connection = [videoOutput connectionWithMediaType:AVMediaTypeVideo]; 243 | if ( connection.isVideoStabilizationSupported ) 244 | { 245 | connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationModeAuto; 246 | } 247 | 248 | [self.avSession commitConfiguration]; 249 | 250 | self.videoDataOutput = videoOutput; 251 | } 252 | } 253 | 254 | - (void) startSessionWithDelegate:(id) delegate 255 | { 256 | dispatch_async( self.sessionQueue, ^{ 257 | switch ( self.cameraSetupResult ) 258 | { 259 | case SetupResultSuccess: 260 | { 261 | // if setup succeeded we can add Observers and frame delegate 262 | // and run the session. 263 | [self addObservers]; 264 | [self.videoDataOutput setSampleBufferDelegate:delegate queue:self.videoFrameSerialQueue]; 265 | 266 | [self.avSession startRunning]; 267 | self.sessionRunning = self.avSession.isRunning; 268 | 269 | // Let everyone know we have a session. 270 | [[NSNotificationCenter defaultCenter] postNotificationName:kAVSessionStarted object:nil]; 271 | break; 272 | } 273 | case SetupResultCameraNotAuthorized: 274 | { 275 | [[NSNotificationCenter defaultCenter] postNotificationName:kSetupResultCameraNotAuthorized object:nil]; 276 | break; 277 | } 278 | case SetupResultSessionConfigurationFailed: 279 | { 280 | [[NSNotificationCenter defaultCenter] postNotificationName:kSetupResultSessionConfigurationFailed object:nil]; 281 | break; 282 | } 283 | } 284 | } ); 285 | } 286 | 287 | - (void) stopSession 288 | { 289 | dispatch_async( self.sessionQueue, ^{ 290 | if ( self.cameraSetupResult == SetupResultSuccess ) 291 | { 292 | [self.avSession stopRunning]; 293 | [self removeObservers]; 294 | } 295 | } ); 296 | } 297 | 298 | #pragma mark KVO and Notifications 299 | 300 | - (void)addObservers 301 | { 302 | [self.avSession addObserver:self forKeyPath:@"running" options:NSKeyValueObservingOptionNew context:SessionRunningContext]; 303 | 304 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(sessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification object:self.avSession]; 305 | } 306 | 307 | - (void)removeObservers 308 | { 309 | [[NSNotificationCenter defaultCenter] removeObserver:self]; 310 | 311 | [self.avSession removeObserver:self forKeyPath:@"running" context:SessionRunningContext]; 312 | } 313 | 314 | - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context 315 | { 316 | if ( context == SessionRunningContext ) 317 | { 318 | self.sessionRunning = [change[NSKeyValueChangeNewKey] boolValue]; 319 | } 320 | else 321 | { 322 | [super observeValueForKeyPath:keyPath ofObject:object change:change context:context]; 323 | } 324 | } 325 | 326 | 327 | - (void)sessionRuntimeError:(NSNotification *)notification 328 | { 329 | NSError *error = notification.userInfo[AVCaptureSessionErrorKey]; 330 | NSLog( @"Capture session runtime error: %@", error ); 331 | 332 | /* 333 | Automatically try to restart the session running if media services were 334 | reset and the last start running succeeded. Otherwise, enable the user 335 | to try to resume the session running. 336 | */ 337 | if ( error.code == AVErrorMediaServicesWereReset ) 338 | { 339 | dispatch_async( self.sessionQueue, ^{ 340 | if ( self.isSessionRunning ) 341 | { 342 | [self.avSession startRunning]; 343 | self.sessionRunning = self.avSession.isRunning; 344 | } 345 | } ); 346 | } 347 | } 348 | 349 | @end 350 | -------------------------------------------------------------------------------- /objC/App/ViewController.h: -------------------------------------------------------------------------------- 1 | 2 | #import 3 | #import 4 | 5 | @interface ViewController : UIViewController 6 | 7 | @end 8 | -------------------------------------------------------------------------------- /objC/App/ViewController.mm: -------------------------------------------------------------------------------- 1 | #import 2 | #import "ViewController.h" 3 | #import "CameraPreviewView.h" 4 | #import "TensorflowGraph.h" 5 | #import "constants.h" 6 | #import "TensorflowPrediction.h" 7 | #import "BoundingBoxView.h" 8 | #import "tensorflowUtils.h" 9 | 10 | @interface ViewController () 11 | 12 | // The view of what the camera is currently capturing 13 | @property (nonatomic, weak) IBOutlet CameraPreviewView *cameraPreviewView; 14 | 15 | // the transparent UIView where we draw the bounding boxes. This view 16 | // sits on top of the CameraPreview 17 | @property (nonatomic, weak) IBOutlet BoundingBoxView *boundingBoxView; 18 | 19 | // the tensorflow graph that will do the recognizing. 20 | @property (nonatomic) TensorflowGraph *tensorflowGraph; 21 | 22 | // Label to alert the user if the camera is unavailable. 23 | @property (nonatomic, weak) IBOutlet UILabel *cameraUnavailableLabel; 24 | 25 | // processingTime and framesProcessed are used for keeping an average time to make predictions. 26 | @property (nonatomic) double processingTime; 27 | @property (nonatomic) int framesProcessed; 28 | 29 | @end 30 | 31 | 32 | @implementation ViewController 33 | 34 | 35 | #pragma mark View Controller Life Cycle 36 | 37 | - (void)viewDidLoad 38 | { 39 | [super viewDidLoad]; 40 | 41 | // 42 | // Configure the video preview. We will grab frames 43 | // from the video preview and feed them into the tensorflow graph. 44 | // Then bounding boxes can be rendered onto the boundingBoxView. 45 | // 46 | [self.cameraPreviewView configureSession]; 47 | } 48 | 49 | - (void)viewWillAppear:(BOOL)animated 50 | { 51 | [super viewWillAppear:animated]; 52 | 53 | // 54 | // Listen for the start of the AVSession. This will signal the start 55 | // of the delivery of video frames and will trigger the 56 | // initialization of the tensorflow graph 57 | // 58 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(OnAVSessionStarted:) name:kAVSessionStarted object:nil]; 59 | 60 | // 61 | // Also Listen for Session initialization failure or for when 62 | // the user doesn't authorize the use of the camera 63 | // 64 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(OnSetupResultCameraNotAuthorized:) name:kSetupResultCameraNotAuthorized object:nil]; 65 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(OnSetupResultSessionConfigurationFailed:) name:kSetupResultSessionConfigurationFailed object:nil]; 66 | 67 | // 68 | // Respond to the tensorflow graph's update of predictions. This will 69 | // trigger the redrawing of the bounding boxes. 70 | // 71 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(OnPredictionsUpdated:) name:kPredictionsUpdated object:nil]; 72 | 73 | // 74 | // Start the AV Session. This will prompt the user for 75 | // permission to use the camera to present a video preview. 76 | // 77 | [self.cameraPreviewView startSessionWithDelegate:self]; 78 | } 79 | 80 | // 81 | // when the view disappears we shut down the session. It will be restarted in ViewWillAppear 82 | // 83 | - (void)viewDidDisappear:(BOOL)animated 84 | { 85 | [self.cameraPreviewView stopSession]; 86 | [super viewDidDisappear:animated]; 87 | } 88 | 89 | // 90 | // We allow autorotation to all orientations, but may have to rotate the 91 | // pixel buffer when we run the graph. 92 | // 93 | - (BOOL)shouldAutorotate 94 | { 95 | return YES; 96 | } 97 | 98 | - (UIInterfaceOrientationMask)supportedInterfaceOrientations 99 | { 100 | return UIInterfaceOrientationMaskAll; 101 | } 102 | 103 | 104 | // 105 | // Override viewWillTransitionToSize so that we can update the videoPreviewLayer with the new orientation. 106 | // 107 | - (void)viewWillTransitionToSize:(CGSize)size withTransitionCoordinator:(id)coordinator 108 | { 109 | // 110 | // call super so the coordinator can be passed on. 111 | // 112 | [super viewWillTransitionToSize:size withTransitionCoordinator:coordinator]; 113 | 114 | // 115 | // Change the orientation of the video session 116 | // 117 | UIDeviceOrientation deviceOrientation = [UIDevice currentDevice].orientation; 118 | self.cameraPreviewView.videoPreviewLayer.connection.videoOrientation = (AVCaptureVideoOrientation)deviceOrientation; 119 | } 120 | 121 | #pragma mark - Video Preview delegate 122 | 123 | 124 | // 125 | // Delegate function from the AVSession. Here we capture frames from 126 | // the video preview and feed them to tensorflow. 127 | // 128 | - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 129 | { 130 | // If the graph is ready, run the frame through tensorflow 131 | if (self.tensorflowGraph) 132 | { 133 | // 134 | // if it is not busy pass the pixel buffer off to the tensorflow graph 135 | // 136 | if ([self.tensorflowGraph canProcessFrame]) 137 | { 138 | // 139 | // Grab the pixel buffer. We pass it to the tf graph which will retain, copy and release 140 | // 141 | CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 142 | [self.tensorflowGraph runModelOnPixelBuffer:pixelBuffer orientation:[UIDevice currentDevice].orientation]; 143 | } 144 | } 145 | } 146 | 147 | // 148 | // Will be called when frames are dropped by the Video Output delegate. 149 | // 150 | - (void)captureOutput:(AVCaptureOutput *)output didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 151 | { 152 | //CFTypeRef droppedFrameReason = CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, NULL); 153 | //NSLog(@"dropped frame, reason: %@", droppedFrameReason); 154 | } 155 | 156 | 157 | #pragma mark - NS_NOTIFICATIONS 158 | 159 | // 160 | // Notification that the AV Session has started. Since we now have a camera session 161 | // it is safe to alloc a tensorflowGraph object. 162 | // 163 | - (void) OnAVSessionStarted: (NSNotification*) notification 164 | { 165 | // Now that the user has granted permission to the camera 166 | // and we have a video session we can initialize our graph. 167 | if (!self.tensorflowGraph) 168 | { 169 | self.tensorflowGraph = [[TensorflowGraph alloc] init]; 170 | } 171 | } 172 | 173 | // 174 | // The tensorflow graph has analyzed the pixel buffer coming out of the CameraPreview 175 | // and resulted in new predictions and bounding boxes. We notify the boundingBoxView to 176 | // draw the boxes over the CameraPreview. 177 | // 178 | - (void) OnPredictionsUpdated: (NSNotification*) notification 179 | { 180 | NSDictionary * dict = [notification userInfo]; 181 | if (dict) 182 | { 183 | // Update the Bounding boxes and labels from the 184 | // new predictions coming out of the graph. 185 | NSArray * predictions = dict[@"predictions"]; 186 | if (predictions) 187 | { 188 | [self.boundingBoxView updateBoundingBoxes:predictions]; 189 | } 190 | } 191 | } 192 | 193 | 194 | // 195 | // Notification that the camera has not been authorized. Without camera permissions 196 | // we will not have a preview and won't alloc a Tensorflow graph. Post an alertBox 197 | // and give the user a short cut to the settings app. 198 | // 199 | - (void) OnSetupResultCameraNotAuthorized: (NSNotification *) notification 200 | { 201 | dispatch_async( dispatch_get_main_queue(), ^{ 202 | NSString *message = NSLocalizedString( @"In order to display a video preview we need to use the camera, please change privacy settings", @"Alert message when the user has denied access to the camera" ); 203 | UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"tensorflowiOS" message:message preferredStyle:UIAlertControllerStyleAlert]; 204 | UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil]; 205 | [alertController addAction:cancelAction]; 206 | // Provide quick access to Settings. 207 | UIAlertAction *settingsAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"Settings", @"Alert button to open Settings" ) style:UIAlertActionStyleDefault handler:^( UIAlertAction *action ) { 208 | [[UIApplication sharedApplication] openURL:[NSURL URLWithString:UIApplicationOpenSettingsURLString] options:@{} completionHandler:nil]; 209 | }]; 210 | [alertController addAction:settingsAction]; 211 | [self presentViewController:alertController animated:YES completion:nil]; 212 | } ); 213 | } 214 | 215 | // 216 | // Configuration of the AV session failed. For some reason the AVSession has failed to 217 | // initialize. Post an alert. 218 | // 219 | - (void) OnSetupResultSessionConfigurationFailed: (NSNotification *) notification 220 | { 221 | dispatch_async( dispatch_get_main_queue(), ^{ 222 | NSString *message = NSLocalizedString( @"Unable to capture media", @"Alert message when something goes wrong during capture session configuration" ); 223 | UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"tensorflowiOS" message:message preferredStyle:UIAlertControllerStyleAlert]; 224 | UIAlertAction *cancelAction = [UIAlertAction actionWithTitle:NSLocalizedString( @"OK", @"Alert OK button" ) style:UIAlertActionStyleCancel handler:nil]; 225 | [alertController addAction:cancelAction]; 226 | [self presentViewController:alertController animated:YES completion:nil]; 227 | } ); 228 | } 229 | @end 230 | -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "size" : "29x29", 15 | "idiom" : "iphone", 16 | "filename" : "tensorflowiOS_Icon_29x29-1.png", 17 | "scale" : "1x" 18 | }, 19 | { 20 | "size" : "29x29", 21 | "idiom" : "iphone", 22 | "filename" : "tensorflowiOS_Icon_29x29@2x-1.png", 23 | "scale" : "2x" 24 | }, 25 | { 26 | "size" : "29x29", 27 | "idiom" : "iphone", 28 | "filename" : "tensorflowiOS_Icon_29x29@3x.png", 29 | "scale" : "3x" 30 | }, 31 | { 32 | "size" : "40x40", 33 | "idiom" : "iphone", 34 | "filename" : "tensorflowiOS_Icon_40x40@2x-1.png", 35 | "scale" : "2x" 36 | }, 37 | { 38 | "size" : "40x40", 39 | "idiom" : "iphone", 40 | "filename" : "tensorflowiOS_Icon_40x40@3x.png", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "size" : "57x57", 45 | "idiom" : "iphone", 46 | "filename" : "tensorflowiOS_Icon_57x57.png", 47 | "scale" : "1x" 48 | }, 49 | { 50 | "size" : "57x57", 51 | "idiom" : "iphone", 52 | "filename" : "tensorflowiOS_Icon_57x57@2x.png", 53 | "scale" : "2x" 54 | }, 55 | { 56 | "size" : "60x60", 57 | "idiom" : "iphone", 58 | "filename" : "tensorflowiOS_Icon_60x60@2x.png", 59 | "scale" : "2x" 60 | }, 61 | { 62 | "size" : "60x60", 63 | "idiom" : "iphone", 64 | "filename" : "tensorflowiOS_Icon_60x60@3x.png", 65 | "scale" : "3x" 66 | }, 67 | { 68 | "idiom" : "ipad", 69 | "size" : "20x20", 70 | "scale" : "1x" 71 | }, 72 | { 73 | "idiom" : "ipad", 74 | "size" : "20x20", 75 | "scale" : "2x" 76 | }, 77 | { 78 | "size" : "29x29", 79 | "idiom" : "ipad", 80 | "filename" : "tensorflowiOS_Icon_29x29.png", 81 | "scale" : "1x" 82 | }, 83 | { 84 | "size" : "29x29", 85 | "idiom" : "ipad", 86 | "filename" : "tensorflowiOS_Icon_29x29@2x.png", 87 | "scale" : "2x" 88 | }, 89 | { 90 | "size" : "40x40", 91 | "idiom" : "ipad", 92 | "filename" : "tensorflowiOS_Icon_40x40.png", 93 | "scale" : "1x" 94 | }, 95 | { 96 | "size" : "40x40", 97 | "idiom" : "ipad", 98 | "filename" : "tensorflowiOS_Icon_40x40@2x.png", 99 | "scale" : "2x" 100 | }, 101 | { 102 | "size" : "50x50", 103 | "idiom" : "ipad", 104 | "filename" : "tensorflowiOS_Icon_50x50.png", 105 | "scale" : "1x" 106 | }, 107 | { 108 | "size" : "50x50", 109 | "idiom" : "ipad", 110 | "filename" : "tensorflowiOS_Icon_50x50@2x.png", 111 | "scale" : "2x" 112 | }, 113 | { 114 | "size" : "72x72", 115 | "idiom" : "ipad", 116 | "filename" : "tensorflowiOS_Icon_72x72.png", 117 | "scale" : "1x" 118 | }, 119 | { 120 | "size" : "72x72", 121 | "idiom" : "ipad", 122 | "filename" : "tensorflowiOS_Icon_72x72@2x.png", 123 | "scale" : "2x" 124 | }, 125 | { 126 | "size" : "76x76", 127 | "idiom" : "ipad", 128 | "filename" : "tensorflowiOS_Icon_76x76.png", 129 | "scale" : "1x" 130 | }, 131 | { 132 | "size" : "76x76", 133 | "idiom" : "ipad", 134 | "filename" : "tensorflowiOS_Icon_76x76@2x.png", 135 | "scale" : "2x" 136 | }, 137 | { 138 | "size" : "83.5x83.5", 139 | "idiom" : "ipad", 140 | "filename" : "tensorflowiOS_Icon_83.5x83.5.png", 141 | "scale" : "2x" 142 | }, 143 | { 144 | "size" : "1024x1024", 145 | "idiom" : "ios-marketing", 146 | "filename" : "tensorflowiOS_Icon_1024x1024-1.png", 147 | "scale" : "1x" 148 | } 149 | ], 150 | "info" : { 151 | "version" : 1, 152 | "author" : "xcode" 153 | } 154 | } -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_1024x1024-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_1024x1024-1.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29-1.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x-1.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@3x.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x-1.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@3x.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50@2x.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57@2x.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@2x.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@3x.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72@2x.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76@2x.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_83.5x83.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_83.5x83.5.png -------------------------------------------------------------------------------- /objC/Assets/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /objC/Assets/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /objC/Assets/Default-568h@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/objC/Assets/Default-568h@2x.png -------------------------------------------------------------------------------- /objC/Assets/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | -------------------------------------------------------------------------------- /objC/SupportingFiles/BuildPhase.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # Grab the proto definition file from the tensorflow models repository 5 | # 6 | echo "Downloading proto definition from models repo" 7 | curl -s -o /${TMPDIR}/string_int_label_map.proto https://raw.githubusercontent.com/tensorflow/models/master/research/object_detection/protos/string_int_label_map.proto 8 | 9 | # 10 | # Test for existence of the protobuf compiler in the tensorflow repo. 11 | # 12 | if [ ! -f $TENSORFLOW_ROOT/tensorflow/contrib/makefile/gen/protobuf-host/bin/protoc ]; then 13 | echo "protoc not found in Tensorflow repo at tensorflow/contrib/makefile/gen/protobuf-host/bin. Did you set TENSORFLOW_ROOT in tensorflow.xcconfig?" 14 | return 1 15 | fi 16 | 17 | # 18 | # Generate the string_int_label_map.cc and .h file 19 | # 20 | 21 | 22 | if [ $? == 0 ]; then 23 | echo "Generating string_int_label_map. Output to '$SRCROOT'" 24 | $TENSORFLOW_ROOT/tensorflow/contrib/makefile/gen/protobuf-host/bin/protoc --proto_path=${TMPDIR} --cpp_out=${SRCROOT}/Tensorflow/ string_int_label_map.proto 25 | else 26 | exit 1 27 | fi 28 | 29 | return $? 30 | -------------------------------------------------------------------------------- /objC/SupportingFiles/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 5.0 19 | CFBundleSignature 20 | ???? 21 | CFBundleVersion 22 | 1 23 | LSRequiresIPhoneOS 24 | 25 | NSCameraUsageDescription 26 | For the Video Preview 27 | UILaunchStoryboardName 28 | LaunchScreen 29 | UIMainStoryboardFile 30 | Main 31 | UIRequiredDeviceCapabilities 32 | 33 | armv7 34 | 35 | UIRequiresFullScreen 36 | 37 | UIStatusBarHidden 38 | 39 | UISupportedInterfaceOrientations 40 | 41 | UIInterfaceOrientationLandscapeLeft 42 | UIInterfaceOrientationLandscapeRight 43 | UIInterfaceOrientationPortrait 44 | UIInterfaceOrientationPortraitUpsideDown 45 | 46 | UISupportedInterfaceOrientations~ipad 47 | 48 | UIInterfaceOrientationLandscapeLeft 49 | UIInterfaceOrientationLandscapeRight 50 | 51 | UIViewControllerBasedStatusBarAppearance 52 | 53 | 54 | 55 | -------------------------------------------------------------------------------- /objC/SupportingFiles/main.m: -------------------------------------------------------------------------------- 1 | @import UIKit; 2 | 3 | #import "AppDelegate.h" 4 | 5 | int main(int argc, char * argv[]) 6 | { 7 | @autoreleasepool 8 | { 9 | return UIApplicationMain( argc, argv, nil, NSStringFromClass( [AppDelegate class] ) ); 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /objC/SupportingFiles/tensorflow.xcconfig: -------------------------------------------------------------------------------- 1 | // 2 | // tensorflow.xcconfig 3 | // tensorflowiOS 4 | // 5 | // Created by Sharp, Chris T on 10/9/17. 6 | // Copyright © 2017 Apple. All rights reserved. 7 | // 8 | 9 | TENSORFLOW_ROOT = /Users/username/Development/tensorflow 10 | -------------------------------------------------------------------------------- /objC/Tensorflow/TensorflowGraph.h: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | #include 4 | 5 | @interface TensorflowGraph : NSObject 6 | 7 | - (id) init; 8 | - (void) runModelOnPixelBuffer:(CVPixelBufferRef) pixelBuf orientation: (UIDeviceOrientation) orientation; 9 | - (BOOL) canProcessFrame; 10 | @end 11 | -------------------------------------------------------------------------------- /objC/Tensorflow/TensorflowGraph.mm: -------------------------------------------------------------------------------- 1 | 2 | #import "TensorflowGraph.h" 3 | #import 4 | #import "constants.h" 5 | #import "TensorflowUtils.h" 6 | #import "TensorflowPrediction.h" 7 | #include 8 | #import "tensorflow/core/public/session.h" 9 | #import "tensorflow/core/util/memmapped_file_system.h" 10 | #include "string_int_label_map.pb.h" 11 | 12 | 13 | const int kGraphChannels = 3; // BGR. 14 | const int kGraphImageWidth = 299; // The width of the pixels going into the graph. 15 | const int kGraphImageHeight = 299; // the height of the pixels going into the graph. 16 | const float kPredictionThreshold = 0.65; // Prediction percentages lower than this will be discarded. 17 | const int kGraphMaxPredictions = 10; // After this many predictions we move on. 18 | const int kAverageEveryXFrames = 50; // Output average processing time every X frames 19 | 20 | @interface TensorflowGraph() 21 | { 22 | std::unique_ptr tfSession; 23 | object_detection::protos::StringIntLabelMap labelMap; 24 | } 25 | 26 | // 27 | // processingTime and framesProcessed are used for keeping an average time to make predictions. 28 | // 29 | @property (nonatomic) double processingTime; 30 | @property (nonatomic) int framesProcessed; 31 | 32 | // Keep a load status - if loading fails we don't want to attempt to run 33 | // anything through a non-existent graph. 34 | @property (nonatomic) tensorflow::Status loadStatus; 35 | @property (nonatomic) tensorflow::Status labelStatus; 36 | @property (nonatomic) BOOL isProcessingFrame; 37 | 38 | @end 39 | 40 | 41 | @implementation TensorflowGraph 42 | 43 | - (id) init 44 | { 45 | self = [super init]; 46 | if (self) 47 | { 48 | // change model name here to use one of the other models. 49 | NSString *model = @"frozen_inference_graph"; 50 | NSString *label = @"mscoco_label_map"; 51 | 52 | if (![self loadModelWithFileName:model modelFileType:@"pb"]) 53 | { 54 | NSLog(@"Failed to load model"); 55 | } 56 | 57 | if (![self loadLabelsWithFileName:label labelsFileType:@"txt"]) 58 | { 59 | NSLog(@"Failed to load labels"); 60 | } 61 | } 62 | return self; 63 | } 64 | 65 | - (BOOL)loadModelWithFileName:(NSString *)modelFileName modelFileType:(NSString *)modelFileType 66 | { 67 | self.loadStatus = loadModel(modelFileName, modelFileType, &tfSession); 68 | return self.loadStatus.ok(); 69 | } 70 | 71 | - (BOOL)loadLabelsWithFileName:(NSString *)labelsFileName labelsFileType:(NSString *)labelsFileType 72 | { 73 | // 74 | // load the labels from the file. labelMap is populated by calling loadLabels. 75 | self.labelStatus = loadLabels(labelsFileName, labelsFileType, &labelMap); 76 | return self.labelStatus.ok(); 77 | } 78 | 79 | - (BOOL) canProcessFrame 80 | { 81 | return (!self.isProcessingFrame); 82 | } 83 | 84 | // 85 | // PixelBufferToCGImage 86 | // pixelBuffer --- the pixel buffer obtained from the device camera 87 | // orientation --- the orientation of the device. 88 | // 89 | // This method retains the CVPixelBuffer, copies it, and applies rotations and scaling 90 | // necessary before feeding the image data into the Tensorflow Graph. 91 | // 92 | - (CGImageRef) pixelBufferToCGImage: (CVImageBufferRef) pixelBuffer orientation: (UIDeviceOrientation) orientation 93 | { 94 | CFRetain(pixelBuffer); 95 | CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); 96 | 97 | // 98 | // alloc a CIImage with the pixel buffer. 99 | // 100 | CIImage* ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer]; 101 | 102 | // 103 | // figure the angle of rotation and the scaling of the pixel buffer 104 | // based on the current orientation of the device. 105 | // 106 | const int pixelBufHeight = (int) CVPixelBufferGetHeight(pixelBuffer); 107 | const int pixelBufWidth = (int) CVPixelBufferGetWidth(pixelBuffer); 108 | CGAffineTransform transform = CGAffineTransformIdentity; 109 | CGFloat angle = 0.0; 110 | switch (orientation) 111 | { 112 | case UIDeviceOrientationPortrait: 113 | { 114 | angle = -M_PI_2; 115 | transform = CGAffineTransformScale(transform, float(kGraphImageHeight)/pixelBufHeight, float(kGraphImageWidth)/pixelBufWidth); 116 | } 117 | break; 118 | case UIDeviceOrientationPortraitUpsideDown: 119 | { 120 | angle = M_PI_2; 121 | transform = CGAffineTransformScale(transform, float(kGraphImageHeight)/pixelBufHeight, float(kGraphImageWidth)/pixelBufWidth); 122 | } 123 | break; 124 | case UIDeviceOrientationLandscapeLeft: 125 | { 126 | angle = 0.0; 127 | transform = CGAffineTransformScale(transform, float(kGraphImageWidth)/pixelBufWidth, float(kGraphImageHeight)/pixelBufHeight); 128 | } 129 | break; 130 | case UIDeviceOrientationLandscapeRight: 131 | { 132 | angle = M_PI; 133 | transform = CGAffineTransformScale(transform, float(kGraphImageWidth)/pixelBufWidth, float(kGraphImageHeight)/pixelBufHeight); 134 | } 135 | break; 136 | case UIDeviceOrientationUnknown: 137 | case UIDeviceOrientationFaceUp: 138 | case UIDeviceOrientationFaceDown: 139 | default: 140 | angle = 0.0; 141 | transform = CGAffineTransformScale(transform, float(kGraphImageWidth)/pixelBufWidth, float(kGraphImageHeight)/pixelBufHeight); 142 | break; 143 | } 144 | 145 | // 146 | // Apply the transforms 147 | // 148 | transform = CGAffineTransformRotate(transform, angle); 149 | CIImage* resized = [ciImage imageByApplyingTransform:transform]; 150 | 151 | // 152 | // Create a cgImage from the frame pixels 153 | // 154 | CIContext *context = [CIContext contextWithOptions:nil]; 155 | CGImageRef cgImage = [context createCGImage:resized fromRect:resized.extent]; 156 | 157 | // 158 | // We are done with the pixel buffer, release it. 159 | // 160 | CFRelease(pixelBuffer); 161 | CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); 162 | 163 | return cgImage; 164 | } 165 | 166 | 167 | // 168 | // createDebugImage 169 | // srcData -- pointer to image pixel data. 170 | // width -- pixel width of the image. 171 | // height -- pixel height of the image. 172 | // 173 | // This method is useful for debuging the image data immediately before going into 174 | // the TF graph. Given a pointer to the pixel data this method will add an alpha 175 | // channel and convert the raw image data into a UIImage. The UIImage will be 176 | // broadcast to any listeners for easy display in a UIView. 177 | // 178 | - (void) createDebugImage: (unsigned char*) srcData width: (size_t) width height: (size_t) height 179 | { 180 | // 181 | // Create a destination array for the cgImage pixel data 182 | // 183 | const size_t srcChannels = kGraphChannels; 184 | const size_t dstChannels = 4; 185 | const size_t numBytes = width * height * dstChannels; 186 | unsigned char pixelData[numBytes]; 187 | unsigned char * destPixels = pixelData; 188 | 189 | // 190 | // Copy into the destination array, adding the alpha channel. 191 | // Since the raw image data comes as BGR and we want RGB we 192 | // flip the blue and red channels. Alpha is added as opaque. 193 | // 194 | size_t i = 0; 195 | while (i < (width * height * srcChannels)) 196 | { 197 | *destPixels++ = srcData[i+2]; 198 | *destPixels++ = srcData[i+1]; 199 | *destPixels++ = srcData[i]; 200 | *destPixels++ = UINT8_MAX; 201 | i += srcChannels; 202 | } 203 | 204 | // 205 | // Create the bitmap context 206 | // 207 | const size_t BitsPerComponent = 8; 208 | const size_t BytesPerRow = width * dstChannels; 209 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 210 | CGContextRef cxt = CGBitmapContextCreate(&pixelData[0], width, height, BitsPerComponent, BytesPerRow, colorSpace, kCGImageAlphaNoneSkipLast); 211 | 212 | // 213 | // create the CGImage and UIImage from the context 214 | // 215 | CGImageRef cgImage = CGBitmapContextCreateImage(cxt); 216 | UIImage * uiImage = [[UIImage alloc] initWithCGImage:cgImage]; 217 | 218 | // 219 | // Clean up 220 | // 221 | CFRelease(cxt); 222 | CFRelease(colorSpace); 223 | CGImageRelease(cgImage); 224 | 225 | // 226 | // Notify that a new image is going to be fed to the graph. 227 | // 228 | dispatch_async(dispatch_get_main_queue(), ^(void) { 229 | [[NSNotificationCenter defaultCenter] postNotificationName:@"kDebugImageUpdated" object:nil userInfo:@{@"debugImage" : uiImage}]; 230 | }); 231 | } 232 | 233 | 234 | // 235 | // Takes a pixel buffer coming from the Camera preview session and obtains predictions w/bounding boxes from 236 | // a tensorflow graph. 237 | // 238 | - (void)runModelOnPixelBuffer:(CVPixelBufferRef) pixelBuffer orientation: (UIDeviceOrientation) orientation 239 | { 240 | // 241 | // if the graph hasn't loaded we can't do anything yet. 242 | // 243 | if (!self.loadStatus.ok() || self.isProcessingFrame) 244 | { 245 | return; 246 | } 247 | 248 | // 249 | // mark the graph as busy 250 | // 251 | self.isProcessingFrame = YES; 252 | 253 | // 254 | // Retain the pixel buffer, copy and make a CGImage out of it. pixelBufferToCGImage will 255 | // rotate the pixel buffer if necessary and scale the image down to the width and height 256 | // desired for inference. pixelBufferToCGImage will also release the CVPixelBuffer. 257 | // 258 | CGImageRef cgImage = [self pixelBufferToCGImage:pixelBuffer orientation:orientation]; 259 | 260 | // 261 | // Gather needed dimensions of the CGImage 262 | // 263 | const int srcHeight = (int) CGImageGetHeight(cgImage); 264 | const int srcWidth = (int) CGImageGetWidth(cgImage); 265 | const int bytesPerRow = (int) CGImageGetBytesPerRow(cgImage); 266 | const int srcChannels = (int) bytesPerRow / srcWidth; 267 | 268 | // 269 | // Create a tensor for running through the graph. 270 | // 271 | tensorflow::Tensor imageTensor(tensorflow::DT_UINT8, tensorflow::TensorShape({1, kGraphImageHeight, kGraphImageWidth, kGraphChannels})); 272 | auto imageTensorDimensioned = imageTensor.tensor(); 273 | 274 | // 275 | // Get a pointer to the pixel data in the cgImage. This is our starting 276 | // address of the source pixel buffer data. 277 | // 278 | CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(cgImage)); 279 | unsigned char *srcStartAddress = (unsigned char*) CFDataGetBytePtr(pixelData); 280 | 281 | // 282 | // Scale the pixel data down to the expected width and height, drop the alpha channel, 283 | // and populate the image_tensor. 284 | // The source pointer iterates through the pixel data and copies the data 285 | // into the reshaped Tensorflow image tensor. Changing the GraphInputWidth and Height 286 | // may increase (or decrease) speed and/or accuracy. 287 | // 288 | unsigned char *destStartAddress = imageTensorDimensioned.data(); 289 | for (int row = 0; row < srcHeight; ++row) 290 | { 291 | unsigned char *destRow = destStartAddress + (row * srcWidth * kGraphChannels); 292 | for (int col = 0; col < srcWidth; ++col) 293 | { 294 | unsigned char* srcPixel = srcStartAddress + (row * bytesPerRow) + (col * srcChannels); 295 | unsigned char* destPixel = destRow + (col * kGraphChannels); 296 | for (int c = 0; c < kGraphChannels; ++c) 297 | { 298 | destPixel[c] = srcPixel[c]; 299 | } 300 | } 301 | } 302 | 303 | // we are done with the CFDataRef 304 | CFRelease(pixelData); 305 | 306 | // 307 | // Move the tensorflow processing to another thread. Not only are there limited pixelBuffers 308 | // but if the thread running the videoPreview gets blocked we will get Late Frame warninigs. 309 | // Running the graph on a background thread keeps things moving. 310 | // 311 | dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ 312 | 313 | // 314 | // Get a start time. We will clock the tensorflow processing time. 315 | // 316 | struct timespec ts_start; 317 | clock_gettime(CLOCK_MONOTONIC, &ts_start); 318 | 319 | if (self->tfSession.get()) 320 | { 321 | // Run through the graph. 322 | std::vector outputs; 323 | tensorflow::Status runStatus = self->tfSession->Run({{"image_tensor", imageTensor}}, {"detection_boxes", "detection_scores", "detection_classes", "num_detections"}, {}, &outputs); 324 | 325 | if (!runStatus.ok()) 326 | { 327 | LOG(FATAL) << "Error: " << runStatus; 328 | } 329 | else 330 | { 331 | // 332 | // Calculate the amount of time it took to run the image through 333 | // the model. 334 | // 335 | struct timespec ts_end; 336 | clock_gettime(CLOCK_MONOTONIC, &ts_end); 337 | struct timespec elapsed = diff(ts_start, ts_end); 338 | 339 | // 340 | // Calculate an average time and output every X frames. 341 | // 342 | self.processingTime += elapsed.tv_sec; 343 | self.processingTime += (elapsed.tv_nsec / 1000000000.0f); 344 | self.framesProcessed += 1; 345 | if (self.framesProcessed % kAverageEveryXFrames == 0) 346 | { 347 | printf("Avg. prediction time: %f\n", self.processingTime / self.framesProcessed); 348 | } 349 | 350 | // 351 | // Generate our list of predictions and bounding boxes 352 | // 353 | auto boundingBoxesFlat = outputs[0].flat(); 354 | tensorflow::TTypes::Flat scores_flat = outputs[1].flat(); 355 | tensorflow::TTypes::Flat indices_flat = outputs[2].flat(); 356 | 357 | NSMutableArray * predictions = [[NSMutableArray alloc] init]; 358 | for (int i = 0; i < kGraphMaxPredictions; ++i) 359 | { 360 | // 361 | // once the prediction score falls below our threshold don't bother 362 | // processing any more predictions. 363 | // 364 | const float score = scores_flat(i); 365 | if (score < kPredictionThreshold) 366 | { 367 | break; 368 | } 369 | 370 | // 371 | // Keep an array of predictions 372 | // 373 | TensorflowPrediction * prediction = [[TensorflowPrediction alloc] init]; 374 | prediction.score = score; 375 | const int label_index = (tensorflow::int32)indices_flat(i); 376 | prediction.label = [NSString stringWithUTF8String:GetDisplayName(&self->labelMap, label_index).c_str()]; 377 | prediction.top = boundingBoxesFlat(i * 4 + 0); 378 | prediction.left = boundingBoxesFlat(i * 4 + 1); 379 | prediction.bottom = boundingBoxesFlat(i * 4 + 2); 380 | prediction.right = boundingBoxesFlat(i * 4 + 3); 381 | 382 | printf("Prediction: %s --- Score: %f\n", [prediction.label cStringUsingEncoding:NSASCIIStringEncoding], prediction.score); 383 | 384 | // 385 | // Crop the pixels out of the bounding box and put the cropped 386 | // image into the prediction object. Prediction values are 387 | // normalized so we multiply by the image dimensions to get 388 | // back to pixel values. 389 | // 390 | const int x = srcWidth * prediction.left; 391 | const int y = srcHeight * prediction.top; 392 | const int w = srcWidth * (prediction.right - prediction.left); 393 | const int h = srcHeight * (prediction.bottom - prediction.top); 394 | 395 | CGRect croppedArea = CGRectMake(x, y, w, h); 396 | CGImageRef cropped = CGImageCreateWithImageInRect(cgImage, croppedArea); 397 | prediction.image = [UIImage imageWithCGImage:cropped]; 398 | CGImageRelease(cropped); 399 | 400 | [predictions addObject:prediction]; 401 | } 402 | 403 | // 404 | // Notify the UI that we have new predictions. Another class will receive this 405 | // and use the data to draw bounding boxes. 406 | // 407 | dispatch_async(dispatch_get_main_queue(), ^(void) { 408 | [[NSNotificationCenter defaultCenter] postNotificationName:kPredictionsUpdated object:nil userInfo:@{@"predictions" : predictions}]; 409 | }); 410 | 411 | } 412 | 413 | CGImageRelease(cgImage); 414 | 415 | self.isProcessingFrame = NO; 416 | } // end --- if (tfSession.get) 417 | }); // end --- dispatch_async 418 | } // end --- runModelOnPixelBuffer() 419 | 420 | @end 421 | -------------------------------------------------------------------------------- /objC/Tensorflow/TensorflowPrediction.h: -------------------------------------------------------------------------------- 1 | // 2 | // TensorflowPrediction.h 3 | // tensorflowiOS 4 | // 5 | // Created by Sharp, Chris T on 10/9/17. 6 | // Copyright © 2017 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @interface TensorflowPrediction : NSObject 13 | @property (nonatomic) NSString *label; 14 | @property (nonatomic) UIImage *image; 15 | @property (nonatomic) float score; 16 | @property (nonatomic) float top; 17 | @property (nonatomic) float left; 18 | @property (nonatomic) float right; 19 | @property (nonatomic) float bottom; 20 | @end 21 | -------------------------------------------------------------------------------- /objC/Tensorflow/TensorflowPrediction.m: -------------------------------------------------------------------------------- 1 | // 2 | // TensorflowPrediction.m 3 | // tensorflowiOS 4 | // 5 | // Created by Sharp, Chris T on 10/9/17. 6 | // Copyright © 2017 Apple. All rights reserved. 7 | // 8 | 9 | #import "TensorflowPrediction.h" 10 | 11 | @implementation TensorflowPrediction 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /objC/Tensorflow/TensorflowUtils.mm: -------------------------------------------------------------------------------- 1 | 2 | #import 3 | 4 | #include "TensorflowUtils.h" 5 | #include 6 | #include 7 | #include 8 | #include "string_int_label_map.pb.h" 9 | 10 | 11 | // Helper class borrowed from some utils that loads protobufs efficiently. 12 | namespace 13 | { 14 | class IfstreamInputStream : public ::google::protobuf::io::CopyingInputStream 15 | { 16 | public: 17 | explicit IfstreamInputStream(const std::string& file_name) : ifs_(file_name.c_str(), std::ios::in | std::ios::binary) {} 18 | ~IfstreamInputStream() { ifs_.close(); } 19 | 20 | int Read(void *buffer, int size) 21 | { 22 | if (!ifs_) 23 | { 24 | return -1; 25 | } 26 | ifs_.read(static_cast(buffer), size); 27 | return (int)ifs_.gcount(); 28 | } 29 | 30 | private: 31 | std::ifstream ifs_; 32 | }; 33 | } 34 | 35 | #pragma mark - Private 36 | 37 | NSString *filePathForResourceName(NSString *name, NSString *extension) 38 | { 39 | NSString *filePath = [[NSBundle mainBundle] pathForResource:name ofType:extension]; 40 | 41 | if (filePath == NULL) 42 | { 43 | LOG(FATAL) << "Couldn't find '" << [name UTF8String] << "." << [extension UTF8String] << "' in bundle."; 44 | return nullptr; 45 | } 46 | return filePath; 47 | } 48 | 49 | bool PortableReadFileToProto(const std::string& fileName, ::google::protobuf::MessageLite *proto) 50 | { 51 | ::google::protobuf::io::CopyingInputStreamAdaptor stream(new IfstreamInputStream(fileName)); 52 | stream.SetOwnsCopyingStream(true); 53 | ::google::protobuf::io::CodedInputStream codedStream(&stream); 54 | 55 | // Total bytes hard limit / warning limit are set to 1GB and 512MB 56 | // respectively. 57 | codedStream.SetTotalBytesLimit(1024LL << 20, 512LL << 20); 58 | return proto->ParseFromCodedStream(&codedStream); 59 | } 60 | 61 | #pragma mark - Public 62 | 63 | tensorflow::Status loadModel(NSString *fileName, NSString *fileType, std::unique_ptr *session) 64 | { 65 | tensorflow::SessionOptions options; 66 | 67 | tensorflow::Session *sessionPointer = nullptr; 68 | tensorflow::Status sessionStatus = tensorflow::NewSession(options, &sessionPointer); 69 | 70 | if (!sessionStatus.ok()) 71 | { 72 | LOG(ERROR) << "Could not create TensorFlow Session: " << sessionStatus; 73 | return sessionStatus; 74 | } 75 | session->reset(sessionPointer); 76 | 77 | tensorflow::GraphDef tensorflowGraph; 78 | 79 | NSString *modelPath = filePathForResourceName(fileName, fileType); 80 | 81 | if (!modelPath) 82 | { 83 | LOG(ERROR) << "Failed to find model proto at" << [fileName UTF8String] << [fileType UTF8String]; 84 | return tensorflow::errors::NotFound([fileName UTF8String], [fileType UTF8String]); 85 | } 86 | 87 | const bool readProtoSucceeded = PortableReadFileToProto([modelPath UTF8String], &tensorflowGraph); 88 | 89 | if (!readProtoSucceeded) 90 | { 91 | LOG(ERROR) << "Failed to load model proto from" << [modelPath UTF8String]; 92 | return tensorflow::errors::NotFound([modelPath UTF8String]); 93 | } 94 | 95 | tensorflow::Status create_status = (*session)->Create(tensorflowGraph); 96 | 97 | if (!create_status.ok()) 98 | { 99 | LOG(ERROR) << "Could not create TensorFlow Graph: " << create_status; 100 | return create_status; 101 | } 102 | 103 | return tensorflow::Status::OK(); 104 | } 105 | 106 | tensorflow::Status loadLabels(NSString *fileName, NSString *fileType, object_detection::protos::StringIntLabelMap *labelMap) 107 | { 108 | // Read the label list 109 | NSString *labelsPath = filePathForResourceName(fileName, fileType); 110 | 111 | if (!labelsPath) 112 | { 113 | LOG(ERROR) << "Failed to find model proto at" << [fileName UTF8String] << [fileType UTF8String]; 114 | return tensorflow::errors::NotFound([fileName UTF8String], [fileType UTF8String]); 115 | } 116 | 117 | int fileDescriptor = open([labelsPath UTF8String], O_RDONLY); 118 | if (fileDescriptor >= 0) 119 | { 120 | google::protobuf::io::FileInputStream fileInput(fileDescriptor); 121 | fileInput.SetCloseOnDelete( true ); 122 | 123 | if (!google::protobuf::TextFormat::Parse(&fileInput, labelMap)) 124 | { 125 | LOG(ERROR) << "Failed to parse label file.\n"; 126 | return tensorflow::errors::Aborted([fileName UTF8String], [fileType UTF8String]); 127 | } 128 | } 129 | 130 | return tensorflow::Status::OK(); 131 | } 132 | 133 | std::string GetDisplayName(const object_detection::protos::StringIntLabelMap* labels, int index) 134 | { 135 | for (int i = 0; i < labels->item_size(); ++i) 136 | { 137 | const object_detection::protos::StringIntLabelMapItem& item = labels->item(i); 138 | if (index == item.id()) 139 | { 140 | return item.display_name(); 141 | } 142 | } 143 | 144 | return ""; 145 | } 146 | 147 | // 148 | // Calculate and return elapsed time between to struct timespecs 149 | // 150 | timespec diff(timespec start, timespec end) 151 | { 152 | timespec temp; 153 | if ((end.tv_nsec-start.tv_nsec)<0) 154 | { 155 | temp.tv_sec = end.tv_sec-start.tv_sec-1; 156 | temp.tv_nsec = 1000000000+end.tv_nsec-start.tv_nsec; 157 | } 158 | else 159 | { 160 | temp.tv_sec = end.tv_sec-start.tv_sec; 161 | temp.tv_nsec = end.tv_nsec-start.tv_nsec; 162 | } 163 | return temp; 164 | } 165 | 166 | 167 | -------------------------------------------------------------------------------- /objC/Tensorflow/constants.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef constants_h 3 | #define constants_h 4 | 5 | extern NSString * const kAVSessionStarted; 6 | extern NSString * const kSetupResultCameraNotAuthorized; 7 | extern NSString * const kSetupResultSessionConfigurationFailed; 8 | extern NSString * const kPredictionsUpdated; 9 | 10 | #endif /* constants_h */ 11 | -------------------------------------------------------------------------------- /objC/Tensorflow/constants.m: -------------------------------------------------------------------------------- 1 | 2 | #import 3 | 4 | NSString * const kAVSessionStarted = @"kAVSessionStarted"; 5 | NSString * const kSetupResultCameraNotAuthorized = @"kSetupResultCameraNotAuthorized"; 6 | NSString * const kSetupResultSessionConfigurationFailed = @"SetupResultSessionConfigurationFailed"; 7 | NSString * const kPredictionsUpdated = @"kPredictionsUpdated"; 8 | 9 | -------------------------------------------------------------------------------- /objC/Tensorflow/ops_to_register.h: -------------------------------------------------------------------------------- 1 | // This file was autogenerated by print_selective_registration_header.py 2 | #ifndef OPS_TO_REGISTER 3 | #define OPS_TO_REGISTER 4 | 5 | namespace { 6 | constexpr const char* skip(const char* x) { 7 | return (*x) ? (*x == ' ' ? skip(x + 1) : x) : x; 8 | } 9 | 10 | constexpr bool isequal(const char* x, const char* y) { 11 | return (*skip(x) && *skip(y)) 12 | ? (*skip(x) == *skip(y) && isequal(skip(x) + 1, skip(y) + 1)) 13 | : (!*skip(x) && !*skip(y)); 14 | } 15 | 16 | template 17 | struct find_in { 18 | static constexpr bool f(const char* x, const char* const y[N]) { 19 | return isequal(x, y[0]) || find_in::f(x, y + 1); 20 | } 21 | }; 22 | 23 | template<> 24 | struct find_in<0> { 25 | static constexpr bool f(const char* x, const char* const y[]) { 26 | return false; 27 | } 28 | }; 29 | } // end namespace 30 | constexpr const char* kNecessaryOpKernelClasses[] = { 31 | "BinaryOp< CPUDevice, functor::add>", 32 | "BinaryOp< CPUDevice, functor::add>", 33 | "AddNOp< CPUDevice, float>", 34 | "ReductionOp", 35 | "AssertOp", 36 | "AvgPoolingOp", 37 | "BiasOp", 38 | "CpuCastOp", 39 | "ConcatV2Op", 40 | "ConcatV2Op", 41 | "ConstantOp", 42 | //"Conv2DOp", 43 | "Conv2DUsingGemmOp< float, Im2ColConvFunctor>>", 44 | "DepthwiseConv2dNativeOp", 45 | "CropAndResizeOp", 46 | "DequantizeOp", 47 | "EnterOp", 48 | "BinaryOp< CPUDevice, functor::equal_to>", 49 | "ExitOp", 50 | "UnaryOp< CPUDevice, functor::exp>", 51 | "ExpandDimsOp", 52 | "FillOp", 53 | "FillOp", 54 | "GatherOp", 55 | "BinaryOp< CPUDevice, functor::greater>", 56 | "BinaryOp< CPUDevice, functor::greater>", 57 | "BinaryOp< CPUDevice, functor::greater_equal>", 58 | "IdentityOp", 59 | "BinaryOp< CPUDevice, functor::less>", 60 | "BinaryOp", 61 | "LoopCondOp", 62 | "MatMulOp", 63 | "ReductionOp>", 64 | "MaxPoolingOp", 65 | "BinaryOp< CPUDevice, functor::maximum>", 66 | "ReductionOp>", 67 | "MergeOp", 68 | "BinaryOp< CPUDevice, functor::minimum>", 69 | "BinaryOp< CPUDevice, functor::minimum>", 70 | "BinaryOp< CPUDevice, functor::mul>", 71 | "BinaryOp< CPUDevice, functor::mul>", 72 | "NextIterationOp", 73 | "NoOp", 74 | "NonMaxSuppressionOp", 75 | "PackOp", 76 | "PackOp", 77 | "PadOp", 78 | "PlaceholderOp", 79 | "RangeOp<::tensorflow::int32>", 80 | "RankOp", 81 | "BinaryOp< CPUDevice, functor::div>", 82 | "ReluOp", 83 | "Relu6Op", 84 | "ReshapeOp", 85 | "ResizeBilinearOp", 86 | "UnaryOp< CPUDevice, functor::round>", 87 | "UnaryOp< CPUDevice, functor::rsqrt>", 88 | "ShapeOp", 89 | "UnaryOp< CPUDevice, functor::sigmoid>", 90 | "SizeOp", 91 | "SliceOp", 92 | "SliceOp", 93 | "SoftmaxOp", 94 | "SplitOpCPU", 95 | "UnaryOp< CPUDevice, functor::sqrt>", 96 | "SqueezeOp", 97 | "StridedSliceOp", 98 | "StridedSliceOp", 99 | "BinaryOp< CPUDevice, functor::sub>", 100 | "BinaryOp< CPUDevice, functor::sub>", 101 | "SwitchOp", 102 | "TensorArrayPackOrGatherOp", 103 | "TensorArrayReadOp", 104 | "TensorArrayUnpackOrScatterOp", 105 | "TensorArraySizeOp", 106 | "TensorArrayOp", 107 | "TensorArrayWriteOp", 108 | "TileOp", 109 | "TopK", 110 | "TransposeCpuOp", 111 | "UnpackOp", 112 | "UnpackOp", 113 | "WhereCPUOp", 114 | "ZerosLikeOp< CPUDevice, float>", 115 | "RecvOp", 116 | "SendOp", 117 | }; 118 | #define SHOULD_REGISTER_OP_KERNEL(clz) (find_in::f(clz, kNecessaryOpKernelClasses)) 119 | 120 | constexpr inline bool ShouldRegisterOp(const char op[]) { 121 | return false 122 | || isequal(op, "Add") 123 | || isequal(op, "AddN") 124 | || isequal(op, "All") 125 | || isequal(op, "Assert") 126 | || isequal(op, "AvgPool") 127 | || isequal(op, "BiasAdd") 128 | || isequal(op, "Cast") 129 | || isequal(op, "ConcatV2") 130 | || isequal(op, "Const") 131 | || isequal(op, "Conv2D") 132 | || isequal(op, "DepthwiseConv2dNative") 133 | || isequal(op, "CropAndResize") 134 | || isequal(op, "Dequantize") 135 | || isequal(op, "Enter") 136 | || isequal(op, "Equal") 137 | || isequal(op, "Exit") 138 | || isequal(op, "Exp") 139 | || isequal(op, "ExpandDims") 140 | || isequal(op, "Fill") 141 | || isequal(op, "Gather") 142 | || isequal(op, "Greater") 143 | || isequal(op, "GreaterEqual") 144 | || isequal(op, "Identity") 145 | || isequal(op, "Less") 146 | || isequal(op, "LogicalAnd") 147 | || isequal(op, "LoopCond") 148 | || isequal(op, "MatMul") 149 | || isequal(op, "Max") 150 | || isequal(op, "MaxPool") 151 | || isequal(op, "Maximum") 152 | || isequal(op, "Mean") 153 | || isequal(op, "Merge") 154 | || isequal(op, "Minimum") 155 | || isequal(op, "Mul") 156 | || isequal(op, "NextIteration") 157 | || isequal(op, "NoOp") 158 | || isequal(op, "NonMaxSuppression") 159 | || isequal(op, "Pack") 160 | || isequal(op, "Pad") 161 | || isequal(op, "Placeholder") 162 | || isequal(op, "Range") 163 | || isequal(op, "Rank") 164 | || isequal(op, "RealDiv") 165 | || isequal(op, "Relu") 166 | || isequal(op, "Relu6") 167 | || isequal(op, "Reshape") 168 | || isequal(op, "ResizeBilinear") 169 | || isequal(op, "Rsqrt") 170 | || isequal(op, "Round") 171 | || isequal(op, "Shape") 172 | || isequal(op, "Sigmoid") 173 | || isequal(op, "Size") 174 | || isequal(op, "Slice") 175 | || isequal(op, "Softmax") 176 | || isequal(op, "Split") 177 | || isequal(op, "Sqrt") 178 | || isequal(op, "Squeeze") 179 | || isequal(op, "StridedSlice") 180 | || isequal(op, "Sub") 181 | || isequal(op, "Switch") 182 | || isequal(op, "TensorArrayGatherV3") 183 | || isequal(op, "TensorArrayReadV3") 184 | || isequal(op, "TensorArrayScatterV3") 185 | || isequal(op, "TensorArraySizeV3") 186 | || isequal(op, "TensorArrayV3") 187 | || isequal(op, "TensorArrayWriteV3") 188 | || isequal(op, "Tile") 189 | || isequal(op, "TopKV2") 190 | || isequal(op, "Transpose") 191 | || isequal(op, "Unpack") 192 | || isequal(op, "Where") 193 | || isequal(op, "ZerosLike") 194 | || isequal(op, "_Recv") 195 | || isequal(op, "_Send") 196 | ; 197 | } 198 | #define SHOULD_REGISTER_OP(op) ShouldRegisterOp(op) 199 | 200 | #define SHOULD_REGISTER_OP_GRADIENT false 201 | #endif 202 | -------------------------------------------------------------------------------- /objC/Tensorflow/tensorflowUtils.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef tensorflowUtils_h 3 | #define tensorflowUtils_h 4 | 5 | #pragma clang diagnostic push 6 | #pragma clang diagnostic ignored "-Wconversion" 7 | #pragma clang diagnostic ignored "-Wdocumentation" 8 | #pragma clang diagnostic ignored "-Wconditional-uninitialized" 9 | #pragma clang diagnostic ignored "-Wcomma" 10 | #include "tensorflow/core/public/session.h" 11 | #include "tensorflow/core/util/memmapped_file_system.h" 12 | #include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" 13 | #pragma clang diagnostic pop 14 | 15 | namespace object_detection 16 | { 17 | namespace protos 18 | { 19 | class StringIntLabelMap; 20 | } 21 | } 22 | 23 | // Reads a serialized GraphDef protobuf file from the bundle, typically 24 | // created with the freeze_graph script. Populates the session argument with a 25 | // Session object that has the model loaded. 26 | tensorflow::Status loadModel(NSString* file_name, 27 | NSString* file_type, 28 | std::unique_ptr* session); 29 | 30 | // Loads a model from a file that has been created using the 31 | // convert_graphdef_memmapped_format tool. This bundles together a GraphDef 32 | // proto together with a file that can be memory-mapped, containing the weight 33 | // parameters for the model. This is useful because it reduces the overall 34 | // memory pressure, since the read-only parameter regions can be easily paged 35 | // out and don't count toward memory limits on iOS. 36 | tensorflow::Status loadMemoryMappedModel(NSString* file_name, 37 | NSString* file_type, 38 | std::unique_ptr* session, 39 | std::unique_ptr* memmapped_env); 40 | 41 | // Loads a text file of a label map in mscoco style. 42 | tensorflow::Status loadLabels(NSString *fileName, NSString *fileType, object_detection::protos::StringIntLabelMap *labelMap); 43 | 44 | // Takes a label Map and an index into it. Returns the 'DisplayName' field in the protobuf 45 | std::string GetDisplayName(const object_detection::protos::StringIntLabelMap* labels, int index); 46 | timespec diff(timespec start, timespec end); 47 | #endif /* tensorflowUtils_h */ 48 | -------------------------------------------------------------------------------- /objC/tensorflowiOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /swift/App/AppDelegate.swift: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.swift 3 | // tensorflowiOS 4 | // 5 | // Created by Chris Sharp on 11/10/17. 6 | // Copyright © 2017 Chris Sharp. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | @UIApplicationMain 12 | class AppDelegate: UIResponder, UIApplicationDelegate { 13 | 14 | var window: UIWindow? 15 | 16 | 17 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool { 18 | // Override point for customization after application launch. 19 | return true 20 | } 21 | 22 | func applicationWillResignActive(_ application: UIApplication) { 23 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 24 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 25 | } 26 | 27 | func applicationDidEnterBackground(_ application: UIApplication) { 28 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 29 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 30 | } 31 | 32 | func applicationWillEnterForeground(_ application: UIApplication) { 33 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 34 | } 35 | 36 | func applicationDidBecomeActive(_ application: UIApplication) { 37 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 38 | } 39 | 40 | func applicationWillTerminate(_ application: UIApplication) { 41 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 42 | } 43 | 44 | 45 | } 46 | 47 | -------------------------------------------------------------------------------- /swift/App/BoundingBoxView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // BoundingBoxView.swift 3 | // tensorflowiOS 4 | // 5 | // Created by Chris Sharp on 11/11/17. 6 | // Copyright © 2017 Chris Sharp. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | 11 | class BoundingBoxView: UIView 12 | { 13 | let BoundingBoxLineWidth = 3.5 14 | var boxesToBeErased : [TensorflowPrediction] = [] 15 | var boxesToBeDrawn : [TensorflowPrediction] = [] 16 | var labels : [UILabel] = [] 17 | 18 | // 19 | // in drawRect we have a clear UIView that we draw green bounding boxes in. 20 | // As a new list of boundingboxes comes in we erase the old boxes and draw the new ones. 21 | // Since this view is just a layer over the videoPreview the bounding boxes could be a few 22 | // frames behind and the box will not align with the object underneath it. This will likely 23 | // be an issue until Tensorflow processing is as fast as the video preview's frame capture. 24 | // 25 | override func draw(_ rect: CGRect) 26 | { 27 | // 28 | // Our drawing context 29 | // 30 | if let context:CGContext = UIGraphicsGetCurrentContext() { 31 | 32 | // 33 | // The width of the bounding box lines. 34 | // 35 | context.setLineWidth(CGFloat(BoundingBoxLineWidth)); 36 | 37 | // 38 | // The fill color of the bounding box is always clear 39 | // 40 | context.setFillColor(red: 1.0, green: 1.0, blue: 1.0, alpha: 0.0) 41 | 42 | // 43 | // Erase boxes from the previous frame 44 | // 45 | if (!self.boxesToBeErased.isEmpty) 46 | { 47 | for pred:TensorflowPrediction in self.boxesToBeErased 48 | { 49 | // Erase the previous bounding box by using a clear stroke color 50 | context.setStrokeColor(red: 1.0, green: 1.0, blue: 1.0, alpha: 0.0) 51 | 52 | // Calculate box dimensions of box to be erased. 53 | let x = CGFloat(pred.left) * self.frame.size.width 54 | let y = CGFloat(pred.top) * self.frame.size.height 55 | let w = (CGFloat(pred.right) * self.frame.size.width) - x 56 | let h = (CGFloat(pred.bottom) * self.frame.size.height) - y 57 | let rectangle:CGRect = CGRect(x: x, y: y, width: w, height: h) 58 | 59 | //Erase it. (draw clear pixels over the green) 60 | context.fill(rectangle) 61 | context.stroke(rectangle) 62 | } 63 | 64 | // 65 | // Remove existing labels too. 66 | // 67 | for label in labels 68 | { 69 | label.removeFromSuperview() 70 | } 71 | 72 | self.labels.removeAll() 73 | self.boxesToBeErased.removeAll() 74 | } 75 | 76 | // 77 | // Draw newly predicted boxes 78 | // 79 | for pred:TensorflowPrediction in boxesToBeDrawn { 80 | // 81 | // Calculate the box dimensions. The box dimensions are given 82 | // as normalized values. Because this view has the same dimensions 83 | // as the original image multiplying by width and height gives the 84 | // correct location for the bounding box. 85 | // 86 | let x = CGFloat(pred.left) * self.frame.size.width; 87 | let y = CGFloat(pred.top) * self.frame.size.height; 88 | let w = (CGFloat(pred.right) * self.frame.size.width) - x; 89 | let h = (CGFloat(pred.bottom) * self.frame.size.height) - y; 90 | let rectangle = CGRect(x: x, y: y, width: w, height: h) 91 | 92 | // Draw with a green stroke. 93 | context.setStrokeColor(red: 0.0, green: 1.0, blue: 0.0, alpha: 0.75) 94 | context.fill(rectangle) 95 | context.stroke(rectangle) 96 | 97 | // Add the label to the upper left of the bounding box 98 | let label:UILabel = UILabel(frame: CGRect(x: x, y: y, width: 75, height: 35)) 99 | label.backgroundColor = UIColor.white 100 | label.textColor = UIColor.orange 101 | label.text = String(format: "%@ %.1f%%", pred.label, pred.score * 100) 102 | label.sizeToFit() 103 | self.addSubview(label) 104 | 105 | // 106 | // Keep a list of labels so we can easily remove from superview. 107 | // 108 | labels.append(label) 109 | } 110 | } 111 | } 112 | 113 | func updateBoundingBoxes(_ boxes:[TensorflowPrediction]) 114 | { 115 | // 116 | // flag the old boxes to be erased and flag the new to be drawn. 117 | // 118 | self.boxesToBeErased = self.boxesToBeDrawn; 119 | self.boxesToBeDrawn = boxes; 120 | 121 | // 122 | // trigger a drawRect call next frame 123 | // 124 | self.setNeedsDisplay() 125 | } 126 | } 127 | -------------------------------------------------------------------------------- /swift/App/CameraPreviewView.swift: -------------------------------------------------------------------------------- 1 | // 2 | // CameraPreviewView.swift 3 | // tensorflowiOS 4 | // 5 | // Created by Chris Sharp on 11/11/17. 6 | // Copyright © 2017 Chris Sharp. All rights reserved. 7 | // 8 | 9 | import UIKit 10 | import AVFoundation 11 | 12 | class CameraPreviewView: UIView 13 | { 14 | private enum SessionSetupResult 15 | { 16 | case success 17 | case notAuthorized 18 | case configurationFailed 19 | } 20 | 21 | private var cameraSetupResult: SessionSetupResult = .success 22 | private let avSession = AVCaptureSession() 23 | private var isSessionRunning = false 24 | 25 | // 26 | // Communicate with the session and other session objects on this queue. 27 | // 28 | private let previewSessionQueue = DispatchQueue(label: "PreviewSessionQueue") 29 | 30 | // 31 | // We use a serial queue for the video frames so that they are 32 | // dispatched in the order that they are captured 33 | // 34 | private let videoSessionQueue = DispatchQueue(label: "VideoFrameQueue") 35 | 36 | private let videoOutput:AVCaptureVideoDataOutput = AVCaptureVideoDataOutput() 37 | 38 | private var keyValueObservations = [NSKeyValueObservation]() 39 | 40 | required init?(coder aDecoder: NSCoder) 41 | { 42 | super.init(coder: aDecoder) 43 | setupSession() 44 | } 45 | 46 | //////////////////////////////////////// 47 | // MARK: - Video Session Setup and Configuration 48 | 49 | func setupSession() 50 | { 51 | self.videoPreviewLayer.session = avSession 52 | 53 | switch AVCaptureDevice.authorizationStatus(for: .video) 54 | { 55 | case .authorized: 56 | // 57 | // The user has previously granted access to the camera. 58 | // 59 | break 60 | 61 | case .notDetermined: 62 | // 63 | // The user has not yet been presented with the option to grant 64 | // video access. We suspend the session queue to delay session 65 | // setup until the access request has completed. 66 | // 67 | previewSessionQueue.suspend() 68 | AVCaptureDevice.requestAccess(for: .video, completionHandler: { granted in 69 | if !granted 70 | { 71 | self.cameraSetupResult = .notAuthorized 72 | } 73 | self.previewSessionQueue.resume() 74 | }) 75 | 76 | default: 77 | // 78 | // The user has previously denied access. 79 | // 80 | cameraSetupResult = .notAuthorized 81 | } 82 | } 83 | 84 | 85 | func configureSession(delegate:AVCaptureVideoDataOutputSampleBufferDelegate ) 86 | { 87 | previewSessionQueue.async { 88 | 89 | if (self.cameraSetupResult != .success) 90 | { 91 | return 92 | } 93 | 94 | self.avSession.beginConfiguration() 95 | 96 | // 97 | // Add video input. 98 | // We try to add the Back Dual Camera and if that is unavailable we try the back wide angle 99 | // camera. If that fails, we try to add the front wide angle camera 100 | // 101 | do 102 | { 103 | var defaultVideoDevice: AVCaptureDevice? 104 | 105 | // 106 | // Choose the back dual camera if available, otherwise default to a wide angle camera. 107 | // 108 | if let dualCameraDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) 109 | { 110 | defaultVideoDevice = dualCameraDevice 111 | } 112 | else if let backCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .back) 113 | { 114 | // 115 | // If the back dual camera is not available, default to the back wide angle camera. 116 | // 117 | defaultVideoDevice = backCameraDevice 118 | } 119 | else if let frontCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) 120 | { 121 | // 122 | // In some cases where users break their phones, the back wide angle camera is not available. 123 | // In this case, we should default to the front wide angle camera. 124 | // 125 | defaultVideoDevice = frontCameraDevice 126 | } 127 | 128 | let videoDeviceInput = try AVCaptureDeviceInput(device: defaultVideoDevice!) 129 | 130 | if self.avSession.canAddInput(videoDeviceInput) 131 | { 132 | self.avSession.addInput(videoDeviceInput) 133 | 134 | DispatchQueue.main.async { 135 | // 136 | // Dispatching this to the main queue because a UIView (CameraPreviewView) can only be 137 | // changed on the main thread. 138 | // 139 | let statusBarOrientation = UIApplication.shared.statusBarOrientation 140 | var initialVideoOrientation: AVCaptureVideoOrientation = .portrait 141 | if (statusBarOrientation != .unknown) 142 | { 143 | if let videoOrientation = AVCaptureVideoOrientation(interfaceOrientation: statusBarOrientation) { 144 | initialVideoOrientation = videoOrientation 145 | } 146 | } 147 | 148 | self.videoPreviewLayer.connection?.videoOrientation = initialVideoOrientation 149 | } 150 | } 151 | else 152 | { 153 | print("Could not add video device input to the session") 154 | self.cameraSetupResult = .configurationFailed 155 | self.avSession.commitConfiguration() 156 | return 157 | } 158 | } 159 | catch 160 | { 161 | print("Could not create video device input: \(error)") 162 | self.cameraSetupResult = .configurationFailed 163 | self.avSession.commitConfiguration() 164 | return 165 | } 166 | 167 | // 168 | // Add video output too. 169 | // 170 | self.addVideoOutput(delegate) 171 | 172 | self.avSession.commitConfiguration() 173 | } // previewSessionQueue.async() 174 | } 175 | 176 | private func addVideoOutput(_ delegate:AVCaptureVideoDataOutputSampleBufferDelegate) 177 | { 178 | // 179 | // We use the 32 bit BGRA pixel format type. That way we can just pass the data to 180 | // Tensorflow without pre-processing. 181 | // 182 | let newSettings = [String(kCVPixelBufferPixelFormatTypeKey) : kCVPixelFormatType_32BGRA] 183 | videoOutput.videoSettings = newSettings; 184 | videoOutput.alwaysDiscardsLateVideoFrames = true 185 | videoOutput.setSampleBufferDelegate(delegate, queue: videoSessionQueue) 186 | 187 | // 188 | // Add the videoOutput to our AVSession 189 | // 190 | if avSession.canAddOutput(videoOutput) 191 | { 192 | avSession.beginConfiguration() 193 | avSession.addOutput(videoOutput) 194 | avSession.sessionPreset = AVCaptureSession.Preset.high; 195 | let connection:AVCaptureConnection = videoOutput.connection(with: AVMediaType.video)! 196 | if ( connection.isVideoStabilizationSupported ) 197 | { 198 | connection.preferredVideoStabilizationMode = AVCaptureVideoStabilizationMode.auto 199 | } 200 | 201 | avSession.commitConfiguration() 202 | } 203 | } 204 | 205 | /////////////////////////////////////////////////////////////////////// 206 | // MARK: - UIView and Session life cycle 207 | var videoPreviewLayer: AVCaptureVideoPreviewLayer 208 | { 209 | guard let layer = layer as? AVCaptureVideoPreviewLayer else { 210 | fatalError("Expected `AVCaptureVideoPreviewLayer` type for layer. Check PreviewView.layerClass implementation.") 211 | } 212 | 213 | return layer 214 | } 215 | 216 | var session: AVCaptureSession? { 217 | get { 218 | return videoPreviewLayer.session 219 | } 220 | set { 221 | videoPreviewLayer.session = newValue 222 | } 223 | } 224 | 225 | override class var layerClass: AnyClass 226 | { 227 | return AVCaptureVideoPreviewLayer.self 228 | } 229 | 230 | private func addObservers() 231 | { 232 | NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError(notification:)), 233 | name: .AVCaptureSessionRuntimeError, 234 | object: avSession) 235 | } 236 | 237 | private func removeObservers() 238 | { 239 | NotificationCenter.default.removeObserver(self) 240 | 241 | for keyValueObservation in keyValueObservations 242 | { 243 | keyValueObservation.invalidate() 244 | } 245 | keyValueObservations.removeAll() 246 | } 247 | 248 | func startSession() 249 | { 250 | previewSessionQueue.async { 251 | switch ( self.cameraSetupResult ) 252 | { 253 | case SessionSetupResult.success: 254 | // if setup succeeded we can add Observers and the frame delegate and run the session. 255 | self.addObservers() 256 | 257 | self.avSession.startRunning() 258 | self.isSessionRunning = self.avSession.isRunning; 259 | 260 | // 261 | // Let everyone know we have a session. This tells all listeners that 262 | // the AV Session is ready and capturing video frames. It is now okay to 263 | // load the tensorflow graph 264 | // 265 | NotificationCenter.default.post(name: NSNotification.Name(rawValue: kAVSessionStarted), object:nil) 266 | 267 | case .notAuthorized: 268 | NotificationCenter.default.post(name: NSNotification.Name(rawValue: kSetupResultCameraNotAuthorized), object: nil) 269 | 270 | case .configurationFailed: 271 | NotificationCenter.default.post(name: NSNotification.Name(rawValue: kSetupResultSessionConfigurationFailed), object: nil) 272 | } 273 | } 274 | } 275 | 276 | func stopSession() 277 | { 278 | previewSessionQueue.async { 279 | if ( self.cameraSetupResult == .success ) 280 | { 281 | self.avSession.stopRunning() 282 | self.removeObservers() 283 | } 284 | } 285 | } 286 | 287 | 288 | @objc func sessionRuntimeError(notification: NSNotification) 289 | { 290 | guard let error = notification.userInfo?[AVCaptureSessionErrorKey] as? AVError else { return } 291 | 292 | print("Capture session runtime error: \(error)") 293 | 294 | /* 295 | Automatically try to restart the session running if media services were 296 | reset and the last start running succeeded. Otherwise, enable the user 297 | to try to resume the session running. 298 | */ 299 | if error.code == .mediaServicesWereReset 300 | { 301 | previewSessionQueue.async { 302 | if self.isSessionRunning 303 | { 304 | self.avSession.startRunning() 305 | self.isSessionRunning = self.avSession.isRunning 306 | } 307 | } 308 | } 309 | } 310 | } 311 | 312 | -------------------------------------------------------------------------------- /swift/App/ViewController.swift: -------------------------------------------------------------------------------- 1 | 2 | // 3 | // ViewController.swift 4 | // tensorflowiOS 5 | // 6 | // Created by Chris Sharp on 11/10/17. 7 | // Copyright © 2017 Chris Sharp. All rights reserved. 8 | // 9 | 10 | import UIKit 11 | import AVFoundation 12 | 13 | 14 | class ViewController:UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate 15 | { 16 | @IBOutlet weak var cameraUnavailableLabel : UILabel! 17 | @IBOutlet weak var boundingBoxView : BoundingBoxView! 18 | @IBOutlet weak var cameraPreviewView : CameraPreviewView! 19 | var tensorflowGraph:TensorflowGraph? = nil 20 | 21 | override func viewDidLoad() 22 | { 23 | super.viewDidLoad() 24 | 25 | // 26 | // Configure the video preview. We will grab frames 27 | // from the video preview and feed them into the tensorflow graph. 28 | // Then bounding boxes can be rendered onto the boundingBoxView. 29 | // 30 | cameraPreviewView.configureSession(delegate: self) 31 | } 32 | 33 | 34 | override func viewWillAppear(_ animated: Bool) 35 | { 36 | super.viewWillAppear(animated) 37 | 38 | // 39 | // Listen for the start of the AVSession. This will signal the start 40 | // of the delivery of video frames and will trigger the 41 | // initialization of the tensorflow graph 42 | // 43 | NotificationCenter.default.addObserver(self, selector: #selector(OnAvSessionStarted(notification:)), 44 | name: NSNotification.Name(rawValue: kAVSessionStarted), 45 | object: nil) 46 | 47 | // 48 | // Also Listen for Session initialization failure or for when 49 | // the user doesn't authorize the use of the camera 50 | // 51 | NotificationCenter.default.addObserver(self, selector: #selector(OnSetupResultCameraNotAuthorized(notification:)), 52 | name: Notification.Name(kSetupResultCameraNotAuthorized), 53 | object:nil) 54 | 55 | NotificationCenter.default.addObserver(self, selector: #selector(OnSetupResultSessionConfigurationFailed(notification:)), 56 | name: Notification.Name(kSetupResultSessionConfigurationFailed), 57 | object:nil) 58 | 59 | // 60 | // Respond to the tensorflow graph's update of predictions. This will 61 | // trigger the redrawing of the bounding boxes. 62 | // 63 | NotificationCenter.default.addObserver(self, selector: #selector(OnPredictionsUpdated(notification:)), 64 | name: Notification.Name(kPredictionsUpdated), 65 | object:nil) 66 | // 67 | // Start the AV Session. This will prompt the user for 68 | // permission to use the camera to present a video preview. 69 | // 70 | cameraPreviewView.startSession() 71 | } 72 | 73 | // 74 | // when the view disappears we shut down the session. It will be restarted in ViewWillAppear 75 | // 76 | override func viewWillDisappear(_ animated: Bool) 77 | { 78 | super.viewWillAppear(animated) 79 | cameraPreviewView.stopSession() 80 | } 81 | 82 | // 83 | // We allow autorotation to all orientations, but may have to rotate the 84 | // pixel buffer when we run the graph. 85 | // 86 | override var shouldAutorotate: Bool 87 | { 88 | return true 89 | } 90 | 91 | override var supportedInterfaceOrientations:UIInterfaceOrientationMask 92 | { 93 | return UIInterfaceOrientationMask.all 94 | } 95 | 96 | // 97 | // Override viewWillTransitionToSize so that we can update the videoPreviewLayer with the new orientation. 98 | // 99 | override func viewWillTransition(to size: CGSize, with coordinator: UIViewControllerTransitionCoordinator) 100 | { 101 | // 102 | // call super so the coordinator can be passed on 103 | // to views and child view controllers. 104 | // 105 | super.viewWillTransition(to: size, with: coordinator) 106 | 107 | if let videoPreviewLayerConnection = cameraPreviewView.videoPreviewLayer.connection 108 | { 109 | // 110 | // Change the orientation of the video session 111 | // 112 | let deviceOrientation = UIDevice.current.orientation 113 | if let newVideoOrientation = AVCaptureVideoOrientation(deviceOrientation: deviceOrientation) { 114 | videoPreviewLayerConnection.videoOrientation = newVideoOrientation 115 | } 116 | } 117 | } 118 | 119 | // 120 | // This delegate method is where we are notified of a new video frame. We obtain 121 | // CVPixelBuffer from the provided sample buffer and pass it on to the tensorflow graph 122 | // 123 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) 124 | { 125 | // 126 | // TensorflowGraph needs a CVPixelBuffer. Get it from the sampleBuffer 127 | // 128 | let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)! 129 | 130 | // 131 | // if the graph is ready pass it on. 132 | // 133 | if tensorflowGraph != nil 134 | { 135 | tensorflowGraph?.runModel(on: pixelBuffer, orientation: UIDevice.current.orientation) 136 | } 137 | } 138 | 139 | 140 | func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) 141 | { 142 | //do something with dropped frames here 143 | } 144 | 145 | //////////////////////////////////////////////////////////////////////////////////////////////////////////// 146 | // MARK: - Notification Handlers 147 | 148 | @objc func OnAvSessionStarted(notification: NSNotification) 149 | { 150 | // Now that the user has granted permission to the camera 151 | // and we have a video session we can initialize our graph. 152 | tensorflowGraph = TensorflowGraph() 153 | } 154 | 155 | @objc func OnSetupResultCameraNotAuthorized(notification: NSNotification) 156 | { 157 | DispatchQueue.main.async { 158 | let changePrivacySetting = "Please grant permission to use the camera in Settings" 159 | let message = NSLocalizedString(changePrivacySetting, comment: "Alert message when we have no access to the camera") 160 | let alertController = UIAlertController(title: "TensorflowiOS", message: message, preferredStyle: .alert) 161 | 162 | alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "Alert OK button"), 163 | style: .cancel, 164 | handler: nil)) 165 | 166 | alertController.addAction(UIAlertAction(title: NSLocalizedString("Settings", comment: "Button to open Settings"), 167 | style: .`default`, 168 | handler: { _ in 169 | UIApplication.shared.open(URL(string: UIApplicationOpenSettingsURLString)!, options: [:], completionHandler: nil) 170 | })) 171 | 172 | self.present(alertController, animated: true, completion: nil) 173 | } 174 | } 175 | 176 | @objc func OnSetupResultSessionConfigurationFailed(notification: NSNotification) 177 | { 178 | DispatchQueue.main.async { 179 | let alertMsg = "Something went wrong during capture session configuration" 180 | let message = NSLocalizedString("Unable to capture media", comment: alertMsg) 181 | let alertController = UIAlertController(title: "TensorflowiOS", message: message, preferredStyle: .alert) 182 | 183 | alertController.addAction(UIAlertAction(title: NSLocalizedString("OK", comment: "OK button"), 184 | style: .cancel, 185 | handler: nil)) 186 | 187 | self.present(alertController, animated: true, completion: nil) 188 | } 189 | } 190 | 191 | @objc func OnPredictionsUpdated(notification: NSNotification) 192 | { 193 | DispatchQueue.main.async { 194 | if let userinfo = notification.userInfo { 195 | if let predictions:[TensorflowPrediction] = userinfo["predictions"] as? [TensorflowPrediction] { 196 | // 197 | // Update the Bounding boxes and labels from the 198 | // new predictions coming out of the graph. 199 | // 200 | self.boundingBoxView.updateBoundingBoxes(predictions) 201 | } 202 | } 203 | } 204 | } 205 | } 206 | 207 | //////////////////////////////////////////////////////////////////// 208 | // MARK: - AVCaptureVideoOrientation extension 209 | 210 | extension AVCaptureVideoOrientation { 211 | init?(deviceOrientation: UIDeviceOrientation) { 212 | switch deviceOrientation { 213 | case .portrait: self = .portrait 214 | case .portraitUpsideDown: self = .portraitUpsideDown 215 | case .landscapeLeft: self = .landscapeRight 216 | case .landscapeRight: self = .landscapeLeft 217 | default: return nil 218 | } 219 | } 220 | 221 | init?(interfaceOrientation: UIInterfaceOrientation) { 222 | switch interfaceOrientation { 223 | case .portrait: self = .portrait 224 | case .portraitUpsideDown: self = .portraitUpsideDown 225 | case .landscapeLeft: self = .landscapeLeft 226 | case .landscapeRight: self = .landscapeRight 227 | default: return nil 228 | } 229 | } 230 | } 231 | 232 | -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "size" : "29x29", 15 | "idiom" : "iphone", 16 | "filename" : "tensorflowiOS_Icon_29x29-1.png", 17 | "scale" : "1x" 18 | }, 19 | { 20 | "size" : "29x29", 21 | "idiom" : "iphone", 22 | "filename" : "tensorflowiOS_Icon_29x29@2x-1.png", 23 | "scale" : "2x" 24 | }, 25 | { 26 | "size" : "29x29", 27 | "idiom" : "iphone", 28 | "filename" : "tensorflowiOS_Icon_29x29@3x.png", 29 | "scale" : "3x" 30 | }, 31 | { 32 | "size" : "40x40", 33 | "idiom" : "iphone", 34 | "filename" : "tensorflowiOS_Icon_40x40@2x-1.png", 35 | "scale" : "2x" 36 | }, 37 | { 38 | "size" : "40x40", 39 | "idiom" : "iphone", 40 | "filename" : "tensorflowiOS_Icon_40x40@3x.png", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "size" : "57x57", 45 | "idiom" : "iphone", 46 | "filename" : "tensorflowiOS_Icon_57x57.png", 47 | "scale" : "1x" 48 | }, 49 | { 50 | "size" : "57x57", 51 | "idiom" : "iphone", 52 | "filename" : "tensorflowiOS_Icon_57x57@2x.png", 53 | "scale" : "2x" 54 | }, 55 | { 56 | "size" : "60x60", 57 | "idiom" : "iphone", 58 | "filename" : "tensorflowiOS_Icon_60x60@2x.png", 59 | "scale" : "2x" 60 | }, 61 | { 62 | "size" : "60x60", 63 | "idiom" : "iphone", 64 | "filename" : "tensorflowiOS_Icon_60x60@3x.png", 65 | "scale" : "3x" 66 | }, 67 | { 68 | "idiom" : "ipad", 69 | "size" : "20x20", 70 | "scale" : "1x" 71 | }, 72 | { 73 | "idiom" : "ipad", 74 | "size" : "20x20", 75 | "scale" : "2x" 76 | }, 77 | { 78 | "size" : "29x29", 79 | "idiom" : "ipad", 80 | "filename" : "tensorflowiOS_Icon_29x29.png", 81 | "scale" : "1x" 82 | }, 83 | { 84 | "size" : "29x29", 85 | "idiom" : "ipad", 86 | "filename" : "tensorflowiOS_Icon_29x29@2x.png", 87 | "scale" : "2x" 88 | }, 89 | { 90 | "size" : "40x40", 91 | "idiom" : "ipad", 92 | "filename" : "tensorflowiOS_Icon_40x40.png", 93 | "scale" : "1x" 94 | }, 95 | { 96 | "size" : "40x40", 97 | "idiom" : "ipad", 98 | "filename" : "tensorflowiOS_Icon_40x40@2x.png", 99 | "scale" : "2x" 100 | }, 101 | { 102 | "size" : "50x50", 103 | "idiom" : "ipad", 104 | "filename" : "tensorflowiOS_Icon_50x50.png", 105 | "scale" : "1x" 106 | }, 107 | { 108 | "size" : "50x50", 109 | "idiom" : "ipad", 110 | "filename" : "tensorflowiOS_Icon_50x50@2x.png", 111 | "scale" : "2x" 112 | }, 113 | { 114 | "size" : "72x72", 115 | "idiom" : "ipad", 116 | "filename" : "tensorflowiOS_Icon_72x72.png", 117 | "scale" : "1x" 118 | }, 119 | { 120 | "size" : "72x72", 121 | "idiom" : "ipad", 122 | "filename" : "tensorflowiOS_Icon_72x72@2x.png", 123 | "scale" : "2x" 124 | }, 125 | { 126 | "size" : "76x76", 127 | "idiom" : "ipad", 128 | "filename" : "tensorflowiOS_Icon_76x76.png", 129 | "scale" : "1x" 130 | }, 131 | { 132 | "size" : "76x76", 133 | "idiom" : "ipad", 134 | "filename" : "tensorflowiOS_Icon_76x76@2x.png", 135 | "scale" : "2x" 136 | }, 137 | { 138 | "size" : "83.5x83.5", 139 | "idiom" : "ipad", 140 | "filename" : "tensorflowiOS_Icon_83.5x83.5.png", 141 | "scale" : "2x" 142 | }, 143 | { 144 | "size" : "1024x1024", 145 | "idiom" : "ios-marketing", 146 | "filename" : "tensorflowiOS_Icon_1024x1024-1.png", 147 | "scale" : "1x" 148 | } 149 | ], 150 | "info" : { 151 | "version" : 1, 152 | "author" : "xcode" 153 | } 154 | } -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_1024x1024-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_1024x1024-1.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29-1.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x-1.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@2x.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_29x29@3x.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x-1.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@2x.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_40x40@3x.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_50x50@2x.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_57x57@2x.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@2x.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@3x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_60x60@3x.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_72x72@2x.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_76x76@2x.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_83.5x83.5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Assets.xcassets/AppIcon.appiconset/tensorflowiOS_Icon_83.5x83.5.png -------------------------------------------------------------------------------- /swift/Assets/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /swift/Assets/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /swift/Assets/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | -------------------------------------------------------------------------------- /swift/Assets/Default-568h@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/csharpseattle/tensorflowiOS/8da0ccdf4cae38c872438bc1f6c9222cd8ceafaa/swift/Assets/Default-568h@2x.png -------------------------------------------------------------------------------- /swift/SupportingFiles/BuildPhase.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # Grab the proto definition file from the tensorflow models repository 5 | # 6 | echo "Downloading proto definition from models repo" 7 | curl -s -o /${TMPDIR}/string_int_label_map.proto https://raw.githubusercontent.com/tensorflow/models/master/research/object_detection/protos/string_int_label_map.proto 8 | 9 | # 10 | # Test for existence of the protobuf compiler in the tensorflow repo. 11 | # 12 | if [ ! -f $TENSORFLOW_ROOT/tensorflow/contrib/makefile/gen/protobuf-host/bin/protoc ]; then 13 | echo "protoc not found in Tensorflow repo at tensorflow/contrib/makefile/gen/protobuf-host/bin. Did you set TENSORFLOW_ROOT in tensorflow.xcconfig?" 14 | return 1 15 | fi 16 | 17 | # 18 | # Generate the string_int_label_map.cc and .h file 19 | # 20 | 21 | 22 | if [ $? == 0 ]; then 23 | echo "Generating string_int_label_map. Output to '$SRCROOT'" 24 | $TENSORFLOW_ROOT/tensorflow/contrib/makefile/gen/protobuf-host/bin/protoc --proto_path=${TMPDIR} --cpp_out=${SRCROOT}/Tensorflow/ string_int_label_map.proto 25 | else 26 | exit 1 27 | fi 28 | 29 | return $? 30 | -------------------------------------------------------------------------------- /swift/SupportingFiles/Constants.swift: -------------------------------------------------------------------------------- 1 | // 2 | // Constants.swift 3 | // tensorflowiOS 4 | // 5 | // Created by Chris Sharp on 11/11/17. 6 | // Copyright © 2017 Chris Sharp. All rights reserved. 7 | // 8 | 9 | import Foundation 10 | 11 | let kAVSessionStarted:String = "kAVSessionStarted"; 12 | let kSetupResultCameraNotAuthorized:String = "kSetupResultCameraNotAuthorized"; 13 | let kSetupResultSessionConfigurationFailed:String = "SetupResultSessionConfigurationFailed"; 14 | let kPredictionsUpdated:String = "kPredictionsUpdated"; 15 | 16 | -------------------------------------------------------------------------------- /swift/SupportingFiles/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | APPL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | LSRequiresIPhoneOS 22 | 23 | NSCameraUsageDescription 24 | For the Video Preview 25 | UILaunchStoryboardName 26 | LaunchScreen 27 | UIMainStoryboardFile 28 | Main 29 | UIRequiredDeviceCapabilities 30 | 31 | armv7 32 | 33 | UIRequiresFullScreen 34 | 35 | UISupportedInterfaceOrientations 36 | 37 | UIInterfaceOrientationLandscapeLeft 38 | UIInterfaceOrientationLandscapeRight 39 | UIInterfaceOrientationPortrait 40 | UIInterfaceOrientationPortraitUpsideDown 41 | 42 | UISupportedInterfaceOrientations~ipad 43 | 44 | UIInterfaceOrientationPortrait 45 | UIInterfaceOrientationPortraitUpsideDown 46 | UIInterfaceOrientationLandscapeLeft 47 | UIInterfaceOrientationLandscapeRight 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /swift/SupportingFiles/tensorflow.xcconfig: -------------------------------------------------------------------------------- 1 | // 2 | // tensorflow.xcconfig 3 | // tensorflowiOS 4 | // 5 | // Created by Sharp, Chris T on 10/9/17. 6 | // Copyright © 2017 Apple. All rights reserved. 7 | // 8 | 9 | TENSORFLOW_ROOT = /Users/username/Development/tensorflow 10 | -------------------------------------------------------------------------------- /swift/Tensorflow/TensorflowGraph.h: -------------------------------------------------------------------------------- 1 | 2 | #import 3 | #import 4 | #include 5 | 6 | 7 | @interface TensorflowGraph : NSObject 8 | 9 | - (id) init; 10 | - (void)runModelOnPixelBuffer:(CVPixelBufferRef) pixelBuf orientation: (UIDeviceOrientation) orientation; 11 | 12 | @end 13 | -------------------------------------------------------------------------------- /swift/Tensorflow/TensorflowGraph.mm: -------------------------------------------------------------------------------- 1 | 2 | #import "TensorflowGraph.h" 3 | #import 4 | #import "TensorflowUtils.h" 5 | #import "TensorflowPrediction.h" 6 | #include 7 | #import "tensorflow/core/public/session.h" 8 | #import "tensorflow/core/util/memmapped_file_system.h" 9 | #include "string_int_label_map.pb.h" 10 | 11 | 12 | const int kGraphChannels = 3; // BGR. 13 | const int kGraphImageWidth = 299; // The width of the pixels going into the graph. 14 | const int kGraphImageHeight = 299; // the height of the pixels going into the graph. 15 | const float kPredictionThreshold = 0.65; // Prediction percentages lower than this will be discarded. 16 | const int kGraphMaxPredictions = 10; // After this many predictions we move on. 17 | const int kAverageEveryXFrames = 50; // Output average processing time every X frames 18 | 19 | @interface TensorflowGraph() 20 | { 21 | std::unique_ptr tfSession; 22 | object_detection::protos::StringIntLabelMap labelMap; 23 | } 24 | 25 | // 26 | // processingTime and framesProcessed are used for keeping an average time to make predictions. 27 | // 28 | @property (nonatomic) double processingTime; 29 | @property (nonatomic) int framesProcessed; 30 | 31 | // Keep a load status - if loading fails we don't want to attempt to run 32 | // anything through a non-existent graph. 33 | @property (nonatomic) tensorflow::Status loadStatus; 34 | @property (nonatomic) tensorflow::Status labelStatus; 35 | @property (nonatomic) BOOL isProcessingFrame; 36 | 37 | @end 38 | 39 | 40 | @implementation TensorflowGraph 41 | 42 | - (id) init 43 | { 44 | self = [super init]; 45 | if (self) 46 | { 47 | // change model name here to use one of the other models. 48 | NSString *model = @"frozen_inference_graph"; 49 | NSString *label = @"mscoco_label_map"; 50 | 51 | if (![self loadModelWithFileName:model modelFileType:@"pb"]) 52 | { 53 | NSLog(@"Failed to load model"); 54 | } 55 | 56 | if (![self loadLabelsWithFileName:label labelsFileType:@"txt"]) 57 | { 58 | NSLog(@"Failed to load labels"); 59 | } 60 | } 61 | return self; 62 | } 63 | 64 | - (BOOL)loadModelWithFileName:(NSString *)modelFileName modelFileType:(NSString *)modelFileType 65 | { 66 | self.loadStatus = loadModel(modelFileName, modelFileType, &tfSession); 67 | return self.loadStatus.ok(); 68 | } 69 | 70 | - (BOOL)loadLabelsWithFileName:(NSString *)labelsFileName labelsFileType:(NSString *)labelsFileType 71 | { 72 | // 73 | // load the labels from the file. labelMap is populated by calling loadLabels. 74 | self.labelStatus = loadLabels(labelsFileName, labelsFileType, &labelMap); 75 | return self.labelStatus.ok(); 76 | } 77 | 78 | - (BOOL) canProcessFrame 79 | { 80 | return (!self.isProcessingFrame); 81 | } 82 | 83 | // 84 | // PixelBufferToCGImage 85 | // pixelBuffer --- the pixel buffer obtained from the device camera 86 | // orientation --- the orientation of the device. 87 | // 88 | // This method retains the CVPixelBuffer, copies it, and applies rotations and scaling 89 | // necessary before feeding the image data into the Tensorflow Graph. 90 | // 91 | - (CGImageRef) pixelBufferToCGImage: (CVImageBufferRef) pixelBuffer orientation: (UIDeviceOrientation) orientation 92 | { 93 | CFRetain(pixelBuffer); 94 | CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); 95 | 96 | // 97 | // alloc a CIImage with the pixel buffer. 98 | // 99 | CIImage* ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer]; 100 | 101 | // 102 | // figure the angle of rotation and the scaling of the pixel buffer 103 | // based on the current orientation of the device. 104 | // 105 | const int pixelBufHeight = (int) CVPixelBufferGetHeight(pixelBuffer); 106 | const int pixelBufWidth = (int) CVPixelBufferGetWidth(pixelBuffer); 107 | CGAffineTransform transform = CGAffineTransformIdentity; 108 | CGFloat angle = 0.0; 109 | switch (orientation) 110 | { 111 | case UIDeviceOrientationPortrait: 112 | { 113 | angle = -M_PI_2; 114 | transform = CGAffineTransformScale(transform, float(kGraphImageHeight)/pixelBufHeight, float(kGraphImageWidth)/pixelBufWidth); 115 | } 116 | break; 117 | case UIDeviceOrientationPortraitUpsideDown: 118 | { 119 | angle = M_PI_2; 120 | transform = CGAffineTransformScale(transform, float(kGraphImageHeight)/pixelBufHeight, float(kGraphImageWidth)/pixelBufWidth); 121 | } 122 | break; 123 | case UIDeviceOrientationLandscapeLeft: 124 | { 125 | angle = 0.0; 126 | transform = CGAffineTransformScale(transform, float(kGraphImageWidth)/pixelBufWidth, float(kGraphImageHeight)/pixelBufHeight); 127 | } 128 | break; 129 | case UIDeviceOrientationLandscapeRight: 130 | { 131 | angle = M_PI; 132 | transform = CGAffineTransformScale(transform, float(kGraphImageWidth)/pixelBufWidth, float(kGraphImageHeight)/pixelBufHeight); 133 | } 134 | break; 135 | case UIDeviceOrientationUnknown: 136 | case UIDeviceOrientationFaceUp: 137 | case UIDeviceOrientationFaceDown: 138 | default: 139 | angle = 0.0; 140 | transform = CGAffineTransformScale(transform, float(kGraphImageWidth)/pixelBufWidth, float(kGraphImageHeight)/pixelBufHeight); 141 | break; 142 | } 143 | 144 | // 145 | // Apply the transforms 146 | // 147 | transform = CGAffineTransformRotate(transform, angle); 148 | CIImage* resized = [ciImage imageByApplyingTransform:transform]; 149 | 150 | // 151 | // Create a cgImage from the frame pixels 152 | // 153 | CIContext *context = [CIContext contextWithOptions:nil]; 154 | CGImageRef cgImage = [context createCGImage:resized fromRect:resized.extent]; 155 | 156 | // 157 | // We are done with the pixel buffer, release it. 158 | // 159 | CFRelease(pixelBuffer); 160 | CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly); 161 | 162 | // 163 | // This cgImage is released after using it to populate the Tensor 164 | // 165 | return cgImage; 166 | } 167 | 168 | 169 | // 170 | // createDebugImage 171 | // srcData -- pointer to image pixel data. 172 | // width -- pixel width of the image. 173 | // height -- pixel height of the image. 174 | // 175 | // This method is useful for debuging the image data immediately before going into 176 | // the TF graph. Given a pointer to the pixel data this method will add an alpha 177 | // channel and convert the raw image data into a UIImage. The UIImage will be 178 | // broadcast to any listeners for easy display in a UIView. 179 | // 180 | - (void) createDebugImage: (unsigned char*) srcData width: (size_t) width height: (size_t) height 181 | { 182 | // 183 | // Create a destination array for the cgImage pixel data 184 | // 185 | const size_t srcChannels = kGraphChannels; 186 | const size_t dstChannels = 4; 187 | const size_t numBytes = width * height * dstChannels; 188 | unsigned char pixelData[numBytes]; 189 | unsigned char * destPixels = pixelData; 190 | 191 | // 192 | // Copy into the destination array, adding the alpha channel. 193 | // Since the raw image data comes as BGR and we want RGB we 194 | // flip the blue and red channels. Alpha is added as opaque. 195 | // 196 | size_t i = 0; 197 | while (i < (width * height * srcChannels)) 198 | { 199 | *destPixels++ = srcData[i+2]; 200 | *destPixels++ = srcData[i+1]; 201 | *destPixels++ = srcData[i]; 202 | *destPixels++ = UINT8_MAX; 203 | i += srcChannels; 204 | } 205 | 206 | // 207 | // Create the bitmap context 208 | // 209 | const size_t BitsPerComponent = 8; 210 | const size_t BytesPerRow = width * dstChannels; 211 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 212 | CGContextRef cxt = CGBitmapContextCreate(&pixelData[0], width, height, BitsPerComponent, BytesPerRow, colorSpace, kCGImageAlphaNoneSkipLast); 213 | 214 | // 215 | // create the CGImage and UIImage from the context 216 | // 217 | CGImageRef cgImage = CGBitmapContextCreateImage(cxt); 218 | UIImage * uiImage = [[UIImage alloc] initWithCGImage:cgImage]; 219 | 220 | // 221 | // Clean up 222 | // 223 | CFRelease(cxt); 224 | CFRelease(colorSpace); 225 | CGImageRelease(cgImage); 226 | 227 | // 228 | // Notify that a new image is going to be fed to the graph. 229 | // 230 | dispatch_async(dispatch_get_main_queue(), ^(void) { 231 | [[NSNotificationCenter defaultCenter] postNotificationName:@"kDebugImageUpdated" object:nil userInfo:@{@"debugImage" : uiImage}]; 232 | }); 233 | } 234 | 235 | 236 | // 237 | // Takes a pixel buffer coming from the Camera preview session and obtains predictions w/bounding boxes from 238 | // a tensorflow graph. 239 | // 240 | - (void)runModelOnPixelBuffer:(CVPixelBufferRef) pixelBuffer orientation: (UIDeviceOrientation) orientation 241 | { 242 | // 243 | // if the graph hasn't loaded we can't do anything yet. 244 | // 245 | if (!self.loadStatus.ok() || self.isProcessingFrame) 246 | { 247 | return; 248 | } 249 | 250 | // 251 | // mark the graph as busy 252 | // 253 | self.isProcessingFrame = YES; 254 | 255 | // 256 | // Retain the pixel buffer, copy and make a CGImage out of it. pixelBufferToCGImage will 257 | // rotate the pixel buffer if necessary and scale the image down to the width and height 258 | // desired for inference. pixelBufferToCGImage will also release the CVPixelBuffer. 259 | // 260 | CGImageRef cgImage = [self pixelBufferToCGImage:pixelBuffer orientation:orientation]; 261 | 262 | // 263 | // Gather needed dimensions of the CGImage 264 | // 265 | const int srcHeight = (int) CGImageGetHeight(cgImage); 266 | const int srcWidth = (int) CGImageGetWidth(cgImage); 267 | const int bytesPerRow = (int) CGImageGetBytesPerRow(cgImage); 268 | const int srcChannels = (int) bytesPerRow / srcWidth; 269 | 270 | // 271 | // Create a tensor for running through the graph. 272 | // 273 | tensorflow::Tensor imageTensor(tensorflow::DT_UINT8, tensorflow::TensorShape({1, kGraphImageHeight, kGraphImageWidth, kGraphChannels})); 274 | auto imageTensorDimensioned = imageTensor.tensor(); 275 | 276 | // 277 | // Get a pointer to the pixel data in the cgImage. This is our starting 278 | // address of the source pixel buffer data. 279 | // 280 | CFDataRef pixelData = CGDataProviderCopyData(CGImageGetDataProvider(cgImage)); 281 | unsigned char *srcStartAddress = (unsigned char*) CFDataGetBytePtr(pixelData); 282 | 283 | // 284 | // Scale the pixel data down to the expected width and height, drop the alpha channel, 285 | // and populate the image_tensor. 286 | // The source pointer iterates through the pixel data and copies the data 287 | // into the reshaped Tensorflow image tensor. Changing the GraphInputWidth and Height 288 | // may increase (or decrease) speed and/or accuracy. 289 | // 290 | unsigned char *destStartAddress = imageTensorDimensioned.data(); 291 | for (int row = 0; row < srcHeight; ++row) 292 | { 293 | unsigned char *destRow = destStartAddress + (row * srcWidth * kGraphChannels); 294 | for (int col = 0; col < srcWidth; ++col) 295 | { 296 | unsigned char* srcPixel = srcStartAddress + (row * bytesPerRow) + (col * srcChannels); 297 | unsigned char* destPixel = destRow + (col * kGraphChannels); 298 | for (int c = 0; c < kGraphChannels; ++c) 299 | { 300 | destPixel[c] = srcPixel[c]; 301 | } 302 | } 303 | } 304 | 305 | // we are done with the CFDataRef 306 | CFRelease(pixelData); 307 | 308 | // 309 | // Move the tensorflow processing to another thread. Not only are there limited pixelBuffers 310 | // but if the thread running the videoPreview gets blocked we will get Late Frame warninigs. 311 | // Running the graph on a background thread keeps things moving. 312 | // 313 | dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ 314 | 315 | // 316 | // Get a start time. We will clock the tensorflow processing time. 317 | // 318 | struct timespec ts_start; 319 | clock_gettime(CLOCK_MONOTONIC, &ts_start); 320 | 321 | if (self->tfSession.get()) 322 | { 323 | // Run through the graph. 324 | std::vector outputs; 325 | tensorflow::Status runStatus = self->tfSession->Run({{"image_tensor", imageTensor}}, {"detection_boxes", "detection_scores", "detection_classes", "num_detections"}, {}, &outputs); 326 | 327 | if (!runStatus.ok()) 328 | { 329 | LOG(FATAL) << "Error: " << runStatus; 330 | } 331 | else 332 | { 333 | // 334 | // Calculate the amount of time it took to run the image through 335 | // the model. 336 | // 337 | struct timespec ts_end; 338 | clock_gettime(CLOCK_MONOTONIC, &ts_end); 339 | struct timespec elapsed = diff(ts_start, ts_end); 340 | 341 | // 342 | // Calculate an average time and output every X frames. 343 | // 344 | self.processingTime += elapsed.tv_sec; 345 | self.processingTime += (elapsed.tv_nsec / 1000000000.0f); 346 | self.framesProcessed += 1; 347 | if (self.framesProcessed % kAverageEveryXFrames == 0) 348 | { 349 | printf("Avg. prediction time: %f\n", self.processingTime / self.framesProcessed); 350 | } 351 | 352 | // 353 | // Generate our list of predictions and bounding boxes 354 | // 355 | auto boundingBoxesFlat = outputs[0].flat(); 356 | tensorflow::TTypes::Flat scores_flat = outputs[1].flat(); 357 | tensorflow::TTypes::Flat indices_flat = outputs[2].flat(); 358 | 359 | NSMutableArray * predictions = [[NSMutableArray alloc] init]; 360 | for (int i = 0; i < kGraphMaxPredictions; ++i) 361 | { 362 | // 363 | // once the prediction score falls below our threshold don't bother 364 | // processing any more predictions. 365 | // 366 | const float score = scores_flat(i); 367 | if (score < kPredictionThreshold) 368 | { 369 | break; 370 | } 371 | 372 | // 373 | // Keep an array of predictions 374 | // 375 | TensorflowPrediction * prediction = [[TensorflowPrediction alloc] init]; 376 | prediction.score = score; 377 | const int label_index = (tensorflow::int32)indices_flat(i); 378 | prediction.label = [NSString stringWithUTF8String:GetDisplayName(&self->labelMap, label_index).c_str()]; 379 | prediction.top = boundingBoxesFlat(i * 4 + 0); 380 | prediction.left = boundingBoxesFlat(i * 4 + 1); 381 | prediction.bottom = boundingBoxesFlat(i * 4 + 2); 382 | prediction.right = boundingBoxesFlat(i * 4 + 3); 383 | 384 | printf("Prediction: %s --- Score: %f\n", [prediction.label cStringUsingEncoding:NSASCIIStringEncoding], prediction.score); 385 | 386 | // 387 | // Crop the pixels out of the bounding box and put the cropped 388 | // image into the prediction object. Prediction values are 389 | // normalized so we multiply by the image dimensions to get 390 | // back to pixel values. 391 | // 392 | const int x = srcWidth * prediction.left; 393 | const int y = srcHeight * prediction.top; 394 | const int w = srcWidth * (prediction.right - prediction.left); 395 | const int h = srcHeight * (prediction.bottom - prediction.top); 396 | 397 | CGRect croppedArea = CGRectMake(x, y, w, h); 398 | CGImageRef cropped = CGImageCreateWithImageInRect(cgImage, croppedArea); 399 | prediction.image = [UIImage imageWithCGImage:cropped]; 400 | CGImageRelease(cropped); 401 | 402 | [predictions addObject:prediction]; 403 | } 404 | 405 | // 406 | // Notify the UI that we have new predictions. Another class will receive this 407 | // and use the data to draw bounding boxes. 408 | // 409 | dispatch_async(dispatch_get_main_queue(), ^(void) { 410 | [[NSNotificationCenter defaultCenter] postNotificationName:@"kPredictionsUpdated" object:nil userInfo:@{@"predictions" : predictions}]; 411 | }); 412 | 413 | } 414 | 415 | CGImageRelease(cgImage); 416 | 417 | self.isProcessingFrame = NO; 418 | } // end --- if (tfSession.get) 419 | }); // end --- dispatch_async 420 | } // end --- runModelOnPixelBuffer() 421 | 422 | @end 423 | -------------------------------------------------------------------------------- /swift/Tensorflow/TensorflowPrediction.h: -------------------------------------------------------------------------------- 1 | // 2 | // TensorflowPrediction.h 3 | // tensorflowiOS 4 | // 5 | // Created by Sharp, Chris T on 10/9/17. 6 | // Copyright © 2017 Apple. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @interface TensorflowPrediction : NSObject 13 | @property (nonatomic) NSString *label; 14 | @property (nonatomic) UIImage *image; 15 | @property (nonatomic) float score; 16 | @property (nonatomic) float top; 17 | @property (nonatomic) float left; 18 | @property (nonatomic) float right; 19 | @property (nonatomic) float bottom; 20 | @end 21 | -------------------------------------------------------------------------------- /swift/Tensorflow/TensorflowPrediction.m: -------------------------------------------------------------------------------- 1 | // 2 | // TensorflowPrediction.m 3 | // tensorflowiOS 4 | // 5 | // Created by Sharp, Chris T on 10/9/17. 6 | // Copyright © 2017 Apple. All rights reserved. 7 | // 8 | 9 | #import "TensorflowPrediction.h" 10 | 11 | @implementation TensorflowPrediction 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /swift/Tensorflow/TensorflowUtils.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef tensorflowUtils_h 3 | #define tensorflowUtils_h 4 | 5 | #pragma clang diagnostic push 6 | #pragma clang diagnostic ignored "-Wconversion" 7 | #pragma clang diagnostic ignored "-Wdocumentation" 8 | #pragma clang diagnostic ignored "-Wconditional-uninitialized" 9 | #pragma clang diagnostic ignored "-Wcomma" 10 | #include "tensorflow/core/public/session.h" 11 | #include "tensorflow/core/util/memmapped_file_system.h" 12 | #include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" 13 | #pragma clang diagnostic pop 14 | 15 | namespace object_detection 16 | { 17 | namespace protos 18 | { 19 | class StringIntLabelMap; 20 | } 21 | } 22 | 23 | // Reads a serialized GraphDef protobuf file from the bundle, typically 24 | // created with the freeze_graph script. Populates the session argument with a 25 | // Session object that has the model loaded. 26 | tensorflow::Status loadModel(NSString* file_name, 27 | NSString* file_type, 28 | std::unique_ptr* session); 29 | 30 | // Loads a model from a file that has been created using the 31 | // convert_graphdef_memmapped_format tool. This bundles together a GraphDef 32 | // proto together with a file that can be memory-mapped, containing the weight 33 | // parameters for the model. This is useful because it reduces the overall 34 | // memory pressure, since the read-only parameter regions can be easily paged 35 | // out and don't count toward memory limits on iOS. 36 | tensorflow::Status loadMemoryMappedModel(NSString* file_name, 37 | NSString* file_type, 38 | std::unique_ptr* session, 39 | std::unique_ptr* memmapped_env); 40 | 41 | // Loads a text file of a label map in mscoco style. 42 | tensorflow::Status loadLabels(NSString *fileName, NSString *fileType, object_detection::protos::StringIntLabelMap *labelMap); 43 | 44 | // Takes a label Map and an index into it. Returns the 'DisplayName' field in the protobuf 45 | std::string GetDisplayName(const object_detection::protos::StringIntLabelMap* labels, int index); 46 | timespec diff(timespec start, timespec end); 47 | #endif /* tensorflowUtils_h */ 48 | -------------------------------------------------------------------------------- /swift/Tensorflow/TensorflowUtils.mm: -------------------------------------------------------------------------------- 1 | 2 | #import 3 | 4 | #include "TensorflowUtils.h" 5 | #include 6 | #include 7 | #include 8 | #include "string_int_label_map.pb.h" 9 | 10 | 11 | // Helper class borrowed from some utils that loads protobufs efficiently. 12 | namespace 13 | { 14 | class IfstreamInputStream : public ::google::protobuf::io::CopyingInputStream 15 | { 16 | public: 17 | explicit IfstreamInputStream(const std::string& file_name) : ifs_(file_name.c_str(), std::ios::in | std::ios::binary) {} 18 | ~IfstreamInputStream() { ifs_.close(); } 19 | 20 | int Read(void *buffer, int size) 21 | { 22 | if (!ifs_) 23 | { 24 | return -1; 25 | } 26 | ifs_.read(static_cast(buffer), size); 27 | return (int)ifs_.gcount(); 28 | } 29 | 30 | private: 31 | std::ifstream ifs_; 32 | }; 33 | } 34 | 35 | #pragma mark - Private 36 | 37 | NSString *filePathForResourceName(NSString *name, NSString *extension) 38 | { 39 | NSString *filePath = [[NSBundle mainBundle] pathForResource:name ofType:extension]; 40 | 41 | if (filePath == NULL) 42 | { 43 | LOG(FATAL) << "Couldn't find '" << [name UTF8String] << "." << [extension UTF8String] << "' in bundle."; 44 | return nullptr; 45 | } 46 | return filePath; 47 | } 48 | 49 | bool PortableReadFileToProto(const std::string& fileName, ::google::protobuf::MessageLite *proto) 50 | { 51 | ::google::protobuf::io::CopyingInputStreamAdaptor stream(new IfstreamInputStream(fileName)); 52 | stream.SetOwnsCopyingStream(true); 53 | ::google::protobuf::io::CodedInputStream codedStream(&stream); 54 | 55 | // Total bytes hard limit / warning limit are set to 1GB and 512MB 56 | // respectively. 57 | codedStream.SetTotalBytesLimit(1024LL << 20, 512LL << 20); 58 | return proto->ParseFromCodedStream(&codedStream); 59 | } 60 | 61 | #pragma mark - Public 62 | 63 | tensorflow::Status loadModel(NSString *fileName, NSString *fileType, std::unique_ptr *session) 64 | { 65 | tensorflow::SessionOptions options; 66 | 67 | tensorflow::Session *sessionPointer = nullptr; 68 | tensorflow::Status sessionStatus = tensorflow::NewSession(options, &sessionPointer); 69 | 70 | if (!sessionStatus.ok()) 71 | { 72 | LOG(ERROR) << "Could not create TensorFlow Session: " << sessionStatus; 73 | return sessionStatus; 74 | } 75 | session->reset(sessionPointer); 76 | 77 | tensorflow::GraphDef tensorflowGraph; 78 | 79 | NSString *modelPath = filePathForResourceName(fileName, fileType); 80 | 81 | if (!modelPath) 82 | { 83 | LOG(ERROR) << "Failed to find model proto at" << [fileName UTF8String] << [fileType UTF8String]; 84 | return tensorflow::errors::NotFound([fileName UTF8String], [fileType UTF8String]); 85 | } 86 | 87 | const bool readProtoSucceeded = PortableReadFileToProto([modelPath UTF8String], &tensorflowGraph); 88 | 89 | if (!readProtoSucceeded) 90 | { 91 | LOG(ERROR) << "Failed to load model proto from" << [modelPath UTF8String]; 92 | return tensorflow::errors::NotFound([modelPath UTF8String]); 93 | } 94 | 95 | tensorflow::Status create_status = (*session)->Create(tensorflowGraph); 96 | 97 | if (!create_status.ok()) 98 | { 99 | LOG(ERROR) << "Could not create TensorFlow Graph: " << create_status; 100 | return create_status; 101 | } 102 | 103 | return tensorflow::Status::OK(); 104 | } 105 | 106 | tensorflow::Status loadLabels(NSString *fileName, NSString *fileType, object_detection::protos::StringIntLabelMap *labelMap) 107 | { 108 | // Read the label list 109 | NSString *labelsPath = filePathForResourceName(fileName, fileType); 110 | 111 | if (!labelsPath) 112 | { 113 | LOG(ERROR) << "Failed to find model proto at" << [fileName UTF8String] << [fileType UTF8String]; 114 | return tensorflow::errors::NotFound([fileName UTF8String], [fileType UTF8String]); 115 | } 116 | 117 | int fileDescriptor = open([labelsPath UTF8String], O_RDONLY); 118 | if (fileDescriptor >= 0) 119 | { 120 | google::protobuf::io::FileInputStream fileInput(fileDescriptor); 121 | fileInput.SetCloseOnDelete( true ); 122 | 123 | if (!google::protobuf::TextFormat::Parse(&fileInput, labelMap)) 124 | { 125 | LOG(ERROR) << "Failed to parse label file.\n"; 126 | return tensorflow::errors::Aborted([fileName UTF8String], [fileType UTF8String]); 127 | } 128 | } 129 | 130 | return tensorflow::Status::OK(); 131 | } 132 | 133 | std::string GetDisplayName(const object_detection::protos::StringIntLabelMap* labels, int index) 134 | { 135 | for (int i = 0; i < labels->item_size(); ++i) 136 | { 137 | const object_detection::protos::StringIntLabelMapItem& item = labels->item(i); 138 | if (index == item.id()) 139 | { 140 | return item.display_name(); 141 | } 142 | } 143 | 144 | return ""; 145 | } 146 | 147 | // 148 | // Calculate and return elapsed time between to struct timespecs 149 | // 150 | timespec diff(timespec start, timespec end) 151 | { 152 | timespec temp; 153 | if ((end.tv_nsec-start.tv_nsec)<0) 154 | { 155 | temp.tv_sec = end.tv_sec-start.tv_sec-1; 156 | temp.tv_nsec = 1000000000+end.tv_nsec-start.tv_nsec; 157 | } 158 | else 159 | { 160 | temp.tv_sec = end.tv_sec-start.tv_sec; 161 | temp.tv_nsec = end.tv_nsec-start.tv_nsec; 162 | } 163 | return temp; 164 | } 165 | 166 | 167 | -------------------------------------------------------------------------------- /swift/Tensorflow/tensorflowiOS-Bridging-Header.h: -------------------------------------------------------------------------------- 1 | // 2 | // Use this file to import your target's public headers that you would like to expose to Swift. 3 | // 4 | 5 | #ifdef __cplusplus 6 | extern "C" { 7 | #endif 8 | 9 | #ifdef __cplusplus 10 | } 11 | #endif 12 | 13 | 14 | 15 | #import "TensorflowGraph.h" 16 | #import "TensorflowPrediction.h" 17 | 18 | 19 | -------------------------------------------------------------------------------- /swift/tensorflowiOS.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 48; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 5325635320CB3EE6009D099F /* TensorflowGraph.mm in Sources */ = {isa = PBXBuildFile; fileRef = 53D01D8D1FB95AA400AEAFC6 /* TensorflowGraph.mm */; }; 11 | 534E46DF20FCF9CC0093F1C2 /* frozen_inference_graph.pb in Resources */ = {isa = PBXBuildFile; fileRef = 534E46DD20FCF9CC0093F1C2 /* frozen_inference_graph.pb */; }; 12 | 5365E49E1FB762BE0004EFFD /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 5365E48B1FB762BE0004EFFD /* Assets.xcassets */; }; 13 | 5365E49F1FB762BE0004EFFD /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 5365E48C1FB762BE0004EFFD /* LaunchScreen.storyboard */; }; 14 | 5365E4A01FB762BE0004EFFD /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 5365E48E1FB762BE0004EFFD /* Main.storyboard */; }; 15 | 5365E4A51FB762BE0004EFFD /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5365E4991FB762BE0004EFFD /* AppDelegate.swift */; }; 16 | 5365E4A61FB762BE0004EFFD /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5365E49A1FB762BE0004EFFD /* ViewController.swift */; }; 17 | 5365E4A91FB7660F0004EFFD /* CameraPreviewView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5365E4A81FB7660F0004EFFD /* CameraPreviewView.swift */; }; 18 | 5365E4AB1FB7807E0004EFFD /* Constants.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5365E4AA1FB7807E0004EFFD /* Constants.swift */; }; 19 | 5365E4AD1FB796B80004EFFD /* BoundingBoxView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 5365E4AC1FB796B80004EFFD /* BoundingBoxView.swift */; }; 20 | 53A04B6620CB144300C7DD4B /* mscoco_label_map.txt in Resources */ = {isa = PBXBuildFile; fileRef = 53A04B6020CB144300C7DD4B /* mscoco_label_map.txt */; }; 21 | 53D01D911FB95BE700AEAFC6 /* string_int_label_map.pb.cc in Sources */ = {isa = PBXBuildFile; fileRef = 53D01D8F1FB95BE700AEAFC6 /* string_int_label_map.pb.cc */; }; 22 | 53D01D941FB95C1800AEAFC6 /* TensorflowPrediction.m in Sources */ = {isa = PBXBuildFile; fileRef = 53D01D931FB95C1800AEAFC6 /* TensorflowPrediction.m */; }; 23 | 53D01D971FB95D1D00AEAFC6 /* TensorflowUtils.mm in Sources */ = {isa = PBXBuildFile; fileRef = 53D01D961FB95D1D00AEAFC6 /* TensorflowUtils.mm */; }; 24 | 53D01D9A1FB95DCF00AEAFC6 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 53D01D991FB95DB600AEAFC6 /* Accelerate.framework */; }; 25 | 53D01DA11FB961C200AEAFC6 /* libstdc++.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 53D01DA01FB961C200AEAFC6 /* libstdc++.tbd */; }; 26 | /* End PBXBuildFile section */ 27 | 28 | /* Begin PBXFileReference section */ 29 | 534E46DB20FCF9CC0093F1C2 /* frozen_inference_graph.pb */ = {isa = PBXFileReference; lastKnownFileType = file; path = frozen_inference_graph.pb; sourceTree = ""; }; 30 | 534E46DD20FCF9CC0093F1C2 /* frozen_inference_graph.pb */ = {isa = PBXFileReference; lastKnownFileType = file; path = frozen_inference_graph.pb; sourceTree = ""; }; 31 | 5365E4701FB6D9E00004EFFD /* tensorflowiOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = tensorflowiOS.app; sourceTree = BUILT_PRODUCTS_DIR; }; 32 | 5365E48B1FB762BE0004EFFD /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 33 | 5365E48D1FB762BE0004EFFD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 34 | 5365E48F1FB762BE0004EFFD /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 35 | 5365E4991FB762BE0004EFFD /* AppDelegate.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; 36 | 5365E49A1FB762BE0004EFFD /* ViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; }; 37 | 5365E49C1FB762BE0004EFFD /* Info.plist */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 38 | 5365E4A81FB7660F0004EFFD /* CameraPreviewView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraPreviewView.swift; sourceTree = ""; }; 39 | 5365E4AA1FB7807E0004EFFD /* Constants.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Constants.swift; sourceTree = ""; }; 40 | 5365E4AC1FB796B80004EFFD /* BoundingBoxView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BoundingBoxView.swift; sourceTree = ""; }; 41 | 5365E4B01FB7A4530004EFFD /* tensorflowiOS-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "tensorflowiOS-Bridging-Header.h"; sourceTree = ""; }; 42 | 5365E4B71FB7A8B90004EFFD /* tensorflow.xcconfig */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.xcconfig; path = tensorflow.xcconfig; sourceTree = ""; }; 43 | 53A04B6020CB144300C7DD4B /* mscoco_label_map.txt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = mscoco_label_map.txt; sourceTree = ""; }; 44 | 53AA6BDA1FC5FA7000074E49 /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = ""; }; 45 | 53D01D8C1FB95AA400AEAFC6 /* TensorflowGraph.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TensorflowGraph.h; sourceTree = ""; }; 46 | 53D01D8D1FB95AA400AEAFC6 /* TensorflowGraph.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = TensorflowGraph.mm; sourceTree = ""; }; 47 | 53D01D8F1FB95BE700AEAFC6 /* string_int_label_map.pb.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = string_int_label_map.pb.cc; sourceTree = ""; }; 48 | 53D01D901FB95BE700AEAFC6 /* string_int_label_map.pb.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = string_int_label_map.pb.h; sourceTree = ""; }; 49 | 53D01D921FB95C1800AEAFC6 /* TensorflowPrediction.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TensorflowPrediction.h; sourceTree = ""; }; 50 | 53D01D931FB95C1800AEAFC6 /* TensorflowPrediction.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = TensorflowPrediction.m; sourceTree = ""; }; 51 | 53D01D951FB95D1D00AEAFC6 /* TensorflowUtils.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = TensorflowUtils.h; sourceTree = ""; }; 52 | 53D01D961FB95D1D00AEAFC6 /* TensorflowUtils.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = TensorflowUtils.mm; sourceTree = ""; }; 53 | 53D01D991FB95DB600AEAFC6 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; }; 54 | 53D01D9E1FB961A800AEAFC6 /* libc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libc++.tbd"; path = "usr/lib/libc++.tbd"; sourceTree = SDKROOT; }; 55 | 53D01DA01FB961C200AEAFC6 /* libstdc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libstdc++.tbd"; path = "usr/lib/libstdc++.tbd"; sourceTree = SDKROOT; }; 56 | /* End PBXFileReference section */ 57 | 58 | /* Begin PBXFrameworksBuildPhase section */ 59 | 5365E46D1FB6D9E00004EFFD /* Frameworks */ = { 60 | isa = PBXFrameworksBuildPhase; 61 | buildActionMask = 2147483647; 62 | files = ( 63 | 53D01DA11FB961C200AEAFC6 /* libstdc++.tbd in Frameworks */, 64 | 53D01D9A1FB95DCF00AEAFC6 /* Accelerate.framework in Frameworks */, 65 | ); 66 | runOnlyForDeploymentPostprocessing = 0; 67 | }; 68 | /* End PBXFrameworksBuildPhase section */ 69 | 70 | /* Begin PBXGroup section */ 71 | 534E46DA20FCF9CC0093F1C2 /* ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03 */ = { 72 | isa = PBXGroup; 73 | children = ( 74 | 534E46DB20FCF9CC0093F1C2 /* frozen_inference_graph.pb */, 75 | ); 76 | path = ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03; 77 | sourceTree = ""; 78 | }; 79 | 534E46DC20FCF9CC0093F1C2 /* ssdlite_mobilenet_v2_coco_2018_05_09 */ = { 80 | isa = PBXGroup; 81 | children = ( 82 | 534E46DD20FCF9CC0093F1C2 /* frozen_inference_graph.pb */, 83 | ); 84 | path = ssdlite_mobilenet_v2_coco_2018_05_09; 85 | sourceTree = ""; 86 | }; 87 | 5365E4671FB6D9E00004EFFD = { 88 | isa = PBXGroup; 89 | children = ( 90 | 53AA6BDA1FC5FA7000074E49 /* README.md */, 91 | 5365E4721FB6D9E00004EFFD /* tensorflowiOS */, 92 | 5365E4711FB6D9E00004EFFD /* Products */, 93 | 53D01D981FB95DB600AEAFC6 /* Frameworks */, 94 | ); 95 | sourceTree = ""; 96 | }; 97 | 5365E4711FB6D9E00004EFFD /* Products */ = { 98 | isa = PBXGroup; 99 | children = ( 100 | 5365E4701FB6D9E00004EFFD /* tensorflowiOS.app */, 101 | ); 102 | name = Products; 103 | sourceTree = ""; 104 | }; 105 | 5365E4721FB6D9E00004EFFD /* tensorflowiOS */ = { 106 | isa = PBXGroup; 107 | children = ( 108 | 53A04B5D20CB144300C7DD4B /* Models */, 109 | 5365E4981FB762BE0004EFFD /* App */, 110 | 5365E49D1FB762BE0004EFFD /* Tensorflow */, 111 | 5365E48A1FB762BE0004EFFD /* Assets */, 112 | 5365E49B1FB762BE0004EFFD /* SupportingFiles */, 113 | ); 114 | name = tensorflowiOS; 115 | sourceTree = ""; 116 | }; 117 | 5365E48A1FB762BE0004EFFD /* Assets */ = { 118 | isa = PBXGroup; 119 | children = ( 120 | 5365E48B1FB762BE0004EFFD /* Assets.xcassets */, 121 | 5365E48C1FB762BE0004EFFD /* LaunchScreen.storyboard */, 122 | 5365E48E1FB762BE0004EFFD /* Main.storyboard */, 123 | ); 124 | path = Assets; 125 | sourceTree = SOURCE_ROOT; 126 | }; 127 | 5365E4981FB762BE0004EFFD /* App */ = { 128 | isa = PBXGroup; 129 | children = ( 130 | 5365E4991FB762BE0004EFFD /* AppDelegate.swift */, 131 | 5365E49A1FB762BE0004EFFD /* ViewController.swift */, 132 | 5365E4A81FB7660F0004EFFD /* CameraPreviewView.swift */, 133 | 5365E4AC1FB796B80004EFFD /* BoundingBoxView.swift */, 134 | ); 135 | path = App; 136 | sourceTree = SOURCE_ROOT; 137 | }; 138 | 5365E49B1FB762BE0004EFFD /* SupportingFiles */ = { 139 | isa = PBXGroup; 140 | children = ( 141 | 5365E4B71FB7A8B90004EFFD /* tensorflow.xcconfig */, 142 | 5365E49C1FB762BE0004EFFD /* Info.plist */, 143 | 5365E4AA1FB7807E0004EFFD /* Constants.swift */, 144 | ); 145 | path = SupportingFiles; 146 | sourceTree = SOURCE_ROOT; 147 | }; 148 | 5365E49D1FB762BE0004EFFD /* Tensorflow */ = { 149 | isa = PBXGroup; 150 | children = ( 151 | 53D01D951FB95D1D00AEAFC6 /* TensorflowUtils.h */, 152 | 53D01D961FB95D1D00AEAFC6 /* TensorflowUtils.mm */, 153 | 53D01D8C1FB95AA400AEAFC6 /* TensorflowGraph.h */, 154 | 53D01D8D1FB95AA400AEAFC6 /* TensorflowGraph.mm */, 155 | 5365E4B01FB7A4530004EFFD /* tensorflowiOS-Bridging-Header.h */, 156 | 53D01D921FB95C1800AEAFC6 /* TensorflowPrediction.h */, 157 | 53D01D931FB95C1800AEAFC6 /* TensorflowPrediction.m */, 158 | 53D01D8F1FB95BE700AEAFC6 /* string_int_label_map.pb.cc */, 159 | 53D01D901FB95BE700AEAFC6 /* string_int_label_map.pb.h */, 160 | ); 161 | path = Tensorflow; 162 | sourceTree = SOURCE_ROOT; 163 | }; 164 | 53A04B5D20CB144300C7DD4B /* Models */ = { 165 | isa = PBXGroup; 166 | children = ( 167 | 534E46DA20FCF9CC0093F1C2 /* ssd_mobilenet_v1_ppn_shared_box_predictor_300x300_coco14_sync_2018_07_03 */, 168 | 534E46DC20FCF9CC0093F1C2 /* ssdlite_mobilenet_v2_coco_2018_05_09 */, 169 | 53A04B6020CB144300C7DD4B /* mscoco_label_map.txt */, 170 | ); 171 | name = Models; 172 | path = ../Models; 173 | sourceTree = ""; 174 | }; 175 | 53D01D981FB95DB600AEAFC6 /* Frameworks */ = { 176 | isa = PBXGroup; 177 | children = ( 178 | 53D01DA01FB961C200AEAFC6 /* libstdc++.tbd */, 179 | 53D01D9E1FB961A800AEAFC6 /* libc++.tbd */, 180 | 53D01D991FB95DB600AEAFC6 /* Accelerate.framework */, 181 | ); 182 | name = Frameworks; 183 | sourceTree = ""; 184 | }; 185 | /* End PBXGroup section */ 186 | 187 | /* Begin PBXNativeTarget section */ 188 | 5365E46F1FB6D9E00004EFFD /* tensorflowiOS */ = { 189 | isa = PBXNativeTarget; 190 | buildConfigurationList = 5365E4821FB6D9E00004EFFD /* Build configuration list for PBXNativeTarget "tensorflowiOS" */; 191 | buildPhases = ( 192 | 5354F61A20244232005A7905 /* ShellScript */, 193 | 5365E46C1FB6D9E00004EFFD /* Sources */, 194 | 5365E46D1FB6D9E00004EFFD /* Frameworks */, 195 | 5365E46E1FB6D9E00004EFFD /* Resources */, 196 | ); 197 | buildRules = ( 198 | ); 199 | dependencies = ( 200 | ); 201 | name = tensorflowiOS; 202 | productName = tensorflowiOS; 203 | productReference = 5365E4701FB6D9E00004EFFD /* tensorflowiOS.app */; 204 | productType = "com.apple.product-type.application"; 205 | }; 206 | /* End PBXNativeTarget section */ 207 | 208 | /* Begin PBXProject section */ 209 | 5365E4681FB6D9E00004EFFD /* Project object */ = { 210 | isa = PBXProject; 211 | attributes = { 212 | LastSwiftUpdateCheck = 0910; 213 | LastUpgradeCheck = 0930; 214 | ORGANIZATIONNAME = "Chris Sharp"; 215 | TargetAttributes = { 216 | 5365E46F1FB6D9E00004EFFD = { 217 | CreatedOnToolsVersion = 9.1; 218 | LastSwiftMigration = 0910; 219 | ProvisioningStyle = Automatic; 220 | }; 221 | }; 222 | }; 223 | buildConfigurationList = 5365E46B1FB6D9E00004EFFD /* Build configuration list for PBXProject "tensorflowiOS" */; 224 | compatibilityVersion = "Xcode 8.0"; 225 | developmentRegion = en; 226 | hasScannedForEncodings = 0; 227 | knownRegions = ( 228 | en, 229 | Base, 230 | ); 231 | mainGroup = 5365E4671FB6D9E00004EFFD; 232 | productRefGroup = 5365E4711FB6D9E00004EFFD /* Products */; 233 | projectDirPath = ""; 234 | projectRoot = ""; 235 | targets = ( 236 | 5365E46F1FB6D9E00004EFFD /* tensorflowiOS */, 237 | ); 238 | }; 239 | /* End PBXProject section */ 240 | 241 | /* Begin PBXResourcesBuildPhase section */ 242 | 5365E46E1FB6D9E00004EFFD /* Resources */ = { 243 | isa = PBXResourcesBuildPhase; 244 | buildActionMask = 2147483647; 245 | files = ( 246 | 53A04B6620CB144300C7DD4B /* mscoco_label_map.txt in Resources */, 247 | 5365E49E1FB762BE0004EFFD /* Assets.xcassets in Resources */, 248 | 5365E4A01FB762BE0004EFFD /* Main.storyboard in Resources */, 249 | 534E46DF20FCF9CC0093F1C2 /* frozen_inference_graph.pb in Resources */, 250 | 5365E49F1FB762BE0004EFFD /* LaunchScreen.storyboard in Resources */, 251 | ); 252 | runOnlyForDeploymentPostprocessing = 0; 253 | }; 254 | /* End PBXResourcesBuildPhase section */ 255 | 256 | /* Begin PBXShellScriptBuildPhase section */ 257 | 5354F61A20244232005A7905 /* ShellScript */ = { 258 | isa = PBXShellScriptBuildPhase; 259 | buildActionMask = 2147483647; 260 | files = ( 261 | ); 262 | inputPaths = ( 263 | ); 264 | outputPaths = ( 265 | ); 266 | runOnlyForDeploymentPostprocessing = 0; 267 | shellPath = /bin/bash; 268 | shellScript = "source SupportingFiles/BuildPhase.sh"; 269 | }; 270 | /* End PBXShellScriptBuildPhase section */ 271 | 272 | /* Begin PBXSourcesBuildPhase section */ 273 | 5365E46C1FB6D9E00004EFFD /* Sources */ = { 274 | isa = PBXSourcesBuildPhase; 275 | buildActionMask = 2147483647; 276 | files = ( 277 | 5365E4A51FB762BE0004EFFD /* AppDelegate.swift in Sources */, 278 | 53D01D941FB95C1800AEAFC6 /* TensorflowPrediction.m in Sources */, 279 | 53D01D971FB95D1D00AEAFC6 /* TensorflowUtils.mm in Sources */, 280 | 5325635320CB3EE6009D099F /* TensorflowGraph.mm in Sources */, 281 | 5365E4A61FB762BE0004EFFD /* ViewController.swift in Sources */, 282 | 53D01D911FB95BE700AEAFC6 /* string_int_label_map.pb.cc in Sources */, 283 | 5365E4AD1FB796B80004EFFD /* BoundingBoxView.swift in Sources */, 284 | 5365E4A91FB7660F0004EFFD /* CameraPreviewView.swift in Sources */, 285 | 5365E4AB1FB7807E0004EFFD /* Constants.swift in Sources */, 286 | ); 287 | runOnlyForDeploymentPostprocessing = 0; 288 | }; 289 | /* End PBXSourcesBuildPhase section */ 290 | 291 | /* Begin PBXVariantGroup section */ 292 | 5365E48C1FB762BE0004EFFD /* LaunchScreen.storyboard */ = { 293 | isa = PBXVariantGroup; 294 | children = ( 295 | 5365E48D1FB762BE0004EFFD /* Base */, 296 | ); 297 | name = LaunchScreen.storyboard; 298 | sourceTree = ""; 299 | }; 300 | 5365E48E1FB762BE0004EFFD /* Main.storyboard */ = { 301 | isa = PBXVariantGroup; 302 | children = ( 303 | 5365E48F1FB762BE0004EFFD /* Base */, 304 | ); 305 | name = Main.storyboard; 306 | sourceTree = ""; 307 | }; 308 | /* End PBXVariantGroup section */ 309 | 310 | /* Begin XCBuildConfiguration section */ 311 | 5365E4801FB6D9E00004EFFD /* Debug */ = { 312 | isa = XCBuildConfiguration; 313 | buildSettings = { 314 | ALWAYS_SEARCH_USER_PATHS = NO; 315 | CLANG_ANALYZER_NONNULL = YES; 316 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 317 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 318 | CLANG_CXX_LIBRARY = "libc++"; 319 | CLANG_ENABLE_MODULES = YES; 320 | CLANG_ENABLE_OBJC_ARC = YES; 321 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 322 | CLANG_WARN_BOOL_CONVERSION = YES; 323 | CLANG_WARN_COMMA = YES; 324 | CLANG_WARN_CONSTANT_CONVERSION = YES; 325 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 326 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 327 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 328 | CLANG_WARN_EMPTY_BODY = YES; 329 | CLANG_WARN_ENUM_CONVERSION = YES; 330 | CLANG_WARN_INFINITE_RECURSION = YES; 331 | CLANG_WARN_INT_CONVERSION = YES; 332 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 333 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 334 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 335 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 336 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 337 | CLANG_WARN_STRICT_PROTOTYPES = YES; 338 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 339 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 340 | CLANG_WARN_UNREACHABLE_CODE = YES; 341 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 342 | CODE_SIGN_IDENTITY = "iPhone Developer"; 343 | COPY_PHASE_STRIP = NO; 344 | DEBUG_INFORMATION_FORMAT = dwarf; 345 | ENABLE_STRICT_OBJC_MSGSEND = YES; 346 | ENABLE_TESTABILITY = YES; 347 | GCC_C_LANGUAGE_STANDARD = gnu11; 348 | GCC_DYNAMIC_NO_PIC = NO; 349 | GCC_NO_COMMON_BLOCKS = YES; 350 | GCC_OPTIMIZATION_LEVEL = 0; 351 | GCC_PREPROCESSOR_DEFINITIONS = ( 352 | "DEBUG=1", 353 | "$(inherited)", 354 | ); 355 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 356 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 357 | GCC_WARN_UNDECLARED_SELECTOR = YES; 358 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 359 | GCC_WARN_UNUSED_FUNCTION = YES; 360 | GCC_WARN_UNUSED_VARIABLE = YES; 361 | IPHONEOS_DEPLOYMENT_TARGET = 11.1; 362 | MTL_ENABLE_DEBUG_INFO = YES; 363 | ONLY_ACTIVE_ARCH = YES; 364 | SDKROOT = iphoneos; 365 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; 366 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 367 | }; 368 | name = Debug; 369 | }; 370 | 5365E4811FB6D9E00004EFFD /* Release */ = { 371 | isa = XCBuildConfiguration; 372 | buildSettings = { 373 | ALWAYS_SEARCH_USER_PATHS = NO; 374 | CLANG_ANALYZER_NONNULL = YES; 375 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 376 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 377 | CLANG_CXX_LIBRARY = "libc++"; 378 | CLANG_ENABLE_MODULES = YES; 379 | CLANG_ENABLE_OBJC_ARC = YES; 380 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 381 | CLANG_WARN_BOOL_CONVERSION = YES; 382 | CLANG_WARN_COMMA = YES; 383 | CLANG_WARN_CONSTANT_CONVERSION = YES; 384 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 385 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 386 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 387 | CLANG_WARN_EMPTY_BODY = YES; 388 | CLANG_WARN_ENUM_CONVERSION = YES; 389 | CLANG_WARN_INFINITE_RECURSION = YES; 390 | CLANG_WARN_INT_CONVERSION = YES; 391 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 392 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 393 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 394 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 395 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 396 | CLANG_WARN_STRICT_PROTOTYPES = YES; 397 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 398 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 399 | CLANG_WARN_UNREACHABLE_CODE = YES; 400 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 401 | CODE_SIGN_IDENTITY = "iPhone Developer"; 402 | COPY_PHASE_STRIP = NO; 403 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 404 | ENABLE_NS_ASSERTIONS = NO; 405 | ENABLE_STRICT_OBJC_MSGSEND = YES; 406 | GCC_C_LANGUAGE_STANDARD = gnu11; 407 | GCC_NO_COMMON_BLOCKS = YES; 408 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 409 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 410 | GCC_WARN_UNDECLARED_SELECTOR = YES; 411 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 412 | GCC_WARN_UNUSED_FUNCTION = YES; 413 | GCC_WARN_UNUSED_VARIABLE = YES; 414 | IPHONEOS_DEPLOYMENT_TARGET = 11.1; 415 | MTL_ENABLE_DEBUG_INFO = NO; 416 | SDKROOT = iphoneos; 417 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule"; 418 | VALIDATE_PRODUCT = YES; 419 | }; 420 | name = Release; 421 | }; 422 | 5365E4831FB6D9E00004EFFD /* Debug */ = { 423 | isa = XCBuildConfiguration; 424 | baseConfigurationReference = 5365E4B71FB7A8B90004EFFD /* tensorflow.xcconfig */; 425 | buildSettings = { 426 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 427 | CLANG_ENABLE_MODULES = YES; 428 | CODE_SIGN_STYLE = Automatic; 429 | DEVELOPMENT_TEAM = 56TGMSA9GE; 430 | HEADER_SEARCH_PATHS = ( 431 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/nsync/public/", 432 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/proto", 433 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/protobuf/src", 434 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/eigen", 435 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads", 436 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/absl", 437 | "$(TENSORFLOW_ROOT)", 438 | ); 439 | INFOPLIST_FILE = "$(SRCROOT)/SupportingFiles/Info.plist"; 440 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 441 | LIBRARY_SEARCH_PATHS = ( 442 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/protobuf_ios/lib/", 443 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/lib", 444 | ); 445 | OTHER_LDFLAGS = ( 446 | "-force_load", 447 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/lib/libtensorflow-core.a", 448 | "-lprotobuf", 449 | "-lprotobuf-lite", 450 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/nsync/builds/${CURRENT_ARCH}.ios.c++11/nsync.a", 451 | "-lc++", 452 | ); 453 | PRODUCT_BUNDLE_IDENTIFIER = com.username.tensorflowiOS; 454 | PRODUCT_NAME = "$(TARGET_NAME)"; 455 | SWIFT_OBJC_BRIDGING_HEADER = "Tensorflow/tensorflowiOS-Bridging-Header.h"; 456 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 457 | SWIFT_VERSION = 4.0; 458 | TARGETED_DEVICE_FAMILY = "1,2"; 459 | }; 460 | name = Debug; 461 | }; 462 | 5365E4841FB6D9E00004EFFD /* Release */ = { 463 | isa = XCBuildConfiguration; 464 | baseConfigurationReference = 5365E4B71FB7A8B90004EFFD /* tensorflow.xcconfig */; 465 | buildSettings = { 466 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 467 | CLANG_ENABLE_MODULES = YES; 468 | CODE_SIGN_STYLE = Automatic; 469 | DEVELOPMENT_TEAM = 56TGMSA9GE; 470 | HEADER_SEARCH_PATHS = ( 471 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/nsync/public/", 472 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/proto", 473 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/protobuf/src", 474 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/eigen", 475 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads", 476 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/absl", 477 | "$(TENSORFLOW_ROOT)", 478 | ); 479 | INFOPLIST_FILE = "$(SRCROOT)/SupportingFiles/Info.plist"; 480 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; 481 | LIBRARY_SEARCH_PATHS = ( 482 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/protobuf_ios/lib/", 483 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/lib", 484 | ); 485 | OTHER_LDFLAGS = ( 486 | "-force_load", 487 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/gen/lib/libtensorflow-core.a", 488 | "-lprotobuf", 489 | "-lprotobuf-lite", 490 | "$(TENSORFLOW_ROOT)/tensorflow/contrib/makefile/downloads/nsync/builds/${CURRENT_ARCH}.ios.c++11/nsync.a", 491 | "-lc++", 492 | ); 493 | PRODUCT_BUNDLE_IDENTIFIER = com.username.tensorflowiOS; 494 | PRODUCT_NAME = "$(TARGET_NAME)"; 495 | SWIFT_OBJC_BRIDGING_HEADER = "Tensorflow/tensorflowiOS-Bridging-Header.h"; 496 | SWIFT_VERSION = 4.0; 497 | TARGETED_DEVICE_FAMILY = "1,2"; 498 | }; 499 | name = Release; 500 | }; 501 | /* End XCBuildConfiguration section */ 502 | 503 | /* Begin XCConfigurationList section */ 504 | 5365E46B1FB6D9E00004EFFD /* Build configuration list for PBXProject "tensorflowiOS" */ = { 505 | isa = XCConfigurationList; 506 | buildConfigurations = ( 507 | 5365E4801FB6D9E00004EFFD /* Debug */, 508 | 5365E4811FB6D9E00004EFFD /* Release */, 509 | ); 510 | defaultConfigurationIsVisible = 0; 511 | defaultConfigurationName = Release; 512 | }; 513 | 5365E4821FB6D9E00004EFFD /* Build configuration list for PBXNativeTarget "tensorflowiOS" */ = { 514 | isa = XCConfigurationList; 515 | buildConfigurations = ( 516 | 5365E4831FB6D9E00004EFFD /* Debug */, 517 | 5365E4841FB6D9E00004EFFD /* Release */, 518 | ); 519 | defaultConfigurationIsVisible = 0; 520 | defaultConfigurationName = Release; 521 | }; 522 | /* End XCConfigurationList section */ 523 | }; 524 | rootObject = 5365E4681FB6D9E00004EFFD /* Project object */; 525 | } 526 | -------------------------------------------------------------------------------- /swift/tensorflowiOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | --------------------------------------------------------------------------------