├── .gitmodules
├── README.md
├── setup.sh
└── tensorswift
├── AVCaptureDevice+Extension.swift
├── Config.swift
├── TensorBridge.mm
├── Tensorswift-Bridging-Header.h
├── Tensorswift.xcodeproj
├── project.pbxproj
└── project.xcworkspace
│ └── contents.xcworkspacedata
├── Tensorswift
├── AppDelegate.swift
├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── Info.plist
├── SeenObjectViewController.swift
├── TensorBridge.h
└── ViewController.swift
├── VideoCapture.swift
├── ios_image_load.h
├── ios_image_load.mm
├── libprotobuf-lite.a
├── libprotobuf.a
├── retrained_graph_stripped.pb
├── retrained_labels.txt
├── tensorflow_utils.h
└── tensorflow_utils.mm
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "tensorflow"]
2 | path = tensorflow
3 | url = https://github.com/tensorflow/tensorflow.git
4 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Tensorflow image recognition in Swift
2 |
3 |
4 |
5 | ## How to use
6 |
7 | ### The automatic and codeless way - recommended
8 | * Install Trainer for Mac
9 | * Click three times
10 |
11 |
12 | ### The manual way
13 |
14 | To clone the linked Tensorflow repository, use
15 | ```
16 | git clone --recursive -j8 https://github.com/mortenjust/tensorswift-ios.git
17 | ```
18 |
19 | Run the setup file. This will take about 30 minutes on a Macbook pro
20 | ```bash
21 | cd tensorswift-ios
22 | ./setup.sh
23 | ```
24 | The app is set up to do a Google search on the recognized label. You can change that. Open the xcode project, and edit `Config.swift` to change what the app does when it recognizes something.
25 |
26 | Train a model with these instructions or use Trainer for Mac to do the hard work
27 |
--------------------------------------------------------------------------------
/setup.sh:
--------------------------------------------------------------------------------
1 | tensorflow/tensorflow/contrib/makefile/download_dependencies.sh
2 | tensorflow/tensorflow/contrib/makefile/build_all_ios.sh
--------------------------------------------------------------------------------
/tensorswift/AVCaptureDevice+Extension.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AVCaptureDevice+Extension.swift
3 | //
4 | // Created by Shuichi Tsutsumi on 4/3/16.
5 | // Copyright © 2016 Shuichi Tsutsumi. All rights reserved.
6 | //
7 | import AVFoundation
8 |
9 | extension AVCaptureDevice {
10 | private func availableFormatsFor(preferredFps: Float64) -> [AVCaptureDeviceFormat] {
11 | guard let allFormats = formats as? [AVCaptureDeviceFormat] else {
12 | return []
13 | }
14 |
15 | var availableFormats: [AVCaptureDeviceFormat] = []
16 | for format in allFormats
17 | {
18 | guard let ranges = format.videoSupportedFrameRateRanges as? [AVFrameRateRange] else {
19 | continue
20 | }
21 |
22 | for range in ranges where range.minFrameRate <= preferredFps && preferredFps <= range.maxFrameRate
23 | {
24 | availableFormats.append(format)
25 | }
26 | }
27 | return availableFormats
28 | }
29 |
30 | private func formatWithHighestResolution(_ availableFormats: [AVCaptureDeviceFormat]) -> AVCaptureDeviceFormat?
31 | {
32 | var maxWidth: Int32 = 0
33 | var selectedFormat: AVCaptureDeviceFormat?
34 | for format in availableFormats {
35 | guard let desc = format.formatDescription else {continue}
36 | let dimensions = CMVideoFormatDescriptionGetDimensions(desc)
37 | let width = dimensions.width
38 | if width >= maxWidth {
39 | maxWidth = width
40 | selectedFormat = format
41 | }
42 | }
43 | return selectedFormat
44 | }
45 |
46 | private func formatFor(preferredSize: CGSize, availableFormats: [AVCaptureDeviceFormat]) -> AVCaptureDeviceFormat?
47 | {
48 | for format in availableFormats {
49 | guard let desc = format.formatDescription else {continue}
50 | let dimensions = CMVideoFormatDescriptionGetDimensions(desc)
51 |
52 | if dimensions.width >= Int32(preferredSize.width) && dimensions.height >= Int32(preferredSize.height)
53 | {
54 | return format
55 | }
56 | }
57 | return nil
58 | }
59 |
60 |
61 | func updateFormatWithPreferredVideoSpec(preferredSpec: VideoSpec)
62 | {
63 | let availableFormats: [AVCaptureDeviceFormat]
64 | if let preferredFps = preferredSpec.fps {
65 | availableFormats = availableFormatsFor(preferredFps: Float64(preferredFps))
66 | }
67 | else {
68 | guard let allFormats = formats as? [AVCaptureDeviceFormat] else { return }
69 | availableFormats = allFormats
70 | }
71 |
72 | var selectedFormat: AVCaptureDeviceFormat?
73 | if let preferredSize = preferredSpec.size {
74 | selectedFormat = formatFor(preferredSize: preferredSize, availableFormats: availableFormats)
75 | } else {
76 | selectedFormat = formatWithHighestResolution(availableFormats)
77 | }
78 | print("selected format: \(selectedFormat)")
79 |
80 | if let selectedFormat = selectedFormat {
81 | do {
82 | try lockForConfiguration()
83 | }
84 | catch {
85 | fatalError("")
86 | }
87 | activeFormat = selectedFormat
88 |
89 | if let preferredFps = preferredSpec.fps {
90 | activeVideoMinFrameDuration = CMTimeMake(1, preferredFps)
91 | activeVideoMaxFrameDuration = CMTimeMake(1, preferredFps)
92 | unlockForConfiguration()
93 | }
94 | }
95 | }
96 | }
97 |
--------------------------------------------------------------------------------
/tensorswift/Config.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Config.swift
3 | // Tensorswift
4 | //
5 | // Created by Morten Just Petersen on 3/11/17.
6 | // Copyright © 2017 Morten Just Petersen. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | class Config {
12 |
13 |
14 | // Lower the confidence if the app doesn't recognize your objects easily
15 | // Increase if it doesn't recognize correctly
16 |
17 | static var confidence = 0.8
18 |
19 |
20 | // For every object you train, add a URL that should be opened when the app sees that object
21 |
22 | static var seeThisOpenThat:[String:String] = [
23 | "catch-all" : "https://google.com/search?q=%s%20cable"
24 | // the label will be added to the end of the catch-all string
25 |
26 | // Add your specific labels here if you need to:
27 | // "peanut" : "https://google.com/search?q=peanut",
28 | ]
29 |
30 |
31 |
32 | // Labels in cable model. TODO: Add all these labels
33 |
34 | // 110-female
35 | // 110v
36 | // 110v-charger
37 | // 110v-grounded
38 | // 110v-grounded-female
39 | // apple-laptop-converted
40 | // apple-laptop-converter
41 | // apple-magnet
42 | // apple-usb-charger
43 | // apple-usb-mini
44 | // eightshape-power
45 | // jack
46 | // jack-female
47 | // jack-to-ligthning
48 | // lightning
49 | // micro-usb
50 | // mini-usb
51 | // network-female
52 | // thunderbolt
53 | // usb
54 | // usb-c
55 | // usb-c-female
56 | // usb-female
57 | //
58 |
59 |
60 | }
61 |
--------------------------------------------------------------------------------
/tensorswift/TensorBridge.mm:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | #import
5 | #import
6 | #import
7 | #import
8 | #import "TensorBridge.h"
9 | //#import "CameraExample-Swift.h"
10 | #include "tensorflow/core/public/session.h"
11 | #include "tensorflow/core/util/memmapped_file_system.h"
12 |
13 | #include
14 |
15 | #import "tensorflow_utils.h"
16 |
17 | // If you have your own model, modify this to the file name, and make sure
18 | // you've added the file to your app resources too.
19 | static NSString* model_file_name = @"retrained_graph_stripped";
20 | static NSString* model_file_type = @"pb";
21 | // This controls whether we'll be loading a plain GraphDef proto, or a
22 | // file created by the convert_graphdef_memmapped_format utility that wraps a
23 | // GraphDef and parameter file that can be mapped into memory from file to
24 | // reduce overall memory usage.
25 | const bool model_uses_memory_mapping = false;
26 | // If you have your own model, point this to the labels file.
27 | static NSString* labels_file_name = @"retrained_labels";
28 | static NSString* labels_file_type = @"txt";
29 | // These dimensions need to match those the model was trained with.
30 | //const int wanted_input_width = 224;
31 | //const int wanted_input_height = 224;
32 | //const int wanted_input_channels = 3;
33 | //const float input_mean = 117.0f;
34 | //const float input_std = 1.0f;
35 |
36 | const int wanted_input_width = 299;
37 | const int wanted_input_height = 299;
38 | const int wanted_input_channels = 3;
39 | const float input_mean = 128.0f;
40 | const float input_std = 128.0f;
41 |
42 | id delegate;
43 |
44 | //const std::string input_layer_name = "input";
45 | const std::string input_layer_name = "Mul";
46 | //const std::string output_layer_name = "softmax1";
47 | const std::string output_layer_name = "final_result:0";
48 |
49 | std::unique_ptr tf_session;
50 | std::unique_ptr tf_memmapped_env;
51 | std::vector labels;
52 | NSMutableDictionary *oldPredictionValues;
53 |
54 |
55 | static const NSString *AVCaptureStillImageIsCapturingStillImageContext =
56 | @"AVCaptureStillImageIsCapturingStillImageContext";
57 |
58 | @implementation TensorBridge : NSObject
59 | @synthesize delegate;
60 |
61 |
62 | - (void)loadModel {
63 | oldPredictionValues = [[NSMutableDictionary alloc] init];
64 |
65 | tensorflow::Status load_status;
66 | if (model_uses_memory_mapping) {
67 | load_status = LoadMemoryMappedModel(
68 | model_file_name, model_file_type, &tf_session, &tf_memmapped_env);
69 | } else {
70 | load_status = LoadModel(model_file_name, model_file_type, &tf_session);
71 | }
72 | if (!load_status.ok()) {
73 | LOG(FATAL) << "Couldn't load model: " << load_status;
74 | }
75 |
76 | tensorflow::Status labels_status =
77 | LoadLabels(labels_file_name, labels_file_type, &labels);
78 | if (!labels_status.ok()) {
79 | LOG(FATAL) << "Couldn't load labels: " << labels_status;
80 | }
81 | }
82 |
83 | - (void)dealWithPredictionValues:(NSDictionary *)newValues {
84 | // NSLog(@"#Dealing with prediction values");
85 | [self.delegate tensorLabelListUpdated:newValues];
86 | }
87 |
88 |
89 | - (void)runCNNOnFrame:(CVPixelBufferRef)pixelBuffer {
90 | assert(pixelBuffer != NULL);
91 |
92 | OSType sourcePixelFormat = CVPixelBufferGetPixelFormatType(pixelBuffer);
93 | int doReverseChannels;
94 | if (kCVPixelFormatType_32ARGB == sourcePixelFormat) {
95 | doReverseChannels = 1;
96 | } else if (kCVPixelFormatType_32BGRA == sourcePixelFormat) {
97 | doReverseChannels = 0;
98 | } else {
99 | assert(false); // Unknown source format
100 | }
101 |
102 | const int sourceRowBytes = (int)CVPixelBufferGetBytesPerRow(pixelBuffer);
103 | const int image_width = (int)CVPixelBufferGetWidth(pixelBuffer);
104 | const int fullHeight = (int)CVPixelBufferGetHeight(pixelBuffer);
105 | CVPixelBufferLockBaseAddress(pixelBuffer, 0);
106 | unsigned char *sourceBaseAddr =
107 | (unsigned char *)(CVPixelBufferGetBaseAddress(pixelBuffer));
108 | int image_height;
109 | unsigned char *sourceStartAddr;
110 | if (fullHeight <= image_width) {
111 | image_height = fullHeight;
112 | sourceStartAddr = sourceBaseAddr;
113 | } else {
114 | image_height = image_width;
115 | const int marginY = ((fullHeight - image_width) / 2);
116 | sourceStartAddr = (sourceBaseAddr + (marginY * sourceRowBytes));
117 | }
118 | const int image_channels = 4;
119 |
120 | assert(image_channels >= wanted_input_channels);
121 | tensorflow::Tensor image_tensor(
122 | tensorflow::DT_FLOAT,
123 | tensorflow::TensorShape(
124 | {1, wanted_input_height, wanted_input_width, wanted_input_channels}));
125 | auto image_tensor_mapped = image_tensor.tensor();
126 | tensorflow::uint8 *in = sourceStartAddr;
127 | float *out = image_tensor_mapped.data();
128 | for (int y = 0; y < wanted_input_height; ++y) {
129 | float *out_row = out + (y * wanted_input_width * wanted_input_channels);
130 | for (int x = 0; x < wanted_input_width; ++x) {
131 | const int in_x = (y * image_width) / wanted_input_width;
132 | const int in_y = (x * image_height) / wanted_input_height;
133 | tensorflow::uint8 *in_pixel =
134 | in + (in_y * image_width * image_channels) + (in_x * image_channels);
135 | float *out_pixel = out_row + (x * wanted_input_channels);
136 | for (int c = 0; c < wanted_input_channels; ++c) {
137 | out_pixel[c] = (in_pixel[c] - input_mean) / input_std;
138 | }
139 | }
140 | }
141 |
142 | if (tf_session.get()) {
143 | std::vector outputs;
144 | tensorflow::Status run_status = tf_session->Run(
145 | {{input_layer_name, image_tensor}}, {output_layer_name}, {}, &outputs);
146 | if (!run_status.ok()) {
147 | LOG(ERROR) << "Running model failed:" << run_status;
148 | } else {
149 | tensorflow::Tensor *output = &outputs[0];
150 | auto predictions = output->flat();
151 |
152 | NSMutableDictionary *newValues = [NSMutableDictionary dictionary];
153 | for (int index = 0; index < predictions.size(); index += 1) {
154 | const float predictionValue = predictions(index);
155 | if (predictionValue > 0.05f) {
156 | std::string label = labels[index % predictions.size()];
157 | NSString *labelObject = [NSString stringWithCString:label.c_str()];
158 | NSNumber *valueObject = [NSNumber numberWithFloat:predictionValue];
159 | [newValues setObject:valueObject forKey:labelObject];
160 | }
161 | }
162 | dispatch_async(dispatch_get_main_queue(), ^(void) {
163 |
164 | // [self setPredictionValues:newValues];
165 | [self dealWithPredictionValues:newValues];
166 | });
167 | }
168 | }
169 | }
170 |
171 | //- (void)dealloc {
172 | // [super dealloc];
173 | //}
174 |
175 | @end
176 |
177 |
--------------------------------------------------------------------------------
/tensorswift/Tensorswift-Bridging-Header.h:
--------------------------------------------------------------------------------
1 | //
2 | // Use this file to import your target's public headers that you would like to expose to Swift.
3 | //
4 |
5 |
6 | #import "TensorBridge.h"
7 |
--------------------------------------------------------------------------------
/tensorswift/Tensorswift.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 46;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | C87CC5C41E24B863000F83A5 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = C87CC5C31E24B863000F83A5 /* AppDelegate.swift */; };
11 | C87CC5C61E24B863000F83A5 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = C87CC5C51E24B863000F83A5 /* ViewController.swift */; };
12 | C87CC5C91E24B863000F83A5 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = C87CC5C71E24B863000F83A5 /* Main.storyboard */; };
13 | C87CC5CB1E24B863000F83A5 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = C87CC5CA1E24B863000F83A5 /* Assets.xcassets */; };
14 | C87CC5CE1E24B863000F83A5 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = C87CC5CC1E24B863000F83A5 /* LaunchScreen.storyboard */; };
15 | C87CC5DB1E24BAE7000F83A5 /* libprotobuf-lite.a in Frameworks */ = {isa = PBXBuildFile; fileRef = C87CC5D91E24BAE7000F83A5 /* libprotobuf-lite.a */; };
16 | C87CC5DC1E24BAE7000F83A5 /* libprotobuf.a in Frameworks */ = {isa = PBXBuildFile; fileRef = C87CC5DA1E24BAE7000F83A5 /* libprotobuf.a */; };
17 | C87CC5DF1E24BCEB000F83A5 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = C87CC5DE1E24BCEB000F83A5 /* Accelerate.framework */; };
18 | C87CC5EC1E260EBE000F83A5 /* ios_image_load.mm in Sources */ = {isa = PBXBuildFile; fileRef = C87CC5E91E260EBE000F83A5 /* ios_image_load.mm */; };
19 | C87CC5ED1E260EBE000F83A5 /* tensorflow_utils.mm in Sources */ = {isa = PBXBuildFile; fileRef = C87CC5EB1E260EBE000F83A5 /* tensorflow_utils.mm */; };
20 | C8B98FD91E7491A000E5D1D5 /* SeenObjectViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = C8B98FD81E7491A000E5D1D5 /* SeenObjectViewController.swift */; };
21 | C8B98FDB1E74E04800E5D1D5 /* Config.swift in Sources */ = {isa = PBXBuildFile; fileRef = C8B98FDA1E74E04800E5D1D5 /* Config.swift */; };
22 | C8D4A38F1E288255005B0640 /* VideoCapture.swift in Sources */ = {isa = PBXBuildFile; fileRef = C8D4A38E1E288255005B0640 /* VideoCapture.swift */; };
23 | C8D4A3911E288321005B0640 /* AVCaptureDevice+Extension.swift in Sources */ = {isa = PBXBuildFile; fileRef = C8D4A3901E288321005B0640 /* AVCaptureDevice+Extension.swift */; };
24 | C8D4A3B81E28B86E005B0640 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = C8D4A3B71E28B86E005B0640 /* UIKit.framework */; };
25 | C8D4A3BA1E28BB5C005B0640 /* TensorBridge.mm in Sources */ = {isa = PBXBuildFile; fileRef = C8D4A3B91E28BB5C005B0640 /* TensorBridge.mm */; };
26 | C8EE27BB1E85837D0083060E /* retrained_graph_stripped.pb in Resources */ = {isa = PBXBuildFile; fileRef = C8EE27B91E85837D0083060E /* retrained_graph_stripped.pb */; };
27 | C8EE27BC1E85837D0083060E /* retrained_labels.txt in Resources */ = {isa = PBXBuildFile; fileRef = C8EE27BA1E85837D0083060E /* retrained_labels.txt */; };
28 | /* End PBXBuildFile section */
29 |
30 | /* Begin PBXFileReference section */
31 | C87CC5C01E24B863000F83A5 /* Tensorswift.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Tensorswift.app; sourceTree = BUILT_PRODUCTS_DIR; };
32 | C87CC5C31E24B863000F83A5 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; };
33 | C87CC5C51E24B863000F83A5 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; };
34 | C87CC5C81E24B863000F83A5 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
35 | C87CC5CA1E24B863000F83A5 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
36 | C87CC5CD1E24B863000F83A5 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
37 | C87CC5CF1E24B863000F83A5 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
38 | C87CC5D51E24B88C000F83A5 /* TensorBridge.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = TensorBridge.h; path = Tensorswift/TensorBridge.h; sourceTree = ""; };
39 | C87CC5D61E24B8B4000F83A5 /* Tensorswift-Bridging-Header.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "Tensorswift-Bridging-Header.h"; sourceTree = ""; };
40 | C87CC5D91E24BAE7000F83A5 /* libprotobuf-lite.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = "libprotobuf-lite.a"; sourceTree = ""; };
41 | C87CC5DA1E24BAE7000F83A5 /* libprotobuf.a */ = {isa = PBXFileReference; lastKnownFileType = archive.ar; path = libprotobuf.a; sourceTree = ""; };
42 | C87CC5DE1E24BCEB000F83A5 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
43 | C87CC5E81E260EBE000F83A5 /* ios_image_load.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = ios_image_load.h; sourceTree = ""; };
44 | C87CC5E91E260EBE000F83A5 /* ios_image_load.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = ios_image_load.mm; sourceTree = ""; };
45 | C87CC5EA1E260EBE000F83A5 /* tensorflow_utils.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = tensorflow_utils.h; sourceTree = ""; };
46 | C87CC5EB1E260EBE000F83A5 /* tensorflow_utils.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = tensorflow_utils.mm; sourceTree = ""; };
47 | C8B98FD81E7491A000E5D1D5 /* SeenObjectViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SeenObjectViewController.swift; sourceTree = ""; };
48 | C8B98FDA1E74E04800E5D1D5 /* Config.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = Config.swift; sourceTree = ""; };
49 | C8D4A38E1E288255005B0640 /* VideoCapture.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = VideoCapture.swift; sourceTree = ""; };
50 | C8D4A3901E288321005B0640 /* AVCaptureDevice+Extension.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "AVCaptureDevice+Extension.swift"; sourceTree = ""; };
51 | C8D4A3B71E28B86E005B0640 /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; };
52 | C8D4A3B91E28BB5C005B0640 /* TensorBridge.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = TensorBridge.mm; sourceTree = ""; };
53 | C8EE27B91E85837D0083060E /* retrained_graph_stripped.pb */ = {isa = PBXFileReference; lastKnownFileType = file; path = retrained_graph_stripped.pb; sourceTree = ""; };
54 | C8EE27BA1E85837D0083060E /* retrained_labels.txt */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = retrained_labels.txt; sourceTree = ""; };
55 | /* End PBXFileReference section */
56 |
57 | /* Begin PBXFrameworksBuildPhase section */
58 | C87CC5BD1E24B863000F83A5 /* Frameworks */ = {
59 | isa = PBXFrameworksBuildPhase;
60 | buildActionMask = 2147483647;
61 | files = (
62 | C8D4A3B81E28B86E005B0640 /* UIKit.framework in Frameworks */,
63 | C87CC5DF1E24BCEB000F83A5 /* Accelerate.framework in Frameworks */,
64 | C87CC5DC1E24BAE7000F83A5 /* libprotobuf.a in Frameworks */,
65 | C87CC5DB1E24BAE7000F83A5 /* libprotobuf-lite.a in Frameworks */,
66 | );
67 | runOnlyForDeploymentPostprocessing = 0;
68 | };
69 | /* End PBXFrameworksBuildPhase section */
70 |
71 | /* Begin PBXGroup section */
72 | C87CC5B71E24B863000F83A5 = {
73 | isa = PBXGroup;
74 | children = (
75 | C8B98FDA1E74E04800E5D1D5 /* Config.swift */,
76 | C8D4A3BB1E294D1B005B0640 /* Model */,
77 | C8D4A3901E288321005B0640 /* AVCaptureDevice+Extension.swift */,
78 | C8D4A38E1E288255005B0640 /* VideoCapture.swift */,
79 | C87CC5D61E24B8B4000F83A5 /* Tensorswift-Bridging-Header.h */,
80 | C87CC5D51E24B88C000F83A5 /* TensorBridge.h */,
81 | C8D4A3B91E28BB5C005B0640 /* TensorBridge.mm */,
82 | C87CC5E81E260EBE000F83A5 /* ios_image_load.h */,
83 | C87CC5E91E260EBE000F83A5 /* ios_image_load.mm */,
84 | C87CC5EA1E260EBE000F83A5 /* tensorflow_utils.h */,
85 | C87CC5EB1E260EBE000F83A5 /* tensorflow_utils.mm */,
86 | C87CC5C21E24B863000F83A5 /* Tensorswift */,
87 | C87CC5C11E24B863000F83A5 /* Products */,
88 | C87CC5DD1E24BCEB000F83A5 /* Frameworks */,
89 | );
90 | sourceTree = "";
91 | };
92 | C87CC5C11E24B863000F83A5 /* Products */ = {
93 | isa = PBXGroup;
94 | children = (
95 | C87CC5C01E24B863000F83A5 /* Tensorswift.app */,
96 | );
97 | name = Products;
98 | sourceTree = "";
99 | };
100 | C87CC5C21E24B863000F83A5 /* Tensorswift */ = {
101 | isa = PBXGroup;
102 | children = (
103 | C87CC5C31E24B863000F83A5 /* AppDelegate.swift */,
104 | C87CC5C51E24B863000F83A5 /* ViewController.swift */,
105 | C87CC5C71E24B863000F83A5 /* Main.storyboard */,
106 | C8B98FD81E7491A000E5D1D5 /* SeenObjectViewController.swift */,
107 | C87CC5CA1E24B863000F83A5 /* Assets.xcassets */,
108 | C87CC5CC1E24B863000F83A5 /* LaunchScreen.storyboard */,
109 | C87CC5CF1E24B863000F83A5 /* Info.plist */,
110 | );
111 | path = Tensorswift;
112 | sourceTree = "";
113 | };
114 | C87CC5DD1E24BCEB000F83A5 /* Frameworks */ = {
115 | isa = PBXGroup;
116 | children = (
117 | C8D4A3B71E28B86E005B0640 /* UIKit.framework */,
118 | C87CC5D91E24BAE7000F83A5 /* libprotobuf-lite.a */,
119 | C87CC5DA1E24BAE7000F83A5 /* libprotobuf.a */,
120 | C87CC5DE1E24BCEB000F83A5 /* Accelerate.framework */,
121 | );
122 | name = Frameworks;
123 | sourceTree = "";
124 | };
125 | C8D4A3BB1E294D1B005B0640 /* Model */ = {
126 | isa = PBXGroup;
127 | children = (
128 | C8EE27B91E85837D0083060E /* retrained_graph_stripped.pb */,
129 | C8EE27BA1E85837D0083060E /* retrained_labels.txt */,
130 | );
131 | name = Model;
132 | sourceTree = "";
133 | };
134 | /* End PBXGroup section */
135 |
136 | /* Begin PBXNativeTarget section */
137 | C87CC5BF1E24B863000F83A5 /* Tensorswift */ = {
138 | isa = PBXNativeTarget;
139 | buildConfigurationList = C87CC5D21E24B863000F83A5 /* Build configuration list for PBXNativeTarget "Tensorswift" */;
140 | buildPhases = (
141 | C87CC5BC1E24B863000F83A5 /* Sources */,
142 | C87CC5BD1E24B863000F83A5 /* Frameworks */,
143 | C87CC5BE1E24B863000F83A5 /* Resources */,
144 | );
145 | buildRules = (
146 | );
147 | dependencies = (
148 | );
149 | name = Tensorswift;
150 | productName = Tensorswift;
151 | productReference = C87CC5C01E24B863000F83A5 /* Tensorswift.app */;
152 | productType = "com.apple.product-type.application";
153 | };
154 | /* End PBXNativeTarget section */
155 |
156 | /* Begin PBXProject section */
157 | C87CC5B81E24B863000F83A5 /* Project object */ = {
158 | isa = PBXProject;
159 | attributes = {
160 | LastSwiftUpdateCheck = 0820;
161 | LastUpgradeCheck = 0820;
162 | ORGANIZATIONNAME = "Morten Just Petersen";
163 | TargetAttributes = {
164 | C87CC5BF1E24B863000F83A5 = {
165 | CreatedOnToolsVersion = 8.2;
166 | DevelopmentTeam = UDGLB23X37;
167 | LastSwiftMigration = 0820;
168 | ProvisioningStyle = Automatic;
169 | };
170 | };
171 | };
172 | buildConfigurationList = C87CC5BB1E24B863000F83A5 /* Build configuration list for PBXProject "Tensorswift" */;
173 | compatibilityVersion = "Xcode 3.2";
174 | developmentRegion = English;
175 | hasScannedForEncodings = 0;
176 | knownRegions = (
177 | en,
178 | Base,
179 | );
180 | mainGroup = C87CC5B71E24B863000F83A5;
181 | productRefGroup = C87CC5C11E24B863000F83A5 /* Products */;
182 | projectDirPath = "";
183 | projectRoot = "";
184 | targets = (
185 | C87CC5BF1E24B863000F83A5 /* Tensorswift */,
186 | );
187 | };
188 | /* End PBXProject section */
189 |
190 | /* Begin PBXResourcesBuildPhase section */
191 | C87CC5BE1E24B863000F83A5 /* Resources */ = {
192 | isa = PBXResourcesBuildPhase;
193 | buildActionMask = 2147483647;
194 | files = (
195 | C8EE27BB1E85837D0083060E /* retrained_graph_stripped.pb in Resources */,
196 | C8EE27BC1E85837D0083060E /* retrained_labels.txt in Resources */,
197 | C87CC5CE1E24B863000F83A5 /* LaunchScreen.storyboard in Resources */,
198 | C87CC5CB1E24B863000F83A5 /* Assets.xcassets in Resources */,
199 | C87CC5C91E24B863000F83A5 /* Main.storyboard in Resources */,
200 | );
201 | runOnlyForDeploymentPostprocessing = 0;
202 | };
203 | /* End PBXResourcesBuildPhase section */
204 |
205 | /* Begin PBXSourcesBuildPhase section */
206 | C87CC5BC1E24B863000F83A5 /* Sources */ = {
207 | isa = PBXSourcesBuildPhase;
208 | buildActionMask = 2147483647;
209 | files = (
210 | C87CC5C61E24B863000F83A5 /* ViewController.swift in Sources */,
211 | C87CC5EC1E260EBE000F83A5 /* ios_image_load.mm in Sources */,
212 | C8B98FDB1E74E04800E5D1D5 /* Config.swift in Sources */,
213 | C8B98FD91E7491A000E5D1D5 /* SeenObjectViewController.swift in Sources */,
214 | C87CC5ED1E260EBE000F83A5 /* tensorflow_utils.mm in Sources */,
215 | C8D4A38F1E288255005B0640 /* VideoCapture.swift in Sources */,
216 | C8D4A3BA1E28BB5C005B0640 /* TensorBridge.mm in Sources */,
217 | C8D4A3911E288321005B0640 /* AVCaptureDevice+Extension.swift in Sources */,
218 | C87CC5C41E24B863000F83A5 /* AppDelegate.swift in Sources */,
219 | );
220 | runOnlyForDeploymentPostprocessing = 0;
221 | };
222 | /* End PBXSourcesBuildPhase section */
223 |
224 | /* Begin PBXVariantGroup section */
225 | C87CC5C71E24B863000F83A5 /* Main.storyboard */ = {
226 | isa = PBXVariantGroup;
227 | children = (
228 | C87CC5C81E24B863000F83A5 /* Base */,
229 | );
230 | name = Main.storyboard;
231 | sourceTree = "";
232 | };
233 | C87CC5CC1E24B863000F83A5 /* LaunchScreen.storyboard */ = {
234 | isa = PBXVariantGroup;
235 | children = (
236 | C87CC5CD1E24B863000F83A5 /* Base */,
237 | );
238 | name = LaunchScreen.storyboard;
239 | sourceTree = "";
240 | };
241 | /* End PBXVariantGroup section */
242 |
243 | /* Begin XCBuildConfiguration section */
244 | C87CC5D01E24B863000F83A5 /* Debug */ = {
245 | isa = XCBuildConfiguration;
246 | buildSettings = {
247 | ALWAYS_SEARCH_USER_PATHS = YES;
248 | CLANG_ANALYZER_NONNULL = YES;
249 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
250 | CLANG_CXX_LIBRARY = "libc++";
251 | CLANG_ENABLE_MODULES = YES;
252 | CLANG_ENABLE_OBJC_ARC = YES;
253 | CLANG_WARN_BOOL_CONVERSION = YES;
254 | CLANG_WARN_CONSTANT_CONVERSION = YES;
255 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
256 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
257 | CLANG_WARN_EMPTY_BODY = YES;
258 | CLANG_WARN_ENUM_CONVERSION = YES;
259 | CLANG_WARN_INFINITE_RECURSION = YES;
260 | CLANG_WARN_INT_CONVERSION = YES;
261 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
262 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
263 | CLANG_WARN_UNREACHABLE_CODE = YES;
264 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
265 | CODE_SIGN_IDENTITY = "iPhone Developer";
266 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
267 | COPY_PHASE_STRIP = NO;
268 | DEBUG_INFORMATION_FORMAT = dwarf;
269 | ENABLE_BITCODE = NO;
270 | ENABLE_STRICT_OBJC_MSGSEND = YES;
271 | ENABLE_TESTABILITY = YES;
272 | GCC_C_LANGUAGE_STANDARD = gnu99;
273 | GCC_DYNAMIC_NO_PIC = NO;
274 | GCC_INPUT_FILETYPE = sourcecode.cpp.objcpp;
275 | GCC_NO_COMMON_BLOCKS = YES;
276 | GCC_OPTIMIZATION_LEVEL = 0;
277 | GCC_PREPROCESSOR_DEFINITIONS = (
278 | "DEBUG=1",
279 | "$(inherited)",
280 | );
281 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
282 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
283 | GCC_WARN_UNDECLARED_SELECTOR = YES;
284 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
285 | GCC_WARN_UNUSED_FUNCTION = YES;
286 | GCC_WARN_UNUSED_VARIABLE = YES;
287 | IPHONEOS_DEPLOYMENT_TARGET = 10.2;
288 | MTL_ENABLE_DEBUG_INFO = YES;
289 | ONLY_ACTIVE_ARCH = YES;
290 | SDKROOT = iphoneos;
291 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
292 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
293 | TARGETED_DEVICE_FAMILY = "1,2";
294 | };
295 | name = Debug;
296 | };
297 | C87CC5D11E24B863000F83A5 /* Release */ = {
298 | isa = XCBuildConfiguration;
299 | buildSettings = {
300 | ALWAYS_SEARCH_USER_PATHS = YES;
301 | CLANG_ANALYZER_NONNULL = YES;
302 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
303 | CLANG_CXX_LIBRARY = "libc++";
304 | CLANG_ENABLE_MODULES = YES;
305 | CLANG_ENABLE_OBJC_ARC = YES;
306 | CLANG_WARN_BOOL_CONVERSION = YES;
307 | CLANG_WARN_CONSTANT_CONVERSION = YES;
308 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
309 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
310 | CLANG_WARN_EMPTY_BODY = YES;
311 | CLANG_WARN_ENUM_CONVERSION = YES;
312 | CLANG_WARN_INFINITE_RECURSION = YES;
313 | CLANG_WARN_INT_CONVERSION = YES;
314 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
315 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
316 | CLANG_WARN_UNREACHABLE_CODE = YES;
317 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
318 | CODE_SIGN_IDENTITY = "iPhone Developer";
319 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
320 | COPY_PHASE_STRIP = NO;
321 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
322 | ENABLE_BITCODE = NO;
323 | ENABLE_NS_ASSERTIONS = NO;
324 | ENABLE_STRICT_OBJC_MSGSEND = YES;
325 | GCC_C_LANGUAGE_STANDARD = gnu99;
326 | GCC_INPUT_FILETYPE = sourcecode.cpp.objcpp;
327 | GCC_NO_COMMON_BLOCKS = YES;
328 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
329 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
330 | GCC_WARN_UNDECLARED_SELECTOR = YES;
331 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
332 | GCC_WARN_UNUSED_FUNCTION = YES;
333 | GCC_WARN_UNUSED_VARIABLE = YES;
334 | IPHONEOS_DEPLOYMENT_TARGET = 10.2;
335 | MTL_ENABLE_DEBUG_INFO = NO;
336 | SDKROOT = iphoneos;
337 | SWIFT_OPTIMIZATION_LEVEL = "-Owholemodule";
338 | TARGETED_DEVICE_FAMILY = "1,2";
339 | VALIDATE_PRODUCT = YES;
340 | };
341 | name = Release;
342 | };
343 | C87CC5D31E24B863000F83A5 /* Debug */ = {
344 | isa = XCBuildConfiguration;
345 | buildSettings = {
346 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
347 | CLANG_ENABLE_MODULES = YES;
348 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
349 | DEVELOPMENT_TEAM = UDGLB23X37;
350 | FRAMEWORK_SEARCH_PATHS = "$(inherited)";
351 | HEADER_SEARCH_PATHS = (
352 | "$(SRCROOT)/../tensorflow/",
353 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/gen/proto",
354 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/downloads/protobuf/src",
355 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/downloads/eigen",
356 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/downloads",
357 | );
358 | INFOPLIST_FILE = Tensorswift/Info.plist;
359 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
360 | LIBRARY_SEARCH_PATHS = (
361 | "$(inherited)",
362 | "$(PROJECT_DIR)",
363 | "$(SRCROOT)/../tensorflow/",
364 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/gen/lib",
365 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/gen/protobuf_ios/lib",
366 | );
367 | "OTHER_LDFLAGS[arch=*]" = (
368 | "-force_load",
369 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/gen/lib/libtensorflow-core.a",
370 | );
371 | PRODUCT_BUNDLE_IDENTIFIER = com.mortenjust.Tensorswift;
372 | PRODUCT_NAME = "$(TARGET_NAME)";
373 | PROVISIONING_PROFILE = "";
374 | PROVISIONING_PROFILE_SPECIFIER = "";
375 | SWIFT_OBJC_BRIDGING_HEADER = "Tensorswift-Bridging-Header.h";
376 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
377 | SWIFT_VERSION = 3.0;
378 | };
379 | name = Debug;
380 | };
381 | C87CC5D41E24B863000F83A5 /* Release */ = {
382 | isa = XCBuildConfiguration;
383 | buildSettings = {
384 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
385 | CLANG_ENABLE_MODULES = YES;
386 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
387 | DEVELOPMENT_TEAM = UDGLB23X37;
388 | FRAMEWORK_SEARCH_PATHS = "$(inherited)";
389 | HEADER_SEARCH_PATHS = (
390 | "$(SRCROOT)/../tensorflow/",
391 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/gen/proto",
392 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/downloads/protobuf/src",
393 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/downloads/eigen",
394 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/downloads",
395 | );
396 | INFOPLIST_FILE = Tensorswift/Info.plist;
397 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
398 | LIBRARY_SEARCH_PATHS = (
399 | "$(inherited)",
400 | "$(PROJECT_DIR)",
401 | "$(SRCROOT)/../tensorflow/",
402 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/gen/lib",
403 | "$(SRCROOT)/../tensorflow/tensorflow/contrib/makefile/gen/protobuf_ios/lib",
404 | );
405 | "OTHER_LDFLAGS[arch=*]" = (
406 | "-force_load",
407 | "$(SRCROOT)/../tensorflow/tensorflow/makefile/gen/lib/libtensorflow-core.a",
408 | );
409 | PRODUCT_BUNDLE_IDENTIFIER = com.mortenjust.Tensorswift;
410 | PRODUCT_NAME = "$(TARGET_NAME)";
411 | PROVISIONING_PROFILE = "";
412 | PROVISIONING_PROFILE_SPECIFIER = "";
413 | SWIFT_OBJC_BRIDGING_HEADER = "Tensorswift-Bridging-Header.h";
414 | SWIFT_VERSION = 3.0;
415 | };
416 | name = Release;
417 | };
418 | /* End XCBuildConfiguration section */
419 |
420 | /* Begin XCConfigurationList section */
421 | C87CC5BB1E24B863000F83A5 /* Build configuration list for PBXProject "Tensorswift" */ = {
422 | isa = XCConfigurationList;
423 | buildConfigurations = (
424 | C87CC5D01E24B863000F83A5 /* Debug */,
425 | C87CC5D11E24B863000F83A5 /* Release */,
426 | );
427 | defaultConfigurationIsVisible = 0;
428 | defaultConfigurationName = Release;
429 | };
430 | C87CC5D21E24B863000F83A5 /* Build configuration list for PBXNativeTarget "Tensorswift" */ = {
431 | isa = XCConfigurationList;
432 | buildConfigurations = (
433 | C87CC5D31E24B863000F83A5 /* Debug */,
434 | C87CC5D41E24B863000F83A5 /* Release */,
435 | );
436 | defaultConfigurationIsVisible = 0;
437 | defaultConfigurationName = Release;
438 | };
439 | /* End XCConfigurationList section */
440 | };
441 | rootObject = C87CC5B81E24B863000F83A5 /* Project object */;
442 | }
443 |
--------------------------------------------------------------------------------
/tensorswift/Tensorswift.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/tensorswift/Tensorswift/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // Tensorswift
4 | //
5 | // Created by Morten Just Petersen on 1/9/17.
6 | // Copyright © 2017 Morten Just Petersen. All rights reserved.
7 | //
8 |
9 | import UIKit
10 |
11 | @UIApplicationMain
12 | class AppDelegate: UIResponder, UIApplicationDelegate {
13 |
14 | var window: UIWindow?
15 |
16 |
17 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplicationLaunchOptionsKey: Any]?) -> Bool {
18 | // Override point for customization after application launch.
19 | return true
20 | }
21 |
22 | func applicationWillResignActive(_ application: UIApplication) {
23 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
24 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
25 | }
26 |
27 | func applicationDidEnterBackground(_ application: UIApplication) {
28 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
29 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
30 | }
31 |
32 | func applicationWillEnterForeground(_ application: UIApplication) {
33 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
34 | }
35 |
36 | func applicationDidBecomeActive(_ application: UIApplication) {
37 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
38 | }
39 |
40 | func applicationWillTerminate(_ application: UIApplication) {
41 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
42 | }
43 |
44 |
45 | }
46 |
47 |
--------------------------------------------------------------------------------
/tensorswift/Tensorswift/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "29x29",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "29x29",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "40x40",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "40x40",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "60x60",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "60x60",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "ipad",
35 | "size" : "29x29",
36 | "scale" : "1x"
37 | },
38 | {
39 | "idiom" : "ipad",
40 | "size" : "29x29",
41 | "scale" : "2x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "40x40",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "40x40",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "76x76",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "76x76",
61 | "scale" : "2x"
62 | }
63 | ],
64 | "info" : {
65 | "version" : 1,
66 | "author" : "xcode"
67 | }
68 | }
--------------------------------------------------------------------------------
/tensorswift/Tensorswift/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/tensorswift/Tensorswift/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
--------------------------------------------------------------------------------
/tensorswift/Tensorswift/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 | LSRequiresIPhoneOS
22 |
23 | UILaunchStoryboardName
24 | LaunchScreen
25 | UIMainStoryboardFile
26 | Main
27 | UIRequiredDeviceCapabilities
28 |
29 | armv7
30 |
31 | UISupportedInterfaceOrientations
32 |
33 | UIInterfaceOrientationPortrait
34 | UIInterfaceOrientationLandscapeLeft
35 | UIInterfaceOrientationLandscapeRight
36 |
37 | UISupportedInterfaceOrientations~ipad
38 |
39 | UIInterfaceOrientationPortrait
40 | UIInterfaceOrientationPortraitUpsideDown
41 | UIInterfaceOrientationLandscapeLeft
42 | UIInterfaceOrientationLandscapeRight
43 |
44 | NSMicrophoneUsageDescription
45 | Recognize things
46 | NSCameraUsageDescription
47 | Recognize things
48 |
49 |
50 |
--------------------------------------------------------------------------------
/tensorswift/Tensorswift/SeenObjectViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SeenObjectViewController.swift
3 | // Tensorswift
4 | //
5 | // Created by Morten Just Petersen on 3/11/17.
6 | // Copyright © 2017 Morten Just Petersen. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import WebKit
11 |
12 | class SeenObjectViewController: UIViewController {
13 |
14 | var urlToLoad:String!
15 | var webView:WKWebView!
16 |
17 |
18 | override func viewDidLoad() {
19 | super.viewDidLoad()
20 |
21 | if urlToLoad == nil { dismiss(animated: true, completion: {
22 | print("! Dismissed, no URL")
23 | }) }
24 |
25 | print("Seen Object. Loading url: \(urlToLoad)")
26 |
27 | webView = WKWebView(frame: view.frame)
28 | view = webView
29 | // view.addSubview(webView)
30 |
31 |
32 | webView.load(
33 | URLRequest(url: URL(string: urlToLoad!)!
34 | ))
35 |
36 | // Do any additional setup after loading the view.
37 | }
38 |
39 | override func didReceiveMemoryWarning() {
40 | super.didReceiveMemoryWarning()
41 | // Dispose of any resources that can be recreated.
42 | }
43 |
44 |
45 | /*
46 | // MARK: - Navigation
47 |
48 | // In a storyboard-based application, you will often want to do a little preparation before navigation
49 | override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
50 | // Get the new view controller using segue.destinationViewController.
51 | // Pass the selected object to the new view controller.
52 | }
53 | */
54 |
55 | }
56 |
--------------------------------------------------------------------------------
/tensorswift/Tensorswift/TensorBridge.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 |
4 | #include
5 |
6 |
7 | @class TensorBridge;
8 |
9 | @protocol TensorDelegate
10 | -(void)tensorLabelListUpdated:(NSDictionary*)devices;
11 | @end
12 |
13 | @interface TensorBridge : NSObject
14 | {
15 |
16 | // std::unique_ptr tf_session;
17 | // std::unique_ptr tf_memmapped_env;
18 | // std::vector labels;
19 | // NSMutableDictionary *oldPredictionValues;
20 |
21 | }
22 | @property (assign) id delegate;
23 | - (void)runCNNOnFrame:(CVPixelBufferRef)pixelBuffer;
24 | - (void)loadModel;
25 |
26 | @end
27 |
--------------------------------------------------------------------------------
/tensorswift/Tensorswift/ViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.swift
3 | // Tensorswift
4 | //
5 | // Created by Morten Just Petersen on 1/9/17.
6 | // Copyright © 2017 Morten Just Petersen. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import AVFoundation
11 |
12 | class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, TensorDelegate {
13 |
14 | @IBOutlet weak var previewView: UIView!
15 | var bridge:TensorBridge = TensorBridge()
16 |
17 |
18 | private var videoCapture: VideoCapture!
19 | private var ciContext : CIContext!
20 |
21 | override func viewWillAppear(_ animated: Bool) {
22 | super.viewWillAppear(animated)
23 | guard let videoCapture = videoCapture else {return}
24 | videoCapture.startCapture()
25 | }
26 |
27 |
28 |
29 | override func viewDidLoad() {
30 | super.viewDidLoad()
31 |
32 | bridge.loadModel()
33 | bridge.delegate = self
34 |
35 | let spec = VideoSpec(fps: 3, size: CGSize(width: 640, height: 480))
36 | videoCapture = VideoCapture(cameraType: .back,
37 | preferredSpec: spec,
38 | previewContainer: previewView.layer)
39 |
40 | videoCapture.imageBufferHandler = {[unowned self] (imageBuffer, timestamp, outputBuffer) in
41 | self.bridge.runCNN(onFrame: imageBuffer)
42 | }
43 | }
44 |
45 | override func viewDidAppear(_ animated: Bool) {
46 | // Test individual labels here
47 |
48 | // presentSeenObject(label: "peanut")
49 | }
50 |
51 |
52 | // seen objects enter here
53 |
54 | func tensorLabelListUpdated(_ recognizedObjects:[AnyHashable : Any]){
55 |
56 | for seenObject in recognizedObjects {
57 | let label = String(describing: seenObject.key)
58 | let confidence = seenObject.value as! Double
59 |
60 | let conPct = (confidence * 100).rounded()
61 |
62 | // change the debug confidence here
63 | if confidence > 0.45 {
64 | print("\(conPct)% sure that's a \(label)")
65 | }
66 |
67 | // change the trigger confidence in the Config file
68 | if confidence > Config.confidence {
69 | presentSeenObject(label: label)
70 | }
71 | }
72 | }
73 |
74 |
75 | func presentSeenObject(label:String){
76 |
77 |
78 | // Create a ViewController that shows a web page
79 | // You can do your own thing here, like your own view controller, or
80 | // just show something in this viewcontroller
81 |
82 | let vc = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "webView") as! SeenObjectViewController
83 |
84 | // is this label defined?
85 | if let url = Config.seeThisOpenThat[label] {
86 | vc.urlToLoad = url
87 |
88 | } else {
89 | // not defined explicitly, see if there is a catch-all
90 |
91 | if let catchAll = Config.seeThisOpenThat["catch-all"] {
92 |
93 | // change - with spaces in label. You can remove this
94 | var l = label.replacingOccurrences(of: "-", with: " ")
95 |
96 | // make the label URL friendly
97 | l = l.addingPercentEncoding(withAllowedCharacters: .urlPathAllowed)!
98 |
99 | // Replace %s with the label
100 | let u = catchAll.replacingOccurrences(of: "%s", with: l)
101 |
102 | vc.urlToLoad = u
103 | } else {
104 | // not even the catch-all is in config.
105 | // Let's just improvise. Maybe a custom thing.
106 |
107 | vc.urlToLoad = "https://www.amazon.com/s/ref=nb_sb_noss?url=search-alias%3Daps&field-keywords=\(label)"
108 | }
109 | }
110 |
111 |
112 |
113 | self.present(vc, animated: false, completion: nil)
114 | }
115 |
116 |
117 | override func didReceiveMemoryWarning() {
118 | super.didReceiveMemoryWarning()
119 | // Dispose of any resources that can be recreated.
120 | }
121 | }
122 |
123 |
--------------------------------------------------------------------------------
/tensorswift/VideoCapture.swift:
--------------------------------------------------------------------------------
1 | import AVFoundation
2 | import Foundation
3 |
4 |
5 | struct VideoSpec {
6 | var fps: Int32?
7 | var size: CGSize?
8 | }
9 |
10 | typealias ImageBufferHandler = ((_ imageBuffer: CVPixelBuffer, _ timestamp: CMTime, _ outputBuffer: CVPixelBuffer?) -> ())
11 |
12 | class VideoCapture: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate {
13 |
14 | private let captureSession = AVCaptureSession()
15 | private var videoDevice: AVCaptureDevice!
16 | private var videoConnection: AVCaptureConnection!
17 | private var audioConnection: AVCaptureConnection!
18 | private var previewLayer: AVCaptureVideoPreviewLayer?
19 |
20 | var imageBufferHandler: ImageBufferHandler?
21 |
22 | init(cameraType: CameraType, preferredSpec: VideoSpec?, previewContainer: CALayer?)
23 | {
24 | super.init()
25 |
26 | videoDevice = cameraType.captureDevice()
27 |
28 | // setup video format
29 | do {
30 | captureSession.sessionPreset = AVCaptureSessionPresetInputPriority
31 | if let preferredSpec = preferredSpec {
32 | // update the format with a preferred fps
33 |
34 | videoDevice.updateFormatWithPreferredVideoSpec(preferredSpec: preferredSpec)
35 | }
36 | }
37 |
38 | // setup video device input
39 | do {
40 | let videoDeviceInput: AVCaptureDeviceInput
41 | do {
42 | videoDeviceInput = try AVCaptureDeviceInput(device: videoDevice)
43 | }
44 | catch {
45 | fatalError("Could not create AVCaptureDeviceInput instance with error: \(error).")
46 | }
47 | guard captureSession.canAddInput(videoDeviceInput) else {
48 | fatalError()
49 | }
50 | captureSession.addInput(videoDeviceInput)
51 | }
52 |
53 | // setup preview
54 | if let previewContainer = previewContainer {
55 | guard let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) else {fatalError()}
56 | previewLayer.frame = previewContainer.bounds
57 | previewLayer.contentsGravity = kCAGravityResizeAspectFill
58 | previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
59 | previewContainer.insertSublayer(previewLayer, at: 0)
60 | self.previewLayer = previewLayer
61 | }
62 |
63 | // setup video output
64 | do {
65 | let videoDataOutput = AVCaptureVideoDataOutput()
66 | videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as AnyHashable: NSNumber(value: kCVPixelFormatType_32BGRA)]
67 | videoDataOutput.alwaysDiscardsLateVideoFrames = true
68 | let queue = DispatchQueue(label: "com.shu223.videosamplequeue")
69 | videoDataOutput.setSampleBufferDelegate(self, queue: queue)
70 | guard captureSession.canAddOutput(videoDataOutput) else {
71 | fatalError()
72 | }
73 | captureSession.addOutput(videoDataOutput)
74 |
75 | videoConnection = videoDataOutput.connection(withMediaType: AVMediaTypeVideo)
76 | }
77 |
78 | // setup audio output
79 | do {
80 | let audioDataOutput = AVCaptureAudioDataOutput()
81 | let queue = DispatchQueue(label: "com.shu223.audiosamplequeue")
82 | audioDataOutput.setSampleBufferDelegate(self, queue: queue)
83 | guard captureSession.canAddOutput(audioDataOutput) else {
84 | fatalError()
85 | }
86 | captureSession.addOutput(audioDataOutput)
87 |
88 | audioConnection = audioDataOutput.connection(withMediaType: AVMediaTypeAudio)
89 | }
90 |
91 | // setup asset writer
92 | do {
93 | }
94 | /*
95 | // Asset Writer
96 | self.assetWriterManager = [[TTMAssetWriterManager alloc] initWithVideoDataOutput:videoDataOutput
97 | audioDataOutput:audioDataOutput
98 | preferredSize:preferredSize
99 | mirrored:(cameraType == CameraTypeFront)];
100 | */
101 | }
102 |
103 | func startCapture() {
104 | print("\(self.classForCoder)/" + #function)
105 | if captureSession.isRunning {
106 | print("already running")
107 | return
108 | }
109 | captureSession.startRunning()
110 | }
111 |
112 | func stopCapture() {
113 | print("\(self.classForCoder)/" + #function)
114 | if !captureSession.isRunning {
115 | print("already stopped")
116 | return
117 | }
118 | captureSession.stopRunning()
119 | }
120 |
121 | func resizePreview() {
122 | if let previewLayer = previewLayer {
123 | guard let superlayer = previewLayer.superlayer else {return}
124 | previewLayer.frame = superlayer.bounds
125 | }
126 | }
127 |
128 | // =========================================================================
129 | // MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
130 |
131 | func captureOutput(_ captureOutput: AVCaptureOutput!, didDrop sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
132 | // print("\(self.classForCoder)/" + #function)
133 | }
134 |
135 | func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!)
136 | {
137 | // FIXME: temp
138 | if connection.videoOrientation != .portrait {
139 | connection.videoOrientation = .portrait
140 | return
141 | }
142 |
143 | if let imageBufferHandler = imageBufferHandler, let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) , connection == videoConnection
144 | {
145 | let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
146 | imageBufferHandler(imageBuffer, timestamp, nil)
147 | }
148 | }
149 | }
150 |
151 |
152 | enum CameraType : Int {
153 | case back
154 | case front
155 |
156 | func captureDevice() -> AVCaptureDevice {
157 | switch self {
158 | case .front:
159 | guard let devices = AVCaptureDeviceDiscoverySession(deviceTypes: [], mediaType: AVMediaTypeVideo, position: .front).devices else {break}
160 | print("devices:\(devices)")
161 | for device in devices where device.position == .front {
162 | return device
163 | }
164 | default:
165 | break
166 | }
167 | return AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
168 | }
169 | }
170 |
--------------------------------------------------------------------------------
/tensorswift/ios_image_load.h:
--------------------------------------------------------------------------------
1 | // Copyright 2015 Google Inc. All rights reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | #ifndef TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_IMAGE_LOAD_H_
16 | #define TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_IMAGE_LOAD_H_
17 |
18 | #include
19 |
20 | #include "tensorflow/core/framework/types.h"
21 |
22 | std::vector LoadImageFromFile(const char* file_name,
23 | int* out_width,
24 | int* out_height,
25 | int* out_channels);
26 |
27 | #endif // TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_IMAGE_LOAD_H_
28 |
--------------------------------------------------------------------------------
/tensorswift/ios_image_load.mm:
--------------------------------------------------------------------------------
1 | // Copyright 2015 Google Inc. All rights reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | #include "ios_image_load.h"
16 |
17 | #include
18 | #include
19 | #include
20 | #include
21 |
22 | #import
23 | #import
24 |
25 | using tensorflow::uint8;
26 |
27 | std::vector LoadImageFromFile(const char* file_name,
28 | int* out_width, int* out_height,
29 | int* out_channels) {
30 | FILE* file_handle = fopen(file_name, "rb");
31 | fseek(file_handle, 0, SEEK_END);
32 | const size_t bytes_in_file = ftell(file_handle);
33 | fseek(file_handle, 0, SEEK_SET);
34 | std::vector file_data(bytes_in_file);
35 | fread(file_data.data(), 1, bytes_in_file, file_handle);
36 | fclose(file_handle);
37 | CFDataRef file_data_ref = CFDataCreateWithBytesNoCopy(NULL, file_data.data(),
38 | bytes_in_file,
39 | kCFAllocatorNull);
40 | CGDataProviderRef image_provider =
41 | CGDataProviderCreateWithCFData(file_data_ref);
42 |
43 | const char* suffix = strrchr(file_name, '.');
44 | if (!suffix || suffix == file_name) {
45 | suffix = "";
46 | }
47 | CGImageRef image;
48 | if (strcasecmp(suffix, ".png") == 0) {
49 | image = CGImageCreateWithPNGDataProvider(image_provider, NULL, true,
50 | kCGRenderingIntentDefault);
51 | } else if ((strcasecmp(suffix, ".jpg") == 0) ||
52 | (strcasecmp(suffix, ".jpeg") == 0)) {
53 | image = CGImageCreateWithJPEGDataProvider(image_provider, NULL, true,
54 | kCGRenderingIntentDefault);
55 | } else {
56 | CFRelease(image_provider);
57 | CFRelease(file_data_ref);
58 | fprintf(stderr, "Unknown suffix for file '%s'\n", file_name);
59 | *out_width = 0;
60 | *out_height = 0;
61 | *out_channels = 0;
62 | return std::vector();
63 | }
64 |
65 | const int width = (int)CGImageGetWidth(image);
66 | const int height = (int)CGImageGetHeight(image);
67 | const int channels = 4;
68 | CGColorSpaceRef color_space = CGColorSpaceCreateDeviceRGB();
69 | const int bytes_per_row = (width * channels);
70 | const int bytes_in_image = (bytes_per_row * height);
71 | std::vector result(bytes_in_image);
72 | const int bits_per_component = 8;
73 | CGContextRef context = CGBitmapContextCreate(result.data(), width, height,
74 | bits_per_component, bytes_per_row, color_space,
75 | kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
76 | CGColorSpaceRelease(color_space);
77 | CGContextDrawImage(context, CGRectMake(0, 0, width, height), image);
78 | CGContextRelease(context);
79 | CFRelease(image);
80 | CFRelease(image_provider);
81 | CFRelease(file_data_ref);
82 |
83 | *out_width = width;
84 | *out_height = height;
85 | *out_channels = channels;
86 | return result;
87 | }
88 |
--------------------------------------------------------------------------------
/tensorswift/libprotobuf-lite.a:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mortenjust/tensorswift-ios/ff1d25448deffdb083041f4f9888b036112230a0/tensorswift/libprotobuf-lite.a
--------------------------------------------------------------------------------
/tensorswift/libprotobuf.a:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mortenjust/tensorswift-ios/ff1d25448deffdb083041f4f9888b036112230a0/tensorswift/libprotobuf.a
--------------------------------------------------------------------------------
/tensorswift/retrained_graph_stripped.pb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/mortenjust/tensorswift-ios/ff1d25448deffdb083041f4f9888b036112230a0/tensorswift/retrained_graph_stripped.pb
--------------------------------------------------------------------------------
/tensorswift/retrained_labels.txt:
--------------------------------------------------------------------------------
1 | usb c
2 | usb female
3 | apple usb mini
4 | apple magnet
5 | 110v grounded female
6 | 110v grounded
7 | 110 female
8 | usb
9 | usb c female
10 | apple laptop converted
11 | 110v
12 | network female
13 | apple usb charger
14 | apple laptop converter
15 | thunderbolt
16 | 110v charger
17 | jack
18 | jack female
19 | mini usb
20 | lightning
21 | jack to ligthning
22 | micro usb
23 | eightshape power
24 |
--------------------------------------------------------------------------------
/tensorswift/tensorflow_utils.h:
--------------------------------------------------------------------------------
1 | // Copyright 2015 Google Inc. All rights reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | #ifndef TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_TENSORFLOW_UTILS_H_
16 | #define TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_TENSORFLOW_UTILS_H_
17 |
18 | #include
19 | #include
20 |
21 | #include "tensorflow/core/public/session.h"
22 | #include "tensorflow/core/util/memmapped_file_system.h"
23 | #include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor"
24 |
25 | // Reads a serialized GraphDef protobuf file from the bundle, typically
26 | // created with the freeze_graph script. Populates the session argument with a
27 | // Session object that has the model loaded.
28 | tensorflow::Status LoadModel(NSString* file_name, NSString* file_type,
29 | std::unique_ptr* session);
30 |
31 | // Loads a model from a file that has been created using the
32 | // convert_graphdef_memmapped_format tool. This bundles together a GraphDef
33 | // proto together with a file that can be memory-mapped, containing the weight
34 | // parameters for the model. This is useful because it reduces the overall
35 | // memory pressure, since the read-only parameter regions can be easily paged
36 | // out and don't count toward memory limits on iOS.
37 | tensorflow::Status LoadMemoryMappedModel(
38 | NSString* file_name, NSString* file_type,
39 | std::unique_ptr* session,
40 | std::unique_ptr* memmapped_env);
41 |
42 | // Takes a text file with a single label on each line, and returns a list.
43 | tensorflow::Status LoadLabels(NSString* file_name, NSString* file_type,
44 | std::vector* label_strings);
45 |
46 | // Sorts the results from a model execution, and returns the highest scoring.
47 | void GetTopN(const Eigen::TensorMap,
48 | Eigen::Aligned>& prediction,
49 | const int num_results, const float threshold,
50 | std::vector >* top_results);
51 |
52 | #endif // TENSORFLOW_CONTRIB_IOS_EXAMPLES_CAMERA_TENSORFLOW_UTILS_H_
53 |
--------------------------------------------------------------------------------
/tensorswift/tensorflow_utils.mm:
--------------------------------------------------------------------------------
1 | // Copyright 2015 Google Inc. All rights reserved.
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // http://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | #import
16 |
17 | #include "tensorflow_utils.h"
18 |
19 | #include
20 | #include
21 | #include
22 | #include
23 | #include
24 | #include
25 |
26 | #include "google/protobuf/io/coded_stream.h"
27 | #include "google/protobuf/io/zero_copy_stream_impl.h"
28 | #include "google/protobuf/io/zero_copy_stream_impl_lite.h"
29 | #include "google/protobuf/message_lite.h"
30 | #include "tensorflow/core/framework/tensor.h"
31 | #include "tensorflow/core/framework/types.pb.h"
32 | #include "tensorflow/core/platform/env.h"
33 | #include "tensorflow/core/platform/logging.h"
34 | #include "tensorflow/core/platform/mutex.h"
35 | #include "tensorflow/core/platform/types.h"
36 | #include "tensorflow/core/public/session.h"
37 |
38 | namespace {
39 |
40 | // Helper class used to load protobufs efficiently.
41 | class IfstreamInputStream : public ::google::protobuf::io::CopyingInputStream {
42 | public:
43 | explicit IfstreamInputStream(const std::string& file_name)
44 | : ifs_(file_name.c_str(), std::ios::in | std::ios::binary) {}
45 | ~IfstreamInputStream() { ifs_.close(); }
46 |
47 | int Read(void* buffer, int size) {
48 | if (!ifs_) {
49 | return -1;
50 | }
51 | ifs_.read(static_cast(buffer), size);
52 | return ifs_.gcount();
53 | }
54 |
55 | private:
56 | std::ifstream ifs_;
57 | };
58 | } // namespace
59 |
60 | // Returns the top N confidence values over threshold in the provided vector,
61 | // sorted by confidence in descending order.
62 | void GetTopN(const Eigen::TensorMap,
63 | Eigen::Aligned>& prediction,
64 | const int num_results, const float threshold,
65 | std::vector >* top_results) {
66 | // Will contain top N results in ascending order.
67 | std::priority_queue,
68 | std::vector >,
69 | std::greater > >
70 | top_result_pq;
71 |
72 | const int count = prediction.size();
73 | for (int i = 0; i < count; ++i) {
74 | const float value = prediction(i);
75 |
76 | // Only add it if it beats the threshold and has a chance at being in
77 | // the top N.
78 | if (value < threshold) {
79 | continue;
80 | }
81 |
82 | top_result_pq.push(std::pair(value, i));
83 |
84 | // If at capacity, kick the smallest value out.
85 | if (top_result_pq.size() > num_results) {
86 | top_result_pq.pop();
87 | }
88 | }
89 |
90 | // Copy to output vector and reverse into descending order.
91 | while (!top_result_pq.empty()) {
92 | top_results->push_back(top_result_pq.top());
93 | top_result_pq.pop();
94 | }
95 | std::reverse(top_results->begin(), top_results->end());
96 | }
97 |
98 | bool PortableReadFileToProto(const std::string& file_name,
99 | ::google::protobuf::MessageLite* proto) {
100 | ::google::protobuf::io::CopyingInputStreamAdaptor stream(
101 | new IfstreamInputStream(file_name));
102 | stream.SetOwnsCopyingStream(true);
103 | ::google::protobuf::io::CodedInputStream coded_stream(&stream);
104 | // Total bytes hard limit / warning limit are set to 1GB and 512MB
105 | // respectively.
106 | coded_stream.SetTotalBytesLimit(1024LL << 20, 512LL << 20);
107 | return proto->ParseFromCodedStream(&coded_stream);
108 | }
109 |
110 | NSString* FilePathForResourceName(NSString* name, NSString* extension) {
111 | NSString* file_path =
112 | [[NSBundle mainBundle] pathForResource:name ofType:extension];
113 | if (file_path == NULL) {
114 | LOG(FATAL) << "Couldn't find '" << [name UTF8String] << "."
115 | << [extension UTF8String] << "' in bundle.";
116 | return nullptr;
117 | }
118 | return file_path;
119 | }
120 |
121 | tensorflow::Status LoadModel(NSString* file_name, NSString* file_type,
122 | std::unique_ptr* session) {
123 | tensorflow::SessionOptions options;
124 |
125 | tensorflow::Session* session_pointer = nullptr;
126 | tensorflow::Status session_status =
127 | tensorflow::NewSession(options, &session_pointer);
128 | if (!session_status.ok()) {
129 | LOG(ERROR) << "Could not create TensorFlow Session: " << session_status;
130 | return session_status;
131 | }
132 | session->reset(session_pointer);
133 |
134 | tensorflow::GraphDef tensorflow_graph;
135 |
136 | NSString* model_path = FilePathForResourceName(file_name, file_type);
137 | if (!model_path) {
138 | LOG(ERROR) << "Failed to find model proto at" << [file_name UTF8String]
139 | << [file_type UTF8String];
140 | return tensorflow::errors::NotFound([file_name UTF8String],
141 | [file_type UTF8String]);
142 | }
143 | const bool read_proto_succeeded =
144 | PortableReadFileToProto([model_path UTF8String], &tensorflow_graph);
145 | if (!read_proto_succeeded) {
146 | LOG(ERROR) << "Failed to load model proto from" << [model_path UTF8String];
147 | return tensorflow::errors::NotFound([model_path UTF8String]);
148 | }
149 |
150 | tensorflow::Status create_status = (*session)->Create(tensorflow_graph);
151 | if (!create_status.ok()) {
152 | LOG(ERROR) << "Could not create TensorFlow Graph: " << create_status;
153 | return create_status;
154 | }
155 |
156 | return tensorflow::Status::OK();
157 | }
158 |
159 | tensorflow::Status LoadMemoryMappedModel(
160 | NSString* file_name, NSString* file_type,
161 | std::unique_ptr* session,
162 | std::unique_ptr* memmapped_env) {
163 | NSString* network_path = FilePathForResourceName(file_name, file_type);
164 | memmapped_env->reset(
165 | new tensorflow::MemmappedEnv(tensorflow::Env::Default()));
166 | tensorflow::Status mmap_status =
167 | (memmapped_env->get())->InitializeFromFile([network_path UTF8String]);
168 | if (!mmap_status.ok()) {
169 | LOG(ERROR) << "MMap failed with " << mmap_status.error_message();
170 | return mmap_status;
171 | }
172 |
173 | tensorflow::GraphDef tensorflow_graph;
174 | tensorflow::Status load_graph_status = ReadBinaryProto(
175 | memmapped_env->get(),
176 | tensorflow::MemmappedFileSystem::kMemmappedPackageDefaultGraphDef,
177 | &tensorflow_graph);
178 | if (!load_graph_status.ok()) {
179 | LOG(ERROR) << "MMap load graph failed with "
180 | << load_graph_status.error_message();
181 | return load_graph_status;
182 | }
183 |
184 | tensorflow::SessionOptions options;
185 | // Disable optimizations on this graph so that constant folding doesn't
186 | // increase the memory footprint by creating new constant copies of the weight
187 | // parameters.
188 | options.config.mutable_graph_options()
189 | ->mutable_optimizer_options()
190 | ->set_opt_level(::tensorflow::OptimizerOptions::L0);
191 | options.env = memmapped_env->get();
192 |
193 | tensorflow::Session* session_pointer = nullptr;
194 | tensorflow::Status session_status =
195 | tensorflow::NewSession(options, &session_pointer);
196 | if (!session_status.ok()) {
197 | LOG(ERROR) << "Could not create TensorFlow Session: " << session_status;
198 | return session_status;
199 | }
200 |
201 | tensorflow::Status create_status = session_pointer->Create(tensorflow_graph);
202 | if (!create_status.ok()) {
203 | LOG(ERROR) << "Could not create TensorFlow Graph: " << create_status;
204 | return create_status;
205 | }
206 |
207 | session->reset(session_pointer);
208 |
209 | return tensorflow::Status::OK();
210 | }
211 |
212 | tensorflow::Status LoadLabels(NSString* file_name, NSString* file_type,
213 | std::vector* label_strings) {
214 | // Read the label list
215 | NSString* labels_path = FilePathForResourceName(file_name, file_type);
216 | if (!labels_path) {
217 | LOG(ERROR) << "Failed to find model proto at" << [file_name UTF8String]
218 | << [file_type UTF8String];
219 | return tensorflow::errors::NotFound([file_name UTF8String],
220 | [file_type UTF8String]);
221 | }
222 | std::ifstream t;
223 | t.open([labels_path UTF8String]);
224 | std::string line;
225 | while (t) {
226 | std::getline(t, line);
227 | label_strings->push_back(line);
228 | }
229 | t.close();
230 | return tensorflow::Status::OK();
231 | }
232 |
--------------------------------------------------------------------------------