├── .gitignore
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── Podfile
├── Podfile.lock
├── README.md
├── docs
├── gifs
│ └── senseiOS_gesture.gif
└── imgs
│ └── sense_iOS_logo.svg
├── sense-iOS.xcodeproj
├── project.pbxproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ ├── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
│ └── xcuserdata
│ │ ├── amercier.xcuserdatad
│ │ └── UserInterfaceState.xcuserstate
│ │ └── sunny.xcuserdatad
│ │ └── UserInterfaceState.xcuserstate
└── xcuserdata
│ ├── amercier.xcuserdatad
│ └── xcschemes
│ │ └── xcschememanagement.plist
│ └── sunny.xcuserdatad
│ └── xcschemes
│ └── xcschememanagement.plist
├── sense-iOS.xcworkspace
├── contents.xcworkspacedata
└── xcshareddata
│ └── IDEWorkspaceChecks.plist
└── sense-iOS
├── AppDelegate.swift
├── Array+argmin.swift
├── Assets.xcassets
├── AccentColor.colorset
│ └── Contents.json
├── AppIcon.appiconset
│ └── Contents.json
└── Contents.json
├── Base.lproj
├── LaunchScreen.storyboard
└── Main.storyboard
├── CameraTurnedOffViewController.swift
├── InferenceLocal.swift
├── InferenceModel.swift
├── Info.plist
├── MotionManager.swift
├── VideoRecorder
├── CVPixelBuffer+Helpers.swift
└── FrameExtraction.swift
├── ViewController.swift
└── sensenet_labels.json
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | */model.tflite
3 |
4 | # Xcode
5 | build/
6 | *.pbxuser
7 | !default.pbxuser
8 | *.mode1v3
9 | !default.mode1v3
10 | *.mode2v3
11 | !default.mode2v3
12 | *.perspectivev3
13 | !default.perspectivev3
14 | xcuserdata
15 | *.xccheckout
16 | *.moved-aside
17 | DerivedData
18 | *.hmap
19 | *.ipa
20 | *.xcuserstate
21 | project.xcworkspace
22 |
23 | # CocoaPods
24 | /Pods/
25 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 |
2 | # Contributor Covenant Code of Conduct
3 |
4 | ## Our Pledge
5 |
6 | We as members, contributors, and leaders pledge to make participation in our
7 | community a harassment-free experience for everyone, regardless of age, body
8 | size, visible or invisible disability, ethnicity, sex characteristics, gender
9 | identity and expression, level of experience, education, socio-economic status,
10 | nationality, personal appearance, race, religion, or sexual identity
11 | and orientation.
12 |
13 | We pledge to act and interact in ways that contribute to an open, welcoming,
14 | diverse, inclusive, and healthy community.
15 |
16 | ## Our Standards
17 |
18 | Examples of behavior that contributes to a positive environment for our
19 | community include:
20 |
21 | * Demonstrating empathy and kindness toward other people
22 | * Being respectful of differing opinions, viewpoints, and experiences
23 | * Giving and gracefully accepting constructive feedback
24 | * Accepting responsibility and apologizing to those affected by our mistakes,
25 | and learning from the experience
26 | * Focusing on what is best not just for us as individuals, but for the
27 | overall community
28 |
29 | Examples of unacceptable behavior include:
30 |
31 | * The use of sexualized language or imagery, and sexual attention or
32 | advances of any kind
33 | * Trolling, insulting or derogatory comments, and personal or political attacks
34 | * Public or private harassment
35 | * Publishing others' private information, such as a physical or email
36 | address, without their explicit permission
37 | * Other conduct which could reasonably be considered inappropriate in a
38 | professional setting
39 |
40 | ## Enforcement Responsibilities
41 |
42 | Community leaders are responsible for clarifying and enforcing our standards of
43 | acceptable behavior and will take appropriate and fair corrective action in
44 | response to any behavior that they deem inappropriate, threatening, offensive,
45 | or harmful.
46 |
47 | Community leaders have the right and responsibility to remove, edit, or reject
48 | comments, commits, code, wiki edits, issues, and other contributions that are
49 | not aligned to this Code of Conduct, and will communicate reasons for moderation
50 | decisions when appropriate.
51 |
52 | ## Scope
53 |
54 | This Code of Conduct applies within all community spaces, and also applies when
55 | an individual is officially representing the community in public spaces.
56 | Examples of representing our community include using an official e-mail address,
57 | posting via an official social media account, or acting as an appointed
58 | representative at an online or offline event.
59 |
60 | ## Enforcement
61 |
62 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
63 | reported to the community leaders responsible for enforcement at
64 | opensource@20bn.com.
65 | All complaints will be reviewed and investigated promptly and fairly.
66 |
67 | All community leaders are obligated to respect the privacy and security of the
68 | reporter of any incident.
69 |
70 | ## Enforcement Guidelines
71 |
72 | Community leaders will follow these Community Impact Guidelines in determining
73 | the consequences for any action they deem in violation of this Code of Conduct:
74 |
75 | ### 1. Correction
76 |
77 | **Community Impact**: Use of inappropriate language or other behavior deemed
78 | unprofessional or unwelcome in the community.
79 |
80 | **Consequence**: A private, written warning from community leaders, providing
81 | clarity around the nature of the violation and an explanation of why the
82 | behavior was inappropriate. A public apology may be requested.
83 |
84 | ### 2. Warning
85 |
86 | **Community Impact**: A violation through a single incident or series
87 | of actions.
88 |
89 | **Consequence**: A warning with consequences for continued behavior. No
90 | interaction with the people involved, including unsolicited interaction with
91 | those enforcing the Code of Conduct, for a specified period of time. This
92 | includes avoiding interactions in community spaces as well as external channels
93 | like social media. Violating these terms may lead to a temporary or
94 | permanent ban.
95 |
96 | ### 3. Temporary Ban
97 |
98 | **Community Impact**: A serious violation of community standards, including
99 | sustained inappropriate behavior.
100 |
101 | **Consequence**: A temporary ban from any sort of interaction or public
102 | communication with the community for a specified period of time. No public or
103 | private interaction with the people involved, including unsolicited interaction
104 | with those enforcing the Code of Conduct, is allowed during this period.
105 | Violating these terms may lead to a permanent ban.
106 |
107 | ### 4. Permanent Ban
108 |
109 | **Community Impact**: Demonstrating a pattern of violation of community
110 | standards, including sustained inappropriate behavior, harassment of an
111 | individual, or aggression toward or disparagement of classes of individuals.
112 |
113 | **Consequence**: A permanent ban from any sort of public interaction within
114 | the community.
115 |
116 | ## Attribution
117 |
118 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
119 | version 2.0, available at
120 | [https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0].
121 |
122 | Community Impact Guidelines were inspired by
123 | [Mozilla's code of conduct enforcement ladder][Mozilla CoC].
124 |
125 | For answers to common questions about this code of conduct, see the FAQ at
126 | [https://www.contributor-covenant.org/faq][FAQ]. Translations are available
127 | at [https://www.contributor-covenant.org/translations][translations].
128 |
129 | [homepage]: https://www.contributor-covenant.org
130 | [v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html
131 | [Mozilla CoC]: https://github.com/mozilla/diversity
132 | [FAQ]: https://www.contributor-covenant.org/faq
133 | [translations]: https://www.contributor-covenant.org/translations
134 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to sense-iOS
2 |
3 | We welcome contributions from developers to improve our pipeline of tools.
4 |
5 | ## License
6 | By contributing to sense-iOS, you agree that the license found in the root directory in the file `LICENSE.md
7 | ` will extend to your contribution.
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Twenty Billion Neurons GmbH
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Podfile:
--------------------------------------------------------------------------------
1 | # Uncomment the next line to define a global platform for your project
2 | platform :ios, '9.0'
3 |
4 | target 'sense-iOS' do
5 | # Comment the next line if you don't want to use dynamic frameworks
6 | use_frameworks!
7 | pod 'TensorFlowLiteSwift', '~> 0.0.1-nightly.20210115', :subspecs => ['CoreML', 'Metal']
8 | end
9 |
10 |
--------------------------------------------------------------------------------
/Podfile.lock:
--------------------------------------------------------------------------------
1 | PODS:
2 | - TensorFlowLiteC (0.0.1-nightly.20210115):
3 | - TensorFlowLiteC/Core (= 0.0.1-nightly.20210115)
4 | - TensorFlowLiteC/Core (0.0.1-nightly.20210115)
5 | - TensorFlowLiteC/CoreML (0.0.1-nightly.20210115):
6 | - TensorFlowLiteC/Core
7 | - TensorFlowLiteC/Metal (0.0.1-nightly.20210115):
8 | - TensorFlowLiteC/Core
9 | - TensorFlowLiteSwift/Core (0.0.1-nightly.20210115):
10 | - TensorFlowLiteC (= 0.0.1-nightly.20210115)
11 | - TensorFlowLiteSwift/CoreML (0.0.1-nightly.20210115):
12 | - TensorFlowLiteC/CoreML (= 0.0.1-nightly.20210115)
13 | - TensorFlowLiteSwift/Core (= 0.0.1-nightly.20210115)
14 | - TensorFlowLiteSwift/Metal (0.0.1-nightly.20210115):
15 | - TensorFlowLiteC/Metal (= 0.0.1-nightly.20210115)
16 | - TensorFlowLiteSwift/Core (= 0.0.1-nightly.20210115)
17 |
18 | DEPENDENCIES:
19 | - TensorFlowLiteSwift/CoreML (~> 0.0.1-nightly.20210115)
20 | - TensorFlowLiteSwift/Metal (~> 0.0.1-nightly.20210115)
21 |
22 | SPEC REPOS:
23 | trunk:
24 | - TensorFlowLiteC
25 | - TensorFlowLiteSwift
26 |
27 | SPEC CHECKSUMS:
28 | TensorFlowLiteC: eee5feb9f9c57adbd195cb5388f56e6912f44ef5
29 | TensorFlowLiteSwift: 4ba42dd715b6895c6b1f6ab156d296effe98ab0f
30 |
31 | PODFILE CHECKSUM: 490348302de9ae1ee507ce57a81f0dca92e7db90
32 |
33 | COCOAPODS: 1.8.3
34 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
36 |
37 | ---
38 |
39 | This repository contains the iOS version of [sense](https://github.com/TwentyBN/sense) which allows you to build an iOS demo app running the pytorch models after converting them to CoreML using the provided script.
40 |
41 | You can convert and deploy the existing gesture detection model as is, or, use the transfer learning script in [sense](https://github.com/TwentyBN/sense) to train on your own custom classification outputs on top of it. More models will be supported soon.
42 |
43 | The model uses an efficientnet backbone and was confirmed to run smoothly on iOS devices with A11 chips (e.g. iPhone 8 or higher) and may also work on devices with A10 chips (e.g. iPad 6/7, iPhone 7).
44 |
45 |
46 |
47 |
48 |
49 | ---
50 |
51 | ## Requirements and Installation
52 |
53 | The following steps will help you install the necessary components to get up and running in no time with your project.
54 |
55 | #### Step 1: Clone this repository
56 |
57 | To begin, clone this repository, as well as [sense](https://github.com/TwentyBN/sense), to a local directory of your choice:
58 |
59 | ```shell
60 | git clone https://github.com/TwentyBN/sense-iOS.git
61 | ```
62 |
63 | #### Step 2: Clone and install the sense repository
64 |
65 | You will also need to clone [sense](https://github.com/TwentyBN/sense) (we will use it to convert Pytorch models to CoreML):
66 |
67 | ```shell
68 | git clone https://github.com/TwentyBN/sense.git
69 | cd sense
70 | ```
71 |
72 | Next, follow the instructions for [sense](https://github.com/TwentyBN/sense) to install
73 | its dependencies.
74 |
75 | #### Step 3: Download our pre-trained models
76 |
77 | You will need to download our pre-trained models to build the demo application. Once again, please follow the
78 | instructions in [sense](https://github.com/TwentyBN/sense) to access them (you will have to create an account and agree to our terms and conditions).
79 |
80 | #### Step 4: Install the pods
81 |
82 | This project relies on Pods to install Tensorflow Lite.
83 | If you don't have `cocoapods` installed on your mac, you can install it using brew:
84 | ```shell
85 | brew install cocoapods
86 | ```
87 | You then need to install the pods by running the following command line:
88 | ```shell
89 | # If you are in sense-iOS root directory:
90 | pod install
91 | ```
92 |
93 | ---
94 |
95 | ## Getting Started
96 |
97 | This section will explain how you can deploy our pre-trained models, or your own custom model, to an iOS application.
98 |
99 | #### Step 1: Converting a Pytorch model to Tensorflow Lite
100 |
101 | The iOS demo requires a Tensorflow Lite version of our model checkpoint which you can produce using the script provided in
102 | `sense` which, for our pre-trained gesture control model, can be run using:
103 |
104 | ```shell
105 | python tools/conversion/convert_to_tflite.py --backbone=efficientnet --classifier=efficient_net_gesture_control --output_name=model
106 | ```
107 |
108 | You should now have the following Tensorflow Lite file: `sense/resources/model_conversion/model.tflite`.
109 |
110 | #### Step 2: Move the converted model to the correct location
111 |
112 | The Tensorflow Lite file created in the last step can be moved from `sense` to `sense-iOS` in the following location: `sense-iOS/sense-iOS/model.tflite`
113 |
114 | ```shell
115 | # If you are in sense
116 | mv ./resources/model_conversion/model.tflite ../sense-iOS/sense-iOS/model.tflite
117 | ```
118 |
119 | #### Step 3: Build the project
120 | You can now open the iOS project with Xcode and build it to your device. Have fun!
121 |
122 | ---
123 |
124 | ## Deploy your own classifier
125 |
126 | Using our transfer learning script, it is possible to further fine-tune our model to your own classification tasks. If
127 | you do so, you'll have to reflect the new outputs in various files in the iOS project:
128 |
129 | #### `sense-iOS/sensenet_labels.json`
130 |
131 | By default, the dictionary in `sensenet_labels.json` contains the labels our model was trained on for the gesture control task. Replace these with the contents of the `label2int.json` file produced during training.
132 |
133 | ---
134 |
135 | ## Citation
136 |
137 | We now have a [blogpost](https://medium.com/twentybn/towards-situated-visual-ai-via-end-to-end-learning-on-video-clips-2832bd9d519f) you can cite:
138 |
139 | ```bibtex
140 | @misc{sense2020blogpost,
141 | author = {Guillaume Berger and Antoine Mercier and Florian Letsch and Cornelius Boehm and
142 | Sunny Panchal and Nahua Kang and Mark Todorovich and Ingo Bax and Roland Memisevic},
143 | title = {Towards situated visual AI via end-to-end learning on video clips},
144 | howpublished = {\url{https://medium.com/twentybn/towards-situated-visual-ai-via-end-to-end-learning-on-video-clips-2832bd9d519f}},
145 | note = {online; accessed 23 October 2020},
146 | year=2020,
147 | }
148 | ```
149 |
150 | ---
151 |
152 | ## License
153 |
154 | The code is copyright (c) 2020 Twenty Billion Neurons GmbH under an MIT Licence. See the file LICENSE for details. Note that this license
155 | only covers the source code of this repo. Pre-trained weights come with a separate license available [here](https://20bn.com/licensing/sdk/evaluation).
156 |
--------------------------------------------------------------------------------
/docs/gifs/senseiOS_gesture.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/quic/sense-iOS/50fc9a7ba373807fa9a8bbf7b58a58327b78c660/docs/gifs/senseiOS_gesture.gif
--------------------------------------------------------------------------------
/docs/imgs/sense_iOS_logo.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 | >
6 |
7 |
8 |
11 |
15 |
16 |
18 |
19 |
20 |
23 |
27 |
28 |
29 |
32 |
34 |
39 |
45 |
51 |
53 |
55 |
56 |
57 |
58 |
59 |
60 |
--------------------------------------------------------------------------------
/sense-iOS.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 50;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 626A8CF17AD5F9E6BD34BAF6 /* Pods_sense_iOS.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 68A00D7C183DCE90505FB221 /* Pods_sense_iOS.framework */; };
11 | A07C812A25BB6C8E0060018E /* model.tflite in Resources */ = {isa = PBXBuildFile; fileRef = A07C812925BB6C8D0060018E /* model.tflite */; };
12 | A0C7EF452541F49A008A066F /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = A0C7EF442541F49A008A066F /* AppDelegate.swift */; };
13 | A0C7EF492541F49A008A066F /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A0C7EF482541F49A008A066F /* ViewController.swift */; };
14 | A0C7EF4C2541F49A008A066F /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = A0C7EF4A2541F49A008A066F /* Main.storyboard */; };
15 | A0C7EF4E2541F49E008A066F /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = A0C7EF4D2541F49E008A066F /* Assets.xcassets */; };
16 | A0C7EF512541F49E008A066F /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = A0C7EF4F2541F49E008A066F /* LaunchScreen.storyboard */; };
17 | A0C7EF7E2541F5DD008A066F /* CameraTurnedOffViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = A0C7EF7D2541F5DD008A066F /* CameraTurnedOffViewController.swift */; };
18 | A0C7EF832541F606008A066F /* InferenceModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = A0C7EF822541F606008A066F /* InferenceModel.swift */; };
19 | A0C7EF882541F615008A066F /* MotionManager.swift in Sources */ = {isa = PBXBuildFile; fileRef = A0C7EF872541F615008A066F /* MotionManager.swift */; };
20 | A0C7EF902541F70C008A066F /* InferenceLocal.swift in Sources */ = {isa = PBXBuildFile; fileRef = A0C7EF8F2541F70C008A066F /* InferenceLocal.swift */; };
21 | A0C7EF972541F71D008A066F /* CVPixelBuffer+Helpers.swift in Sources */ = {isa = PBXBuildFile; fileRef = A0C7EF952541F71D008A066F /* CVPixelBuffer+Helpers.swift */; };
22 | A0C7EF982541F71D008A066F /* FrameExtraction.swift in Sources */ = {isa = PBXBuildFile; fileRef = A0C7EF962541F71D008A066F /* FrameExtraction.swift */; };
23 | A0C7EFA22541FB8A008A066F /* sensenet_labels.json in Resources */ = {isa = PBXBuildFile; fileRef = A0C7EFA12541FB89008A066F /* sensenet_labels.json */; };
24 | A0C7EFA7254223FB008A066F /* Array+argmin.swift in Sources */ = {isa = PBXBuildFile; fileRef = A0C7EFA6254223FB008A066F /* Array+argmin.swift */; };
25 | /* End PBXBuildFile section */
26 |
27 | /* Begin PBXContainerItemProxy section */
28 | A0C7EF582541F49F008A066F /* PBXContainerItemProxy */ = {
29 | isa = PBXContainerItemProxy;
30 | containerPortal = A0C7EF392541F49A008A066F /* Project object */;
31 | proxyType = 1;
32 | remoteGlobalIDString = A0C7EF402541F49A008A066F;
33 | remoteInfo = "sense-iOS";
34 | };
35 | A0C7EF632541F49F008A066F /* PBXContainerItemProxy */ = {
36 | isa = PBXContainerItemProxy;
37 | containerPortal = A0C7EF392541F49A008A066F /* Project object */;
38 | proxyType = 1;
39 | remoteGlobalIDString = A0C7EF402541F49A008A066F;
40 | remoteInfo = "sense-iOS";
41 | };
42 | /* End PBXContainerItemProxy section */
43 |
44 | /* Begin PBXFileReference section */
45 | 43815A5C88CA00E95BDF4325 /* Pods-sense-iOS.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-sense-iOS.debug.xcconfig"; path = "Target Support Files/Pods-sense-iOS/Pods-sense-iOS.debug.xcconfig"; sourceTree = ""; };
46 | 68A00D7C183DCE90505FB221 /* Pods_sense_iOS.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_sense_iOS.framework; sourceTree = BUILT_PRODUCTS_DIR; };
47 | A07C812925BB6C8D0060018E /* model.tflite */ = {isa = PBXFileReference; lastKnownFileType = file; path = model.tflite; sourceTree = ""; };
48 | A0C7EF412541F49A008A066F /* sense-iOS.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "sense-iOS.app"; sourceTree = BUILT_PRODUCTS_DIR; };
49 | A0C7EF442541F49A008A066F /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; };
50 | A0C7EF482541F49A008A066F /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; };
51 | A0C7EF4B2541F49A008A066F /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
52 | A0C7EF4D2541F49E008A066F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
53 | A0C7EF502541F49E008A066F /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
54 | A0C7EF522541F49E008A066F /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
55 | A0C7EF572541F49F008A066F /* sense-iOSTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "sense-iOSTests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; };
56 | A0C7EF622541F49F008A066F /* sense-iOSUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "sense-iOSUITests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; };
57 | A0C7EF7D2541F5DD008A066F /* CameraTurnedOffViewController.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = CameraTurnedOffViewController.swift; sourceTree = ""; };
58 | A0C7EF822541F606008A066F /* InferenceModel.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = InferenceModel.swift; sourceTree = ""; };
59 | A0C7EF872541F615008A066F /* MotionManager.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = MotionManager.swift; sourceTree = ""; };
60 | A0C7EF8F2541F70C008A066F /* InferenceLocal.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = InferenceLocal.swift; sourceTree = ""; };
61 | A0C7EF952541F71D008A066F /* CVPixelBuffer+Helpers.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = "CVPixelBuffer+Helpers.swift"; sourceTree = ""; };
62 | A0C7EF962541F71D008A066F /* FrameExtraction.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = FrameExtraction.swift; sourceTree = ""; };
63 | A0C7EFA12541FB89008A066F /* sensenet_labels.json */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text.json; path = sensenet_labels.json; sourceTree = ""; };
64 | A0C7EFA6254223FB008A066F /* Array+argmin.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "Array+argmin.swift"; sourceTree = ""; };
65 | E60F0C305F40709F64B3FEEC /* Pods-sense-iOS.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-sense-iOS.release.xcconfig"; path = "Target Support Files/Pods-sense-iOS/Pods-sense-iOS.release.xcconfig"; sourceTree = ""; };
66 | /* End PBXFileReference section */
67 |
68 | /* Begin PBXFrameworksBuildPhase section */
69 | A0C7EF3E2541F49A008A066F /* Frameworks */ = {
70 | isa = PBXFrameworksBuildPhase;
71 | buildActionMask = 2147483647;
72 | files = (
73 | 626A8CF17AD5F9E6BD34BAF6 /* Pods_sense_iOS.framework in Frameworks */,
74 | );
75 | runOnlyForDeploymentPostprocessing = 0;
76 | };
77 | A0C7EF542541F49E008A066F /* Frameworks */ = {
78 | isa = PBXFrameworksBuildPhase;
79 | buildActionMask = 2147483647;
80 | files = (
81 | );
82 | runOnlyForDeploymentPostprocessing = 0;
83 | };
84 | A0C7EF5F2541F49F008A066F /* Frameworks */ = {
85 | isa = PBXFrameworksBuildPhase;
86 | buildActionMask = 2147483647;
87 | files = (
88 | );
89 | runOnlyForDeploymentPostprocessing = 0;
90 | };
91 | /* End PBXFrameworksBuildPhase section */
92 |
93 | /* Begin PBXGroup section */
94 | 49FEE295AB375959F3011769 /* Pods */ = {
95 | isa = PBXGroup;
96 | children = (
97 | 43815A5C88CA00E95BDF4325 /* Pods-sense-iOS.debug.xcconfig */,
98 | E60F0C305F40709F64B3FEEC /* Pods-sense-iOS.release.xcconfig */,
99 | );
100 | path = Pods;
101 | sourceTree = "";
102 | };
103 | A0C7EF382541F49A008A066F = {
104 | isa = PBXGroup;
105 | children = (
106 | A0C7EF432541F49A008A066F /* sense-iOS */,
107 | A0C7EF422541F49A008A066F /* Products */,
108 | 49FEE295AB375959F3011769 /* Pods */,
109 | F858B1964CFF95D0017FD781 /* Frameworks */,
110 | );
111 | sourceTree = "";
112 | };
113 | A0C7EF422541F49A008A066F /* Products */ = {
114 | isa = PBXGroup;
115 | children = (
116 | A0C7EF412541F49A008A066F /* sense-iOS.app */,
117 | A0C7EF572541F49F008A066F /* sense-iOSTests.xctest */,
118 | A0C7EF622541F49F008A066F /* sense-iOSUITests.xctest */,
119 | );
120 | name = Products;
121 | sourceTree = "";
122 | };
123 | A0C7EF432541F49A008A066F /* sense-iOS */ = {
124 | isa = PBXGroup;
125 | children = (
126 | A0C7EF442541F49A008A066F /* AppDelegate.swift */,
127 | A0C7EF482541F49A008A066F /* ViewController.swift */,
128 | A0C7EF7D2541F5DD008A066F /* CameraTurnedOffViewController.swift */,
129 | A0C7EF822541F606008A066F /* InferenceModel.swift */,
130 | A07C812925BB6C8D0060018E /* model.tflite */,
131 | A0C7EF8F2541F70C008A066F /* InferenceLocal.swift */,
132 | A0C7EF872541F615008A066F /* MotionManager.swift */,
133 | A0C7EFA6254223FB008A066F /* Array+argmin.swift */,
134 | A0C7EF522541F49E008A066F /* Info.plist */,
135 | A0C7EFA12541FB89008A066F /* sensenet_labels.json */,
136 | A0C7EF942541F71D008A066F /* VideoRecorder */,
137 | A0C7EF4A2541F49A008A066F /* Main.storyboard */,
138 | A0C7EF4D2541F49E008A066F /* Assets.xcassets */,
139 | A0C7EF4F2541F49E008A066F /* LaunchScreen.storyboard */,
140 | );
141 | path = "sense-iOS";
142 | sourceTree = "";
143 | };
144 | A0C7EF942541F71D008A066F /* VideoRecorder */ = {
145 | isa = PBXGroup;
146 | children = (
147 | A0C7EF952541F71D008A066F /* CVPixelBuffer+Helpers.swift */,
148 | A0C7EF962541F71D008A066F /* FrameExtraction.swift */,
149 | );
150 | path = VideoRecorder;
151 | sourceTree = "";
152 | };
153 | F858B1964CFF95D0017FD781 /* Frameworks */ = {
154 | isa = PBXGroup;
155 | children = (
156 | 68A00D7C183DCE90505FB221 /* Pods_sense_iOS.framework */,
157 | );
158 | name = Frameworks;
159 | sourceTree = "";
160 | };
161 | /* End PBXGroup section */
162 |
163 | /* Begin PBXNativeTarget section */
164 | A0C7EF402541F49A008A066F /* sense-iOS */ = {
165 | isa = PBXNativeTarget;
166 | buildConfigurationList = A0C7EF6B2541F49F008A066F /* Build configuration list for PBXNativeTarget "sense-iOS" */;
167 | buildPhases = (
168 | 9B91162D938F05FCE615CDF3 /* [CP] Check Pods Manifest.lock */,
169 | A0C7EF3D2541F49A008A066F /* Sources */,
170 | A0C7EF3E2541F49A008A066F /* Frameworks */,
171 | A0C7EF3F2541F49A008A066F /* Resources */,
172 | );
173 | buildRules = (
174 | );
175 | dependencies = (
176 | );
177 | name = "sense-iOS";
178 | productName = "sense-iOS";
179 | productReference = A0C7EF412541F49A008A066F /* sense-iOS.app */;
180 | productType = "com.apple.product-type.application";
181 | };
182 | A0C7EF562541F49E008A066F /* sense-iOSTests */ = {
183 | isa = PBXNativeTarget;
184 | buildConfigurationList = A0C7EF6E2541F49F008A066F /* Build configuration list for PBXNativeTarget "sense-iOSTests" */;
185 | buildPhases = (
186 | A0C7EF532541F49E008A066F /* Sources */,
187 | A0C7EF542541F49E008A066F /* Frameworks */,
188 | A0C7EF552541F49E008A066F /* Resources */,
189 | );
190 | buildRules = (
191 | );
192 | dependencies = (
193 | A0C7EF592541F49F008A066F /* PBXTargetDependency */,
194 | );
195 | name = "sense-iOSTests";
196 | productName = "sense-iOSTests";
197 | productReference = A0C7EF572541F49F008A066F /* sense-iOSTests.xctest */;
198 | productType = "com.apple.product-type.bundle.unit-test";
199 | };
200 | A0C7EF612541F49F008A066F /* sense-iOSUITests */ = {
201 | isa = PBXNativeTarget;
202 | buildConfigurationList = A0C7EF712541F49F008A066F /* Build configuration list for PBXNativeTarget "sense-iOSUITests" */;
203 | buildPhases = (
204 | A0C7EF5E2541F49F008A066F /* Sources */,
205 | A0C7EF5F2541F49F008A066F /* Frameworks */,
206 | A0C7EF602541F49F008A066F /* Resources */,
207 | );
208 | buildRules = (
209 | );
210 | dependencies = (
211 | A0C7EF642541F49F008A066F /* PBXTargetDependency */,
212 | );
213 | name = "sense-iOSUITests";
214 | productName = "sense-iOSUITests";
215 | productReference = A0C7EF622541F49F008A066F /* sense-iOSUITests.xctest */;
216 | productType = "com.apple.product-type.bundle.ui-testing";
217 | };
218 | /* End PBXNativeTarget section */
219 |
220 | /* Begin PBXProject section */
221 | A0C7EF392541F49A008A066F /* Project object */ = {
222 | isa = PBXProject;
223 | attributes = {
224 | LastSwiftUpdateCheck = 1200;
225 | LastUpgradeCheck = 1200;
226 | TargetAttributes = {
227 | A0C7EF402541F49A008A066F = {
228 | CreatedOnToolsVersion = 12.0;
229 | };
230 | A0C7EF562541F49E008A066F = {
231 | CreatedOnToolsVersion = 12.0;
232 | TestTargetID = A0C7EF402541F49A008A066F;
233 | };
234 | A0C7EF612541F49F008A066F = {
235 | CreatedOnToolsVersion = 12.0;
236 | TestTargetID = A0C7EF402541F49A008A066F;
237 | };
238 | };
239 | };
240 | buildConfigurationList = A0C7EF3C2541F49A008A066F /* Build configuration list for PBXProject "sense-iOS" */;
241 | compatibilityVersion = "Xcode 9.3";
242 | developmentRegion = en;
243 | hasScannedForEncodings = 0;
244 | knownRegions = (
245 | en,
246 | Base,
247 | );
248 | mainGroup = A0C7EF382541F49A008A066F;
249 | productRefGroup = A0C7EF422541F49A008A066F /* Products */;
250 | projectDirPath = "";
251 | projectRoot = "";
252 | targets = (
253 | A0C7EF402541F49A008A066F /* sense-iOS */,
254 | A0C7EF562541F49E008A066F /* sense-iOSTests */,
255 | A0C7EF612541F49F008A066F /* sense-iOSUITests */,
256 | );
257 | };
258 | /* End PBXProject section */
259 |
260 | /* Begin PBXResourcesBuildPhase section */
261 | A0C7EF3F2541F49A008A066F /* Resources */ = {
262 | isa = PBXResourcesBuildPhase;
263 | buildActionMask = 2147483647;
264 | files = (
265 | A07C812A25BB6C8E0060018E /* model.tflite in Resources */,
266 | A0C7EF512541F49E008A066F /* LaunchScreen.storyboard in Resources */,
267 | A0C7EF4E2541F49E008A066F /* Assets.xcassets in Resources */,
268 | A0C7EF4C2541F49A008A066F /* Main.storyboard in Resources */,
269 | A0C7EFA22541FB8A008A066F /* sensenet_labels.json in Resources */,
270 | );
271 | runOnlyForDeploymentPostprocessing = 0;
272 | };
273 | A0C7EF552541F49E008A066F /* Resources */ = {
274 | isa = PBXResourcesBuildPhase;
275 | buildActionMask = 2147483647;
276 | files = (
277 | );
278 | runOnlyForDeploymentPostprocessing = 0;
279 | };
280 | A0C7EF602541F49F008A066F /* Resources */ = {
281 | isa = PBXResourcesBuildPhase;
282 | buildActionMask = 2147483647;
283 | files = (
284 | );
285 | runOnlyForDeploymentPostprocessing = 0;
286 | };
287 | /* End PBXResourcesBuildPhase section */
288 |
289 | /* Begin PBXShellScriptBuildPhase section */
290 | 9B91162D938F05FCE615CDF3 /* [CP] Check Pods Manifest.lock */ = {
291 | isa = PBXShellScriptBuildPhase;
292 | buildActionMask = 2147483647;
293 | files = (
294 | );
295 | inputFileListPaths = (
296 | );
297 | inputPaths = (
298 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
299 | "${PODS_ROOT}/Manifest.lock",
300 | );
301 | name = "[CP] Check Pods Manifest.lock";
302 | outputFileListPaths = (
303 | );
304 | outputPaths = (
305 | "$(DERIVED_FILE_DIR)/Pods-sense-iOS-checkManifestLockResult.txt",
306 | );
307 | runOnlyForDeploymentPostprocessing = 0;
308 | shellPath = /bin/sh;
309 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
310 | showEnvVarsInLog = 0;
311 | };
312 | /* End PBXShellScriptBuildPhase section */
313 |
314 | /* Begin PBXSourcesBuildPhase section */
315 | A0C7EF3D2541F49A008A066F /* Sources */ = {
316 | isa = PBXSourcesBuildPhase;
317 | buildActionMask = 2147483647;
318 | files = (
319 | A0C7EF7E2541F5DD008A066F /* CameraTurnedOffViewController.swift in Sources */,
320 | A0C7EF972541F71D008A066F /* CVPixelBuffer+Helpers.swift in Sources */,
321 | A0C7EF882541F615008A066F /* MotionManager.swift in Sources */,
322 | A0C7EF832541F606008A066F /* InferenceModel.swift in Sources */,
323 | A0C7EFA7254223FB008A066F /* Array+argmin.swift in Sources */,
324 | A0C7EF982541F71D008A066F /* FrameExtraction.swift in Sources */,
325 | A0C7EF492541F49A008A066F /* ViewController.swift in Sources */,
326 | A0C7EF452541F49A008A066F /* AppDelegate.swift in Sources */,
327 | A0C7EF902541F70C008A066F /* InferenceLocal.swift in Sources */,
328 | );
329 | runOnlyForDeploymentPostprocessing = 0;
330 | };
331 | A0C7EF532541F49E008A066F /* Sources */ = {
332 | isa = PBXSourcesBuildPhase;
333 | buildActionMask = 2147483647;
334 | files = (
335 | );
336 | runOnlyForDeploymentPostprocessing = 0;
337 | };
338 | A0C7EF5E2541F49F008A066F /* Sources */ = {
339 | isa = PBXSourcesBuildPhase;
340 | buildActionMask = 2147483647;
341 | files = (
342 | );
343 | runOnlyForDeploymentPostprocessing = 0;
344 | };
345 | /* End PBXSourcesBuildPhase section */
346 |
347 | /* Begin PBXTargetDependency section */
348 | A0C7EF592541F49F008A066F /* PBXTargetDependency */ = {
349 | isa = PBXTargetDependency;
350 | target = A0C7EF402541F49A008A066F /* sense-iOS */;
351 | targetProxy = A0C7EF582541F49F008A066F /* PBXContainerItemProxy */;
352 | };
353 | A0C7EF642541F49F008A066F /* PBXTargetDependency */ = {
354 | isa = PBXTargetDependency;
355 | target = A0C7EF402541F49A008A066F /* sense-iOS */;
356 | targetProxy = A0C7EF632541F49F008A066F /* PBXContainerItemProxy */;
357 | };
358 | /* End PBXTargetDependency section */
359 |
360 | /* Begin PBXVariantGroup section */
361 | A0C7EF4A2541F49A008A066F /* Main.storyboard */ = {
362 | isa = PBXVariantGroup;
363 | children = (
364 | A0C7EF4B2541F49A008A066F /* Base */,
365 | );
366 | name = Main.storyboard;
367 | sourceTree = "";
368 | };
369 | A0C7EF4F2541F49E008A066F /* LaunchScreen.storyboard */ = {
370 | isa = PBXVariantGroup;
371 | children = (
372 | A0C7EF502541F49E008A066F /* Base */,
373 | );
374 | name = LaunchScreen.storyboard;
375 | sourceTree = "";
376 | };
377 | /* End PBXVariantGroup section */
378 |
379 | /* Begin XCBuildConfiguration section */
380 | A0C7EF692541F49F008A066F /* Debug */ = {
381 | isa = XCBuildConfiguration;
382 | buildSettings = {
383 | ALWAYS_SEARCH_USER_PATHS = NO;
384 | CLANG_ANALYZER_NONNULL = YES;
385 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
386 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
387 | CLANG_CXX_LIBRARY = "libc++";
388 | CLANG_ENABLE_MODULES = YES;
389 | CLANG_ENABLE_OBJC_ARC = YES;
390 | CLANG_ENABLE_OBJC_WEAK = YES;
391 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
392 | CLANG_WARN_BOOL_CONVERSION = YES;
393 | CLANG_WARN_COMMA = YES;
394 | CLANG_WARN_CONSTANT_CONVERSION = YES;
395 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
396 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
397 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
398 | CLANG_WARN_EMPTY_BODY = YES;
399 | CLANG_WARN_ENUM_CONVERSION = YES;
400 | CLANG_WARN_INFINITE_RECURSION = YES;
401 | CLANG_WARN_INT_CONVERSION = YES;
402 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
403 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
404 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
405 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
406 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
407 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
408 | CLANG_WARN_STRICT_PROTOTYPES = YES;
409 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
410 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
411 | CLANG_WARN_UNREACHABLE_CODE = YES;
412 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
413 | COPY_PHASE_STRIP = NO;
414 | DEBUG_INFORMATION_FORMAT = dwarf;
415 | ENABLE_STRICT_OBJC_MSGSEND = YES;
416 | ENABLE_TESTABILITY = YES;
417 | GCC_C_LANGUAGE_STANDARD = gnu11;
418 | GCC_DYNAMIC_NO_PIC = NO;
419 | GCC_NO_COMMON_BLOCKS = YES;
420 | GCC_OPTIMIZATION_LEVEL = 0;
421 | GCC_PREPROCESSOR_DEFINITIONS = (
422 | "DEBUG=1",
423 | "$(inherited)",
424 | );
425 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
426 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
427 | GCC_WARN_UNDECLARED_SELECTOR = YES;
428 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
429 | GCC_WARN_UNUSED_FUNCTION = YES;
430 | GCC_WARN_UNUSED_VARIABLE = YES;
431 | IPHONEOS_DEPLOYMENT_TARGET = 14.0;
432 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
433 | MTL_FAST_MATH = YES;
434 | ONLY_ACTIVE_ARCH = YES;
435 | SDKROOT = iphoneos;
436 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG;
437 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
438 | };
439 | name = Debug;
440 | };
441 | A0C7EF6A2541F49F008A066F /* Release */ = {
442 | isa = XCBuildConfiguration;
443 | buildSettings = {
444 | ALWAYS_SEARCH_USER_PATHS = NO;
445 | CLANG_ANALYZER_NONNULL = YES;
446 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
447 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14";
448 | CLANG_CXX_LIBRARY = "libc++";
449 | CLANG_ENABLE_MODULES = YES;
450 | CLANG_ENABLE_OBJC_ARC = YES;
451 | CLANG_ENABLE_OBJC_WEAK = YES;
452 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
453 | CLANG_WARN_BOOL_CONVERSION = YES;
454 | CLANG_WARN_COMMA = YES;
455 | CLANG_WARN_CONSTANT_CONVERSION = YES;
456 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
457 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
458 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
459 | CLANG_WARN_EMPTY_BODY = YES;
460 | CLANG_WARN_ENUM_CONVERSION = YES;
461 | CLANG_WARN_INFINITE_RECURSION = YES;
462 | CLANG_WARN_INT_CONVERSION = YES;
463 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
464 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
465 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
466 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
467 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
468 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
469 | CLANG_WARN_STRICT_PROTOTYPES = YES;
470 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
471 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
472 | CLANG_WARN_UNREACHABLE_CODE = YES;
473 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
474 | COPY_PHASE_STRIP = NO;
475 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
476 | ENABLE_NS_ASSERTIONS = NO;
477 | ENABLE_STRICT_OBJC_MSGSEND = YES;
478 | GCC_C_LANGUAGE_STANDARD = gnu11;
479 | GCC_NO_COMMON_BLOCKS = YES;
480 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
481 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
482 | GCC_WARN_UNDECLARED_SELECTOR = YES;
483 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
484 | GCC_WARN_UNUSED_FUNCTION = YES;
485 | GCC_WARN_UNUSED_VARIABLE = YES;
486 | IPHONEOS_DEPLOYMENT_TARGET = 14.0;
487 | MTL_ENABLE_DEBUG_INFO = NO;
488 | MTL_FAST_MATH = YES;
489 | SDKROOT = iphoneos;
490 | SWIFT_COMPILATION_MODE = wholemodule;
491 | SWIFT_OPTIMIZATION_LEVEL = "-O";
492 | VALIDATE_PRODUCT = YES;
493 | };
494 | name = Release;
495 | };
496 | A0C7EF6C2541F49F008A066F /* Debug */ = {
497 | isa = XCBuildConfiguration;
498 | baseConfigurationReference = 43815A5C88CA00E95BDF4325 /* Pods-sense-iOS.debug.xcconfig */;
499 | buildSettings = {
500 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
501 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
502 | CODE_SIGN_STYLE = Automatic;
503 | DEVELOPMENT_TEAM = 46U46J8QDH;
504 | INFOPLIST_FILE = "sense-iOS/Info.plist";
505 | LD_RUNPATH_SEARCH_PATHS = (
506 | "$(inherited)",
507 | "@executable_path/Frameworks",
508 | );
509 | PRODUCT_BUNDLE_IDENTIFIER = "twentybn.sense-iOS";
510 | PRODUCT_NAME = "$(TARGET_NAME)";
511 | SWIFT_VERSION = 5.0;
512 | TARGETED_DEVICE_FAMILY = "1,2";
513 | };
514 | name = Debug;
515 | };
516 | A0C7EF6D2541F49F008A066F /* Release */ = {
517 | isa = XCBuildConfiguration;
518 | baseConfigurationReference = E60F0C305F40709F64B3FEEC /* Pods-sense-iOS.release.xcconfig */;
519 | buildSettings = {
520 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
521 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
522 | CODE_SIGN_STYLE = Automatic;
523 | DEVELOPMENT_TEAM = 46U46J8QDH;
524 | INFOPLIST_FILE = "sense-iOS/Info.plist";
525 | LD_RUNPATH_SEARCH_PATHS = (
526 | "$(inherited)",
527 | "@executable_path/Frameworks",
528 | );
529 | PRODUCT_BUNDLE_IDENTIFIER = "twentybn.sense-iOS";
530 | PRODUCT_NAME = "$(TARGET_NAME)";
531 | SWIFT_VERSION = 5.0;
532 | TARGETED_DEVICE_FAMILY = "1,2";
533 | };
534 | name = Release;
535 | };
536 | A0C7EF6F2541F49F008A066F /* Debug */ = {
537 | isa = XCBuildConfiguration;
538 | buildSettings = {
539 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
540 | BUNDLE_LOADER = "$(TEST_HOST)";
541 | CODE_SIGN_STYLE = Automatic;
542 | DEVELOPMENT_TEAM = 46U46J8QDH;
543 | INFOPLIST_FILE = "sense-iOSTests/Info.plist";
544 | IPHONEOS_DEPLOYMENT_TARGET = 14.0;
545 | LD_RUNPATH_SEARCH_PATHS = (
546 | "$(inherited)",
547 | "@executable_path/Frameworks",
548 | "@loader_path/Frameworks",
549 | );
550 | PRODUCT_BUNDLE_IDENTIFIER = "twentybn.sense-iOSTests";
551 | PRODUCT_NAME = "$(TARGET_NAME)";
552 | SWIFT_VERSION = 5.0;
553 | TARGETED_DEVICE_FAMILY = "1,2";
554 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/sense-iOS.app/sense-iOS";
555 | };
556 | name = Debug;
557 | };
558 | A0C7EF702541F49F008A066F /* Release */ = {
559 | isa = XCBuildConfiguration;
560 | buildSettings = {
561 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
562 | BUNDLE_LOADER = "$(TEST_HOST)";
563 | CODE_SIGN_STYLE = Automatic;
564 | DEVELOPMENT_TEAM = 46U46J8QDH;
565 | INFOPLIST_FILE = "sense-iOSTests/Info.plist";
566 | IPHONEOS_DEPLOYMENT_TARGET = 14.0;
567 | LD_RUNPATH_SEARCH_PATHS = (
568 | "$(inherited)",
569 | "@executable_path/Frameworks",
570 | "@loader_path/Frameworks",
571 | );
572 | PRODUCT_BUNDLE_IDENTIFIER = "twentybn.sense-iOSTests";
573 | PRODUCT_NAME = "$(TARGET_NAME)";
574 | SWIFT_VERSION = 5.0;
575 | TARGETED_DEVICE_FAMILY = "1,2";
576 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/sense-iOS.app/sense-iOS";
577 | };
578 | name = Release;
579 | };
580 | A0C7EF722541F49F008A066F /* Debug */ = {
581 | isa = XCBuildConfiguration;
582 | buildSettings = {
583 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
584 | CODE_SIGN_STYLE = Automatic;
585 | DEVELOPMENT_TEAM = 46U46J8QDH;
586 | INFOPLIST_FILE = "20bn-sense-iOSUITests/Info.plist";
587 | LD_RUNPATH_SEARCH_PATHS = (
588 | "$(inherited)",
589 | "@executable_path/Frameworks",
590 | "@loader_path/Frameworks",
591 | );
592 | PRODUCT_BUNDLE_IDENTIFIER = "twentybn.sense-iOSUITests";
593 | PRODUCT_NAME = "$(TARGET_NAME)";
594 | SWIFT_VERSION = 5.0;
595 | TARGETED_DEVICE_FAMILY = "1,2";
596 | TEST_TARGET_NAME = "sense-iOS";
597 | };
598 | name = Debug;
599 | };
600 | A0C7EF732541F49F008A066F /* Release */ = {
601 | isa = XCBuildConfiguration;
602 | buildSettings = {
603 | ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES;
604 | CODE_SIGN_STYLE = Automatic;
605 | DEVELOPMENT_TEAM = 46U46J8QDH;
606 | INFOPLIST_FILE = "20bn-sense-iOSUITests/Info.plist";
607 | LD_RUNPATH_SEARCH_PATHS = (
608 | "$(inherited)",
609 | "@executable_path/Frameworks",
610 | "@loader_path/Frameworks",
611 | );
612 | PRODUCT_BUNDLE_IDENTIFIER = "twentybn.sense-iOSUITests";
613 | PRODUCT_NAME = "$(TARGET_NAME)";
614 | SWIFT_VERSION = 5.0;
615 | TARGETED_DEVICE_FAMILY = "1,2";
616 | TEST_TARGET_NAME = "sense-iOS";
617 | };
618 | name = Release;
619 | };
620 | /* End XCBuildConfiguration section */
621 |
622 | /* Begin XCConfigurationList section */
623 | A0C7EF3C2541F49A008A066F /* Build configuration list for PBXProject "sense-iOS" */ = {
624 | isa = XCConfigurationList;
625 | buildConfigurations = (
626 | A0C7EF692541F49F008A066F /* Debug */,
627 | A0C7EF6A2541F49F008A066F /* Release */,
628 | );
629 | defaultConfigurationIsVisible = 0;
630 | defaultConfigurationName = Release;
631 | };
632 | A0C7EF6B2541F49F008A066F /* Build configuration list for PBXNativeTarget "sense-iOS" */ = {
633 | isa = XCConfigurationList;
634 | buildConfigurations = (
635 | A0C7EF6C2541F49F008A066F /* Debug */,
636 | A0C7EF6D2541F49F008A066F /* Release */,
637 | );
638 | defaultConfigurationIsVisible = 0;
639 | defaultConfigurationName = Release;
640 | };
641 | A0C7EF6E2541F49F008A066F /* Build configuration list for PBXNativeTarget "sense-iOSTests" */ = {
642 | isa = XCConfigurationList;
643 | buildConfigurations = (
644 | A0C7EF6F2541F49F008A066F /* Debug */,
645 | A0C7EF702541F49F008A066F /* Release */,
646 | );
647 | defaultConfigurationIsVisible = 0;
648 | defaultConfigurationName = Release;
649 | };
650 | A0C7EF712541F49F008A066F /* Build configuration list for PBXNativeTarget "sense-iOSUITests" */ = {
651 | isa = XCConfigurationList;
652 | buildConfigurations = (
653 | A0C7EF722541F49F008A066F /* Debug */,
654 | A0C7EF732541F49F008A066F /* Release */,
655 | );
656 | defaultConfigurationIsVisible = 0;
657 | defaultConfigurationName = Release;
658 | };
659 | /* End XCConfigurationList section */
660 | };
661 | rootObject = A0C7EF392541F49A008A066F /* Project object */;
662 | }
663 |
--------------------------------------------------------------------------------
/sense-iOS.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/sense-iOS.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/sense-iOS.xcodeproj/project.xcworkspace/xcuserdata/amercier.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/quic/sense-iOS/50fc9a7ba373807fa9a8bbf7b58a58327b78c660/sense-iOS.xcodeproj/project.xcworkspace/xcuserdata/amercier.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/sense-iOS.xcodeproj/project.xcworkspace/xcuserdata/sunny.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/quic/sense-iOS/50fc9a7ba373807fa9a8bbf7b58a58327b78c660/sense-iOS.xcodeproj/project.xcworkspace/xcuserdata/sunny.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/sense-iOS.xcodeproj/xcuserdata/amercier.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | sense-iOS.xcscheme_^#shared#^_
8 |
9 | orderHint
10 | 3
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/sense-iOS.xcodeproj/xcuserdata/sunny.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | sense-iOS.xcscheme_^#shared#^_
8 |
9 | orderHint
10 | 0
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/sense-iOS.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/sense-iOS.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/sense-iOS/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | import UIKit
2 |
3 | @UIApplicationMain
4 | class AppDelegate: UIResponder, UIApplicationDelegate {
5 | var window: UIWindow?
6 |
7 |
8 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
9 | // Override point for customization after application launch.
10 | return true
11 | }
12 | }
13 |
14 |
15 |
--------------------------------------------------------------------------------
/sense-iOS/Array+argmin.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | extension Array where Element: Comparable {
4 | func argmax() -> Index? {
5 | return indices.max(by: { self[$0] < self[$1] })
6 | }
7 |
8 | func argmin() -> Index? {
9 | return indices.min(by: { self[$0] < self[$1] })
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/sense-iOS/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/sense-iOS/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "scale" : "2x",
6 | "size" : "20x20"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "scale" : "3x",
11 | "size" : "20x20"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "scale" : "2x",
16 | "size" : "29x29"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "scale" : "3x",
21 | "size" : "29x29"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "scale" : "2x",
26 | "size" : "40x40"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "scale" : "3x",
31 | "size" : "40x40"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "scale" : "2x",
36 | "size" : "60x60"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "scale" : "3x",
41 | "size" : "60x60"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "scale" : "1x",
46 | "size" : "20x20"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "scale" : "2x",
51 | "size" : "20x20"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "scale" : "1x",
56 | "size" : "29x29"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "scale" : "2x",
61 | "size" : "29x29"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "scale" : "1x",
66 | "size" : "40x40"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "scale" : "2x",
71 | "size" : "40x40"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "scale" : "1x",
76 | "size" : "76x76"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "scale" : "2x",
81 | "size" : "76x76"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "scale" : "2x",
86 | "size" : "83.5x83.5"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "scale" : "1x",
91 | "size" : "1024x1024"
92 | }
93 | ],
94 | "info" : {
95 | "author" : "xcode",
96 | "version" : 1
97 | }
98 | }
99 |
--------------------------------------------------------------------------------
/sense-iOS/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/sense-iOS/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/sense-iOS/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
--------------------------------------------------------------------------------
/sense-iOS/CameraTurnedOffViewController.swift:
--------------------------------------------------------------------------------
1 | import UIKit
2 |
3 | class CameraTurnedOffViewController: UIViewController {
4 |
5 | @IBOutlet weak var cameraDescriptionLabel: UILabel!
6 |
7 | override func viewDidLoad() {
8 | super.viewDidLoad()
9 | cameraDescriptionLabel.text = "please allow the app access to your camera. Turn on Camera in your device settings."
10 | }
11 |
12 |
13 | @IBAction func goToSettingsAction(_ sender: UIButton) {
14 | guard let settingsURL = URL(string: UIApplication.openSettingsURLString) else {
15 | return
16 | }
17 |
18 | if UIApplication.shared.canOpenURL(settingsURL) {
19 | UIApplication.shared.open(settingsURL)
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/sense-iOS/InferenceLocal.swift:
--------------------------------------------------------------------------------
1 | import Accelerate
2 | import AVFoundation
3 | import Foundation
4 | import TensorFlowLite
5 |
6 | class InferenceLocal {
7 |
8 | var delegate: GUIControllerDelegate?
9 | var modelRunning = false
10 | public static let inputWidth: Int = 160
11 | public static let inputHeight: Int = 224
12 | let batchSize = 1
13 | let inputChannels = 3
14 |
15 |
16 | struct Prediction {
17 | let classIndex: Int
18 | let score: Float
19 | let rect: CGRect
20 | }
21 |
22 | var startTime = CACurrentMediaTime()
23 |
24 | var numFrames = -2
25 | var frames = [Data?]()
26 | var framesForPrediction = [Data?]()
27 | var cachedOutputs0: [Data] = [Data]()
28 | var cachedOutputs1: [Data] = [Data]()
29 | var numFramePrediction = 4
30 | let modelPath = Bundle.main.path(forResource: "model", ofType: "tflite")
31 | var threadCount = 2
32 | var interpreter: Interpreter? = nil
33 | let queue1 = DispatchQueue(label: "com.sense-iOS.inference", qos: .userInteractive)
34 |
35 | init() {
36 | // Specify the options for the `Interpreter`.
37 | var options = Interpreter.Options()
38 | options.threadCount = threadCount
39 | do {
40 | // Create the `Interpreter`.
41 | let coremlDelegate = CoreMLDelegate()
42 | if let coremlDelegate = coremlDelegate {
43 | interpreter = try Interpreter(modelPath: modelPath!,
44 | options: options, delegates: [coremlDelegate])
45 | } else {
46 | let delegate = MetalDelegate()
47 | interpreter = try Interpreter(modelPath: modelPath!,
48 | options: options, delegates: [delegate])
49 | }
50 |
51 | // Allocate memory for the model's input `Tensor`s.
52 | try interpreter!.allocateTensors()
53 | } catch let error {
54 | print("Failed to create the interpreter with error: \(error.localizedDescription)")
55 | }
56 |
57 | do {
58 | var startDate = Date()
59 | try interpreter!.invoke()
60 | var interval = Date().timeIntervalSince(startDate) * 1000
61 | print(interval)
62 | } catch let error {
63 | print("Failed to invoke interpretor: \(error.localizedDescription)")
64 | }
65 | cachedOutputs0 = copyOutput()
66 | cachedOutputs1 = copyOutput()
67 | }
68 |
69 | private func copyOutput() -> Array {
70 | let numOutput = interpreter!.outputTensorCount
71 | var outputArray = [Data]()
72 | for i in 0 ... Int((numOutput / 2) - 1) {
73 | let output = try! interpreter!.output(at: i)
74 | outputArray.append(copyData(tensor: output))
75 | }
76 | return outputArray
77 | }
78 |
79 | func copyData(tensor: Tensor) -> Data {
80 | let res = Data(copyingBufferOf: copyArrayOutput(tensor: tensor))
81 | return res
82 | }
83 |
84 | func copyArrayOutput(tensor: Tensor) -> Array {
85 | let outputSize = tensor.shape.dimensions.reduce(1, {x, y in x * y})
86 | let outputData =
87 | UnsafeMutableBufferPointer.allocate(capacity: outputSize)
88 | tensor.data.copyBytes(to: outputData)
89 | let array = Array(outputData)
90 | outputData.deallocate()
91 | return array
92 | }
93 |
94 |
95 | public func collectFrames(imageBuffer: CVPixelBuffer) {
96 | if (self.frames.count > 7) {
97 | if !self.modelRunning {
98 | // prevent to run the model while in the process of removing frames to avoid access concurency issues
99 | modelRunning = true
100 |
101 | while self.frames.count > 4 {
102 | frames.removeFirst(1)
103 | debugPrint("TIMING: remove frame")
104 | }
105 | modelRunning = false
106 | }
107 | }
108 | self.frames.append( rgbDataFromBuffer(
109 | imageBuffer,
110 | byteCount: batchSize * InferenceLocal.inputWidth * InferenceLocal.inputHeight * inputChannels,
111 | isModelQuantized: false))
112 | if (self.frames.count >= numFramePrediction && modelRunning == false) {
113 | self.modelRunning = true
114 | // do the copy of the frame here to avoid replacing some frames before prediction
115 |
116 | for i in 0 ... (numFramePrediction - 1){
117 | framesForPrediction.append(self.frames[i])
118 | }
119 | frames.removeFirst(4)
120 | queue1.async {
121 | self.startTime = CACurrentMediaTime()
122 | self.loadPrediction(frames:self.framesForPrediction) {
123 | res in
124 | self.delegate?.emitPredictions(global_output: res)
125 | }
126 | self.framesForPrediction.removeAll()
127 | self.modelRunning = false
128 | }
129 | }
130 | }
131 | public func loadPrediction(frames: [Data?], completionHandler: @escaping (Array) -> Void) {
132 | let time = CACurrentMediaTime()
133 | let model_output = evaluateModelEfficientnet(frames: frames)
134 | self.cachedOutputs1 = self.cachedOutputs0
135 | self.cachedOutputs0 = model_output
136 | debugPrint("TIMING: " + String(CACurrentMediaTime()) + " " + String(CACurrentMediaTime() - time))
137 | let array = copyArrayOutput(tensor: try! interpreter!.output(at: 10))
138 | completionHandler(array)
139 | }
140 |
141 | public func evaluateModelEfficientnet(frames: [Data?]) -> [Data] {
142 | try! interpreter!.copy(frames[3]!, toInputAt: 0)
143 | try! interpreter!.copy(frames[2]!, toInputAt: 1)
144 | try! interpreter!.copy(frames[1]!, toInputAt: 2)
145 | try! interpreter!.copy(frames[0]!, toInputAt: 3)
146 | try! interpreter!.copy(cachedOutputs0[0], toInputAt: 4)
147 | try! interpreter!.copy(cachedOutputs0[1], toInputAt: 5)
148 | try! interpreter!.copy(cachedOutputs0[2], toInputAt: 6)
149 | try! interpreter!.copy(cachedOutputs0[3], toInputAt: 7)
150 | try! interpreter!.copy(cachedOutputs0[4], toInputAt: 8)
151 | try! interpreter!.copy(cachedOutputs0[5], toInputAt: 9)
152 | try! interpreter!.copy(cachedOutputs0[6], toInputAt: 10)
153 | try! interpreter!.copy(cachedOutputs1[6], toInputAt: 11)
154 | try! interpreter!.copy(cachedOutputs0[7], toInputAt: 12)
155 | try! interpreter!.copy(cachedOutputs1[7], toInputAt: 13)
156 | try! interpreter!.copy(cachedOutputs0[8], toInputAt: 14)
157 | try! interpreter!.copy(cachedOutputs1[8], toInputAt: 15)
158 | try! interpreter!.copy(cachedOutputs0[9], toInputAt: 16)
159 | try! interpreter!.invoke()
160 | return copyOutput()
161 | }
162 |
163 |
164 |
165 | /// Returns the RGB data representation of the given image buffer with the specified `byteCount`.
166 | ///
167 | /// - Parameters
168 | /// - buffer: The pixel buffer to convert to RGB data.
169 | /// - byteCount: The expected byte count for the RGB data calculated using the values that the
170 | /// model was trained on: `batchSize * imageWidth * imageHeight * componentsCount`.
171 | /// - isModelQuantized: Whether the model is quantized (i.e. fixed point values rather than
172 | /// floating point values).
173 | /// - Returns: The RGB data representation of the image buffer or `nil` if the buffer could not be
174 | /// converted.
175 | private func rgbDataFromBuffer(
176 | _ buffer: CVPixelBuffer,
177 | byteCount: Int,
178 | isModelQuantized: Bool
179 | ) -> Data? {
180 | CVPixelBufferLockBaseAddress(buffer, .readOnly)
181 | defer {
182 | CVPixelBufferUnlockBaseAddress(buffer, .readOnly)
183 | }
184 | guard let sourceData = CVPixelBufferGetBaseAddress(buffer) else {
185 | return nil
186 | }
187 |
188 | let width = CVPixelBufferGetWidth(buffer)
189 | let height = CVPixelBufferGetHeight(buffer)
190 | let sourceBytesPerRow = CVPixelBufferGetBytesPerRow(buffer)
191 | let destinationChannelCount = 3
192 | let destinationBytesPerRow = destinationChannelCount * width
193 |
194 | var sourceBuffer = vImage_Buffer(data: sourceData,
195 | height: vImagePixelCount(height),
196 | width: vImagePixelCount(width),
197 | rowBytes: sourceBytesPerRow)
198 |
199 | guard let destinationData = malloc(height * destinationBytesPerRow) else {
200 | print("Error: out of memory")
201 | return nil
202 | }
203 |
204 | defer {
205 | free(destinationData)
206 | }
207 |
208 | var destinationBuffer = vImage_Buffer(data: destinationData,
209 | height: vImagePixelCount(height),
210 | width: vImagePixelCount(width),
211 | rowBytes: destinationBytesPerRow)
212 |
213 | let pixelBufferFormat = CVPixelBufferGetPixelFormatType(buffer)
214 |
215 | switch (pixelBufferFormat) {
216 | case kCVPixelFormatType_32BGRA:
217 | vImageConvert_BGRA8888toRGB888(&sourceBuffer, &destinationBuffer, UInt32(kvImageNoFlags))
218 | case kCVPixelFormatType_32ARGB:
219 | vImageConvert_ARGB8888toRGB888(&sourceBuffer, &destinationBuffer, UInt32(kvImageNoFlags))
220 | case kCVPixelFormatType_32RGBA:
221 | vImageConvert_RGBA8888toRGB888(&sourceBuffer, &destinationBuffer, UInt32(kvImageNoFlags))
222 | default:
223 | // Unknown pixel format.
224 | return nil
225 | }
226 |
227 | let byteData = Data(bytes: destinationBuffer.data, count: destinationBuffer.rowBytes * height)
228 | if isModelQuantized {
229 | return byteData
230 | }
231 |
232 | // Not quantized, convert to floats
233 | let bytes = Array(unsafeData: byteData)!
234 | var floats = [Float]()
235 | for i in 0..(copyingBufferOf array: [T]) {
251 | self = array.withUnsafeBufferPointer(Data.init)
252 | }
253 | }
254 |
255 | extension Array {
256 | /// Creates a new array from the bytes of the given unsafe data.
257 | ///
258 | /// - Warning: The array's `Element` type must be trivial in that it can be copied bit for bit
259 | /// with no indirection or reference-counting operations; otherwise, copying the raw bytes in
260 | /// the `unsafeData`'s buffer to a new array returns an unsafe copy.
261 | /// - Note: Returns `nil` if `unsafeData.count` is not a multiple of
262 | /// `MemoryLayout.stride`.
263 | /// - Parameter unsafeData: The data containing the bytes to turn into an array.
264 | init?(unsafeData: Data) {
265 | guard unsafeData.count % MemoryLayout.stride == 0 else { return nil }
266 | #if swift(>=5.0)
267 | self = unsafeData.withUnsafeBytes { .init($0.bindMemory(to: Element.self)) }
268 | #else
269 | self = unsafeData.withUnsafeBytes {
270 | .init(UnsafeBufferPointer(
271 | start: $0,
272 | count: unsafeData.count / MemoryLayout.stride
273 | ))
274 | }
275 | #endif // swift(>=5.0)
276 | }
277 | }
278 |
--------------------------------------------------------------------------------
/sense-iOS/InferenceModel.swift:
--------------------------------------------------------------------------------
1 | import UIKit
2 | import Vision
3 | import AVFoundation
4 | import CoreMedia
5 | import VideoToolbox
6 | import MediaPlayer
7 |
8 |
9 | protocol GUIControllerDelegate {
10 | func getOrientation() -> UIDeviceOrientation
11 | func emitPredictions(global_output: [Float32])
12 | }
13 |
14 | // Interface for ViewController
15 | protocol InferenceType {
16 | var cameraPermission: AVAuthorizationStatus { get }
17 | func requestCameraAccess(completion: @escaping (Bool) -> Void)
18 | }
19 |
20 |
21 | final class InferenceModel: InferenceType {
22 |
23 | // Class declarations
24 | let appDelegate: AppDelegate = UIApplication.shared.delegate as! AppDelegate
25 | var frameExtractor: FrameExtractor!
26 | var inference = InferenceLocal()
27 | var motionManager = MotionManager()
28 | var cameraPermission: AVAuthorizationStatus = AVCaptureDevice.authorizationStatus(for: .video)
29 |
30 | // Variable declarations for video processing
31 | var resizedPixelBuffer: CVPixelBuffer?
32 | var frameCapturingStartTime = CACurrentMediaTime()
33 | var lastPredictionTime = CACurrentMediaTime()
34 |
35 | // Variable declarations for motion control
36 | var deviceOrientation = UIDevice.current.orientation
37 | var inferenceStarted = false
38 | var cameraReady = false
39 | var viewAppeared = false
40 |
41 | var delegate: WorkoutModelDelegate? = nil
42 | //Coupon
43 | var lab2int: [String:Int] = [:]
44 | var int2lab: [Int:String] = [:]
45 |
46 |
47 | // MARK: Internal
48 |
49 | init() {
50 | inference.delegate = self
51 | frameCapturingStartTime = CACurrentMediaTime()
52 | motionManager.delegate = self
53 | if let path = Bundle.main.path(forResource: "sensenet_labels", ofType: "json") {
54 | do {
55 | let data = try Data(contentsOf: URL(fileURLWithPath: path), options: .mappedIfSafe)
56 | let jsonResult = try JSONSerialization.jsonObject(with: data, options: .mutableLeaves)
57 | if let jsonResult = jsonResult as? Dictionary {
58 | lab2int = jsonResult
59 | int2lab = Dictionary(uniqueKeysWithValues: lab2int.map({ ($1, $0) }))
60 | }
61 | } catch {
62 | }
63 | }
64 | }
65 |
66 | func startInference() {
67 | UIApplication.shared.isIdleTimerDisabled = true
68 | // wait for camera to be ready
69 | Timer.scheduledTimer(withTimeInterval: 0.1, repeats: true) { timer in
70 | if self.cameraReady {
71 | self.inferenceStarted = true
72 | timer.invalidate()
73 | self.frameExtractor.delegate = self
74 | self.frameExtractor.start()
75 | self.lastPredictionTime = CACurrentMediaTime()
76 | }
77 | }
78 | }
79 |
80 | func goBackground() {
81 | if inferenceStarted {
82 | frameExtractor.stop()
83 | }
84 | }
85 |
86 | func leaveBackground() {
87 | if inferenceStarted {
88 | frameExtractor.delegate = self
89 | frameExtractor.start()
90 | }
91 | }
92 |
93 | func setUpCamera() {
94 | frameExtractor = FrameExtractor()
95 | frameExtractor.delegate = self
96 | frameExtractor.setUp { success in
97 | if success {
98 | self.frameExtractor.start()
99 | self.frameExtractor.stop()
100 | self.cameraReady = true
101 | } else{
102 | print("error: did not succeed to start frame extractor")
103 | }
104 | }
105 | }
106 |
107 |
108 | // MARK: GUIControllerDelegate Methods
109 |
110 | func emitPredictions(global_output: [Float32]) {
111 | let time = CACurrentMediaTime()
112 | let elapsed = time - lastPredictionTime
113 | lastPredictionTime = time
114 |
115 | let softmax = self.softmax(logits: global_output)
116 | let maxIndice = softmax.argmax()
117 | if let maxPosition = maxIndice {
118 | let maxScore = softmax[maxPosition]
119 | var label = ""
120 | var score = ""
121 | if maxScore > 0.6 {
122 | label = "\(int2lab[maxPosition]!)"
123 | score = "\(Double(round(100*maxScore)))%"
124 | }
125 | delegate?.showPrediction(label: label, score: score)
126 | }
127 | }
128 |
129 | func softmax(logits: [Float32]) -> [Float32] {
130 | var sumExps: Float32 = 0.0
131 | var exps = [Float32]()
132 | var softmax = [Float32]()
133 | for output in logits {
134 | let expValue = exp(output)
135 | exps.append(expValue)
136 | sumExps += expValue
137 | }
138 | for exp in exps {
139 | softmax.append(exp / sumExps)
140 | }
141 | return softmax
142 | }
143 |
144 |
145 | // MARK: WorkoutType Methods
146 |
147 | func requestCameraAccess(completion: @escaping (Bool) -> Void) {
148 | AVCaptureDevice.requestAccess(for: .video) { granted in
149 | completion(granted)
150 | }
151 | }
152 |
153 | private func processPrediction(pixelBuffer: CVPixelBuffer) {
154 | // Resize the input with Core Image to the desired output.
155 | guard let resizedPixelBuffer = resizePixelBuffer(pixelBuffer,
156 | width: InferenceLocal.inputWidth,
157 | height: InferenceLocal.inputHeight) else { return }
158 |
159 |
160 | // Rotate input accordingly and give it to our model.
161 | var rotatedBuffer: CVPixelBuffer?
162 | var noRotation = false
163 | var transform = CGAffineTransform(scaleX: -1, y: 1)
164 | switch self.deviceOrientation {
165 | case .portraitUpsideDown:
166 | rotatedBuffer = rotate90PixelBuffer(resizedPixelBuffer, factor: 2)
167 | transform = CGAffineTransform(scaleX: 1, y: 1)
168 | default:
169 | noRotation = true
170 | }
171 |
172 | if noRotation {
173 | let padded = resizedPixelBuffer
174 | delegate?.showDebugImage(padded, transform: transform)
175 | inference.collectFrames(imageBuffer: padded)
176 | } else {
177 | let padded = rotatedBuffer!
178 | delegate?.showDebugImage(padded, transform: transform)
179 | inference.collectFrames(imageBuffer: padded)
180 | }
181 | }
182 | }
183 |
184 |
185 | extension InferenceModel: GUIControllerDelegate{
186 |
187 | func getOrientation() -> UIDeviceOrientation {
188 | return self.deviceOrientation
189 | }
190 | }
191 |
192 | extension InferenceModel: MotionManagerDelegate {
193 | public func rotated(_ orientation: UIDeviceOrientation) {
194 | self.deviceOrientation = orientation
195 | // Handle rotation
196 | print(self.deviceOrientation)
197 | }
198 | }
199 |
200 |
201 | extension InferenceModel: FrameExtractorDelegate {
202 | func captured(_ capture: FrameExtractor, didCaptureVideoFrame pixelBuffer: CVPixelBuffer?) {
203 | if let pixelBuffer = pixelBuffer {
204 | DispatchQueue.global().async {
205 | self.processPrediction(pixelBuffer: pixelBuffer)
206 | }
207 | }
208 | }
209 | }
210 |
--------------------------------------------------------------------------------
/sense-iOS/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | NSCameraUsageDescription
6 | Use the camera in order to use video to perform video recognition
7 | CFBundleDevelopmentRegion
8 | $(DEVELOPMENT_LANGUAGE)
9 | CFBundleExecutable
10 | $(EXECUTABLE_NAME)
11 | CFBundleIdentifier
12 | $(PRODUCT_BUNDLE_IDENTIFIER)
13 | CFBundleInfoDictionaryVersion
14 | 6.0
15 | CFBundleName
16 | $(PRODUCT_NAME)
17 | CFBundlePackageType
18 | $(PRODUCT_BUNDLE_PACKAGE_TYPE)
19 | CFBundleShortVersionString
20 | 1.0
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | UIApplicationSupportsIndirectInputEvents
26 |
27 | UILaunchStoryboardName
28 | LaunchScreen
29 | UIMainStoryboardFile
30 | Main
31 | UIRequiredDeviceCapabilities
32 |
33 | armv7
34 |
35 | UISupportedInterfaceOrientations
36 |
37 | UIInterfaceOrientationPortrait
38 | UIInterfaceOrientationLandscapeLeft
39 | UIInterfaceOrientationLandscapeRight
40 |
41 | UISupportedInterfaceOrientations~ipad
42 |
43 | UIInterfaceOrientationPortrait
44 | UIInterfaceOrientationPortraitUpsideDown
45 | UIInterfaceOrientationLandscapeLeft
46 | UIInterfaceOrientationLandscapeRight
47 |
48 |
49 |
50 |
--------------------------------------------------------------------------------
/sense-iOS/MotionManager.swift:
--------------------------------------------------------------------------------
1 | import UIKit
2 |
3 | public protocol MotionManagerDelegate: class {
4 | func rotated(_ orientation: UIDeviceOrientation)
5 | }
6 |
7 | class MotionManager: NSObject {
8 |
9 | var didRotate: ((Notification) -> Void)!
10 | weak var delegate: MotionManagerDelegate?
11 |
12 | override init() {
13 | super.init()
14 | setUpNotification()
15 | }
16 |
17 | func setUpNotification() {
18 | UIDevice.current.beginGeneratingDeviceOrientationNotifications()
19 | self.didRotate = { notification in
20 | let orientation = self.getOrientation()
21 | self.delegate?.rotated(orientation)
22 | }
23 | NotificationCenter.default.addObserver(forName: UIDevice.orientationDidChangeNotification,
24 | object: nil,
25 | queue: .main,
26 | using: self.didRotate)
27 | }
28 |
29 | func getOrientation() -> UIDeviceOrientation {
30 | var orientation: UIDeviceOrientation
31 | orientation = UIDevice.current.orientation
32 | return orientation
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/sense-iOS/VideoRecorder/CVPixelBuffer+Helpers.swift:
--------------------------------------------------------------------------------
1 | // https://github.com/hollance/CoreMLHelpers/blob/master/CoreMLHelpers/CVPixelBuffer%2BHelpers.swift
2 |
3 | import Foundation
4 | import Accelerate
5 |
6 | func resizePixelBuffer(_ srcPixelBuffer: CVPixelBuffer,
7 | cropX: Int,
8 | cropY: Int,
9 | cropWidth: Int,
10 | cropHeight: Int,
11 | scaleWidth: Int,
12 | scaleHeight: Int) -> CVPixelBuffer? {
13 |
14 | CVPixelBufferLockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
15 | defer { CVPixelBufferUnlockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0)) }
16 |
17 | guard let srcData = CVPixelBufferGetBaseAddress(srcPixelBuffer) else {
18 | print("Error: could not get pixel buffer base address")
19 | return nil
20 | }
21 |
22 | let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer)
23 | let offset = cropY*srcBytesPerRow + cropX*4
24 | var srcBuffer = vImage_Buffer(data: srcData.advanced(by: offset),
25 | height: vImagePixelCount(cropHeight),
26 | width: vImagePixelCount(cropWidth),
27 | rowBytes: srcBytesPerRow)
28 |
29 | let destBytesPerRow = scaleWidth*4
30 | guard let destData = malloc(scaleHeight*destBytesPerRow) else {
31 | print("Error: out of memory")
32 | return nil
33 | }
34 | var destBuffer = vImage_Buffer(data: destData,
35 | height: vImagePixelCount(scaleHeight),
36 | width: vImagePixelCount(scaleWidth),
37 | rowBytes: destBytesPerRow)
38 |
39 | let error = vImageScale_ARGB8888(&srcBuffer, &destBuffer, nil, vImage_Flags(0))
40 |
41 | CVPixelBufferUnlockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
42 | if error != kvImageNoError {
43 | print("Error: ", error)
44 | free(destData)
45 | return nil
46 | }
47 |
48 | //
49 | let releaseCallback: CVPixelBufferReleaseBytesCallback = { _, ptr in
50 | if let ptr = ptr {
51 | free(UnsafeMutableRawPointer(mutating: ptr))
52 | }
53 | }
54 |
55 | let pixelFormat = CVPixelBufferGetPixelFormatType(srcPixelBuffer)
56 | var dstPixelBuffer: CVPixelBuffer?
57 | let status = CVPixelBufferCreateWithBytes(nil, scaleWidth, scaleHeight,
58 | pixelFormat, destData,
59 | destBytesPerRow, releaseCallback,
60 | nil, nil, &dstPixelBuffer)
61 | if status != kCVReturnSuccess {
62 | print("Error: could not create new pixel buffer")
63 | free(destData)
64 | return nil
65 | }
66 | return dstPixelBuffer
67 | }
68 |
69 | func resizePixelBuffer(_ pixelBuffer: CVPixelBuffer,
70 | width: Int, height: Int) -> CVPixelBuffer? {
71 | return resizePixelBuffer(pixelBuffer, cropX: 0, cropY: 0,
72 | cropWidth: CVPixelBufferGetWidth(pixelBuffer),
73 | cropHeight: CVPixelBufferGetHeight(pixelBuffer),
74 | scaleWidth: width, scaleHeight: height)
75 | }
76 |
77 | func rotate90PixelBuffer(_ srcPixelBuffer: CVPixelBuffer, factor: UInt8) -> CVPixelBuffer? {
78 | let flags = CVPixelBufferLockFlags(rawValue: 0)
79 | guard kCVReturnSuccess == CVPixelBufferLockBaseAddress(srcPixelBuffer, flags) else {
80 | return nil
81 | }
82 | defer { CVPixelBufferUnlockBaseAddress(srcPixelBuffer, flags) }
83 |
84 | guard let srcData = CVPixelBufferGetBaseAddress(srcPixelBuffer) else {
85 | print("Error: could not get pixel buffer base address")
86 | return nil
87 | }
88 | let sourceWidth = CVPixelBufferGetWidth(srcPixelBuffer)
89 | let sourceHeight = CVPixelBufferGetHeight(srcPixelBuffer)
90 | var destWidth = sourceHeight
91 | var destHeight = sourceWidth
92 | var color = UInt8(0)
93 |
94 | if factor % 2 == 0 {
95 | destWidth = sourceWidth
96 | destHeight = sourceHeight
97 | }
98 |
99 | let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer)
100 | var srcBuffer = vImage_Buffer(data: srcData,
101 | height: vImagePixelCount(sourceHeight),
102 | width: vImagePixelCount(sourceWidth),
103 | rowBytes: srcBytesPerRow)
104 |
105 | let destBytesPerRow = destWidth*4
106 | guard let destData = malloc(destHeight*destBytesPerRow) else {
107 | print("Error: out of memory")
108 | return nil
109 | }
110 | var destBuffer = vImage_Buffer(data: destData,
111 | height: vImagePixelCount(destHeight),
112 | width: vImagePixelCount(destWidth),
113 | rowBytes: destBytesPerRow)
114 |
115 | let error = vImageRotate90_ARGB8888(&srcBuffer, &destBuffer, factor, &color, vImage_Flags(0))
116 | if error != kvImageNoError {
117 | print("Error:", error)
118 | free(destData)
119 | return nil
120 | }
121 |
122 | let releaseCallback: CVPixelBufferReleaseBytesCallback = { _, ptr in
123 | if let ptr = ptr {
124 | free(UnsafeMutableRawPointer(mutating: ptr))
125 | }
126 | }
127 |
128 | let pixelFormat = CVPixelBufferGetPixelFormatType(srcPixelBuffer)
129 | var dstPixelBuffer: CVPixelBuffer?
130 | let status = CVPixelBufferCreateWithBytes(nil, destWidth, destHeight,
131 | pixelFormat, destData,
132 | destBytesPerRow, releaseCallback,
133 | nil, nil, &dstPixelBuffer)
134 | if status != kCVReturnSuccess {
135 | print("Error: could not create new pixel buffer")
136 | free(destData)
137 | return nil
138 | }
139 | return dstPixelBuffer
140 | }
141 |
142 | func padPixelBuffer(_ srcPixelBuffer: CVPixelBuffer,
143 | destWidth: Int,
144 | destHeight: Int) -> CVPixelBuffer? {
145 |
146 | CVPixelBufferLockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
147 |
148 | let srcBytesPerRow = CVPixelBufferGetBytesPerRow(srcPixelBuffer)
149 | let srcWidth = CVPixelBufferGetWidth(srcPixelBuffer)
150 | let srcHeight = CVPixelBufferGetHeight(srcPixelBuffer)
151 | let pixelFormat = CVPixelBufferGetPixelFormatType(srcPixelBuffer)
152 | let srcBaseAddress = CVPixelBufferGetBaseAddress(srcPixelBuffer)
153 |
154 | var paddedPixelBuffer: CVPixelBuffer?
155 |
156 | let srcBytes = srcBaseAddress!.assumingMemoryBound(to: UInt8.self)
157 | let destBuffer = calloc(4*destHeight*destWidth, MemoryLayout.size)
158 |
159 |
160 | for row in 0...size*((destWidth - srcWidth)/2 + row*destWidth)
162 | let offset_src = 4*MemoryLayout.size*(row*srcWidth)
163 | memcpy(destBuffer?.advanced(by: offset_dest), srcBytes.advanced(by: offset_src), srcBytesPerRow)
164 | }
165 |
166 | CVPixelBufferUnlockBaseAddress(srcPixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
167 |
168 | let destBytesPerRow = 4*destWidth
169 |
170 | let releaseCallback: CVPixelBufferReleaseBytesCallback = { _, ptr in
171 | if let ptr = ptr {
172 | free(UnsafeMutableRawPointer(mutating: ptr))
173 | }
174 | }
175 |
176 | let status = CVPixelBufferCreateWithBytes(nil, destWidth, destWidth,
177 | pixelFormat, destBuffer!,
178 | destBytesPerRow, releaseCallback,
179 | nil, nil, &paddedPixelBuffer)
180 |
181 | if status != kCVReturnSuccess {
182 | print("Error: could not create new pixel buffer")
183 | free(destBuffer)
184 | return nil
185 | }
186 |
187 | return paddedPixelBuffer
188 | }
189 |
--------------------------------------------------------------------------------
/sense-iOS/VideoRecorder/FrameExtraction.swift:
--------------------------------------------------------------------------------
1 | import UIKit
2 | import AVFoundation
3 | import CoreVideo
4 | import Accelerate
5 |
6 | public protocol FrameExtractorDelegate: class {
7 | func captured(_ capture: FrameExtractor, didCaptureVideoFrame: CVPixelBuffer?)
8 | }
9 |
10 | public class FrameExtractor: NSObject {
11 | weak var delegate: FrameExtractorDelegate?
12 | public var fps = 16
13 | public var deviceOrientation = "portrait"
14 |
15 | private var position = AVCaptureDevice.Position.front
16 | private let quality = AVCaptureSession.Preset.vga640x480
17 |
18 | private var permissionGranted = false
19 | private let sessionQueue = DispatchQueue(label: "session queue")
20 | private let captureSession = AVCaptureSession()
21 | private let videoOutput = AVCaptureVideoDataOutput()
22 | private let context = CIContext()
23 |
24 | private var lastTimestamp = CACurrentMediaTime()
25 | private var lastTimestamp2 = CACurrentMediaTime()
26 | override init() { }
27 |
28 | public func setUp(completion: @escaping (Bool) -> Void) {
29 | checkPermission()
30 |
31 | sessionQueue.async {
32 | let success = self.configureSession()
33 | DispatchQueue.main.async {
34 | completion(success)
35 | }
36 | }
37 | }
38 |
39 | // MARK: AVSession configuration
40 | private func checkPermission() {
41 | switch AVCaptureDevice.authorizationStatus(for: AVMediaType.video) {
42 | case .authorized:
43 | permissionGranted = true
44 | default:
45 | permissionGranted = false
46 | }
47 | }
48 |
49 | func configureSession() -> Bool {
50 | guard permissionGranted else { return false }
51 | captureSession.beginConfiguration()
52 | captureSession.sessionPreset = quality
53 | guard let captureDevice = selectCaptureDevice() else { return false }
54 | guard let captureDeviceInput = try? AVCaptureDeviceInput(device: captureDevice) else { return false }
55 | guard captureSession.canAddInput(captureDeviceInput) else { return false }
56 | captureSession.addInput(captureDeviceInput)
57 |
58 | let settings: [String : Any] = [
59 | kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA)
60 | ]
61 |
62 | videoOutput.setSampleBufferDelegate(self, queue: sessionQueue)
63 | videoOutput.alwaysDiscardsLateVideoFrames = true
64 | videoOutput.videoSettings = settings
65 |
66 | guard captureSession.canAddOutput(videoOutput) else { return false }
67 | captureSession.addOutput(videoOutput)
68 |
69 | videoOutput.connection(with: AVMediaType.video)?.videoOrientation = .portrait
70 | videoOutput.connection(with: AVMediaType.video)?.isVideoMirrored = false
71 | captureSession.commitConfiguration()
72 | return true
73 | }
74 |
75 | private func selectCaptureDevice() -> AVCaptureDevice? {
76 | if position == .front {
77 | return AVCaptureDevice.default(.builtInWideAngleCamera,
78 | for: AVMediaType.video,
79 | position: .front)
80 | }
81 | else {
82 | return AVCaptureDevice.default(.builtInWideAngleCamera,
83 | for: AVMediaType.video,
84 | position: .back)
85 | }
86 |
87 | }
88 |
89 | public func start() {
90 | if (!captureSession.isRunning) {
91 | captureSession.startRunning()
92 | }
93 | }
94 |
95 | public func stop() {
96 | if (captureSession.isRunning) {
97 | captureSession.stopRunning()
98 | }
99 | }
100 | }
101 |
102 | extension FrameExtractor: AVCaptureVideoDataOutputSampleBufferDelegate {
103 | public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
104 | // Because lowering the capture device's FPS looks ugly in the preview,
105 | // we capture at full speed but only call the delegate at its desired
106 | // framerate.
107 | let currentTime = CACurrentMediaTime()
108 |
109 | // send image to neural network
110 | let deltaTime = currentTime - lastTimestamp
111 | if deltaTime >= (1 / Double(fps)) {
112 | // update time by adding the delta, and not taking the current time
113 | while lastTimestamp + (1 / Double(fps)) <= currentTime{
114 | lastTimestamp = lastTimestamp + (1 / Double(fps))
115 | }
116 | let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
117 | delegate?.captured(self, didCaptureVideoFrame: imageBuffer)
118 | }
119 | }
120 |
121 | public func captureOutput(_ output: AVCaptureOutput, didDrop sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
122 | }
123 | }
124 |
--------------------------------------------------------------------------------
/sense-iOS/ViewController.swift:
--------------------------------------------------------------------------------
1 | import UIKit
2 | import VideoToolbox
3 |
4 | protocol WorkoutModelDelegate {
5 | func showDebugImage(_ resizedPixelBuffer: CVPixelBuffer, transform:CGAffineTransform)
6 | func showPrediction(label: String, score: String)
7 | }
8 |
9 | protocol WorkoutPreviewDelegate: AnyObject {
10 | func cameraPermissionManager()
11 | }
12 |
13 | class ViewController: UIViewController, WorkoutModelDelegate {
14 | @IBOutlet weak var imageView: UIImageView!
15 | @IBOutlet weak var label: UILabel!
16 | @IBOutlet weak var score: UILabel!
17 | let model = InferenceModel()
18 |
19 | override func viewDidLoad() {
20 | super.viewDidLoad()
21 | model.delegate = self
22 | cameraPermissionManager()
23 | model.startInference()
24 | }
25 |
26 | override func viewWillDisappear(_ animated: Bool) {
27 | model.frameExtractor.stop()
28 | }
29 |
30 | override func viewWillAppear(_ animated: Bool) {
31 | if model.inferenceStarted {
32 | model.frameExtractor.start()
33 | }
34 | }
35 |
36 |
37 | private func navigateToCameraPermission() {
38 | guard let cameraPermissionVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "cameraTurnedOffViewController") as? CameraTurnedOffViewController else {
39 | return
40 | }
41 | navigationController?.pushViewController(cameraPermissionVC, animated: true)
42 | // needed in order to prevent to be stuck if dismiss this cameraPermission
43 | }
44 |
45 | func showDebugImage(_ resizedPixelBuffer: CVPixelBuffer, transform:CGAffineTransform) {
46 | DispatchQueue.main.async {
47 | var debugImage: CGImage?
48 | let img = UIImage.init(ciImage: CIImage(cvPixelBuffer: resizedPixelBuffer), scale:1, orientation:UIImage.Orientation.upMirrored)
49 | VTCreateCGImageFromCVPixelBuffer(resizedPixelBuffer, options: nil, imageOut: &debugImage)
50 | self.imageView.image = img
51 | // flip the image
52 | self.imageView.transform = transform
53 | }
54 | }
55 |
56 | func showPrediction(label: String, score: String) {
57 | DispatchQueue.main.async {
58 | self.label.text = label
59 | self.score.text = score
60 | }
61 | }
62 | }
63 |
64 | extension ViewController: WorkoutPreviewDelegate {
65 | func cameraPermissionManager() {
66 | // case to show tutorial or camera permission
67 | switch self.model.cameraPermission {
68 | case .authorized:
69 | self.model.setUpCamera()
70 | self.model.startInference()
71 | case .notDetermined:
72 | self.model.requestCameraAccess { granted in
73 | DispatchQueue.main.sync {
74 | if granted {
75 | self.model.setUpCamera()
76 | self.model.startInference()
77 | } else {
78 | self.navigationController?.setNavigationBarHidden(false, animated: false)
79 | self.navigateToCameraPermission()
80 | }
81 | }
82 | }
83 | default:
84 | self.navigationController?.setNavigationBarHidden(false, animated: false)
85 | navigateToCameraPermission()
86 | }
87 | }
88 | }
89 |
90 |
91 |
92 |
--------------------------------------------------------------------------------
/sense-iOS/sensenet_labels.json:
--------------------------------------------------------------------------------
1 | {
2 | "\"Drinking\" gesture": 0,
3 | "\"Sleeping\" gesture": 1,
4 | "Calling someone closer": 2,
5 | "Covering ears": 3,
6 | "Covering eyes": 4,
7 | "Dabbing": 5,
8 | "Doing nothing": 6,
9 | "Doing other things": 7,
10 | "Facepalming": 8,
11 | "No person visible": 9,
12 | "Nodding": 10,
13 | "Pointing left": 11,
14 | "Pointing right": 12,
15 | "Pointing to the camera": 13,
16 | "Putting finger to mouth": 14,
17 | "Rolling hand": 15,
18 | "Scratching": 16,
19 | "Shaking head": 17,
20 | "Showing the middle finger": 18,
21 | "Swiping down": 19,
22 | "Swiping down (with two hands)": 20,
23 | "Swiping left": 21,
24 | "Swiping right": 22,
25 | "Swiping up": 23,
26 | "Swiping up (with two hands)": 24,
27 | "Thumb down": 25,
28 | "Thumb up": 26,
29 | "Waving": 27,
30 | "Zooming in": 28,
31 | "Zooming out": 29
32 | }
33 |
--------------------------------------------------------------------------------