├── .gitignore
├── LICENSE.md
├── Media
└── Face_Morph_Preview.gif
├── Package.swift
├── README.md
└── Sources
├── BTShared
├── ARView+Occlusion.swift
├── BodyTrackingError.swift
└── Utilities.swift
├── BodyTracking
├── Body2D
│ ├── BodyTracker2D.swift
│ ├── BodyTracking2DComponent.swift
│ ├── BodyTracking2DSystem.swift
│ └── TwoDBodyJoint.swift
└── Body3D
│ ├── Body3D
│ ├── Body3DComponent.swift
│ └── BodyEntity3D.swift
│ ├── BodyAnchor
│ ├── BodyAnchor.swift
│ └── BodyAnchorComponent.swift
│ ├── BodyTracking3DSystem.swift
│ ├── Configuration.swift
│ └── ThreeDBodyJoint.swift
├── FaceTracking
├── FaceAnchor.swift
├── FaceAnchorComponent.swift
├── FaceMorphedEntity.swift
├── FaceSystem.swift
└── FaceTrackingConfig.swift
└── HandTracking
├── Hand2D
├── FrameRateRegulator.swift
├── Hand2DComponent.swift
├── HandDetector.swift
├── HandJoint.swift
├── HandTracker2D.swift
└── HandTracking2DSystem.swift
└── Hand3D
├── CVPixelBuffer+Helpers.swift
├── Hand3D
├── Hand3DComponent.swift
└── HandTracker3D.swift
├── HandAnchor
├── HandAnchor.swift
└── HandAnchorComponent.swift
└── HandTracking3DSystem.swift
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | /.build
3 | /Packages
4 | /*.xcodeproj
5 | .swiftpm/
6 | xcuserdata/
7 | xcshareddata/
8 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | Copyright (c) 2024 Grant M Jarvis
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy
4 | of this software and associated documentation files (the "Software"), to deal
5 | in the Software without restriction, including without limitation the rights
6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
7 | copies of the Software, and to permit persons to whom the Software is
8 | furnished to do so, subject to the following conditions:
9 |
10 | The above copyright notice and this permission notice shall be included in
11 | all copies or substantial portions of the Software.
12 |
13 | Notwithstanding the foregoing, you may not use, copy, modify, merge, publish,
14 | distribute, sublicense, create a derivative work, and/or sell copies of the
15 | Software in any work that is designed, intended, or marketed for pedagogical or
16 | instructional purposes related to programming, coding, application development,
17 | or information technology. Permission for such use, copying, modification,
18 | merger, publication, distribution, sublicensing, creation of derivative works,
19 | or sale is expressly withheld.
20 |
21 | This project and source code may use libraries or frameworks that are
22 | released under various Open-Source licenses. Use of those libraries and
23 | frameworks are governed by their own individual licenses.
24 |
25 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
26 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
27 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
28 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
29 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
30 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
31 | THE SOFTWARE.
32 |
--------------------------------------------------------------------------------
/Media/Face_Morph_Preview.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Reality-Dev/BodyTracking/783ccefca4c31659e795c7d9d79ea4edd735f5fa/Media/Face_Morph_Preview.gif
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version:5.9
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "BodyTracking",
8 | platforms: [.iOS(.v15)],
9 | products: [
10 | .library(name: "BodyTracking", targets: ["BodyTracking"]),
11 | .library(name: "FaceTracking", targets: ["FaceTracking"]),
12 | .library(name: "HandTracking", targets: ["HandTracking"]),
13 | ],
14 | dependencies: [
15 | .package(url: "https://github.com/Reality-Dev/RealityKit-Utilities", from: "1.0.0"),
16 | .package(url: "https://github.com/Reality-Dev/RealityMorpher", exact: "2.0.11"),
17 | ],
18 | targets: [
19 | .target(name: "BodyTracking",
20 | dependencies: [.target(name: "BTShared"),
21 | .product(name: "RKUtilities", package: "RealityKit-Utilities")]),
22 | .target(name: "FaceTracking",
23 | dependencies: [.target(name: "BTShared"),
24 | .product(name: "RealityMorpher", package: "RealityMorpher"),
25 | .product(name: "RKUtilities", package: "RealityKit-Utilities")]),
26 | .target(name: "HandTracking",
27 | dependencies: [.target(name: "BTShared"),
28 | .product(name: "RKUtilities", package: "RealityKit-Utilities")]),
29 | .target(name: "BTShared",
30 | dependencies: [.product(name: "RKUtilities", package: "RealityKit-Utilities")]),
31 | ],
32 | swiftLanguageVersions: [.v5]
33 | )
34 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # BodyTracking
2 |
3 | This package enables easy, convenient body tracking in RealityKit.
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 | ## Usage
12 |
13 | See [this downloadable](https://www.realityacademy.pro/course/body-tracking) that includes expert guidance and examples.
14 |
15 | ## What's Included
16 |
17 | This package includes code for:
18 | - 3D Body Tracking
19 | - 2D Body Tracking
20 | - 2D Hand Tracking
21 | - 3D Hand Tracking
22 | - 3D Face Tracking
23 | - Face Geometry Morphing
24 | - 3D Eye Tracking
25 | - People Occlusion
26 |
27 | For character animation, see [RKLoader](https://github.com/Reality-Dev/RealityKit-Asset-Loading)
28 | ``` swift
29 | import RKLoader
30 |
31 | var character: BodyTrackedEntity?
32 |
33 | ...
34 |
35 | func loadCharacter {
36 | Task(priority: .userInitiated) { [weak self] in
37 | let character = try await RKLoader.loadBodyTrackedEntityAsync(named: "character")
38 |
39 | self?.character = character
40 |
41 | let bodyAnchor = AnchorEntity(.body)
42 |
43 | self?.scene.addAnchor(bodyAnchor)
44 |
45 | bodyAnchor.addChild(character)
46 | }
47 | }
48 | ```
49 |
50 | ## Requirements
51 |
52 | - iOS 15
53 | - A12 Processor or later.
54 | - Swift 5.5
55 | - Xcode 11
56 |
57 | ## Installation
58 |
59 | ### Swift Package Manager
60 |
61 | Add the URL of this repository to your Xcode 11+ Project under:
62 | File > Add Packages
63 | `https://github.com/Reality-Dev/BodyTracking`
64 |
65 | ## Support
66 |
67 | If you have questions feel free to message me on [GitHub](https://github.com/Reality-Dev) or on [Twitter](https://twitter.com/GMJ4K)
68 |
69 |
70 | ## More
71 |
72 | Pull Requests are welcome and encouraged.
73 |
--------------------------------------------------------------------------------
/Sources/BTShared/ARView+Occlusion.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ARView+Occlusion.swift
3 | // BodyEntity-Example
4 | //
5 | // Created by Grant Jarvis on 5/2/21.
6 | //
7 |
8 | import ARKit
9 | import RealityKit
10 |
11 | // This enables or disables person segmentation occlusion.
12 | // Person segmentation is different from the 3D occlusion shapes inside of ARSUIView3D.
13 | public extension ARView {
14 | /// Use this function to enable person segmentation occlusion
15 | /// - Parameter withDepth: If withDepth is false, then a person always shows up in front of virtual content, no matter how far away the person or the content is. If withDepth is true, then the person shows up in front only where it is judged to be *closer* to the camera than the virtual content.
16 | func enableOcclusion(withDepth: Bool = true) throws {
17 | var config: ARConfiguration
18 | guard ARWorldTrackingConfiguration.supportsFrameSemantics(.personSegmentation) else {
19 | let errorMessage = "This device does Not support person segmentation."
20 | print(errorMessage)
21 | throw BodyTrackingError.unsupportedFrameSemantics("personSegmentation frame semantic is unavailable.")
22 | }
23 | if let configuration = session.configuration {
24 | config = configuration
25 | } else {
26 | config = ARWorldTrackingConfiguration()
27 | }
28 | if withDepth {
29 | config.frameSemantics.insert(.personSegmentationWithDepth)
30 | } else {
31 | config.frameSemantics.insert(.personSegmentation)
32 | }
33 | session.run(config)
34 | }
35 |
36 | /// Use this function to disable person segmentation occlusion
37 | func disableOcclusion() {
38 | var config: ARConfiguration
39 | if let configuration = session.configuration {
40 | config = configuration
41 | } else {
42 | config = ARWorldTrackingConfiguration()
43 | }
44 | config.frameSemantics.remove([.personSegmentationWithDepth, .personSegmentation])
45 | session.run(config)
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/Sources/BTShared/BodyTrackingError.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 1/1/24.
6 | //
7 |
8 | import Foundation
9 |
10 | public enum BodyTrackingError: Error, LocalizedError {
11 | case unsupportedFrameSemantics(String)
12 |
13 | case unsupportedConfiguration(String)
14 |
15 | public var errorDescription: String? {
16 | switch self {
17 | case .unsupportedFrameSemantics(let comment):
18 | return NSLocalizedString(
19 | "The provided frame semantics are not available",
20 | comment: comment
21 | )
22 | case .unsupportedConfiguration(let comment):
23 | return NSLocalizedString(
24 | "The provided configuration is not available",
25 | comment: comment
26 | )
27 | }
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/Sources/BTShared/Utilities.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Utilities.swift
3 | // BodyTracking-Example
4 | //
5 | // Created by Grant Jarvis on 11/13/21.
6 | //
7 |
8 | import RealityKit
9 | import RKUtilities
10 | import simd
11 | import UIKit
12 |
13 | // MARK: - Alerts
14 |
15 | public extension UIView {
16 | func showAlert(title: String, message: String) {
17 | guard
18 | let windowScene = UIApplication.shared.connectedScenes.first as? UIWindowScene,
19 | let mainWindow = windowScene.windows.first(where: { $0.isKeyWindow }) else { return }
20 |
21 | let alert = UIAlertController(title: title, message: message, preferredStyle: .alert)
22 | alert.addAction(UIAlertAction(title: "Dismiss", style: .default, handler: nil))
23 | // arView.window is nil the way we have set up this example project.
24 | mainWindow.rootViewController?.present(alert, animated: true, completion: nil)
25 | }
26 | }
27 |
28 | // MARK: - Coordinate Space Conversion
29 |
30 | public extension ARView {
31 | func convertAVFoundationToScreenSpace(_ point: CGPoint) -> CGPoint {
32 | // Convert from normalized AVFoundation coordinates (0,0 top-left, 1,1 bottom-right)
33 | // to screen-space coordinates.
34 | if
35 | let arFrame = session.currentFrame,
36 | let interfaceOrientation = window?.windowScene?.interfaceOrientation
37 | {
38 | let transform = arFrame.displayTransform(for: interfaceOrientation, viewportSize: frame.size)
39 | let normalizedCenter = point.applying(transform)
40 | let center = normalizedCenter.applying(CGAffineTransform.identity.scaledBy(x: frame.width, y: frame.height))
41 | return center
42 | } else {
43 | return CGPoint()
44 | }
45 | }
46 |
47 | func convertScreenSpaceToAVFoundation(_ point: CGPoint) -> CGPoint? {
48 | // Convert to normalized pixel coordinates (0,0 top-left, 1,1 bottom-right)
49 | // from screen-space coordinates.
50 | guard
51 | let arFrame = session.currentFrame,
52 | let interfaceOrientation = window?.windowScene?.interfaceOrientation
53 | else { return nil }
54 |
55 | let inverseScaleTransform = CGAffineTransform.identity.scaledBy(x: frame.width, y: frame.height).inverted()
56 | let invertedDisplayTransform = arFrame.displayTransform(for: interfaceOrientation, viewportSize: frame.size).inverted()
57 | let unScaledPoint = point.applying(inverseScaleTransform)
58 | let normalizedCenter = unScaledPoint.applying(invertedDisplayTransform)
59 | return normalizedCenter
60 | }
61 | }
62 |
63 | // MARK: - SafeGuarding
64 |
65 | internal extension Collection {
66 | subscript(safe index: Index) -> Element? {
67 | return indices.contains(index) ? self[index] : nil
68 | }
69 | }
70 |
71 | // MARK: - WeakCollection
72 |
73 | // Created by Vladislav Grigoryev on 27.05.2020.
74 | // Copyright © 2020 GORA Studio. https://gora.studio
75 | //
76 | // Permission is hereby granted, free of charge, to any person obtaining a copy
77 | // of this software and associated documentation files (the "Software"), to deal
78 | // in the Software without restriction, including without limitation the rights
79 | // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
80 | // copies of the Software, and to permit persons to whom the Software is
81 | // furnished to do so, subject to the following conditions:
82 | //
83 | // The above copyright notice and this permission notice shall be included in
84 | // all copies or substantial portions of the Software.
85 | //
86 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
87 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
88 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
89 | // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
90 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
91 | // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
92 | // THE SOFTWARE.
93 |
94 | import Foundation
95 |
96 | @propertyWrapper
97 | public struct WeakCollection {
98 | private var _wrappedValue: [Weak]
99 |
100 | public var wrappedValue: [Value] {
101 | get { _wrappedValue.lazy.compactMap { $0.get() } }
102 | set { _wrappedValue = newValue.map(Weak.init) }
103 | }
104 |
105 | public init(wrappedValue: [Value]) { _wrappedValue = wrappedValue.map(Weak.init) }
106 |
107 | public mutating func compact() { _wrappedValue = { _wrappedValue }() }
108 | }
109 |
110 | @propertyWrapper
111 | public final class Weak {
112 | private weak var _wrappedValue: AnyObject?
113 |
114 | public var wrappedValue: Object? {
115 | get { _wrappedValue as? Object }
116 | set { _wrappedValue = newValue }
117 | }
118 |
119 | public init(_ object: Object) { _wrappedValue = object }
120 |
121 | public init(wrappedValue: Object?) { _wrappedValue = wrappedValue }
122 |
123 | public func get() -> Object? { wrappedValue }
124 | }
125 |
--------------------------------------------------------------------------------
/Sources/BodyTracking/Body2D/BodyTracker2D.swift:
--------------------------------------------------------------------------------
1 |
2 | import BTShared
3 | import CoreGraphics
4 | import RealityKit
5 | import RKUtilities
6 | import UIKit
7 |
8 | public class BodyTracker2D: NSObject, HasBody2D {
9 | public internal(set) var body2D = BodyTracking2DComponent()
10 |
11 | public required init(arView: ARView) {
12 | super.init()
13 |
14 | BodyTracking2DSystem.shared.registerSystem(with: arView)
15 |
16 | BodyTracking2DSystem.shared.participatingTrackers.append(self)
17 |
18 | populateJointPositions()
19 | }
20 |
21 | override required init() {
22 | fatalError("init() has not been implemented")
23 | }
24 |
25 | /// Destroy this Entity and its references to any ARViews
26 | /// This helps prevent memory leaks.
27 | public func destroy() {
28 | body2D.jointScreenPositions = [:]
29 |
30 | body2D.trackedViews.forEach { view in
31 | view.value.removeFromSuperview()
32 | }
33 |
34 | body2D.trackedViews.removeAll()
35 | }
36 |
37 | private func populateJointPositions() {
38 | TwoDBodyJoint.allCases.forEach {
39 | body2D.jointScreenPositions[$0] = CGPoint()
40 | }
41 | }
42 |
43 | /// Allows only one view per joint.
44 | /// - This will add `thisView` to ARView automatically.
45 | /// - If you would like to attach more than one view per joint, then try attaching additional views to the view that is already attached to this joint.
46 | public func attach(view: UIView, to joint: TwoDBodyJoint) {
47 | body2D.trackedViews[joint] = view
48 | if view.superview == nil {
49 | BodyTracking2DSystem.shared.arView?.addSubview(view)
50 | }
51 | }
52 |
53 | public func removeJoint(_ joint: TwoDBodyJoint) {
54 | body2D.trackedViews[joint]?.removeFromSuperview()
55 | body2D.trackedViews.removeValue(forKey: joint)
56 | }
57 | }
58 |
59 | // MARK: - Angle Calculations
60 |
61 | public extension BodyTracker2D {
62 | /// Returns the angle (in degrees) between 3 given joints, treating joint2 as the center point.
63 | /// - The maximum angle is 180.0°
64 | /// - See "ARView2D.swift" for an example usage.
65 | func angleBetween3Joints(_ joint1: TwoDBodyJoint,
66 | _ joint2: TwoDBodyJoint,
67 | _ joint3: TwoDBodyJoint) -> CGFloat?
68 | {
69 | let jointScreenPositions = body2D.jointScreenPositions
70 |
71 | // Make sure the joints we are looking for are included in jointScreenPositions.
72 | guard
73 | let joint1ScreenPosition = jointScreenPositions[joint1],
74 | let joint2ScreenPosition = jointScreenPositions[joint2],
75 | let joint3ScreenPosition = jointScreenPositions[joint3]
76 | else { return nil }
77 |
78 | let vect1 = (joint1ScreenPosition - joint2ScreenPosition).simdVect()
79 | let vect2 = (joint3ScreenPosition - joint2ScreenPosition).simdVect()
80 |
81 | let top = dot(vect1, vect2)
82 | let bottom = length(vect1) * length(vect2)
83 | let angleInRadians = CGFloat(acos(top / bottom))
84 | let angleInDegrees = (angleInRadians * 180) / .pi
85 | return angleInDegrees
86 | }
87 |
88 | /// Returns the angle (in degrees) between down and the vector formed by the two given points.
89 | /// - In the UIKit coordinate system, (0,0) is in the top-left corner.
90 | /// - See "ARView2D.swift" for an example usage.
91 | /// - Returns: A vector pointing straight down returns 0.0.
92 | /// A vector pointing to the right returns 270.0.
93 | /// A vector pointing up returns 180.0.
94 | /// A vector pointing to the left returns 90.0.
95 | func angleFrom2Joints(_ joint1: TwoDBodyJoint,
96 | _ joint2: TwoDBodyJoint) -> CGFloat?
97 | {
98 | let jointScreenPositions = body2D.jointScreenPositions
99 |
100 | // Make sure the joints we are looking for are included in jointScreenPositions.
101 | guard
102 | let joint1ScreenPosition = jointScreenPositions[joint1],
103 | let joint2ScreenPosition = jointScreenPositions[joint2]
104 | else { return nil }
105 |
106 | return angleBetween2Points(point1: joint1ScreenPosition,
107 | point2: joint2ScreenPosition)
108 | }
109 |
110 | private func angleBetween2Points(point1: CGPoint, point2: CGPoint) -> CGFloat {
111 | let difference = point1 - point2
112 | let angleInRadians = atan2(difference.y, difference.x)
113 |
114 | var angleInDegrees = Float.radiansToDegrees(Float(angleInRadians))
115 | angleInDegrees -= 90
116 | if angleInDegrees < 0 { angleInDegrees += 360.0 }
117 | return CGFloat(angleInDegrees)
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/Sources/BodyTracking/Body2D/BodyTracking2DComponent.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import RealityKit
9 | import UIKit
10 |
11 | public protocol HasBody2D {
12 | var body2D: BodyTracking2DComponent { get }
13 | }
14 |
15 | // Does not get registered since it is not added to an Entity.
16 | public struct BodyTracking2DComponent {
17 | /// The positions of the joints on screen.
18 | ///
19 | /// - (0,0) is in the top-left.
20 | public internal(set) var jointScreenPositions = [TwoDBodyJoint: CGPoint]()
21 |
22 | public internal(set) var trackedViews = [TwoDBodyJoint: UIView]()
23 |
24 | /// True if a body is detected in the current frame.
25 | public internal(set) var bodyIsDetected = false
26 | }
27 |
--------------------------------------------------------------------------------
/Sources/BodyTracking/Body2D/BodyTracking2DSystem.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import ARKit
9 | import BTShared
10 | import Combine
11 | import RealityKit
12 | import UIKit
13 |
14 | internal class BodyTracking2DSystem {
15 | static var shared = BodyTracking2DSystem()
16 |
17 | private var cancellableForUpdate: Cancellable?
18 |
19 | internal private(set) weak var arView: ARView?
20 |
21 | // Since BodyTracker2D is not an Entity and does not get added to the 3D scene, instead of querying the scene for the entities we keep weak references to them here.
22 | @WeakCollection var participatingTrackers = [BodyTracker2D]()
23 |
24 | internal func registerSystem(with arView: ARView) {
25 | self.arView = arView
26 |
27 | cancellableForUpdate?.cancel()
28 |
29 | cancellableForUpdate = arView.scene.subscribe(to: SceneEvents.Update.self, update)
30 | }
31 |
32 | internal func deregisterSystem() {
33 | cancellableForUpdate = nil
34 |
35 | participatingTrackers.removeAll()
36 | }
37 |
38 | private func update(_: SceneEvents.Update) {
39 | guard let currentFrame = arView?.session.currentFrame else { return }
40 |
41 | participatingTrackers.forEach { updateTracker($0, frame: currentFrame) }
42 | }
43 |
44 | // Run this code every frame to get the joints.
45 | private func updateTracker(_ tracker: BodyTracker2D,
46 | frame: ARFrame)
47 | {
48 | updateJointScreenPositions(on: tracker, frame: frame)
49 |
50 | updateTrackedViews(on: tracker, frame: frame)
51 | }
52 |
53 | private func updateJointScreenPositions(on tracker: BodyTracker2D,
54 | frame: ARFrame)
55 | {
56 | /*
57 | BETA ISSUES: As of 07/23/2022:
58 | These have NOT yet been updated with the two new ear joints:
59 | ARSkeletonDefinition.defaultBody2D.jointCount
60 | ARSkeletonDefinition.defaultBody2D.jointNames
61 | ARSkeletonDefinition.defaultBody2D.jointNames.count
62 | But this HAS been updated with the two new ear joints:
63 | ARFrame.detectedBody.skeleton.jointLandmarks
64 | */
65 |
66 | let detectedBody = frame.detectedBody
67 |
68 | let frameDetectsBody = detectedBody != nil
69 |
70 | if tracker.body2D.bodyIsDetected != frameDetectsBody {
71 | tracker.body2D.bodyIsDetected = frameDetectsBody
72 | }
73 |
74 | guard
75 | let detectedBody,
76 | let arView,
77 | let interfaceOrientation = arView.window?.windowScene?.interfaceOrientation
78 | else { return }
79 |
80 | // TODO: better handle individual joints becoming undetected.
81 | let jointLandmarks = detectedBody.skeleton.jointLandmarks
82 |
83 | // Convert the normalized joint points into screen-space CGPoints.
84 | let displayTransform = frame.displayTransform(for: interfaceOrientation, viewportSize: arView.frame.size)
85 |
86 | for i in 0 ..< jointLandmarks.count {
87 | guard let screenPosition = screenPosition(for: jointLandmarks[i],
88 | displayTransform: displayTransform)
89 | else { continue }
90 |
91 | if let joint = TwoDBodyJoint(rawValue: i) {
92 | tracker.body2D.jointScreenPositions[joint] = screenPosition
93 | }
94 | }
95 | }
96 |
97 | private func screenPosition(for jointLandmark: simd_float2,
98 | displayTransform: CGAffineTransform) -> CGPoint?
99 | {
100 | if jointLandmark.x.isNaN || jointLandmark.y.isNaN {
101 | return nil
102 | }
103 |
104 | let point = CGPoint(x: CGFloat(jointLandmark.x),
105 | y: CGFloat(jointLandmark.y))
106 |
107 | let normalizedCenter = point.applying(displayTransform)
108 |
109 | guard let frameSize = arView?.frame.size else { return nil }
110 | // Convert from normalized pixel coordinates (0,0 top-left, 1,1 bottom-right) to screen-space coordinates.
111 | let screenPoint = normalizedCenter.applying(CGAffineTransform.identity.scaledBy(x: frameSize.width, y: frameSize.height))
112 |
113 | return screenPoint
114 | }
115 |
116 | func updateTrackedViews(on tracker: BodyTracker2D,
117 | frame: ARFrame)
118 | {
119 | guard frame.detectedBody != nil,
120 | tracker.body2D.jointScreenPositions.isEmpty == false
121 | else { return }
122 |
123 | for view in tracker.body2D.trackedViews {
124 | if let screenPosition = tracker.body2D.jointScreenPositions[view.key] {
125 | view.value.center = screenPosition
126 | }
127 | }
128 | }
129 | }
130 |
--------------------------------------------------------------------------------
/Sources/BodyTracking/Body2D/TwoDBodyJoint.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import Foundation
9 |
10 | /*
11 | BETA ISSUES: As of 07/23/2022:
12 | These have NOT yet been updated with the two new ear joints:
13 | ARSkeletonDefinition.defaultBody2D.jointCount
14 | ARSkeletonDefinition.defaultBody2D.jointNames
15 | ARSkeletonDefinition.defaultBody2D.jointNames.count
16 | But this HAS been updated with the two new ear joints:
17 | ARFrame.detectedBody.skeleton.jointLandmarks
18 | */
19 |
20 | /// ARSkeleton.JointName only contains 8 of these but this includes all of them :)
21 | ///
22 | /// - Use TwoDBodyJoint.allCases to access an array of all joints
23 | public enum TwoDBodyJoint: Int, CaseIterable {
24 | case head_joint = 0
25 | case neck_1_joint = 1
26 | case right_shoulder_1_joint = 2
27 | case right_forearm_joint = 3
28 | case right_hand_joint = 4
29 | case left_shoulder_1_joint = 5
30 | case left_forearm_joint = 6
31 | case left_hand_joint = 7
32 | case right_upLeg_joint = 8
33 | case right_leg_joint = 9
34 | case right_foot_joint = 10
35 | case left_upLeg_joint = 11
36 | case left_leg_joint = 12
37 | case left_foot_joint = 13
38 | case right_eye_joint = 14
39 | case left_eye_joint = 15
40 | case root = 16 // hips
41 | case right_ear_joint = 17
42 | case left_ear_joint = 18
43 |
44 | // Two new joints for the ears were added in iOS 16.0
45 | // CaseIterable does not work with `@available` applied to cases.
46 |
47 | public static var allCases: [TwoDBodyJoint] {
48 | if #available(iOS 16, *) {
49 | return [
50 | .head_joint,
51 | .neck_1_joint,
52 | .right_shoulder_1_joint,
53 | .right_forearm_joint,
54 | .right_hand_joint,
55 | .left_shoulder_1_joint,
56 | .left_forearm_joint,
57 | .left_hand_joint,
58 | .right_upLeg_joint,
59 | .right_leg_joint,
60 | .right_foot_joint,
61 | .left_upLeg_joint,
62 | .left_leg_joint,
63 | .left_foot_joint,
64 | .right_eye_joint,
65 | .left_eye_joint,
66 | .root, // hips
67 | .right_ear_joint,
68 | .left_ear_joint,
69 | ]
70 | } else {
71 | return [
72 | .head_joint,
73 | .neck_1_joint,
74 | .right_shoulder_1_joint,
75 | .right_forearm_joint,
76 | .right_hand_joint,
77 | .left_shoulder_1_joint,
78 | .left_forearm_joint,
79 | .left_hand_joint,
80 | .right_upLeg_joint,
81 | .right_leg_joint,
82 | .right_foot_joint,
83 | .left_upLeg_joint,
84 | .left_leg_joint,
85 | .left_foot_joint,
86 | .right_eye_joint,
87 | .left_eye_joint,
88 | .root, // hips
89 | ]
90 | }
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/Sources/BodyTracking/Body3D/Body3D/Body3DComponent.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import BTShared
9 | import RealityKit
10 | import struct RKUtilities.Registerer
11 |
12 | public protocol HasBody3D {
13 | var body3D: Body3DComponent { get }
14 | }
15 |
16 | // MARK: - Body3DComponent
17 |
18 | public struct Body3DComponent: Component {
19 | internal var trackedJoints = Set()
20 |
21 | /// An amount, from 0 to 1, that the joint movements are smoothed by.
22 | public var smoothingAmount: Float = 0
23 |
24 | internal var needsSmoothing: Bool
25 |
26 | public init(smoothingAmount: Float,
27 | trackedJoints: Set = [])
28 | {
29 | self.smoothingAmount = smoothingAmount
30 | self.needsSmoothing = smoothingAmount > 0
31 | self.trackedJoints = trackedJoints
32 | Registerer.register(Self.self)
33 | // If you call registerSystem() multiple times, RealityKit ignores additional calls after the first.
34 | BodyTracking3DSystem.registerSystem()
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/Sources/BodyTracking/Body3D/Body3D/BodyEntity3D.swift:
--------------------------------------------------------------------------------
1 |
2 | import ARKit
3 | import RealityKit
4 |
5 | // MARK: - BodyEntity3D
6 |
7 | public class BodyEntity3D: Entity, HasBody3D {
8 | public internal(set) var body3D: Body3DComponent {
9 | get {
10 | component(forType: Body3DComponent.self) ?? .init(smoothingAmount: 0)
11 | }
12 | set {
13 | components.set(newValue)
14 | }
15 | }
16 |
17 | /// Initializes a BodyEntity3D
18 | /// - Parameters:
19 | /// - smoothingAmount: The amount, from 0 to 1, that the body is smoothed. Values may need to approach 1.0 to appear to have much effect.
20 | public required init(smoothingAmount: Float = 0)
21 | {
22 | super.init()
23 |
24 | body3D = Body3DComponent(smoothingAmount: smoothingAmount.clamped(0, 0.9999))
25 | }
26 |
27 | required init() {
28 | fatalError("init() has not been implemented")
29 | }
30 |
31 | /// Destroy this Entity and its references to any ARViews
32 | /// This helps prevent memory leaks.
33 | public func destroy() {
34 | for child in children {
35 | child.removeFromParent()
36 | }
37 | body3D.trackedJoints = []
38 | removeFromParent()
39 | }
40 |
41 | public func setSmoothingAmount(_ newValue: Float) {
42 | body3D.smoothingAmount = newValue.clamped(0, 0.9999)
43 | }
44 |
45 | /// Use this function to attach an entity to a particular joint.
46 | ///
47 | /// After calling this function, an entity will follow the transform of a particular joint every frame.
48 | /// - Parameters:
49 | /// - entity: The entity to attach.
50 | /// - jointName: The joint to attach the entity to.
51 | /// - preservingWorldTransform: A Boolean that you set to true to preserve the entity’s world transform, or false to preserve its relative transform. Use true when you want a model to keep its effective location and size within a scene. If you want to offset an entity from a joint transform, then set this to false.
52 | public func attach(entity: Entity,
53 | to jointName: ThreeDBodyJoint,
54 | preservingWorldTransform: Bool = false)
55 | {
56 | var joint: JointEntity
57 |
58 | if let jointLocal = body3D.trackedJoints.first(where: { $0.jointName == jointName }) {
59 | joint = jointLocal
60 |
61 | } else { // body3DComponent.trackedJoints does Not contain this joint yet.
62 | let jointLocal = JointEntity(jointName: jointName)
63 |
64 | /*
65 | For efficiency: Entities are parented to the root, not parented to local parent joint. Not using local transform.
66 | i.e. If only a subset of joints have entities added to them, then we do not need to add internal entities to every joint.
67 | */
68 |
69 | addChild(jointLocal)
70 |
71 | if let jointModelTransforms = ARSkeletonDefinition.defaultBody3D.neutralBodySkeleton3D?.jointModelTransforms {
72 | jointLocal.setTransformMatrix(jointModelTransforms[jointName.rawValue], relativeTo: self)
73 | }
74 |
75 | body3D.trackedJoints.insert(jointLocal)
76 |
77 | joint = jointLocal
78 | }
79 |
80 | joint.addChild(entity, preservingWorldTransform: preservingWorldTransform)
81 |
82 | if !preservingWorldTransform { entity.transform = .init() }
83 | }
84 |
85 | /// Removes this joint and all attached entities.
86 | public func removeJoint(_ joint: ThreeDBodyJoint) {
87 | if let jointLocal = body3D.trackedJoints.first(where: { $0.jointName == joint }) {
88 | jointLocal.removeFromParent()
89 |
90 | body3D.trackedJoints.remove(jointLocal)
91 | }
92 | }
93 | }
94 |
--------------------------------------------------------------------------------
/Sources/BodyTracking/Body3D/BodyAnchor/BodyAnchor.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/29/23.
6 | //
7 |
8 | import ARKit
9 | import BTShared
10 | import RealityKit
11 |
12 | public class BodyAnchor: Entity, HasBodyAnchoring {
13 | @WeakCollection internal var body3DEntities = [BodyEntity3D]()
14 |
15 | public internal(set) var bodyAnchorComponent: BodyAnchorComponent {
16 | get {
17 | component(forType: BodyAnchorComponent.self) ?? .init()
18 | }
19 | set {
20 | components.set(newValue)
21 | }
22 | }
23 |
24 | /// Initializes a BodyAnchor
25 | /// - Parameter session: The ARSession that the `BodyTracking3DSystem` will use to extract tracking data.
26 | public init(session: ARSession) {
27 | BodyTracking3DSystem.arSession = session
28 |
29 | super.init()
30 |
31 | bodyAnchorComponent = .init()
32 |
33 | // This will automatically attach this entity to the body.
34 | anchoring = AnchoringComponent(.body)
35 | }
36 |
37 | @MainActor required init() {
38 | fatalError("init() has not been implemented")
39 | }
40 |
41 | /// Attaches a `BodyEntity3D` to this `BodyAnchor` so that the `BodyEntity3D`'s joint transforms will be updated based on the tracking data associated with this `BodyAnchor`.
42 | /// - Parameters:
43 | /// - bodyEntity: The entity that will be added for tracking.
44 | /// - automaticallyAddChild: Set to true to add this entity as a child to the `BodyAnchor`. If set to false, you can still add the `BodyEntity3D` to the scene in some other way (such as to another anchor or anchor's descendant), and its joint transforms will be updated based on the tracking data associated with this `BodyAnchor`.
45 | public func attach(bodyEntity: BodyEntity3D,
46 | automaticallyAddChild: Bool = true)
47 | {
48 | guard body3DEntities.contains(where: { $0 == bodyEntity }) == false else {
49 | print("Already added BodyEntity3D \(bodyEntity.name) to this BodyAnchor")
50 | return
51 | }
52 |
53 | body3DEntities.append(bodyEntity)
54 |
55 | if automaticallyAddChild { addChild(bodyEntity) }
56 | }
57 |
58 | /// Destroy this Entity and its references to any ARViews
59 | /// This helps prevent memory leaks.
60 | public func destroy() {
61 | for child in children {
62 | child.removeFromParent()
63 | }
64 |
65 | body3DEntities.removeAll()
66 |
67 | removeFromParent()
68 | }
69 | }
70 |
--------------------------------------------------------------------------------
/Sources/BodyTracking/Body3D/BodyAnchor/BodyAnchorComponent.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import ARKit
9 | import BTShared
10 | import Combine
11 | import RealityKit
12 | import struct RKUtilities.Registerer
13 |
14 | public protocol HasBodyAnchoring: HasAnchoring {
15 | var bodyAnchorComponent: BodyAnchorComponent { get }
16 | }
17 |
18 | public struct BodyAnchorComponent: Component {
19 | public internal(set) var didInitiallyDetectBody = false
20 |
21 | public internal(set) weak var arBodyAnchor: ARBodyAnchor?
22 |
23 | /// A Boolean value that indicates whether this object's transform accurately represents the trasform of the real-world body for the current frame.
24 | ///
25 | /// If this value is true, the object’s transform currently matches the position and orientation of the real-world object it represents.
26 | ///
27 | /// If this value is false, the object is not guaranteed to match the movement of its corresponding real-world feature, even if it remains in the visible scene.
28 | public internal(set) var bodyIsTracked = CurrentValueSubject(false)
29 |
30 | init() {
31 | Registerer.register(Self.self)
32 | BodyTracking3DSystem.registerSystem()
33 | }
34 |
35 | public func jointModelTransform(for joint: ThreeDBodyJoint) -> simd_float4x4? {
36 | arBodyAnchor?.skeleton.jointModelTransforms[joint.rawValue]
37 | }
38 |
39 | public func jointLocalTransform(for joint: ThreeDBodyJoint) -> simd_float4x4? {
40 | arBodyAnchor?.skeleton.jointLocalTransforms[joint.rawValue]
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/Sources/BodyTracking/Body3D/BodyTracking3DSystem.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/29/23.
6 | //
7 |
8 | import ARKit
9 | import BTShared
10 | import RealityKit
11 | import RKUtilities
12 |
13 | // MARK: - BodyTracking3DSystem
14 |
15 | final class BodyTracking3DSystem: System {
16 | weak static var arSession: ARSession?
17 |
18 | init(scene _: Scene) {}
19 |
20 | private static var bodyAnchorQuery = EntityQuery(where: .has(BodyAnchorComponent.self))
21 |
22 | func update(context: SceneUpdateContext) {
23 | // Must access the frame's anchors every frame. Storing the ARBodyAnchor does not give updates.
24 | guard
25 | let arSession = Self.arSession,
26 | let arBodyAnchor = arSession.currentFrame?.anchors.compactMap({ $0 as? ARBodyAnchor }).first
27 | else { return }
28 |
29 | context.scene.performQuery(Self.bodyAnchorQuery).compactMap { $0 as? BodyAnchor }.forEach { bodyAnchor in
30 |
31 | bodyAnchor.bodyAnchorComponent.arBodyAnchor = arBodyAnchor
32 |
33 | if bodyAnchor.bodyAnchorComponent.bodyIsTracked.value != arBodyAnchor.isTracked {
34 | bodyAnchor.bodyAnchorComponent.bodyIsTracked.value = arBodyAnchor.isTracked
35 | }
36 |
37 | let didInitiallyDetectBody = bodyAnchor.bodyAnchorComponent.didInitiallyDetectBody
38 |
39 | if !didInitiallyDetectBody, arBodyAnchor.isTracked {
40 | bodyAnchor.bodyAnchorComponent.didInitiallyDetectBody = true
41 | }
42 |
43 | bodyAnchor.body3DEntities.forEach {
44 | updateJoints(of: $0, with: arBodyAnchor)
45 | }
46 | }
47 | }
48 |
49 | private func updateJoints(of bodyEntity: BodyEntity3D,
50 | with arBodyAnchor: ARBodyAnchor)
51 | {
52 | /*
53 | For efficiency: Entities are parented to the root, not parented to local parent joint. Not using local transform.
54 | i.e. If only a subset of joints have entities added to them, then we do not need to add internal entities to every joint.
55 | */
56 | for trackedJoint in bodyEntity.body3D.trackedJoints {
57 | let jointIndex = trackedJoint.jointName.rawValue
58 | let newTransform = arBodyAnchor.skeleton.jointModelTransforms[jointIndex]
59 | if bodyEntity.body3D.needsSmoothing {
60 | smoothJointMotion(trackedJoint,
61 | bodyEntity: bodyEntity,
62 | newTransform: newTransform)
63 |
64 | } else {
65 | trackedJoint.setTransformMatrix(newTransform, relativeTo: bodyEntity)
66 | }
67 | }
68 | }
69 |
70 | // MARK: - Smoothing
71 |
72 | // TODO: Use SmoothDamp instead of Lerp.
73 |
74 | private func smoothJointMotion(_ joint: JointEntity,
75 | bodyEntity: BodyEntity3D,
76 | newTransform: simd_float4x4)
77 | {
78 | // Scale isn't changing for body joints, so don't smooth that.
79 |
80 | let smoothedAmount = (1 - bodyEntity.body3D.smoothingAmount)
81 |
82 | // Don't smooth hips joint - to prevent BodyEntity3D from flying through space whenever the body is initially detected.
83 | let t = joint.jointName == .hips_joint ? 1 : smoothedAmount
84 |
85 | let newTransform = simd_float4x4.mixOrientationTranslation(joint.transform.matrix, newTransform, t: t)
86 |
87 | joint.setTransformMatrix(newTransform, relativeTo: bodyEntity)
88 | }
89 | }
90 |
91 | // MARK: - simd_float4x4 extension
92 |
93 | extension simd_float4x4 {
94 | static func mixOrientationTranslation(_ x: simd_float4x4, _ y: simd_float4x4, t: Float) -> simd_float4x4 {
95 | let newTranslation = simd.mix(x.translation,
96 | y.translation,
97 | t: t)
98 |
99 | var mixedMatrix = simd_float4x4(translation: newTranslation)
100 |
101 | let newOrientation = simd_slerp(x.orientation,
102 | y.orientation,
103 | t)
104 |
105 | mixedMatrix.orientation = newOrientation
106 |
107 | return mixedMatrix
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/Sources/BodyTracking/Body3D/Configuration.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/29/23.
6 | //
7 |
8 | import ARKit
9 | import RealityKit
10 | import BTShared
11 |
12 | // MARK: - Configuration
13 |
14 | public extension ARView {
15 | func runBodyTrackingConfig2D() throws {
16 | // This is more efficient if you are just using 2D and Not 3D tracking.
17 | guard ARWorldTrackingConfiguration.supportsFrameSemantics(.bodyDetection) else {
18 | let errorMessage = "This device does Not support body detection."
19 | print(errorMessage)
20 | throw BodyTrackingError.unsupportedFrameSemantics("bodyDetection frame semantic is unavailable.")
21 | }
22 | let config2D = ARWorldTrackingConfiguration()
23 | config2D.frameSemantics = .bodyDetection
24 | session.run(config2D)
25 | }
26 |
27 | /// If ARBodyTrackingConfiguration is supported on this device, run this type of configuration on this ARView's session.
28 | ///
29 | /// If ARBodyTrackingConfiguration is not supported on this device, this function will print an error message, throw an error, and present an alert to the user.
30 | func runBodyTrackingConfig3D(autoAlert: Bool = false) throws {
31 | // If the iOS device doesn't support body tracking, raise an error.
32 | guard ARBodyTrackingConfiguration.isSupported else {
33 | if autoAlert {
34 | showAlert(title: "Uh oh...", message: "This device does Not support body tracking.")
35 | }
36 |
37 | let errorMessage = """
38 | This device does Not support body tracking. This feature is only supported on devices with an A12 chip.
39 | """
40 |
41 | print(errorMessage)
42 |
43 | throw BodyTrackingError.unsupportedConfiguration("ARBodyTrackingConfiguration is unavailable.")
44 | }
45 |
46 | // This automatically adds the .bodyDetection frame semantic to the session configuration for 2D tracking as well.
47 | let config3D = ARBodyTrackingConfiguration()
48 |
49 | session.run(config3D)
50 | }
51 | }
52 |
--------------------------------------------------------------------------------
/Sources/BodyTracking/Body3D/ThreeDBodyJoint.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/29/23.
6 | //
7 |
8 | import ARKit
9 | import RealityKit
10 |
11 | // MARK: - JointEntity
12 |
13 | public class JointEntity: Entity {
14 | public private(set) var jointName: ThreeDBodyJoint!
15 |
16 | required init(jointName: ThreeDBodyJoint) {
17 | self.jointName = jointName
18 | super.init()
19 | name = String(describing: jointName)
20 | }
21 |
22 | required init() {
23 | fatalError("init() has not been implemented")
24 | }
25 | }
26 |
27 | // MARK: - ThreeDBodyJoint
28 |
29 | /// ARSkeleton.JointName only contains 8 of these but this includes all of them :)
30 | ///
31 | /// Includes 91 joints total, 28 tracked.
32 | /// - Use ThreeDBodyJoint.allCases to access an array of all joints
33 | public enum ThreeDBodyJoint: Int, CaseIterable {
34 | case root = 0
35 | case hips_joint = 1 // Could be redundant with root since root is at the hip.
36 | case left_upLeg_joint = 2
37 | case left_leg_joint = 3
38 | case left_foot_joint = 4
39 | case left_toes_joint = 5
40 | case left_toesEnd_joint = 6
41 | case right_upLeg_joint = 7
42 | case right_leg_joint = 8
43 | case right_foot_joint = 9
44 | case right_toes_joint = 10
45 | case right_toesEnd_joint = 11
46 | case spine_1_joint = 12
47 | case spine_2_joint = 13
48 | case spine_3_joint = 14
49 | case spine_4_joint = 15
50 | case spine_5_joint = 16
51 | case spine_6_joint = 17
52 | case spine_7_joint = 18
53 | case left_shoulder_1_joint = 19
54 | case left_arm_joint = 20
55 | case left_forearm_joint = 21
56 | case left_hand_joint = 22
57 | case left_handIndexStart_joint = 23
58 | case left_handIndex_1_joint = 24
59 | case left_handIndex_2_joint = 25
60 | case left_handIndex_3_joint = 26
61 | case left_handIndexEnd_joint = 27
62 | case left_handMidStart_joint = 28
63 | case left_handMid_1_joint = 29
64 | case left_handMid_2_joint = 30
65 | case left_handMid_3_joint = 31
66 | case left_handMidEnd_joint = 32
67 | case left_handPinkyStart_joint = 33
68 | case left_handPinky_1_joint = 34
69 | case left_handPinky_2_joint = 35
70 | case left_handPinky_3_joint = 36
71 | case left_handPinkyEnd_joint = 37
72 | case left_handRingStart_joint = 38
73 | case left_handRing_1_joint = 39
74 | case left_handRing_2_joint = 40
75 | case left_handRing_3_joint = 41
76 | case left_handRingEnd_joint = 42
77 | case left_handThumbStart_joint = 43
78 | case left_handThumb_1_joint = 44
79 | case left_handThumb_2_joint = 45
80 | case left_handThumbEnd_joint = 46
81 | case neck_1_joint = 47
82 | case neck_2_joint = 48
83 | case neck_3_joint = 49
84 | case neck_4_joint = 50
85 | case head_joint = 51
86 | case jaw_joint = 52
87 | case chin_joint = 53
88 | case left_eye_joint = 54
89 | case left_eyeLowerLid_joint = 55
90 | case left_eyeUpperLid_joint = 56
91 | case left_eyeball_joint = 57
92 | case nose_joint = 58
93 | case right_eye_joint = 59
94 | case right_eyeLowerLid_joint = 60
95 | case right_eyeUpperLid_joint = 61
96 | case right_eyeball_joint = 62
97 | case right_shoulder_1_joint = 63
98 | case right_arm_joint = 64
99 | case right_forearm_joint = 65
100 | case right_hand_joint = 66
101 | case right_handIndexStart_joint = 67
102 | case right_handIndex_1_joint = 68
103 | case right_handIndex_2_joint = 69
104 | case right_handIndex_3_joint = 70
105 | case right_handIndexEnd_joint = 71
106 | case right_handMidStart_joint = 72
107 | case right_handMid_1_joint = 73
108 | case right_handMid_2_joint = 74
109 | case right_handMid_3_joint = 75
110 | case right_handMidEnd_joint = 76
111 | case right_handPinkyStart_joint = 77
112 | case right_handPinky_1_joint = 78
113 | case right_handPinky_2_joint = 79
114 | case right_handPinky_3_joint = 80
115 | case right_handPinkyEnd_joint = 81
116 | case right_handRingStart_joint = 82
117 | case right_handRing_1_joint = 83
118 | case right_handRing_2_joint = 84
119 | case right_handRing_3_joint = 85
120 | case right_handRingEnd_joint = 86
121 | case right_handThumbStart_joint = 87
122 | case right_handThumb_1_joint = 88
123 | case right_handThumb_2_joint = 89
124 | case right_handThumbEnd_joint = 90
125 |
126 | public func getParentJoint() -> ThreeDBodyJoint {
127 | let parentIndex = ARSkeletonDefinition.defaultBody3D.parentIndices[rawValue]
128 | return ThreeDBodyJoint(rawValue: parentIndex) ?? .root
129 | }
130 |
131 | public func getChildJoints() -> [ThreeDBodyJoint] {
132 | var childJoints = [ThreeDBodyJoint]()
133 |
134 | let default3DBody = ARSkeletonDefinition.defaultBody3D
135 |
136 | let parentIndices = default3DBody.parentIndices
137 |
138 | for (jointIndex, parentIndex) in parentIndices.enumerated() where parentIndex == rawValue {
139 | if let childJoint = ThreeDBodyJoint(rawValue: jointIndex)
140 | {
141 | childJoints.append(childJoint)
142 | }
143 | }
144 | return childJoints
145 | }
146 |
147 | /// Use this function to determine if a particular joint is tracked or untracked.
148 | public func isTracked() -> Bool {
149 | return ThreeDBodyJoint.trackedJoints.contains(self)
150 | }
151 |
152 | /// Not all joints are tracked, but these are.
153 | ///
154 | /// Tracked joints' transforms (position, rotation, scale) follow the person's body.
155 | /// Untracked joints always maintain the same transform relative to their parent joint.
156 | /// There are 91 joints total in the skeleton, and 28 are tracked.
157 | public static var trackedJoints: Set = [
158 | .root,
159 | .hips_joint,
160 | .left_upLeg_joint,
161 | .left_leg_joint,
162 | .left_foot_joint,
163 | .right_upLeg_joint,
164 | .right_leg_joint,
165 | .right_foot_joint,
166 | .spine_1_joint,
167 | .spine_2_joint,
168 | .spine_3_joint,
169 | .spine_4_joint,
170 | .spine_5_joint,
171 | .spine_6_joint,
172 | .spine_7_joint,
173 | .left_shoulder_1_joint,
174 | .left_arm_joint,
175 | .left_forearm_joint,
176 | .left_hand_joint,
177 | .neck_1_joint,
178 | .neck_2_joint,
179 | .neck_3_joint,
180 | .neck_4_joint,
181 | .head_joint,
182 | .right_shoulder_1_joint,
183 | .right_arm_joint,
184 | .right_forearm_joint,
185 | .right_hand_joint,
186 | ]
187 | }
188 |
--------------------------------------------------------------------------------
/Sources/FaceTracking/FaceAnchor.swift:
--------------------------------------------------------------------------------
1 |
2 | import ARKit
3 | import BTShared
4 | import Combine
5 | import RealityKit
6 | import RKUtilities
7 | import UIKit
8 |
9 | public enum Eye {
10 | case left, right
11 | }
12 |
13 | public class FaceAnchor: Entity, HasFaceAnchoring {
14 | public internal(set) var face: FaceAnchorComponent {
15 | get {
16 | component(forType: FaceAnchorComponent.self) ?? .init()
17 | } set {
18 | components.set(newValue)
19 | }
20 | }
21 |
22 | public private(set) var leftEye = Entity()
23 |
24 | public private(set) var rightEye = Entity()
25 |
26 | @WeakCollection internal var morphedEntities = [FaceMorphedEntity]()
27 |
28 | internal var eyeAttachments = [EyeAttachment]()
29 |
30 | public required init(session: ARSession) {
31 | super.init()
32 |
33 | FaceSystem.arSession = session
34 |
35 | face = .init()
36 |
37 | // This will automatically attach this entity to the face.
38 | anchoring = AnchoringComponent(.face)
39 |
40 | addChild(leftEye)
41 |
42 | addChild(rightEye)
43 | }
44 |
45 | required init() {
46 | fatalError("init() has not been implemented")
47 | }
48 |
49 | /// Attaches a FaceMorphedEntity to this FaceAnchor so that the FaceMorphedEntity's mesh will be deformed based on the BlendShapes associated with this particular FaceAnchor
50 | /// - Parameters:
51 | /// - morphedEntity: The entity that will be added for morphing.
52 | /// - automaticallyAddChild: Set to true to add this entity as a child to the face anchor. If set to false, you can still add the FaceMorphedEntity to the scene in some other way (such as to another anchor or anchor's descendant), and its geometry will still morph based on the BlendShapes associated with this particular FaceAnchor.
53 | public func attach(morphedEntity: FaceMorphedEntity,
54 | automaticallyAddChild: Bool = true)
55 | {
56 | guard morphedEntities.contains(where: { $0 == morphedEntity }) == false else {
57 | print("Already added FaceMorphedEntity \(morphedEntity.name) to this FaceAnchor")
58 | return
59 | }
60 |
61 | morphedEntities.append(morphedEntity)
62 |
63 | if automaticallyAddChild { addChild(morphedEntity) }
64 | }
65 |
66 | /// Attaches an `Entity`'s transform to one of the eyes on this `FaceAnchor`.
67 | ///
68 | /// - Parameters:
69 | /// - entity: The entity to attach.
70 | /// - chirality: The eye to select. i.e. left or right.
71 | /// - trackedTransforms: The set of transforms to track. Options include `position` and `rotation`.
72 | public func attach(entity: Entity,
73 | toEye chirality: Chirality,
74 | tracking trackedTransforms: TransformationOptions = .rotation)
75 | {
76 | guard eyeAttachments.contains(where: { $0.entity == entity }) == false else {
77 | print("Already added Entity \(entity.name) to this FaceAnchor")
78 | return
79 | }
80 |
81 | eyeAttachments.append(.init(entity: entity,
82 | chirality: chirality,
83 | trackedTransforms: trackedTransforms))
84 |
85 |
86 | }
87 |
88 | /// Destroy this Entity and its references to any ARViews
89 | /// This helps prevent memory leaks.
90 | public func destroy() {
91 | for child in children {
92 | child.removeFromParent()
93 | }
94 |
95 | morphedEntities.removeAll()
96 |
97 | eyeAttachments.removeAll()
98 |
99 | removeFromParent()
100 | }
101 | }
102 |
103 | // MARK: - Eye Tracking Data
104 | public extension FaceAnchor {
105 | struct TransformationOptions: OptionSet {
106 | public let rawValue: Int
107 |
108 | public static let position = TransformationOptions(rawValue: 1 << 0)
109 |
110 | public static let rotation = TransformationOptions(rawValue: 1 << 1)
111 |
112 | public init(rawValue: Int) {
113 | self.rawValue = rawValue
114 | }
115 | }
116 |
117 | enum Chirality {
118 | case left, right
119 | }
120 |
121 | internal struct EyeAttachment {
122 | weak var entity: Entity?
123 |
124 | var chirality: Chirality
125 |
126 | var trackedTransforms: TransformationOptions
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/Sources/FaceTracking/FaceAnchorComponent.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/16/23.
6 | //
7 |
8 | import ARKit
9 | import BTShared
10 | import Combine
11 | import RealityKit
12 | import struct RKUtilities.Registerer
13 |
14 | /// Used for efficiency so that not all blendshape values must be copied every frame or every time any individual value is accessed.
15 | /// Will transform blendshape values from `NSNumber` to `Float` for use in RealityKit.
16 | public struct BlendShapeContainer {
17 | fileprivate weak var sourceAnchor: ARFaceAnchor?
18 |
19 | public subscript(key: ARFaceAnchor.BlendShapeLocation) -> Float? {
20 | return sourceAnchor?.blendShapes[key] as? Float
21 | }
22 | }
23 |
24 | public protocol HasFaceAnchoring: HasAnchoring {
25 | var face: FaceAnchorComponent { get }
26 |
27 | var leftEye: Entity { get }
28 |
29 | var rightEye: Entity { get }
30 | }
31 |
32 | public struct FaceAnchorComponent: Component {
33 | /// A Boolean value that indicates whether this object's transform accurately represents the trasform of the real-world face for the current frame.
34 | ///
35 | /// If this value is true, the object’s transform currently matches the position and orientation of the real-world object it represents.
36 | ///
37 | /// If this value is false, the object is not guaranteed to match the movement of its corresponding real-world feature, even if it remains in the visible scene.
38 | public internal(set) var faceIsTracked = CurrentValueSubject(false)
39 |
40 | /// Identifiers for specific facial features with coefficients describing the relative movements of those features.
41 | ///
42 | /// See: `ARFaceAnchor.BlendShapeLocation` for more explanation.
43 | /// - Note: A geometry morpher can be used with blendshapes for Memoji type effects, but these values can be used for other purposes as well.
44 | public var blendShapes: BlendShapeContainer {
45 | return BlendShapeContainer(sourceAnchor: arFaceAnchor)
46 | }
47 |
48 | public var rEyeTransform: simd_float4x4? {
49 | return arFaceAnchor?.rightEyeTransform
50 | }
51 |
52 | public var lEyeTransform: simd_float4x4? {
53 | return arFaceAnchor?.leftEyeTransform
54 | }
55 |
56 | public internal(set) weak var arFaceAnchor: ARFaceAnchor?
57 |
58 | public init() {
59 | Registerer.register(Self.self)
60 | FaceSystem.registerSystem()
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/Sources/FaceTracking/FaceMorphedEntity.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/16/23.
6 | //
7 |
8 | import ARKit
9 | import RealityKit
10 | import RealityMorpher
11 | import RKUtilities
12 |
13 | public final class FaceMorphedEntity: Entity, HasModel, HasMorph {
14 | // These must be in corresponding order to the targets passed to the morph component.
15 | internal private(set) var targetLocations: [ARFaceAnchor.BlendShapeLocation]
16 |
17 | public var morphComponent: MorphComponent {
18 | get {
19 | component(forType: MorphComponent.self)!
20 | }
21 | set {
22 | components.set(newValue)
23 | }
24 | }
25 |
26 | public init(baseModel: ModelComponent,
27 | targetMapping: [ARFaceAnchor.BlendShapeLocation: ModelComponent])
28 | {
29 | targetLocations = Array(targetMapping.keys)
30 |
31 | let targets = targetLocations.compactMap { targetMapping[$0] }
32 |
33 | super.init()
34 |
35 | model = baseModel
36 |
37 | do {
38 | // This will handle throwing an error if an unsupported number of targets was passed.
39 | morphComponent = try MorphComponent(entity: self,
40 | targets: targets)
41 | } catch {
42 | assertionFailure("Failed to create MorphComponent for FaceMorphedEntity \(error)")
43 | }
44 | }
45 |
46 | @MainActor required init() {
47 | fatalError("init() has not been implemented")
48 | }
49 |
50 | /// Use this to perform your own morphing; If you attach this FaceMorphedEntity to a FaceAnchor then there is no need to call this method yourself.
51 | public func update(with blendShapeContainer: BlendShapeContainer) {
52 | var weights = [ARFaceAnchor.BlendShapeLocation: Float]()
53 |
54 | targetLocations.forEach {
55 | weights[$0] = blendShapeContainer[$0]
56 | }
57 |
58 | setTargetWeights(weights: weights)
59 | }
60 |
61 | /// Use this to perform your own morphing; If you attach this FaceMorphedEntity to a FaceAnchor then there is no need to call this method yourself.
62 | public func setTargetWeights(weights: [ARFaceAnchor.BlendShapeLocation: Float]) {
63 |
64 | let values = targetLocations.compactMap { weights[$0] }
65 |
66 | guard values.count == targetLocations.count else {
67 | assertionFailure("Weights must at least include the same members as the corresponding targets.")
68 | return
69 | }
70 |
71 | morphComponent.setTargetWeights(.init(values))
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/Sources/FaceTracking/FaceSystem.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/16/23.
6 | //
7 |
8 | import ARKit
9 | import RealityKit
10 | import RealityMorpher
11 |
12 | final class FaceSystem: System {
13 | weak static var arSession: ARSession?
14 |
15 | // MorphSystem is private, so it cannot be referenced.
16 | // static var dependencies = [.before(MorphSystem.self)]
17 |
18 | init(scene _: Scene) {}
19 |
20 | private static var faceQuery = EntityQuery(where: .has(FaceAnchorComponent.self))
21 |
22 | // TODO: Add support for multiple faces.
23 | // Must access the frame's anchors every frame. Storing the ARFaceAnchor does not give updates.
24 | func update(context: SceneUpdateContext) {
25 | guard
26 | let arSession = Self.arSession,
27 | let arFaceAnchor = arSession.currentFrame?.anchors.compactMap({ $0 as? ARFaceAnchor }).first
28 | else { return }
29 |
30 | context.scene.performQuery(Self.faceQuery).compactMap { $0 as? FaceAnchor }.forEach { faceAnchor in
31 |
32 | faceAnchor.face.arFaceAnchor = arFaceAnchor
33 |
34 | if faceAnchor.face.faceIsTracked.value != arFaceAnchor.isTracked {
35 | faceAnchor.face.faceIsTracked.value = arFaceAnchor.isTracked
36 | }
37 |
38 | updateEyes(arFaceAnchor: arFaceAnchor,
39 | faceAnchor: faceAnchor)
40 |
41 | updateEyeTrackedEntities(faceAnchor: faceAnchor)
42 |
43 | updateMorphedEntities(faceAnchor: faceAnchor)
44 | }
45 | }
46 |
47 | private func updateEyes(arFaceAnchor: ARFaceAnchor,
48 | faceAnchor: FaceAnchor) {
49 | faceAnchor.leftEye.transform.matrix = arFaceAnchor.leftEyeTransform
50 |
51 | faceAnchor.rightEye.transform.matrix = arFaceAnchor.rightEyeTransform
52 | }
53 |
54 | private func updateMorphedEntities(faceAnchor: FaceAnchor) {
55 | for morphedEntity in faceAnchor.morphedEntities {
56 |
57 | morphedEntity.update(with: faceAnchor.face.blendShapes)
58 | }
59 | }
60 |
61 | private func updateEyeTrackedEntities(faceAnchor: FaceAnchor) {
62 | for eyeAttachment in faceAnchor.eyeAttachments {
63 | guard let eyeTrackedEntity = eyeAttachment.entity else {continue}
64 | let trackedTransforms = eyeAttachment.trackedTransforms
65 |
66 | var eyeTarget: Entity?
67 |
68 | switch eyeAttachment.chirality {
69 | case .left:
70 | eyeTarget = faceAnchor.leftEye
71 | case .right:
72 | eyeTarget = faceAnchor.rightEye
73 | }
74 | guard let eyeTarget else {continue}
75 |
76 | if trackedTransforms.contains(.rotation) {
77 | eyeTrackedEntity.orientation = eyeTarget.orientation
78 | }
79 | if trackedTransforms.contains(.position) {
80 | eyeTrackedEntity.position = eyeTarget.position
81 | }
82 | }
83 | }
84 | }
85 |
--------------------------------------------------------------------------------
/Sources/FaceTracking/FaceTrackingConfig.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import ARKit
9 | import BTShared
10 | import RealityKit
11 |
12 | public extension ARView {
13 | // To learn more about face tracking:
14 | // https://developer.apple.com/documentation/arkit/arfacetrackingconfiguration
15 | /*
16 | "Because face tracking provides your app with personal facial information, your app must include a privacy policy describing to users how you intend to use face tracking and face data. For details, see the Apple Developer Program License Agreement."
17 | */
18 |
19 | func runFaceTrackingConfig(autoAlert: Bool = false) throws {
20 | // If the iOS device doesn't support face tracking, raise an error.
21 | guard ARFaceTrackingConfiguration.isSupported
22 | else {
23 | if autoAlert {
24 | showAlert(title: "Uh oh...", message: "This device does Not support face tracking.")
25 | }
26 | let errorMessage = "This device does Not support face tracking. This feature is only supported on devices with an A12 chip."
27 | print(errorMessage)
28 | throw BodyTrackingError.unsupportedConfiguration("ARFaceTrackingConfiguration is unavailable.")
29 | }
30 |
31 | let config3D = ARFaceTrackingConfiguration()
32 | session.run(config3D)
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/Sources/HandTracking/Hand2D/FrameRateRegulator.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import Foundation
9 |
10 | public class FrameRateRegulator {
11 | public enum RequestRate: Int {
12 | case everyFrame = 1
13 | case half = 2
14 | case quarter = 4
15 | }
16 |
17 | /// The frequency that the Vision request for detecting hands will be performed.
18 | ///
19 | /// Running the request every frame may decrease performance.
20 | /// Can be reduced to increase performance at the cost of choppy tracking.
21 | /// Set to half to run every other frame. Set to quarter to run every 1 out of 4 frames.
22 | public var requestRate: RequestRate = .everyFrame
23 |
24 | private var frameInt = 1
25 |
26 | internal func canContinue() -> Bool {
27 | if frameInt == requestRate.rawValue {
28 | frameInt = 1
29 | return true
30 |
31 | } else {
32 | frameInt += 1
33 | return false
34 | }
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/Sources/HandTracking/Hand2D/Hand2DComponent.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import ARKit
9 | import Combine
10 | import RealityKit
11 |
12 | public protocol HasHand2D {
13 | var hand2D: Hand2DComponent { get set }
14 | }
15 |
16 | // Does not get registered since it is not added to an Entity.
17 | public struct Hand2DComponent {
18 | public typealias HandJointName = VNHumanHandPoseObservation.JointName
19 |
20 | public var confidenceThreshold: Float!
21 |
22 | /// Value is true if the hand has ever been recognized.
23 | public internal(set) var handWasInitiallyIdentified = CurrentValueSubject(false)
24 |
25 | /// Value is true if the hand is currently recognized.
26 | public internal(set) var handIsRecognized = CurrentValueSubject(false)
27 |
28 | /// Screen-space coordinates. These can be used with a UIKit view or ARView covering the entire screen.
29 | public internal(set) var jointScreenPositions: [HandJointName: CGPoint]!
30 |
31 | /// Normalized pixel coordinates (0,0 top-left, 1,1 bottom-right)
32 | public internal(set) var jointAVFoundationPositions: [HandJointName: CGPoint]!
33 |
34 | public internal(set) var trackedViews = [HandJointName: UIView]()
35 | }
36 |
--------------------------------------------------------------------------------
/Sources/HandTracking/Hand2D/HandDetector.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import ARKit
9 | import BTShared
10 | import RealityKit
11 | import UIKit
12 | import Vision
13 |
14 | enum HandTrackingError: Error {
15 | case requestInFlight
16 |
17 | case frameRateRegulated
18 |
19 | case noHandsDetected
20 | }
21 |
22 | class HandDetector {
23 | internal static var shared = HandDetector()
24 |
25 | private var inFlight = false
26 |
27 | internal var frameRateRegulator = FrameRateRegulator()
28 |
29 | internal static let requestQueue = DispatchQueue(label: "pro.RealityAcademy.handTracking", qos: .userInteractive)
30 |
31 | /// You can track as many hands as you want, or set the maximumHandCount
32 | private var handPoseRequest = VNDetectHumanHandPoseRequest()
33 |
34 | init() {
35 | handPoseRequest.maximumHandCount = 1
36 | }
37 |
38 | internal func runFingerDetection(frame: ARFrame,
39 | handCount: Int) throws -> [VNHumanHandPoseObservation]
40 | {
41 | if handPoseRequest.maximumHandCount != handCount {
42 | handPoseRequest.maximumHandCount = handCount
43 | }
44 |
45 | guard frameRateRegulator.canContinue() else {
46 | throw HandTrackingError.frameRateRegulated
47 | }
48 |
49 | guard !inFlight else { throw HandTrackingError.requestInFlight }
50 |
51 | let handler = VNImageRequestHandler(cvPixelBuffer: frame.capturedImage, orientation: .up, options: [:])
52 |
53 | inFlight = true
54 |
55 | do {
56 | // Perform VNDetectHumanHandPoseRequest
57 | try handler.perform([handPoseRequest])
58 | // Continue only when a hand was detected in the frame.
59 | // Since we set the maximumHandCount property of the request to 1, there will be at most one observation.
60 | guard let observations = handPoseRequest.results, observations.isEmpty == false else {
61 | throw HandTrackingError.noHandsDetected
62 | }
63 |
64 | inFlight = false
65 |
66 | return observations
67 |
68 | } catch {
69 | inFlight = false
70 |
71 | throw error
72 | }
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/Sources/HandTracking/Hand2D/HandJoint.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/31/23.
6 | //
7 |
8 | import ARKit
9 |
10 | public enum HandJoint: String, CaseIterable {
11 | public typealias JointName = VNHumanHandPoseObservation.JointName
12 |
13 | case thumbTip, thumbIP, thumbMP, thumbCMC
14 | case indexTip, indexDIP, indexPIP, indexMCP
15 | case middleTip, middleDIP, middlePIP, middleMCP
16 | case ringTip, ringDIP, ringPIP, ringMCP
17 | case littleTip, littleDIP, littlePIP, littleMCP
18 | case wrist
19 |
20 | var name: JointName {
21 | return JointName.init(rawValue: .init(rawValue: rawValue))
22 | }
23 |
24 | // 21 total.
25 | public static let allHandJoints: [JointName] = [
26 | .thumbTip, .thumbIP, .thumbMP, .thumbCMC,
27 | .indexTip, .indexDIP, .indexPIP, .indexMCP,
28 | .middleTip, .middleDIP, .middlePIP, .middleMCP,
29 | .ringTip, .ringDIP, .ringPIP, .ringMCP,
30 | .littleTip, .littleDIP, .littlePIP, .littleMCP,
31 | .wrist
32 | ]
33 |
34 | public static let tipJoints: Set = [
35 | .thumbTip, .indexTip, .middleTip, .ringTip, .littleTip
36 | ]
37 |
38 | public static let orientationTarget: [JointName : JointName] = [
39 | .thumbTip: .thumbIP,
40 | .thumbIP: .thumbTip,
41 | .thumbMP: .thumbIP,
42 | .thumbCMC: .thumbMP,
43 | .indexTip: .indexDIP,
44 | .indexDIP: .indexTip,
45 | .indexPIP: .indexDIP,
46 | .indexMCP: .indexPIP,
47 | .middleTip: .middleDIP,
48 | .middleDIP: .middleTip,
49 | .middlePIP: .middleDIP,
50 | .middleMCP: .middlePIP,
51 | .ringTip: .ringDIP,
52 | .ringDIP: .ringTip,
53 | .ringPIP: .ringDIP,
54 | .ringMCP: .ringPIP,
55 | .littleTip: .littleDIP,
56 | .littleDIP: .littleTip,
57 | .littlePIP: .littleDIP,
58 | .littleMCP: .littlePIP,
59 | .wrist: .middleMCP
60 | ]
61 | }
62 |
--------------------------------------------------------------------------------
/Sources/HandTracking/Hand2D/HandTracker2D.swift:
--------------------------------------------------------------------------------
1 |
2 | import ARKit
3 | import BTShared
4 | import RealityKit
5 | import RKUtilities
6 | import UIKit
7 |
8 | // You can track as many hands as you want, or set the maximumHandCount of HandDetector's handPoseRequest.
9 | public class HandTracker2D: HasHand2D, Identifiable {
10 | public typealias HandJointName = HandJoint.JointName
11 |
12 | public var hand2D: Hand2DComponent
13 |
14 | public internal(set) var id = UUID()
15 |
16 | /// The frequency that the Vision request for detecting hands will be performed.
17 | ///
18 | /// Running the request every frame may decrease performance.
19 | /// Can be reduced to increase performance at the cost of choppy tracking.
20 | /// Set to half to run every other frame. Set to quarter to run every 1 out of 4 frames.
21 | /// Note: If multiple objects using hand tracking are used simultaneously, then the highest requestRate of any of them will be used for all of them.
22 | public static var requestRate: FrameRateRegulator.RequestRate {
23 | get {
24 | return HandDetector.shared.frameRateRegulator.requestRate
25 | }
26 | set {
27 | HandDetector.shared.frameRateRegulator.requestRate = newValue
28 | }
29 | }
30 |
31 | public required init(arView: ARView,
32 | confidenceThreshold: Float = 0.4)
33 | {
34 | hand2D = .init(confidenceThreshold: confidenceThreshold)
35 |
36 | Hand2DSystem.registerSystem(with: arView)
37 |
38 | Hand2DSystem.participatingTrackers.append(self)
39 |
40 | populateJointPositions()
41 | }
42 |
43 | required init() {
44 | fatalError("init() has not been implemented")
45 | }
46 |
47 | /// Destroy this Entity and its references to any ARViews
48 | /// This helps prevent memory leaks.
49 | public func destroy() {
50 | hand2D.jointScreenPositions = [:]
51 |
52 | hand2D.trackedViews.forEach { view in
53 | view.value.removeFromSuperview()
54 | }
55 |
56 | hand2D.trackedViews.removeAll()
57 |
58 | Hand2DSystem.participatingTrackers.removeAll(where: { $0 == self })
59 | }
60 |
61 | private func populateJointPositions() {
62 | hand2D.jointScreenPositions = [:]
63 |
64 | hand2D.jointAVFoundationPositions = [:]
65 |
66 | for joint in HandJoint.allHandJoints {
67 | hand2D.jointScreenPositions[joint] = CGPoint()
68 | hand2D.jointAVFoundationPositions[joint] = CGPoint()
69 | }
70 | }
71 |
72 | public func setConfidenceThreshold(_ newValue: Float) {
73 | hand2D.confidenceThreshold = newValue
74 | }
75 |
76 | /// Allows only one view per joint.
77 | /// - This will add `thisView` to ARView automatically.
78 | /// - If you would like to attach more than one view per joint, then try attaching additional views to the view that is already attached to this joint.
79 | public func attach(thisView: UIView, toThisJoint thisJoint: HandJointName) {
80 | guard let arView = Hand2DSystem.arView else { return }
81 |
82 | hand2D.trackedViews[thisJoint] = thisView
83 |
84 | if thisView.superview == nil {
85 | arView.addSubview(thisView)
86 | }
87 | }
88 |
89 | public func removeJoint(_ joint: HandJointName) {
90 | hand2D.trackedViews[joint]?.removeFromSuperview()
91 |
92 | hand2D.trackedViews.removeValue(forKey: joint)
93 | }
94 | }
95 |
96 | extension HandTracker2D: Equatable {
97 | public static func == (lhs: HandTracker2D, rhs: HandTracker2D) -> Bool {
98 | return lhs.id == rhs.id
99 | }
100 | }
101 |
--------------------------------------------------------------------------------
/Sources/HandTracking/Hand2D/HandTracking2DSystem.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HandTrackingSystem.swift
3 | // BodyTracking-Example
4 | //
5 | // Created by Grant Jarvis on 4/29/22.
6 | //
7 |
8 | import ARKit
9 | import BTShared
10 | import Foundation
11 | import RealityKit
12 | import RKUtilities
13 |
14 | internal class Hand2DSystem: System {
15 | required init(scene _: Scene) {}
16 |
17 | static var dependencies: [SystemDependency] {
18 | [.before(HandTracking3DSystem.self)]
19 | }
20 |
21 | internal private(set) weak static var arView: ARView?
22 |
23 | // RealityKit creates the instance of this System itself, so we use static properties.
24 | // Since HandTracker2D is not an Entity and does not get added to the 3D scene, instead of querying the scene for the entities we keep weak references to them here.
25 | @WeakCollection static var participatingTrackers = [HandTracker2D]()
26 |
27 | internal static func registerSystem(with arView: ARView) {
28 | self.arView = arView
29 | registerSystem()
30 | }
31 |
32 | // TODO: Throw an error if more hands added than are supported.
33 | func update(context _: SceneUpdateContext) {
34 | guard let currentFrame = Self.arView?.session.currentFrame else { return }
35 |
36 | // Perform the request in a separate dispatch queue to prevent blocking the main thread (including the camera feed).
37 | // Using `Task(priority:)` and `await` stalled the camera feed when multiple hands were present in the frame. Different priority levels were tested and none were acceptable.
38 | HandDetector.requestQueue.async {
39 | defer {
40 | DispatchQueue.main.async {
41 | // Position values can interpolate even on frames that throw an error.
42 | Self.participatingTrackers.forEach {
43 | self.updateTrackedViews(on: $0, frame: currentFrame)
44 | }
45 | }
46 | }
47 |
48 | do {
49 | let observations = try HandDetector.shared.runFingerDetection(frame: currentFrame,
50 | handCount: Self.participatingTrackers.count)
51 | DispatchQueue.main.async {
52 | self.handleObservations(observations, frame: currentFrame)
53 | }
54 |
55 | } catch (HandTrackingError.noHandsDetected) {
56 | Self.participatingTrackers.forEach {
57 | if $0.hand2D.handIsRecognized.value { $0.hand2D.handIsRecognized.value = false }
58 | }
59 |
60 | } catch {}
61 | }
62 | }
63 |
64 | private func handleObservations(_ observations: [VNHumanHandPoseObservation],
65 | frame _: ARFrame)
66 | {
67 | // Using chirality does not work when the had flips around with the palm towards the camera.
68 | for (tracker, observation) in zip(Self.participatingTrackers, observations) {
69 | handleObservation(on: tracker, observation: observation)
70 | }
71 | }
72 |
73 | private func handleObservation(on tracker: HandTracker2D,
74 | observation: VNHumanHandPoseObservation)
75 | {
76 | guard let fingerPoints = try? observation.recognizedPoints(.all) else { return }
77 |
78 | var aboveConfidenceThreshold = false
79 |
80 | for point in fingerPoints {
81 |
82 | // TODO: expose confidence values publicly.
83 | // TODO: better handle individual joints becoming undetected.
84 | guard point.value.confidence > tracker.hand2D.confidenceThreshold else { continue }
85 |
86 | aboveConfidenceThreshold = true
87 |
88 | let cgPoint = CGPoint(x: point.value.x, y: point.value.y)
89 |
90 | let avPoint = cgPoint.convertVisionToAVFoundation()
91 |
92 | tracker.hand2D.jointAVFoundationPositions[point.key] = avPoint
93 |
94 | if let screenSpacePoint = Self.arView?.convertAVFoundationToScreenSpace(avPoint) {
95 | tracker.hand2D.jointScreenPositions[point.key] = screenSpacePoint
96 | }
97 | }
98 |
99 | if !aboveConfidenceThreshold {
100 | if tracker.hand2D.handIsRecognized.value {
101 | tracker.hand2D.handIsRecognized.value = false
102 | }
103 | } else {
104 | if tracker.hand2D.handIsRecognized.value == false {
105 | tracker.hand2D.handIsRecognized.value = true
106 | }
107 | if tracker.hand2D.handWasInitiallyIdentified.value == false {
108 | tracker.hand2D.handWasInitiallyIdentified.value = true
109 | }
110 | }
111 | }
112 |
113 | internal func updateTrackedViews(on tracker: HandTracker2D,
114 | frame _: ARFrame)
115 | {
116 | let hand2D = tracker.hand2D
117 |
118 | guard
119 | hand2D.jointScreenPositions.count > 0
120 | else { return }
121 |
122 | for view in hand2D.trackedViews {
123 | let jointIndex = view.key
124 |
125 | if let screenPosition = hand2D.jointScreenPositions[jointIndex] {
126 | switch HandDetector.shared.frameRateRegulator.requestRate {
127 | case .everyFrame:
128 |
129 | view.value.center = screenPosition
130 |
131 | // Interpolate between where the view is and the target location.
132 | // We do not run the Vision request every frame, so we need to animate the view in between those frames.
133 | case .half, .quarter:
134 |
135 | let viewCenter = view.value.center
136 |
137 | let difference = screenPosition - viewCenter
138 |
139 | view.value.center = viewCenter + (difference * 0.5)
140 | }
141 | }
142 | }
143 | }
144 | }
145 |
--------------------------------------------------------------------------------
/Sources/HandTracking/Hand3D/CVPixelBuffer+Helpers.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import CoreVideo
9 | import Foundation
10 |
11 | public extension CVPixelBuffer {
12 | struct BufferPosition {
13 | var column: Int
14 | var row: Int
15 | }
16 |
17 | /// The input point must be in normalized AVFoundation coordinates. i.e. (0,0) is in the Top-Left, (1,1,) in the Bottom-Right.
18 | func value(from point: CGPoint) -> Float? {
19 | let width = CVPixelBufferGetWidth(self)
20 | let height = CVPixelBufferGetHeight(self)
21 |
22 | let colPosition = Int(point.x * CGFloat(width))
23 |
24 | let rowPosition = Int(point.y * CGFloat(height))
25 |
26 | return value(column: colPosition, row: rowPosition)
27 | }
28 |
29 | func value(column: Int, row: Int) -> Float? {
30 | guard CVPixelBufferGetPixelFormatType(self) == kCVPixelFormatType_DepthFloat32 else { return nil }
31 | CVPixelBufferLockBaseAddress(self, .readOnly)
32 | if let baseAddress = CVPixelBufferGetBaseAddress(self) {
33 | let width = CVPixelBufferGetWidth(self)
34 | let index = column + (row * width)
35 | let offset = index * MemoryLayout.stride
36 | let value = baseAddress.load(fromByteOffset: offset, as: Float.self)
37 | CVPixelBufferUnlockBaseAddress(self, .readOnly)
38 | return value
39 | }
40 | CVPixelBufferUnlockBaseAddress(self, .readOnly)
41 | return nil
42 | }
43 |
44 | /// The input points must be in normalized AVFoundation coordinates. i.e. (0,0) is in the Top-Left, (1,1,) in the Bottom-Right.
45 | func values(from points: [CGPoint]) -> [Float]? {
46 | let width = CVPixelBufferGetWidth(self)
47 | let height = CVPixelBufferGetHeight(self)
48 |
49 | let bufferPositions = points.map {
50 | let colPosition = Int($0.x * CGFloat(width))
51 | let rowPosition = Int($0.y * CGFloat(height))
52 | return BufferPosition(column: colPosition, row: rowPosition)
53 | }
54 |
55 | return values(from: bufferPositions)
56 | }
57 |
58 | func values(from positions: [BufferPosition]) -> [Float]? {
59 | guard CVPixelBufferGetPixelFormatType(self) == kCVPixelFormatType_DepthFloat32 else { return nil }
60 |
61 | CVPixelBufferLockBaseAddress(self, .readOnly)
62 |
63 | if let baseAddress = CVPixelBufferGetBaseAddress(self) {
64 | let width = CVPixelBufferGetWidth(self)
65 |
66 | let values = positions.map {
67 | let index = $0.column + ($0.row * width)
68 | let offset = index * MemoryLayout.stride
69 | return baseAddress.load(fromByteOffset: offset, as: Float.self)
70 | }
71 |
72 | CVPixelBufferUnlockBaseAddress(self, .readOnly)
73 |
74 | return values
75 | }
76 | CVPixelBufferUnlockBaseAddress(self, .readOnly)
77 | return nil
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/Sources/HandTracking/Hand3D/Hand3D/Hand3DComponent.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import BTShared
9 | import RealityKit
10 | import struct RKUtilities.Registerer
11 |
12 | public protocol HasHand3D {
13 | var hand3D: Hand3DComponent { get }
14 | }
15 |
16 | public struct Hand3DComponent: Component {
17 | public internal(set) var trackedEntities = [HandJoint.JointName: Entity]()
18 |
19 | init() {
20 | Registerer.register(Self.self)
21 | HandTracking3DSystem.registerSystem()
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Sources/HandTracking/Hand3D/Hand3D/HandTracker3D.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HandTracker3D.swift
3 | // BodyTracking-Example
4 | //
5 | // Created by Grant Jarvis on 4/29/22.
6 | //
7 |
8 | import BTShared
9 | import Combine
10 | import CoreVideo
11 | import Foundation
12 | import RealityKit
13 |
14 | public enum DepthBufferSelection {
15 | case sceneDepth
16 | case smoothedSceneDepth
17 | case personSegmentationWithDepth
18 | }
19 |
20 | public class HandTracker3D: Entity, HasHand3D {
21 | public internal(set) var hand3D: Hand3DComponent {
22 | get {
23 | component(forType: Hand3DComponent.self) ?? .init()
24 | }
25 | set {
26 | components.set(newValue)
27 | }
28 | }
29 |
30 | public required init() {
31 | super.init()
32 |
33 | hand3D = .init()
34 | }
35 |
36 | // TODO: Use ML model for 3D hand tracking.
37 |
38 | /// Allows only one view per joint.
39 | /// - This will add `thisView` to ARView automatically.
40 | /// - If you would like to attach more than one view per joint, then try attaching additional views to the view that is already attached to this joint.
41 | public func attach(entity: Entity, to joint: HandJoint.JointName, preservingWorldTransform: Bool = false) {
42 | let jointEnt: Entity
43 |
44 | if let existingEnt = hand3D.trackedEntities[joint] {
45 | jointEnt = existingEnt
46 | } else {
47 | jointEnt = Entity()
48 | hand3D.trackedEntities[joint] = jointEnt
49 | }
50 |
51 | jointEnt.addChild(entity, preservingWorldTransform: preservingWorldTransform)
52 |
53 | addChild(jointEnt)
54 |
55 | if !preservingWorldTransform { entity.transform = .init() }
56 | }
57 |
58 | public func removeEnt(_ joint: HandJoint.JointName) {
59 | hand3D.trackedEntities[joint]?.removeFromParent()
60 | hand3D.trackedEntities.removeValue(forKey: joint)
61 | }
62 |
63 | public func destroy() {
64 | hand3D.trackedEntities.forEach { pair in
65 | pair.value.removeFromParent()
66 | }
67 |
68 | removeFromParent()
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/Sources/HandTracking/Hand3D/HandAnchor/HandAnchor.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import ARKit
9 | import BTShared
10 | import RealityKit
11 | import Vision
12 |
13 | public class HandAnchor: Entity, HasHandAnchoring {
14 | /// The underlying 2D hand tracker used to help determine the 3D joint transforms.
15 | public fileprivate(set) var handTracker2D: HandTracker2D
16 |
17 | @WeakCollection internal var handTrackers3D = [HandTracker3D]()
18 |
19 | /// The frequency that the Vision request for detecting hands will be performed.
20 | ///
21 | /// Running the request every frame may decrease performance.
22 | /// Can be reduced to increase performance at the cost of choppy tracking.
23 | /// Set to half to run every other frame. Set to quarter to run every 1 out of 4 frames.
24 | public static var requestRate: FrameRateRegulator.RequestRate {
25 | get {
26 | return HandDetector.shared.frameRateRegulator.requestRate
27 | }
28 | set {
29 | HandDetector.shared.frameRateRegulator.requestRate = newValue
30 | }
31 | }
32 |
33 | public internal(set) var handAnchorComponent: HandAnchorComponent {
34 | get {
35 | component(forType: HandAnchorComponent.self)!
36 | }
37 | set {
38 | components.set(newValue)
39 | }
40 | }
41 |
42 | internal let autoEnableInitially: Bool
43 |
44 | internal let autoToggleContinually: Bool
45 |
46 | /// Initializes a `HandAnchor`
47 | /// - Parameter arView: The ARView that the hand is viewed from.
48 | /// - Parameter depthBufferSelection: Which depth buffer option to use.
49 | /// - Parameter autoEnableInitially: If set to true, the HandTracker3D will be disabled until the hand is initially recognized.
50 | /// - Parameter autoToggleContinually: If set to true, the HandAnchor will automatically disable itself when the hand is no longer recognized, and re-enable when the hand is recognized again.
51 | public init(arView: ARView,
52 | depthBufferSelection: DepthBufferSelection? = nil,
53 | autoEnableInitially: Bool = true,
54 | autoToggleContinually: Bool = true)
55 | {
56 | handTracker2D = .init(arView: arView)
57 |
58 | HandTracking3DSystem.arView = arView
59 |
60 | self.autoEnableInitially = autoEnableInitially
61 |
62 | self.autoToggleContinually = autoToggleContinually
63 |
64 | super.init()
65 |
66 | if autoEnableInitially {
67 | isEnabled = false
68 | }
69 |
70 | anchoring = .init(.world(transform: float4x4.init(diagonal: .one)))
71 |
72 | // TODO: fix depth for non-LiDAR enabled devices.
73 | let depthBufferSelection = depthBufferSelection ?? (ARWorldTrackingConfiguration.supportsFrameSemantics(.smoothedSceneDepth) ? .smoothedSceneDepth : .personSegmentationWithDepth)
74 |
75 | handAnchorComponent = .init(depthBufferSelection: depthBufferSelection)
76 |
77 | HandDetector.shared.frameRateRegulator.requestRate = .everyFrame
78 | }
79 |
80 | @MainActor required init() {
81 | fatalError("init() has not been implemented")
82 | }
83 |
84 | /// Attaches a `HandTracker3D` to this `HandAnchor` so that the `HandTracker3D`'s joint transforms will be updated based on the tracking data associated with this `HandAnchor`.
85 | /// - Parameters:
86 | /// - handTracker: The entity that will be added for tracking.
87 | /// - automaticallyAddChild: Set to true to add this entity as a child to the `HandAnchor`. If set to false, you can still add the `HandTracker3D` to the scene in some other way (such as to another anchor or anchor's descendant), and its joint transforms will be updated based on the tracking data associated with this `HandAnchor`.
88 | public func attach(handTracker: HandTracker3D,
89 | automaticallyAddChild: Bool = true)
90 | {
91 | guard handTrackers3D.contains(where: { $0 == handTracker }) == false else {
92 | print("Already added HandTracker3D \(handTracker.name) to this HandAnchor")
93 | return
94 | }
95 | handTrackers3D.append(handTracker)
96 |
97 | if automaticallyAddChild { addChild(handTracker) }
98 | }
99 |
100 | public func destroy() {
101 | handTracker2D.destroy()
102 |
103 | handTrackers3D.forEach {
104 | $0.destroy()
105 | }
106 |
107 | removeFromParent()
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/Sources/HandTracking/Hand3D/HandAnchor/HandAnchorComponent.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import BTShared
9 | import Combine
10 | import RealityKit
11 | import struct RKUtilities.Registerer
12 |
13 | public protocol HasHandAnchoring: HasAnchoring {
14 | var handAnchorComponent: HandAnchorComponent { get }
15 | }
16 |
17 | public struct HandAnchorComponent: Component {
18 |
19 | // TODO: Make orientation of descedant joints optional - for efficiency.
20 | // When orientation is false, track only the used joints.
21 | // TODO: Add optional smoothing amount - position and orientation affected.
22 | public internal(set) var handWasInitiallyIdentified = CurrentValueSubject(false)
23 |
24 | public internal(set) var handIsRecognized = CurrentValueSubject(false)
25 |
26 | public var depthBufferSelection: DepthBufferSelection = .smoothedSceneDepth
27 |
28 | public internal(set) var depthValues = [HandJoint.JointName: Float]()
29 |
30 | public internal(set) var jointModelTransforms = [HandJoint.JointName: simd_float4x4]()
31 |
32 | init(depthBufferSelection: DepthBufferSelection) {
33 | self.depthBufferSelection = depthBufferSelection
34 |
35 | populateJointTransforms()
36 |
37 | Registerer.register(Self.self)
38 | HandTracking3DSystem.registerSystem()
39 | }
40 |
41 | private mutating func populateJointTransforms() {
42 | let identity = simd_float4x4.init(diagonal: .one)
43 | for joint in HandJoint.allHandJoints {
44 | jointModelTransforms[joint] = identity
45 | }
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/Sources/HandTracking/Hand3D/HandTracking3DSystem.swift:
--------------------------------------------------------------------------------
1 | //
2 | // File.swift
3 | //
4 | //
5 | // Created by Grant Jarvis on 12/30/23.
6 | //
7 |
8 | import ARKit
9 | import RealityKit
10 | import RKUtilities
11 | import BTShared
12 |
13 | internal class HandTracking3DSystem: System {
14 | static var dependencies: [SystemDependency] {
15 | [.after(Hand2DSystem.self)]
16 | }
17 |
18 | internal weak static var arView: ARView?
19 |
20 | required init(scene _: Scene) {}
21 |
22 | private static var handAnchorQuery = EntityQuery(where: .has(HandAnchorComponent.self))
23 |
24 | func update(context: SceneUpdateContext) {
25 | // TODO: Support multiple hands.
26 | context.scene.performQuery(Self.handAnchorQuery).compactMap { $0 as? HandAnchor }.forEach { handAnchor in
27 |
28 | let hand2D = handAnchor.handTracker2D.hand2D
29 | let anchorComponent = handAnchor.handAnchorComponent
30 |
31 | guard
32 | hand2D.handWasInitiallyIdentified.value,
33 | let currentFrame = Self.arView?.session.currentFrame,
34 | let sceneDepth = getSceneDepth(currentFrame: currentFrame,
35 | anchorComponent: anchorComponent)
36 | else { return }
37 |
38 | updateEnabled(handAnchor: handAnchor,
39 | anchorComponent: anchorComponent,
40 | hand2D: hand2D)
41 |
42 | updateTransforms(on: handAnchor, sceneDepth: sceneDepth)
43 |
44 | updateTrackedEntities(on: handAnchor)
45 | }
46 | }
47 |
48 | private func getSceneDepth(currentFrame: ARFrame,
49 | anchorComponent: HandAnchorComponent) -> CVPixelBuffer? {
50 | switch anchorComponent.depthBufferSelection {
51 | case .sceneDepth:
52 | currentFrame.sceneDepth?.depthMap
53 |
54 | case .smoothedSceneDepth:
55 | // smoothedSceneDepth works much better than estimatedDepthData.
56 | currentFrame.smoothedSceneDepth?.depthMap
57 |
58 | case .personSegmentationWithDepth:
59 | currentFrame.estimatedDepthData
60 | }
61 | }
62 |
63 | private func updateEnabled(handAnchor: HandAnchor,
64 | anchorComponent: HandAnchorComponent,
65 | hand2D: Hand2DComponent) {
66 | // Safer than using sink in case the components get regenerated.
67 | if anchorComponent.handWasInitiallyIdentified.value != hand2D.handWasInitiallyIdentified.value {
68 |
69 | handAnchor.handAnchorComponent.handWasInitiallyIdentified.value = hand2D.handWasInitiallyIdentified.value
70 |
71 | if handAnchor.autoEnableInitially &&
72 | hand2D.handWasInitiallyIdentified.value {
73 | handAnchor.isEnabled = true
74 | }
75 | }
76 |
77 | if anchorComponent.handIsRecognized.value != hand2D.handIsRecognized.value {
78 | handAnchor.handAnchorComponent.handIsRecognized.value = hand2D.handIsRecognized.value
79 |
80 | if handAnchor.autoToggleContinually {
81 | handAnchor.isEnabled = hand2D.handIsRecognized.value
82 | }
83 | }
84 | }
85 |
86 | private let jointMapping: [HandJoint.JointName: Int] = {
87 | let jointNames = HandJoint.allHandJoints
88 |
89 | var jointMapping = [HandJoint.JointName: Int]()
90 |
91 | jointNames.enumerated().forEach {
92 | jointMapping[$0.1] = $0.0
93 | }
94 | return jointMapping
95 | }()
96 |
97 | /*
98 | If we have the 2D screen position of the joint and we have the depth at that point, we can project from that 2D position into world space
99 | (using ARView.ray(through screenPoint: CGPoint))
100 | and get a 3D world-space coordinate for that joint.
101 | */
102 | private func updateTransforms(on handAnchor: HandAnchor,
103 | sceneDepth: CVPixelBuffer)
104 | {
105 |
106 | guard
107 | let positions2D = get2DPositions(on: handAnchor),
108 |
109 | // Gather all values at once instead of locking the buffer multiple times.
110 | // Tip depths are not used.
111 | let depthsAtPoints = sceneDepth.values(from: positions2D.avPositions)
112 | else { return }
113 |
114 | updateAnchorTransform(of: handAnchor,
115 | screenPositions: positions2D.screenPositions,
116 | depthsAtPoints: depthsAtPoints)
117 |
118 | guard
119 | let modelPositions = getModelPositions(on: handAnchor,
120 | depthsAtPoints: depthsAtPoints,
121 | screenPositions: positions2D.screenPositions)
122 | else {return}
123 |
124 | setJointTransforms(on: handAnchor,
125 | modelPositions: modelPositions)
126 | }
127 |
128 | private func get2DPositions(on handAnchor: HandAnchor) -> (screenPositions: [CGPoint],
129 | avPositions: [CGPoint])? {
130 | let jointNames = HandJoint.allHandJoints
131 |
132 | let hand2D = handAnchor.handTracker2D.hand2D
133 |
134 | let jointCount = jointNames.count
135 |
136 | let screenPositions = jointNames.compactMap {
137 | hand2D.jointScreenPositions[$0]
138 | }
139 |
140 | let avPositions = jointNames.compactMap {
141 | hand2D.jointAVFoundationPositions[$0]
142 | }
143 |
144 | guard
145 | screenPositions.count == jointCount,
146 | avPositions.count == jointCount
147 | else { return nil }
148 |
149 | return (screenPositions, avPositions)
150 | }
151 |
152 | private func getModelPositions(on handAnchor: HandAnchor,
153 | depthsAtPoints: [Float],
154 | screenPositions: [CGPoint]
155 | ) -> [simd_float3]? {
156 | let jointNames = HandJoint.allHandJoints
157 |
158 | let projectionData = zip(screenPositions, depthsAtPoints)
159 | let modelPositions = zip(jointNames, projectionData).compactMap { jointName, projectionDataPoint in
160 |
161 | // Wrist and tip depths are not used.
162 | return modelPosition(on: handAnchor,
163 | jointName: jointName,
164 | screenPosition: projectionDataPoint.0,
165 | depth: projectionDataPoint.1)
166 | }
167 |
168 | guard modelPositions.count == HandJoint.allHandJoints.count else { return nil }
169 |
170 | return modelPositions
171 | }
172 |
173 | private func updateAnchorTransform(of handAnchor: HandAnchor,
174 | screenPositions: [CGPoint],
175 | depthsAtPoints: [Float]) {
176 |
177 | guard let worldWristPosition = worldPosition(of: .wrist,
178 | on: handAnchor,
179 | screenPositions: screenPositions,
180 | depthsAtPoints: depthsAtPoints) else {return}
181 |
182 | handAnchor.worldPosition = worldWristPosition
183 |
184 | if let worldMiddlePosition = worldPosition(of: .middleMCP,
185 | on: handAnchor,
186 | screenPositions: screenPositions,
187 | depthsAtPoints: depthsAtPoints),
188 | let worldIndexPosition = worldPosition(of: .indexMCP,
189 | on: handAnchor,
190 | screenPositions: screenPositions,
191 | depthsAtPoints: depthsAtPoints) {
192 | let upDirection = triangleNormal(vertex1: worldMiddlePosition,
193 | vertex2: worldWristPosition,
194 | vertex3: worldIndexPosition)
195 |
196 | let newOrientation = orientationFromVects(rootPoint: worldWristPosition,
197 | forwardPoint: worldMiddlePosition,
198 | upDirection: upDirection)
199 |
200 | handAnchor.worldRotation = simd_slerp(handAnchor.worldRotation, newOrientation, 0.6)
201 | }
202 | }
203 |
204 | private func updateTrackedEntities(on handAnchor: HandAnchor) {
205 | let jointModelTransforms = handAnchor.handAnchorComponent.jointModelTransforms
206 |
207 | for handTracker3D in handAnchor.handTrackers3D {
208 | handTracker3D.hand3D.trackedEntities.forEach {
209 | if let transform = jointModelTransforms[$0.key] {
210 | $0.value.setTransformMatrix(transform, relativeTo: handTracker3D)
211 | }
212 | }
213 | }
214 | }
215 |
216 | private func setJointTransforms(on handAnchor: HandAnchor,
217 | modelPositions: [simd_float3]) {
218 |
219 |
220 | var jointModelTransforms = handAnchor.handAnchorComponent.jointModelTransforms
221 |
222 | for (jointName, index) in jointMapping {
223 |
224 | if jointName == .wrist { continue }
225 |
226 | // -- POSITION --
227 | let modelPosition = modelPositions[index]
228 |
229 | jointModelTransforms[jointName]?.translation = modelPosition
230 |
231 | // -- ORIENTATION --
232 | let currentOrientation = jointModelTransforms[jointName]?.orientation ?? .init()
233 |
234 | let orientationTarget = HandJoint.orientationTarget[jointName]!
235 |
236 | let targetPosition = modelPositions[jointMapping[orientationTarget]!]
237 |
238 | let newOrientation = getOrientation(for: jointName,
239 | currentPosition: modelPosition,
240 | currentOrientation: currentOrientation,
241 | targetPosition: targetPosition)
242 |
243 |
244 | jointModelTransforms[jointName]?.orientation = newOrientation
245 |
246 | }
247 |
248 | handAnchor.handAnchorComponent.jointModelTransforms = jointModelTransforms
249 | }
250 |
251 | private func getOrientation(for jointName: HandJoint.JointName,
252 | currentPosition: simd_float3,
253 | currentOrientation: simd_quatf,
254 | targetPosition: simd_float3,
255 | t: Float = 0.5,
256 | offset: simd_quatf? = nil) -> simd_quatf {
257 | var targetPosition = targetPosition
258 | if HandJoint.tipJoints.contains(jointName) {
259 | targetPosition = currentPosition + (currentPosition - targetPosition)
260 | }
261 |
262 | var targetOrientation = simd_quatf(from: .forward, to: safeNormalize(targetPosition - currentPosition))
263 |
264 | if let offset {
265 | targetOrientation *= offset
266 | }
267 |
268 | return simd_slerp(currentOrientation, targetOrientation, t)
269 | }
270 |
271 | private func triangleNormal(vertex1: SIMD3,
272 | vertex2: SIMD3,
273 | vertex3: SIMD3) -> SIMD3 {
274 | let vector1 = vertex1 - vertex2
275 | let vector2 = vertex3 - vertex2
276 |
277 | // Calculate the cross product to get the normal vector
278 | let normalVector = cross(vector1, vector2)
279 |
280 | // Normalize the result to get a unit normal vector
281 | return safeNormalize(normalVector)
282 | }
283 |
284 | private func orientationFromVects(rootPoint: SIMD3,
285 | forwardPoint: SIMD3,
286 | upDirection: SIMD3) -> simd_quatf {
287 |
288 | let forwardDirection = safeNormalize(forwardPoint - rootPoint)
289 |
290 | let quaternionForward = simd_quatf(from: .forward, to: forwardDirection)
291 |
292 | let rotatedUp = quaternionForward.act(simd_float3(0, 1, 0))
293 |
294 | let adjustedQuaternion = simd_quatf(from: rotatedUp, to: upDirection) * quaternionForward
295 |
296 | return adjustedQuaternion
297 | }
298 |
299 | /// Get the model-space position from a UIKit screen point and a depth value
300 | /// - Parameters:
301 | /// - screenPosition: A `CGPoint` representing a point on screen in UIKit coordinates.
302 | /// - depth: The depth at this coordinate, in meters.
303 | /// - Returns: The position in model space (relative to the` HandAnchor`) of this coordinate at this depth.
304 | public func modelPosition(on handAnchor: HandAnchor,
305 | jointName: HandJoint.JointName,
306 | screenPosition: CGPoint,
307 | depth: Float) -> simd_float3?
308 | {
309 | if let worldSpacePosition = worldPosition(on: handAnchor,
310 | jointName: jointName,
311 | screenPosition: screenPosition,
312 | depth: depth) {
313 | return handAnchor.convert(position: worldSpacePosition, from: nil)
314 | }
315 | return nil
316 | }
317 |
318 | private func worldPosition(of joint: HandJoint.JointName,
319 | on handAnchor: HandAnchor,
320 | screenPositions: [CGPoint],
321 | depthsAtPoints: [Float]) -> simd_float3? {
322 |
323 | let jointIndex = jointMapping[joint]!
324 |
325 | let jointScreenPosition = screenPositions[jointIndex]
326 |
327 | let jointDepth = depthsAtPoints[jointIndex]
328 |
329 | return worldPosition(on: handAnchor,
330 | jointName: joint,
331 | screenPosition: jointScreenPosition,
332 | depth: jointDepth)
333 | }
334 |
335 | /// Get the world-space position from a UIKit screen point and a depth value
336 | /// - Parameters:
337 | /// - screenPosition: A `CGPoint` representing a point on screen in UIKit coordinates.
338 | /// - depth: The depth at this coordinate, in meters.
339 | /// - Returns: The position in world space of this coordinate at this depth.
340 | public func worldPosition(on handAnchor: HandAnchor,
341 | jointName: HandJoint.JointName,
342 | screenPosition: CGPoint,
343 | depth: Float) -> simd_float3?
344 | {
345 | guard
346 | let arView = Self.arView,
347 | let rayResult = arView.ray(through: screenPosition)
348 | else { return nil }
349 |
350 | var depth = depth
351 |
352 | smoothDepthValue(on: jointName,
353 | handAnchor: handAnchor,
354 | depth: &depth)
355 |
356 | // rayResult.direction is a normalized (1 meter long) vector pointing in the correct direction, and we want to go the length of depth along this vector.
357 | let worldOffset = rayResult.direction * depth
358 | let worldPosition = rayResult.origin + worldOffset
359 |
360 | return worldPosition
361 | }
362 |
363 | private func smoothDepthValue(on jointName: HandJoint.JointName,
364 | handAnchor: HandAnchor,
365 | depth: inout Float){
366 |
367 | let depthValues = handAnchor.handAnchorComponent.depthValues
368 |
369 | // Tip joints have unreliable depth.
370 | if HandJoint.tipJoints.contains(jointName),
371 | let depthTarget = HandJoint.orientationTarget[jointName],
372 | let targetDepth = depthValues[depthTarget] {
373 | depth = targetDepth
374 | }
375 |
376 | let previousDepth = depthValues[jointName]
377 |
378 | // Middle depth is more stable.
379 | if let middleDepth = depthValues[.middleMCP],
380 | abs(depth - middleDepth) > 0.1
381 | {
382 | if let previousDepth,
383 | // As the hand moves rapidly closer to or away from the camera, more distal values become less reliable.
384 | abs(previousDepth - middleDepth) < 0.11
385 | {
386 | depth = previousDepth
387 |
388 | } else {
389 |
390 | depth = middleDepth
391 | }
392 |
393 | } else {
394 | // 2D screen positions are pretty good, but depth values are jittery, so they need smoothing.
395 | if let previousDepth {
396 | depth = Float.lerp(previousDepth, depth, t: 0.2)
397 | }
398 |
399 | handAnchor.handAnchorComponent.depthValues[jointName] = depth
400 | }
401 | }
402 | }
403 |
404 | public func safeNormalize(_ x: SIMD3) -> SIMD3 {
405 | var normalized = normalize(x)
406 | if normalized.x.isNaN { normalized.x = 0 }
407 | if normalized.y.isNaN { normalized.y = 0 }
408 | if normalized.z.isNaN { normalized.z = 0 }
409 | return normalized
410 | }
411 |
--------------------------------------------------------------------------------