├── .swiftpm
└── xcode
│ └── package.xcworkspace
│ └── contents.xcworkspacedata
├── Package.swift
├── README.md
└── Sources
└── DicyaninHandTracking
├── Components
├── CollisionSubscriptionComponent.swift
├── ToolCollisionTriggerComponent.swift
└── ToolInteractionTargetComponent.swift
├── FingerVisualizationEntity.swift
├── HandTracking+Interaction.swift
├── HandTracking+ModelLoading.swift
├── HandTracking.swift
├── HandTrackingView.swift
├── ToolManager.swift
├── ToolView.swift
└── ToolViewButton.swift
/.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version: 5.9
2 | import PackageDescription
3 |
4 | let package = Package(
5 | name: "DicyaninHandTracking",
6 | platforms: [
7 | .iOS(.v17),
8 | .visionOS(.v1)
9 | ],
10 | products: [
11 | .library(
12 | name: "DicyaninHandTracking",
13 | targets: ["DicyaninHandTracking"]),
14 | ],
15 | dependencies: [
16 | .package(path: "DicyaninARKitSession")
17 | ],
18 | targets: [
19 | .target(
20 | name: "DicyaninHandTracking",
21 | dependencies: ["DicyaninARKitSession"]),
22 | .testTarget(
23 | name: "HandTrackingTests",
24 | dependencies: ["DicyaninHandTracking"]),
25 | ]
26 | )
27 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # DicyaninHandTracking
2 |
3 | A Swift package for hand tracking and interactions with objects in visionOS applications.
4 |
5 | ## Overview
6 |
7 | DicyaninHandTracking provides a simple and efficient way to implement hand tracking and gesture recognition in your visionOS applications. It offers a high-level API for tracking hand movements, detecting gestures, and managing hand interactions with 3D objects.
8 |
9 | ## Features
10 |
11 | - Hand tracking and gesture recognition
12 | - Support for both visionOS
13 | - Easy integration with RealityKit
14 | - Customizable hand visualization
15 | - Tool interaction system
16 | - Collision detection and handling
17 |
18 | ## Requirements
19 |
20 | - visionOS 1.0+
21 | - Xcode 15.0+
22 | - Swift 5.9+
23 |
24 | ## Dependencies
25 |
26 | ### DicyaninARKitSession
27 |
28 | HandTracking depends on [DicyaninHandSessionManager](https://github.com/hunterh37/DicyaninHandSessionManager),
29 |
30 | a package that provides centralized ARKit session management. This dependency is necessary because:
31 |
32 | - In visionOS, only one HandTrackingProvider can be active at a time
33 | - Multiple packages or components might need hand tracking data
34 | - DicyaninARKitSession manages a single ARKit session and distributes hand tracking updates to all subscribers
35 | - It ensures efficient resource usage and prevents conflicts between different parts of your app
36 |
37 | To add this dependency to your project, include it in your Package.swift:
38 |
39 | ```swift
40 | dependencies: [
41 | .package(url: "https://github.com/dicyanin/DicyaninARKitSession.git", from: "0.0.1")
42 | ]
43 | ```
44 |
45 | ## Installation
46 |
47 | ### Swift Package Manager
48 |
49 | Add the following to your `Package.swift` file:
50 |
51 | ```swift
52 | dependencies: [
53 | .package(url: "https://github.com/hunterh37/DicyaninHandTracking.git", from: "0.0.1")
54 | ]
55 | ```
56 |
57 | ## Usage
58 |
59 | ### Basic Setup
60 |
61 | ```swift
62 | import DicyaninHandTracking
63 | import RealityKit
64 |
65 | // Create a hand tracking view
66 | let handTrackingView = HandTrackingView()
67 |
68 | // Or create with custom tools
69 | let tools = [
70 | Tool(id: "camera", name: "Camera", modelName: "Camera"),
71 | Tool(id: "flower", name: "Flower", modelName: "Flower")
72 | ]
73 | let customHandTrackingView = HandTrackingView(tools: tools)
74 | ```
75 |
76 | ### Using with RealityView
77 |
78 | Here's a complete example of how to use HandTracking in a visionOS app with RealityView:
79 |
80 | ```swift
81 | import SwiftUI
82 | import RealityKit
83 | import DicyaninHandTracking
84 |
85 | struct ContentView: View {
86 | @State private var handTracking = HandTracking()
87 |
88 | var body: some View {
89 | RealityView { content in
90 | // Start hand tracking
91 | handTracking.start(showHandVisualizations: true)
92 |
93 | // Add hand tracking visualization to the scene
94 | if let handEntity = handTracking.controlRootEntity {
95 | content.add(handEntity)
96 | }
97 |
98 | // Add your 3D content here
99 | let box = ModelEntity(mesh: .generateBox(size: 0.1))
100 | box.position = SIMD3(0, 0, -0.5)
101 | content.add(box)
102 |
103 | // Configure trigger entities for interaction
104 | let leftTrigger = handTracking.configureTriggerEntity(
105 | at: SIMD3(-0.3, 0, -0.5),
106 | stage: 0,
107 | interactionData: ["type": "leftButton"]
108 | ) {
109 | print("Left button pressed!")
110 | }
111 |
112 | let rightTrigger = handTracking.configureTriggerEntity(
113 | at: SIMD3(0.3, 0, -0.5),
114 | stage: 0,
115 | interactionData: ["type": "rightButton"]
116 | ) {
117 | print("Right button pressed!")
118 | }
119 |
120 | // Add trigger entities to the scene
121 | content.add(leftTrigger)
122 | content.add(rightTrigger)
123 |
124 | } update: { content in
125 | // Update hand tracking state
126 | if let handEntity = handTracking.controlRootEntity {
127 | content.add(handEntity)
128 | }
129 | }
130 | }
131 | }
132 | ```
133 |
134 | ### Tool Interaction
135 |
136 | ```swift
137 | // Configure a trigger entity
138 | let trigger = handTracking.configureTriggerEntity(
139 | at: SIMD3(0, 0, 0),
140 | stage: 0,
141 | interactionData: ["key": "value"]
142 | ) {
143 | print("Trigger activated!")
144 | }
145 | ```
146 |
147 | ## API Reference
148 |
149 | ### HandTracking
150 |
151 | The main class for hand tracking functionality.
152 |
153 | #### Properties
154 |
155 | - `latestHandTracking`: Current state of hand tracking
156 | - `isRightHanded`: Whether the user is right-handed
157 | - `controlRootEntity`: Root entity for hand visualization
158 |
159 | #### Methods
160 |
161 | - `start(showHandVisualizations:)`: Start hand tracking with optional hand visualization
162 | - `stop()`: Stop hand tracking
163 | - `highlightFinger(_:hand:duration:isActive:)`: Highlight a specific finger
164 | - `setFingerActive(_:onHand:isActive:)`: Set a finger's active state
165 | - `setAllFingersActive(_:duration:addCollision:)`: Set all fingers' active state
166 |
167 | ### HandTrackingView
168 |
169 | A SwiftUI view that implements hand tracking functionality.
170 |
171 | #### Initializers
172 |
173 | - `init(showHandVisualizations:)`: Create a view with default tools
174 | - `init(tools:showHandVisualizations:)`: Create a view with custom tools
175 |
176 | ### ToolManager
177 |
178 | Manages available tools and the currently active tool.
179 |
180 | #### Properties
181 |
182 | - `availableTools`: Array of available tools
183 | - `activeTool`: Currently active tool
184 | - `onToolChanged`: Callback when the active tool changes
185 |
186 | #### Methods
187 |
188 | - `configureTools(_:)`: Configure the available tools
189 | - `setActiveTool(_:)`: Set the active tool
190 | - `setActiveTool(id:)`: Set the active tool by ID
191 | - `addTool(_:)`: Add a new tool
192 | - `removeTool(id:)`: Remove a tool
193 |
194 | ### Required Setup
195 |
196 | 1. Add the following key to your Info.plist file to request hand tracking permissions:
197 | ```xml
198 | NSHandsTrackingUsageDescription
199 | This app needs access to hand tracking to enable hand interaction features.
200 | ```
201 |
202 |
203 | ## Acknowledgments
204 |
205 | - Apple's RealityKit and ARKit frameworks
206 | - The visionOS development community
207 | - All contributors to this project
208 |
--------------------------------------------------------------------------------
/Sources/DicyaninHandTracking/Components/CollisionSubscriptionComponent.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CollisionSubscriptionComponent.swift
3 | // HandTracking
4 | //
5 | // Created by Hunter Harris on 5/12/25.
6 | //
7 |
8 | import Foundation
9 | import RealityKit
10 | import Combine
11 |
12 | /// A component that stores the collision subscription
13 | public struct CollisionSubscriptionComponent: Component {
14 | var subscription: Cancellable?
15 | var sceneSubscription: Cancellable?
16 | }
17 |
--------------------------------------------------------------------------------
/Sources/DicyaninHandTracking/Components/ToolCollisionTriggerComponent.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ToolCollisionTriggerComponent.swift
3 | // HandTracking
4 | //
5 | // Created by Hunter Harris on 5/12/25.
6 | //
7 |
8 | import Foundation
9 | import RealityKit
10 |
11 | /// A component that defines collision triggers for tool interactions
12 | public struct ToolCollisionTriggerComponent: Component {
13 | /// The current stage of the interaction
14 | var currentStage: Int = 0
15 |
16 | /// The total number of stages for this interaction
17 | let totalStages: Int
18 |
19 | /// Description of what needs to be done in this stage
20 | let stageDescriptions: [String]
21 |
22 | /// Whether this trigger has been completed
23 | var isCompleted: Bool = false
24 |
25 | init(totalStages: Int, stageDescriptions: [String]) {
26 | self.totalStages = totalStages
27 | self.stageDescriptions = stageDescriptions
28 | }
29 |
30 | /// Progress to the next stage
31 | mutating func progressToNextStage() -> Bool {
32 | if currentStage >= totalStages - 1 {
33 | isCompleted = true
34 | return true // Return true for the final stage
35 | }
36 | currentStage += 1
37 | return true
38 | }
39 |
40 | /// Get the current stage description
41 | var currentStageDescription: String {
42 | guard currentStage < stageDescriptions.count else { return "Unknown stage" }
43 | return stageDescriptions[currentStage]
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/Sources/DicyaninHandTracking/Components/ToolInteractionTargetComponent.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ToolInteractionTargetComponentlo.swift
3 | // HandTracking
4 | //
5 | // Created by Hunter Harris on 5/12/25.
6 | //
7 |
8 | import Foundation
9 | import RealityKit
10 |
11 | /// A component that defines an object that can trigger interactions with hand-held tools
12 | public struct ToolInteractionTargetComponent: Component {
13 | /// The stage this target triggers
14 | let targetStage: Int
15 |
16 | /// Whether this interaction has been completed
17 | var isCompleted: Bool = false
18 |
19 | /// Additional data needed for the interaction
20 | var interactionData: [String: Any]?
21 |
22 | /// The collision group this target belongs to
23 | let collisionGroup: CollisionGroup
24 |
25 | /// The collision mask for detecting collisions
26 | let collisionMask: CollisionGroup
27 |
28 | /// Completion handler called when interaction occurs
29 | var onInteraction: (() -> Void)?
30 |
31 | init(targetStage: Int,
32 | interactionData: [String: Any]? = nil,
33 | collisionGroup: CollisionGroup = .default,
34 | collisionMask: CollisionGroup = .default,
35 | onInteraction: (() -> Void)? = nil) {
36 | self.targetStage = targetStage
37 | self.interactionData = interactionData
38 | self.collisionGroup = collisionGroup
39 | self.collisionMask = collisionMask
40 | self.onInteraction = onInteraction
41 | }
42 |
43 | /// Check if this target matches the current stage of a tool
44 | func matchesCurrentStage(of trigger: ToolCollisionTriggerComponent) -> Bool {
45 | guard trigger.currentStage == targetStage,
46 | !isCompleted else {
47 | return false
48 | }
49 | return true
50 | }
51 |
52 | /// Mark this interaction as completed
53 | mutating func complete() {
54 | isCompleted = true
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/Sources/DicyaninHandTracking/FingerVisualizationEntity.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FingerVisualizationEntity.swift
3 | // HandTracking
4 | //
5 | // Created by Hunter Harris on 5/11/25.
6 | //
7 |
8 | import Foundation
9 | import RealityKit
10 | import Combine
11 | import UIKit
12 | import SwiftUI
13 |
14 | /// Entity used for visualizing finger positions and interactions
15 | public class FingerVisualizationEntity: Entity, HasModel, HasCollision {
16 | public var mode: HandVisualMode = .fingertip
17 | public var isActiveVisual: Bool = false {
18 | didSet {
19 | updateMaterial()
20 | }
21 | }
22 |
23 | public required init(mode: HandVisualMode) {
24 | super.init()
25 | self.mode = mode
26 | self.components.set(dynamicModelComponent)
27 | self.generateCollisionShapes(recursive: true)
28 | }
29 |
30 | public init(mesh: MeshResource, materials: [SimpleMaterial], isActiveVisual: Bool = false) {
31 | super.init()
32 | self.components.set(ModelComponent(mesh: mesh, materials: materials))
33 | self.isActiveVisual = isActiveVisual
34 | self.generateCollisionShapes(recursive: true)
35 | updateMaterial()
36 | }
37 |
38 | @MainActor @preconcurrency required public init() {
39 | super.init()
40 | self.components.set(dynamicModelComponent)
41 | self.generateCollisionShapes(recursive: true)
42 | }
43 |
44 | private func simpleMaterial(isActive: Bool) -> SimpleMaterial {
45 | let color: SimpleMaterial.Color = isActive ? .init(.init(hex: "C0C0C0").opacity(0.8)) : .init(.clear)
46 | return SimpleMaterial(color: color, isMetallic: false)
47 | }
48 |
49 | private func updateMaterial() {
50 | self.components.set(dynamicModelComponent)
51 | if isActiveVisual {
52 | self.setOpacity(0, animated: true, duration: 0.5)
53 | } else {
54 | self.setOpacity(0, animated: true, duration: 0.5)
55 | }
56 | }
57 |
58 | private var dynamicModelComponent: ModelComponent {
59 | switch mode {
60 | case .wrist:
61 | return ModelComponent(mesh: .generateBox(width: 0.12, height: 0.01, depth: 0.06), materials: [simpleMaterial(isActive: isActiveVisual)])
62 | case .fingertip:
63 | return ModelComponent(mesh: .generateSphere(radius: 0.01), materials: [simpleMaterial(isActive: isActiveVisual)])
64 | }
65 | }
66 |
67 | public func setIsActiveVisual(_ isActive: Bool, removeAfter seconds: TimeInterval? = nil, addCollision: Bool = false) {
68 | self.isActiveVisual = isActive
69 | self.components.set(dynamicModelComponent)
70 |
71 | if addCollision {
72 | let shape: ShapeResource = (mode == .wrist) ? .generateBox(size: [0.12, 0.01, 0.06]) : .generateSphere(radius: 0.01)
73 | self.components.set(CollisionComponent(
74 | shapes: [shape],
75 | mode: .default))
76 | self.components.set(PhysicsBodyComponent(shapes: [.generateSphere(radius: 0.01)], mass: 1, mode: .kinematic))
77 | } else {
78 | self.components.remove(CollisionComponent.self)
79 | self.components.remove(PhysicsBodyComponent.self)
80 | }
81 |
82 | if let delay = seconds {
83 | DispatchQueue.main.asyncAfter(deadline: .now() + delay) { [weak self] in
84 | self?.isActiveVisual = false
85 | self?.setOpacity(0, animated: true, duration: 0.5)
86 | self?.components.set(self?.dynamicModelComponent ?? ModelComponent(mesh: .generateBox(size: 0.1), materials: []))
87 | }
88 | }
89 | }
90 | }
91 |
92 | private var playbackCompletedSubscriptions: Set = .init()
93 |
94 | extension Entity {
95 |
96 | /// The opacity value applied to the entity and its descendants.
97 | ///
98 | /// `OpacityComponent` is assigned to the entity if it doesn't already exist.
99 | var opacity: Float {
100 | get {
101 | return components[OpacityComponent.self]?.opacity ?? 1
102 | }
103 | set {
104 | if !components.has(OpacityComponent.self) {
105 | components[OpacityComponent.self] = OpacityComponent(opacity: newValue)
106 | } else {
107 | components[OpacityComponent.self]?.opacity = newValue
108 | }
109 | }
110 | }
111 |
112 | /// Sets the opacity value applied to the entity and its descendants with optional animation.
113 | ///
114 | /// `OpacityComponent` is assigned to the entity if it doesn't already exist.
115 | func setOpacity(_ opacity: Float, animated: Bool, duration: TimeInterval = 0.01, delay: TimeInterval = 0, completion: (() -> Void)? = nil) {
116 | guard animated else {
117 | self.opacity = opacity
118 | return
119 | }
120 |
121 | if !components.has(OpacityComponent.self) {
122 | components[OpacityComponent.self] = OpacityComponent(opacity: 1)
123 | }
124 |
125 | let animation = FromToByAnimation(name: "Entity/setOpacity", to: opacity, duration: duration, timing: .linear, isAdditive: false, bindTarget: .opacity, delay: delay)
126 |
127 | do {
128 | let animationResource: AnimationResource = try .generate(with: animation)
129 | let animationPlaybackController = playAnimation(animationResource)
130 |
131 | if completion != nil {
132 | scene?.publisher(for: AnimationEvents.PlaybackCompleted.self)
133 | .filter { $0.playbackController == animationPlaybackController }
134 | .sink(receiveValue: { event in
135 | completion?()
136 | }).store(in: &playbackCompletedSubscriptions)
137 | }
138 | } catch {
139 | assertionFailure("Could not generate animation: \(error.localizedDescription)")
140 | }
141 | }
142 | }
143 |
144 | // Utility extension to create a UIColor from a hex string
145 | extension UIColor {
146 | convenience init?(hex: String) {
147 | var hexSanitized = hex.trimmingCharacters(in: .whitespacesAndNewlines)
148 | hexSanitized = hexSanitized.hasPrefix("#") ? String(hexSanitized.dropFirst()) : hexSanitized
149 | let length = hexSanitized.count
150 |
151 | var rgb: UInt64 = 0
152 | Scanner(string: hexSanitized).scanHexInt64(&rgb)
153 |
154 | if length == 6 {
155 | let r = CGFloat((rgb & 0xFF0000) >> 16) / 255.0
156 | let g = CGFloat((rgb & 0x00FF00) >> 8) / 255.0
157 | let b = CGFloat(rgb & 0x0000FF) / 255.0
158 | self.init(red: r, green: g, blue: b, alpha: 1.0)
159 | } else {
160 | return nil
161 | }
162 | }
163 |
164 | // Convert UIColor to a hex string
165 | func toHex() -> String? {
166 | var red: CGFloat = 0
167 | var green: CGFloat = 0
168 | var blue: CGFloat = 0
169 | var alpha: CGFloat = 0
170 |
171 | self.getRed(&red, green: &green, blue: &blue, alpha: &alpha)
172 |
173 | let r = Int(red * 255.0)
174 | let g = Int(green * 255.0)
175 | let b = Int(blue * 255.0)
176 |
177 | return String(format: "#%02X%02X%02X", r, g, b)
178 | }
179 | }
180 |
181 | extension Color {
182 | init(hex: String) {
183 | if let uiColor = UIColor(hex: hex) {
184 | self.init(uiColor)
185 | } else {
186 | self.init(.red)
187 | }
188 | }
189 |
190 | // Add a method to retrieve the hex value from a Color object
191 | func toHex() -> String? {
192 | return UIColor(self).toHex()
193 | }
194 | }
195 |
--------------------------------------------------------------------------------
/Sources/DicyaninHandTracking/HandTracking+Interaction.swift:
--------------------------------------------------------------------------------
1 | import RealityKit
2 | import Combine
3 |
4 | // MARK: - Entity Extensions
5 | extension Entity {
6 | var toolInteractionTarget: ToolInteractionTargetComponent? {
7 | get { components[ToolInteractionTargetComponent.self] }
8 | set { components[ToolInteractionTargetComponent.self] = newValue }
9 | }
10 |
11 | var collisionSubscription: CollisionSubscriptionComponent? {
12 | get { components[CollisionSubscriptionComponent.self] }
13 | set { components[CollisionSubscriptionComponent.self] = newValue }
14 | }
15 |
16 | var toolCollisionTrigger: ToolCollisionTriggerComponent? {
17 | get { components[ToolCollisionTriggerComponent.self] }
18 | set { components[ToolCollisionTriggerComponent.self] = newValue }
19 | }
20 |
21 | private func handleCollision(_ event: CollisionEvents.Began) {
22 | print("💥 Collision detected between: \(event.entityA.name) and \(event.entityB.name)")
23 |
24 | // Check if this entity is involved in the collision
25 | guard event.entityA == self || event.entityB == self else {
26 | return
27 | }
28 |
29 | guard let targetComponent = self.toolInteractionTarget,
30 | let toolTrigger = event.entityA.toolCollisionTrigger ?? event.entityB.toolCollisionTrigger,
31 | targetComponent.matchesCurrentStage(of: toolTrigger) else {
32 | return
33 | }
34 |
35 | print("✅ Valid collision detected")
36 |
37 | var trigger = toolTrigger
38 | // Trigger the interaction
39 | if trigger.progressToNextStage() {
40 | // Update the tool's trigger
41 | if let toolEntity = event.entityA as? ModelEntity ?? event.entityB as? ModelEntity {
42 | toolEntity.toolCollisionTrigger = trigger
43 | }
44 |
45 | // Mark this target as completed
46 | var updatedComponent = targetComponent
47 | updatedComponent.complete()
48 | self.toolInteractionTarget = updatedComponent
49 |
50 | // Call the completion handler
51 | targetComponent.onInteraction?()
52 | print("🎯 Interaction completed")
53 | }
54 | }
55 | }
56 |
57 | // MARK: - Collision Groups
58 | public extension CollisionGroup {
59 | static let tool = CollisionGroup(rawValue: 1 << 0)
60 | static let interactionTarget = CollisionGroup(rawValue: 1 << 1)
61 | }
62 |
63 | // MARK: - Public Entity Extensions
64 | public extension Entity {
65 | /// Sets up a tool interaction target with proper collision handling
66 | func setupToolInteractionTarget(stage: Int,
67 | interactionData: [String: Any]? = nil,
68 | collisionGroup: CollisionGroup = .interactionTarget,
69 | collisionMask: CollisionGroup = .tool,
70 | onInteraction: (() -> Void)? = nil) {
71 |
72 | print("🔧 Setting up tool interaction target")
73 |
74 | components.remove(PhysicsBodyComponent.self)
75 | components.remove(CollisionSubscriptionComponent.self)
76 |
77 | // Create and set the interaction target component
78 | let targetComponent = ToolInteractionTargetComponent(
79 | targetStage: stage,
80 | interactionData: interactionData,
81 | collisionGroup: collisionGroup,
82 | collisionMask: collisionMask,
83 | onInteraction: onInteraction
84 | )
85 | self.toolInteractionTarget = targetComponent
86 |
87 | // Get the actual bounds of the model
88 | let bounds = self.visualBounds(relativeTo: nil)
89 |
90 | // Add collision component using the model's actual bounds
91 | let collisionComponent = CollisionComponent(
92 | shapes: [.generateBox(size: bounds.extents)],
93 | mode: .trigger,
94 | filter: CollisionFilter(
95 | group: targetComponent.collisionGroup,
96 | mask: targetComponent.collisionMask
97 | )
98 | )
99 | components.set(collisionComponent)
100 |
101 | // Add physics body using the same bounds
102 | let physicsBody = PhysicsBodyComponent(
103 | shapes: [.generateBox(size: bounds.extents)],
104 | mass: 0,
105 | mode: .static
106 | )
107 | components.set(physicsBody)
108 |
109 | // Set up collision subscription when added to scene
110 | if let scene = self.scene {
111 | print("📡 Setting up collision subscription")
112 | let subscription = scene.subscribe(to: CollisionEvents.Began.self) { [weak self] event in
113 | self?.handleCollision(event)
114 | }
115 | self.collisionSubscription = CollisionSubscriptionComponent(subscription: subscription)
116 | } else {
117 | print("⚠️ No scene available for collision subscription")
118 | }
119 | }
120 | }
121 |
--------------------------------------------------------------------------------
/Sources/DicyaninHandTracking/HandTracking+ModelLoading.swift:
--------------------------------------------------------------------------------
1 | import RealityKit
2 | import ARKit
3 |
4 | public extension DicyaninHandTracking {
5 | /// Loads a 3D model and attaches it to the right hand entity
6 | /// - Parameters:
7 | /// - modelName: The name of the model to load (without file extension)
8 | /// - completion: Optional completion handler that provides the loaded entity
9 | func loadModelForRightHand(modelName: String, completion: ((Entity?) -> Void)? = nil) {
10 | Task { @MainActor in
11 | do {
12 | print("📦 Attempting to load model: \(modelName)")
13 |
14 | // Try to load the model from the main bundle
15 | if let entity = try? Entity.load(named: modelName) {
16 | print("✅ Successfully loaded model: \(modelName)")
17 | attachModelToRightHand(entity)
18 | completion?(entity)
19 | return
20 | }
21 |
22 | print("❌ Failed to load model: \(modelName)")
23 | completion?(nil)
24 | }
25 | }
26 | }
27 |
28 | /// Loads a 3D model from a URL and attaches it to the right hand entity
29 | /// - Parameters:
30 | /// - url: The URL of the model to load
31 | /// - completion: Optional completion handler that provides the loaded entity
32 | func loadModelForRightHand(from url: URL, completion: ((Entity?) -> Void)? = nil) {
33 | Task { @MainActor in
34 | do {
35 | let entity = try ModelEntity.load(contentsOf: url)
36 | if let modelEntity = entity as? ModelEntity {
37 | attachModelToRightHand(modelEntity)
38 | completion?(modelEntity)
39 | } else {
40 | print("Failed to load model as ModelEntity from URL")
41 | completion?(nil)
42 | }
43 | } catch {
44 | print("Failed to load model from URL: \(error)")
45 | completion?(nil)
46 | }
47 | }
48 | }
49 |
50 | // MARK: - Private Methods
51 |
52 | private func attachModelToRightHand(_ entity: Entity) {
53 | Task { @MainActor in
54 | // Remove any existing model
55 | // removeModelFromRightHand()
56 |
57 | // Store reference to new entity
58 | currentToolEntity = entity
59 |
60 | // Add the new model
61 | rightHandEntity.addChild(entity)
62 |
63 | // Center the model on the hand
64 | entity.position = .zero
65 |
66 | // Add collision component
67 | entity.components.set(CollisionComponent(shapes: [.generateBox(size: entity.visualBounds(relativeTo: nil).extents)], mode: .trigger))
68 |
69 | entity.components.set(InputTargetComponent())
70 |
71 | // Add tool collision trigger component
72 | let trigger = ToolCollisionTriggerComponent(
73 | totalStages: 1,
74 | stageDescriptions: ["Ready for interaction"]
75 | )
76 | entity.toolCollisionTrigger = trigger
77 | }
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/Sources/DicyaninHandTracking/HandTracking.swift:
--------------------------------------------------------------------------------
1 | //
2 | // HandTracking.swift
3 | // HandTracking
4 | //
5 | // Created by Hunter Harris on 5/11/25.
6 | //
7 |
8 | import RealityKit
9 | import ARKit
10 | import SwiftUI
11 | import Combine
12 | import DicyaninARKitSession
13 |
14 | /// Protocol defining the interface for hand tracking functionality
15 | public protocol HandTrackingProtocol: ObservableObject {
16 | var latestHandTracking: HandAnchorUpdate { get }
17 | var isRightHanded: Bool { get set }
18 | var controlRootEntity: Entity { get }
19 |
20 | func start(showHandVisualizations: Bool) async
21 | func stop()
22 | func highlightFinger(_ finger: HandSkeleton.JointName, hand: HandType, duration: TimeInterval?, isActive: Bool)
23 | func setFingerActive(_ finger: HandSkeleton.JointName, onHand isLeftHand: Bool, isActive: Bool)
24 | func setAllFingersActive(_ isActive: Bool, duration: TimeInterval?, addCollision: Bool)
25 | }
26 |
27 | var rootEntity = Entity()
28 | var rightHandEntity = Entity()
29 | var leftHandEntity = Entity()
30 |
31 | /// A class that manages hand tracking and gesture recognition
32 | public class DicyaninHandTracking: HandTrackingProtocol {
33 | /// Shared instance of the hand tracking manager
34 | public static let shared = DicyaninHandTracking()
35 |
36 | private var cancellables = Set()
37 | private var handTrackingCancellable: AnyCancellable?
38 |
39 | public init() {
40 | setupHandTrackingSubscription()
41 | }
42 |
43 | // MARK: - Properties
44 | var currentToolEntity: Entity?
45 |
46 | @Published public var latestHandTracking: HandAnchorUpdate = .init(left: nil, right: nil)
47 | @Published public var isRightHanded = true
48 |
49 | // MARK: - Entity Management
50 | private var leftFingerVisualizationEntities: [HandSkeleton.JointName: FingerVisualizationEntity] = [:]
51 | private var rightFingerVisualizationEntities: [HandSkeleton.JointName: FingerVisualizationEntity] = [:]
52 |
53 | public let controlRootEntity = Entity()
54 |
55 | /// Registers all required components and systems for hand tracking
56 | public static func registerComponents() {
57 | // Register interaction components
58 | ToolInteractionTargetComponent.registerComponent()
59 | ToolCollisionTriggerComponent.registerComponent()
60 | CollisionSubscriptionComponent.registerComponent()
61 | }
62 |
63 | // MARK: - Hand Joints
64 | private let handJoints: [HandSkeleton.JointName] = [
65 | .indexFingerTip, .middleFingerTip, .littleFingerTip, .ringFingerTip, .wrist,
66 | .littleFingerKnuckle, .littleFingerMetacarpal, .littleFingerIntermediateBase,
67 | .ringFingerKnuckle, .ringFingerMetacarpal, .ringFingerIntermediateBase,
68 | .middleFingerKnuckle, .middleFingerMetacarpal, .middleFingerIntermediateBase,
69 | .indexFingerKnuckle, .indexFingerMetacarpal, .indexFingerIntermediateBase,
70 | .thumbKnuckle, .thumbIntermediateBase, .thumbTip, .thumbIntermediateTip
71 | ]
72 |
73 | // MARK: - Public Methods
74 | /// Starts hand tracking
75 | /// - Parameter showHandVisualizations: Whether to show hand visualization entities
76 | public func start(showHandVisualizations: Bool = true) async {
77 | Task { @MainActor in
78 | rightHandEntity.removeFromParent()
79 | leftHandEntity.removeFromParent()
80 | rightHandEntity.components.set(CollisionComponent(shapes: [.generateSphere(radius: 0.2)], mode: .trigger))
81 | rightHandEntity.components.set(PhysicsBodyComponent(shapes: [.generateSphere(radius: 0.2)], mass: 10, mode: .kinematic))
82 |
83 | controlRootEntity.addChild(rightHandEntity)
84 | controlRootEntity.addChild(leftHandEntity)
85 |
86 | if showHandVisualizations {
87 | initializeVisualizationFingerTips()
88 | }
89 |
90 | do {
91 | try await ARKitSessionManager.shared.start()
92 | } catch {
93 | print("Failed to start hand tracking: \(error)")
94 | }
95 | }
96 | }
97 |
98 | /// Stops hand tracking
99 | public func stop() {
100 | ARKitSessionManager.shared.stop()
101 | }
102 |
103 | public func highlightFinger(_ finger: HandSkeleton.JointName, hand: HandType, duration: TimeInterval? = nil, isActive: Bool = true) {
104 | let isRightHand = hand == .rightHand
105 | let visualizationEntities = isRightHand ? rightFingerVisualizationEntities : leftFingerVisualizationEntities
106 |
107 | guard let entity = visualizationEntities[finger] else { return }
108 | entity.setIsActiveVisual(isActive, removeAfter: duration)
109 | }
110 |
111 | public func setFingerActive(_ finger: HandSkeleton.JointName, onHand isLeftHand: Bool, isActive: Bool) {
112 | let visualizationEntities = isLeftHand ? leftFingerVisualizationEntities : rightFingerVisualizationEntities
113 | guard let entity = visualizationEntities[finger] else { return }
114 | entity.setIsActiveVisual(isActive)
115 | }
116 |
117 | /// Sets all finger entities to active or inactive
118 | /// - Parameters:
119 | /// - isActive: Whether to make the entities visible
120 | /// - duration: Optional duration after which to revert the state
121 | /// - addCollision: Whether to add collision components
122 | public func setAllFingersActive(_ isActive: Bool, duration: TimeInterval? = nil, addCollision: Bool = false) {
123 | // Set left hand fingers
124 | for (_, entity) in leftFingerVisualizationEntities {
125 | entity.setIsActiveVisual(isActive, removeAfter: duration, addCollision: addCollision)
126 | }
127 |
128 | // Set right hand fingers
129 | for (_, entity) in rightFingerVisualizationEntities {
130 | entity.setIsActiveVisual(isActive, removeAfter: duration, addCollision: addCollision)
131 | }
132 | }
133 |
134 | /// Removes any currently attached model from the right hand
135 | public func removeModelFromRightHand() {
136 | Task { @MainActor in
137 | // Remove the current tool entity if it exists
138 | if let currentTool = currentToolEntity {
139 | currentTool.removeFromParent()
140 | currentToolEntity = nil
141 | }
142 | }
143 | }
144 |
145 | // MARK: - Private Methods
146 | private func setupHandTrackingSubscription() {
147 | handTrackingCancellable = ARKitSessionManager.shared.handTrackingUpdates
148 | .sink { [weak self] update in
149 | // Convert DicyaninARKitSession.HandAnchorUpdate to HandTracking.HandAnchorUpdate
150 | let convertedUpdate = HandAnchorUpdate(
151 | left: update.left,
152 | right: update.right
153 | )
154 | self?.handleHandUpdate(convertedUpdate)
155 | }
156 | }
157 |
158 | private func handleHandUpdate(_ update: HandAnchorUpdate) {
159 | // Process hand updates as before
160 | if let leftHand = update.left {
161 | processHandAnchor(leftHand)
162 | }
163 | if let rightHand = update.right {
164 | processHandAnchor(rightHand)
165 | }
166 | }
167 |
168 | private func processHandAnchor(_ anchor: HandAnchor) {
169 | DispatchQueue.main.async { [weak self] in
170 | guard let self = self else { return }
171 | self.latestHandTracking = HandAnchorUpdate(left: anchor, right: nil)
172 | self.updateFingertipVisualizerEntities(anchor)
173 |
174 | let newTransform = Transform(matrix: anchor.originFromAnchorTransform)
175 | if anchor.chirality == .left {
176 | leftHandEntity.transform = newTransform
177 | } else if anchor.chirality == .right {
178 | rightHandEntity.transform = newTransform
179 | }
180 | }
181 | }
182 |
183 | private func initializeVisualizationFingerTips() {
184 | for fingerTip in handJoints {
185 | let entity = createVisualizerFingertipEntity(for: fingerTip)
186 | leftFingerVisualizationEntities[fingerTip] = entity
187 | controlRootEntity.addChild(entity)
188 | }
189 | for fingerTip in handJoints {
190 | let entity = createVisualizerFingertipEntity(for: fingerTip)
191 | rightFingerVisualizationEntities[fingerTip] = entity
192 | controlRootEntity.addChild(entity)
193 | }
194 | }
195 |
196 | private func createVisualizerFingertipEntity(for jointName: HandSkeleton.JointName) -> FingerVisualizationEntity {
197 | var mode: HandVisualMode = .fingertip
198 | if jointName == .wrist {
199 | mode = .wrist
200 | }
201 | return FingerVisualizationEntity(mode: mode)
202 | }
203 |
204 | private func updateFingertipVisualizerEntities(_ anchor: HandAnchor) {
205 | guard let handSkeleton = anchor.handSkeleton else { return }
206 |
207 | for (jointName, entity) in leftFingerVisualizationEntities {
208 | let joint = handSkeleton.joint(jointName)
209 | let worldTransform = matrix_multiply(anchor.originFromAnchorTransform, joint.anchorFromJointTransform)
210 | entity.setTransformMatrix(worldTransform, relativeTo: nil)
211 | }
212 |
213 | for (jointName, entity) in rightFingerVisualizationEntities {
214 | let joint = handSkeleton.joint(jointName)
215 | let worldTransform = matrix_multiply(anchor.originFromAnchorTransform, joint.anchorFromJointTransform)
216 | entity.setTransformMatrix(worldTransform, relativeTo: nil)
217 | }
218 | }
219 |
220 | private func removeAllHandEntities() {
221 | for entity in leftFingerVisualizationEntities.values {
222 | entity.removeFromParent()
223 | }
224 | for entity in rightFingerVisualizationEntities.values {
225 | entity.removeFromParent()
226 | }
227 | leftFingerVisualizationEntities.removeAll()
228 | rightFingerVisualizationEntities.removeAll()
229 | }
230 |
231 | /// Configures a trigger entity with the specified properties
232 | /// - Parameters:
233 | /// - position: The position of the entity in 3D space
234 | /// - stage: The interaction stage this trigger belongs to (default: 0)
235 | /// - interactionData: Additional data for the interaction (default: nil)
236 | /// - onInteraction: Closure called when the entity is interacted with
237 | /// - Returns: The configured ModelEntity
238 | public func configureTriggerEntity(
239 | at position: SIMD3,
240 | stage: Int = 0,
241 | interactionData: [String: Any]? = nil,
242 | onInteraction: (() -> Void)? = nil
243 | ) -> ModelEntity {
244 | // Create the entity with a proper 3D model
245 | let entity = ModelEntity(mesh: .generateSphere(radius: 0.05))
246 | entity.position = position
247 |
248 | // Setup interaction target
249 | entity.setupToolInteractionTarget(
250 | stage: stage,
251 | interactionData: interactionData,
252 | collisionGroup: .interactionTarget,
253 | collisionMask: .tool,
254 | onInteraction: onInteraction
255 | )
256 |
257 | // Add to controlRootEntity
258 | controlRootEntity.addChild(entity)
259 |
260 | return entity
261 | }
262 | }
263 |
264 | /// Represents the type of hand being tracked
265 | public enum HandType {
266 | case leftHand
267 | case rightHand
268 | }
269 |
270 | /// Represents the visual mode for hand tracking entities
271 | public enum HandVisualMode {
272 | case wrist
273 | case fingertip
274 | }
275 |
276 | /// Represents the current state of hand tracking
277 | public struct HandAnchorUpdate {
278 | public var left: HandAnchor?
279 | public var right: HandAnchor?
280 |
281 | public init(left: HandAnchor? = nil, right: HandAnchor? = nil) {
282 | self.left = left
283 | self.right = right
284 | }
285 | }
286 |
287 | /// Extension to provide additional functionality for HandSkeleton.JointName
288 | public extension HandSkeleton.JointName {
289 | /// Returns true if this joint is a fingertip
290 | var isFingertip: Bool {
291 | switch self {
292 | case .indexFingerTip, .middleFingerTip, .ringFingerTip, .littleFingerTip, .thumbTip:
293 | return true
294 | default:
295 | return false
296 | }
297 | }
298 |
299 | /// Returns true if this joint is a knuckle
300 | var isKnuckle: Bool {
301 | switch self {
302 | case .indexFingerKnuckle, .middleFingerKnuckle, .ringFingerKnuckle, .littleFingerKnuckle, .thumbKnuckle:
303 | return true
304 | default:
305 | return false
306 | }
307 | }
308 | }
309 |
--------------------------------------------------------------------------------
/Sources/DicyaninHandTracking/HandTrackingView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import RealityKit
3 | import DicyaninHandTracking
4 | import DicyaninARKitSession
5 |
6 | /// A SwiftUI view that implements hand tracking functionality
7 | public struct HandTrackingView: View {
8 | @StateObject private var handTracking = DicyaninHandTracking.shared
9 | @StateObject private var toolManager = ToolManager.shared
10 | private let showHandVisualizations: Bool
11 | private let tools: [Tool]
12 |
13 | /// Creates a new HandTrackingView with default tools
14 | /// - Parameter showHandVisualizations: Whether to show hand visualization entities (default: true)
15 | public init(showHandVisualizations: Bool = true) {
16 | self.showHandVisualizations = showHandVisualizations
17 |
18 | self.tools = [
19 | Tool(id: "camera", name: "Camera", modelName: "Camera"),
20 | Tool(id: "flower", name: "Flower", modelName: "Flower")
21 | ]
22 | }
23 |
24 | /// Creates a new HandTrackingView with custom tools
25 | /// - Parameters:
26 | /// - tools: Array of tools to use
27 | /// - showHandVisualizations: Whether to show hand visualization entities (default: true)
28 | public init(tools: [Tool], showHandVisualizations: Bool = true) {
29 | self.showHandVisualizations = showHandVisualizations
30 | self.tools = tools
31 | }
32 |
33 | public var body: some View {
34 | RealityView { content in
35 | // Register required components
36 | DicyaninHandTracking.registerComponents()
37 |
38 | // Configure tools
39 | toolManager.configureTools(tools)
40 |
41 | // Add hand tracking entities to the scene
42 | content.add(handTracking.controlRootEntity)
43 |
44 | // Set up tool change handler
45 | toolManager.onToolChanged = { tool in
46 | // Remove any existing model
47 | handTracking.removeModelFromRightHand()
48 |
49 | // Load the new tool model
50 | handTracking.loadModelForRightHand(modelName: tool.modelName) { entity in
51 | if let entity = entity {
52 | print("📸 \(tool.name) model loaded successfully")
53 | }
54 | }
55 | }
56 |
57 | // Load initial tool model
58 | if let activeTool = toolManager.activeTool {
59 | handTracking.loadModelForRightHand(modelName: activeTool.modelName) { entity in
60 | if let entity = entity {
61 | print("📸 \(activeTool.name) model loaded successfully")
62 | }
63 | }
64 | }
65 |
66 | // Add example interactive entities
67 | addExampleEntities(to: content)
68 |
69 | // Start hand tracking
70 | Task {
71 | await handTracking.start(showHandVisualizations: showHandVisualizations)
72 | }
73 | }
74 | .onDisappear {
75 | // Clean up hand tracking when view disappears
76 | handTracking.stop()
77 | }
78 | #if targetEnvironment(simulator)
79 | // Allow drag gesture in simulator on tool objects for ease of debugging
80 | .gesture(dragGesture)
81 | #endif
82 | }
83 | var dragGesture: some Gesture {
84 | DragGesture()
85 | .targetedToAnyEntity()
86 | .onChanged { value in // When drag begins/changes, set Rigidbody to kinematic
87 | guard let parent = value.entity.parent else { return }
88 | value.entity.position = value.convert(value.location3D, from: .local, to: parent)
89 | value.entity.components[PhysicsBodyComponent.self]?.mode = .kinematic
90 | }
91 | .onEnded({ value in // When drag ends, set Rigidbody back to dynamic
92 | value.entity.components[PhysicsBodyComponent.self]?.mode = .dynamic
93 |
94 | })
95 | }
96 |
97 | private func addExampleEntities(to content: RealityViewContent) {
98 | // Create a few example entities with different positions
99 | let positions: [SIMD3] = [
100 | SIMD3(0.5, 0.5, 0.5), // Right, Up, Forward
101 | SIMD3(-0.5, 0.5, 0.5), // Left, Up, Forward
102 | SIMD3(0, 0.7, 0.5), // Center, Higher Up, Forward
103 | SIMD3(0.5, 0.5, -0.5), // Right, Up, Back
104 | SIMD3(-0.5, 0.5, -0.5) // Left, Up, Back
105 | ]
106 |
107 | let boxSize = SIMD3(0.1, 0.1, 0.1)
108 |
109 | // Create entities at each position
110 | for (index, position) in positions.enumerated() {
111 | let entity = ModelEntity(mesh: .generateBox(size: boxSize))
112 | entity.position = position
113 |
114 | // Add to scene first
115 | content.add(entity)
116 |
117 | // Now set up collision and interaction after entity is in scene
118 | entity.components.set(CollisionComponent(
119 | shapes: [.generateBox(size: boxSize)],
120 | mode: .trigger,
121 | filter: CollisionFilter(group: .interactionTarget, mask: .tool)
122 | ))
123 |
124 | entity.components.set(PhysicsBodyComponent(
125 | shapes: [.generateBox(size: boxSize)],
126 | mass: 0,
127 | mode: .static
128 | ))
129 |
130 | Task {
131 | await try? Task.sleep(for: .seconds(1))
132 | // Setup interaction target with completion handler
133 | entity.setupToolInteractionTarget(
134 | stage: 0,
135 | interactionData: ["index": index],
136 | collisionGroup: .interactionTarget,
137 | collisionMask: .tool
138 | ) {
139 | print("🎯 Interacted with entity at position: \(position)")
140 |
141 | // Example: Change the entity's color when interacted with
142 | if var modelComponent = entity.components[ModelComponent.self] {
143 | modelComponent.materials = [SimpleMaterial(color: .green, isMetallic: false)]
144 | entity.components[ModelComponent.self] = modelComponent
145 | }
146 | }
147 | }
148 |
149 | print("📦 Added entity at position: \(position)")
150 | }
151 | }
152 | }
153 |
154 | #Preview {
155 | HandTrackingView()
156 | }
157 |
--------------------------------------------------------------------------------
/Sources/DicyaninHandTracking/ToolManager.swift:
--------------------------------------------------------------------------------
1 | import RealityKit
2 | import SwiftUI
3 | import Combine
4 |
5 | /// Represents a tool that can be used in the hand tracking system
6 | public struct Tool: Identifiable {
7 | public let id: String
8 | public let name: String
9 | public let modelName: String
10 | public let stages: Int
11 | public let stageDescriptions: [String]
12 |
13 | public init(id: String, name: String, modelName: String, stages: Int = 1, stageDescriptions: [String] = ["Ready"]) {
14 | self.id = id
15 | self.name = name
16 | self.modelName = modelName
17 | self.stages = stages
18 | self.stageDescriptions = stageDescriptions
19 | }
20 | }
21 |
22 | /// Manages the available tools and the currently active tool
23 | public class ToolManager: ObservableObject {
24 | public static let shared = ToolManager()
25 |
26 | @Published public private(set) var availableTools: [Tool] = []
27 | @Published public private(set) var activeTool: Tool?
28 |
29 | // Callback for when tool changes
30 | public var onToolChanged: ((Tool) -> Void)?
31 |
32 | private init() {}
33 |
34 | /// Configures the available tools
35 | /// - Parameter tools: Array of tools to make available
36 | public func configureTools(_ tools: [Tool]) {
37 | availableTools = tools
38 |
39 | // Set the first tool as active by default if no tool is currently active
40 | if activeTool == nil, let firstTool = availableTools.first {
41 | setActiveTool(firstTool)
42 | }
43 | }
44 |
45 | /// Sets the active tool
46 | /// - Parameter tool: The tool to set as active
47 | public func setActiveTool(_ tool: Tool) {
48 | // Only update if the tool is different
49 | guard activeTool?.id != tool.id else { return }
50 |
51 | activeTool = tool
52 |
53 | // Notify listeners of the tool change
54 | onToolChanged?(tool)
55 | }
56 |
57 | /// Sets the active tool by ID
58 | /// - Parameter toolId: The ID of the tool to set as active
59 | public func setActiveTool(id toolId: String) {
60 | if let tool = availableTools.first(where: { $0.id == toolId }) {
61 | setActiveTool(tool)
62 | }
63 | }
64 |
65 | /// Adds a new tool to the available tools
66 | /// - Parameter tool: The tool to add
67 | public func addTool(_ tool: Tool) {
68 | availableTools.append(tool)
69 | }
70 |
71 | /// Removes a tool from the available tools
72 | /// - Parameter toolId: The ID of the tool to remove
73 | public func removeTool(id toolId: String) {
74 | availableTools.removeAll { $0.id == toolId }
75 |
76 | // If we removed the active tool, set a new active tool
77 | if activeTool?.id == toolId {
78 | activeTool = availableTools.first
79 | if let newTool = activeTool {
80 | onToolChanged?(newTool)
81 | }
82 | }
83 | }
84 | }
--------------------------------------------------------------------------------
/Sources/DicyaninHandTracking/ToolView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import RealityKit
3 |
4 | /// A view that displays available tools and allows switching between them
5 | public struct ToolView: View {
6 | @StateObject private var toolManager = ToolManager.shared
7 | @Environment(\.dismissWindow) private var dismissWindow
8 |
9 | public init() {}
10 |
11 | public var body: some View {
12 | NavigationStack {
13 | List(toolManager.availableTools) { tool in
14 | Button {
15 | toolManager.setActiveTool(tool)
16 | dismissWindow()
17 | } label: {
18 | HStack {
19 | Text(tool.name)
20 | .foregroundStyle(.primary)
21 |
22 | Spacer()
23 |
24 | if tool.id == toolManager.activeTool?.id {
25 | Image(systemName: "checkmark")
26 | .foregroundStyle(.blue)
27 | }
28 | }
29 | }
30 | }
31 | .navigationTitle("Select Tool")
32 | .toolbar {
33 | ToolbarItem(placement: .topBarTrailing) {
34 | Button("Done") {
35 | dismissWindow()
36 | }
37 | }
38 | }
39 | }
40 | }
41 | }
42 |
43 | #Preview {
44 | ToolView()
45 | }
--------------------------------------------------------------------------------
/Sources/DicyaninHandTracking/ToolViewButton.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 |
3 | /// A button that opens the tool selection view in a window
4 | public struct ToolViewButton: View {
5 | @Environment(\.openWindow) private var openWindow
6 |
7 | public init() {}
8 |
9 | public var body: some View {
10 | Button {
11 | openWindow(id: "tool-view")
12 | } label: {
13 | Label("Change Tool", systemImage: "wrench.and.screwdriver")
14 | }
15 | .fontWeight(.semibold)
16 | }
17 | }
18 |
19 | #Preview {
20 | ToolViewButton()
21 | }
22 |
--------------------------------------------------------------------------------