├── .DS_Store ├── .gitignore ├── CITATION.cff ├── LICENSE ├── README.md ├── Tracking Streamer.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ ├── xcshareddata │ │ ├── IDEWorkspaceChecks.plist │ │ └── swiftpm │ │ │ └── Package.resolved │ └── xcuserdata │ │ └── yhpark.xcuserdatad │ │ └── UserInterfaceState.xcuserstate ├── xcshareddata │ └── xcschemes │ │ └── VisionProTeleop.xcscheme └── xcuserdata │ ├── account2309.xcuserdatad │ └── xcschemes │ │ └── xcschememanagement.plist │ └── yhpark.xcuserdatad │ └── xcschemes │ └── xcschememanagement.plist ├── Tracking Streamer ├── App.swift ├── ContentView.swift ├── Supporting files │ ├── .DS_Store │ ├── Archive │ │ ├── DebugView.md │ │ └── HandToGround.md │ ├── Assets.xcassets │ │ ├── .DS_Store │ │ ├── AccentColor.colorset │ │ │ └── Contents.json │ │ ├── AppIcon.solidimagestack │ │ │ ├── Back.solidimagestacklayer │ │ │ │ ├── Content.imageset │ │ │ │ │ ├── Background.png │ │ │ │ │ └── Contents.json │ │ │ │ └── Contents.json │ │ │ ├── Contents.json │ │ │ ├── Front.solidimagestacklayer │ │ │ │ ├── Content.imageset │ │ │ │ │ ├── Contents.json │ │ │ │ │ └── lab_logo_inverted.png │ │ │ │ └── Contents.json │ │ │ └── Middle.solidimagestacklayer │ │ │ │ ├── Content.imageset │ │ │ │ └── Contents.json │ │ │ │ └── Contents.json │ │ ├── Contents.json │ │ ├── Image.imageset │ │ │ └── Contents.json │ │ ├── graph2.imageset │ │ │ ├── Contents.json │ │ │ └── diagram_visionpro.png │ │ └── lab_logo_inverted.imageset │ │ │ ├── Contents.json │ │ │ └── lab_logo_inverted.png │ ├── Info.plist │ ├── InfoPlist.xcstrings │ ├── Localizable.xcstrings │ ├── README assets │ │ ├── appstore_badge.svg │ │ ├── icon.png │ │ ├── screenshot1280w.jpg │ │ ├── screenshot1280w2.jpg │ │ └── screenshot1280w3.jpg │ ├── sound1.m4a │ └── sound2.m4a ├── VisionProTeleop.entitlements ├── 🌐RealityView.swift ├── 📏Unit.swift ├── 🛠️Menu │ ├── 🛠️MenuTop.swift │ ├── 🛠️Panel.swift │ └── 🛠️SettingPanel.swift ├── 🥽AppModel.swift ├── 🧑HeadTrackingComponent&System.swift ├── 🧩Model.swift └── 🧩Name.swift ├── __pycache__ ├── handtracking_pb2.cpython-38.pyc └── handtracking_pb2_grpc.cpython-38.pyc ├── assets ├── axis_convention.png ├── coord_system.png ├── hand_skeleton_convention.png ├── main.png ├── short_paper.pdf ├── short_paper_new.pdf └── visionpro_main.png ├── avp_stream ├── .DS_Store ├── __init__.py ├── __pycache__ │ ├── __init__.cpython-310.pyc │ ├── __init__.cpython-311.pyc │ ├── __init__.cpython-38.pyc │ ├── isaac_env.cpython-38.pyc │ ├── streamer.cpython-311.pyc │ ├── streamer.cpython-38.pyc │ └── utils.cpython-38.pyc ├── assets │ ├── huge_axis.urdf │ ├── normal_axis.urdf │ └── small_axis.urdf ├── grpc_msg │ ├── __init__.py │ ├── __pycache__ │ │ ├── __init__.cpython-310.pyc │ │ ├── __init__.cpython-311.pyc │ │ ├── __init__.cpython-38.pyc │ │ ├── handtracking_pb2.cpython-310.pyc │ │ ├── handtracking_pb2.cpython-311.pyc │ │ ├── handtracking_pb2.cpython-38.pyc │ │ ├── handtracking_pb2_grpc.cpython-310.pyc │ │ ├── handtracking_pb2_grpc.cpython-311.pyc │ │ └── handtracking_pb2_grpc.cpython-38.pyc │ ├── handtracking.grpc.swift │ ├── handtracking.pb.swift │ ├── handtracking.proto │ ├── handtracking_pb2.py │ └── handtracking_pb2_grpc.py ├── isaac_env.py ├── streamer.py └── utils │ ├── __init__.py │ ├── __pycache__ │ ├── __init__.cpython-311.pyc │ ├── __init__.cpython-38.pyc │ ├── constants.cpython-38.pyc │ ├── grpc_utils.cpython-311.pyc │ ├── grpc_utils.cpython-38.pyc │ ├── isaac_utils.cpython-38.pyc │ ├── se3_utils.cpython-38.pyc │ └── trn_constants.cpython-38.pyc │ ├── grpc_utils.py │ ├── isaac_utils.py │ ├── se3_utils.py │ └── trn_constants.py ├── example.py ├── how_to_install.md ├── setup.py ├── viz_isaac.py └── viz_localization.py /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/.DS_Store -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /*.egg-info 2 | /dist 3 | *.pyc -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | message: "If you use this software, please cite it as below." 3 | authors: 4 | - family-names: "Park" 5 | given-names: "Younghyo" 6 | orcid: "https://orcid.org/0000-0000-0000-0000" 7 | title: "Teleopeation System using Apple Vision Pro" 8 | version: 0.1.0 9 | url: "https://github.com/Improbable-AI/VisionProTeleop" -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Younghyo Park 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | VisionProTeleop 2 | =========== 3 | 4 | ![CleanShot 2024-03-03 at 13 55 11@2x](https://github.com/Improbable-AI/VisionProTeleop/assets/68195716/d87a906c-ccf3-4e2d-bd25-a66dc0df803b) 5 | 6 | 7 | 8 | Wanna use your new Apple Vision Pro to control your robot? Wanna record how you navigate and manipulate the world to train your robot? 9 | This VisionOS app and python library streams your Head + Wrist + Hand Tracking result via gRPC over a WiFi network, so any robots connected to the same wifi network can subscribe and use. 10 | 11 | > **For a more detailed explanation, check out this short [paper](./assets/short_paper_new.pdf).** 12 | 13 | 14 | ## How to Use 15 | 16 | If you use this repository in your work, consider citing: 17 | 18 | @software{park2024avp, 19 | title={Using Apple Vision Pro to Train and Control Robots}, 20 | author={Park, Younghyo and Agrawal, Pulkit}, 21 | year={2024}, 22 | url = {https://github.com/Improbable-AI/VisionProTeleop}, 23 | } 24 | 25 | ### Step 1. Install the app on Vision Pro 26 | 27 | ![](assets/visionpro_main.png) 28 | 29 | This app is now officially on VisionOS App Store! You can search for **[Tracking Streamer](https://apps.apple.com/us/app/tracking-streamer/id6478969032)** from the App Store and install the app. 30 | 31 | If you want to play around with the app, you can build/install the app yourself too. To learn how to do that, take a look at this [documentation](/how_to_install.md). This requires (a) Apple Developer Account, (b) Vision Pro Developer Strap, and (c) a Mac with Xcode installed. 32 | 33 | 34 | ### Step 2. Run the app on Vision Pro 35 | 36 | After installation, click on the app on Vision Pro and click `Start`. That's it! Vision Pro is now streaming the tracking data over your wifi network. 37 | 38 | **Tip** Remember the IP address before you click start; you need to specify this IP address to subscribe to the data. Once you click start, the app will immediately enter into pass-through mode. Click on the digital crown to stop streaming. 39 | 40 | 41 | ### Step 3. Receive the stream from anywhere 42 | 43 | The following python package allows you to receive the data stream from any device that's connected to the same WiFi network. First, install the package: 44 | 45 | ``` 46 | pip install avp_stream 47 | ``` 48 | 49 | Then, add this code snippet to any of your projects you were developing: 50 | 51 | ```python 52 | from avp_stream import VisionProStreamer 53 | avp_ip = "10.31.181.201" # example IP 54 | s = VisionProStreamer(ip = avp_ip, record = True) 55 | 56 | while True: 57 | r = s.latest 58 | print(r['head'], r['right_wrist'], r['right_fingers']) 59 | ``` 60 | 61 | 62 | 63 | ## Available Data 64 | 65 | ```python 66 | r = s.latest 67 | ``` 68 | 69 | `r` is a dictionary containing the following data streamed from AVP: 70 | 71 | ```python 72 | r['head']: np.ndarray 73 | # shape (1,4,4) / measured from ground frame 74 | r['right_wrist']: np.ndarray 75 | # shape (1,4,4) / measured from ground frame 76 | r['left_wrist']: np.ndarray 77 | # shape (1,4,4) / measured from ground frame 78 | r['right_fingers']: np.ndarray 79 | # shape (25,4,4) / measured from right wrist frame 80 | r['left_fingers']: np.ndarray 81 | # shape (25,4,4) / measured from left wrist frame 82 | r['right_pinch_distance']: float 83 | # distance between right index tip and thumb tip 84 | r['left_pinch_distance']: float 85 | # distance between left index tip and thumb tip 86 | r['right_wrist_roll']: float 87 | # rotation angle of your right wrist around your arm axis 88 | r['left_wrist_roll']: float 89 | # rotation angle of your left wrist around your arm axis 90 | ``` 91 | 92 | 93 | ### Axis Convention 94 | 95 | Refer to the image below to see how the axis are defined for your head, wrist, and fingers. 96 | 97 | ![](assets/axis_convention.png) 98 | 99 | 100 | ### Hand Skeleton used in VisionOS 101 | 102 | ![](assets/hand_skeleton_convention.png) 103 | 104 | Refer to the image above to see what order the joints are represented in each hand's skeleton. 105 | 106 | 107 | ## Acknowledgements 108 | 109 | We acknowledge support from Hyundai Motor Company and ARO MURI grant number W911NF-23-1-0277. 110 | 111 | 127 | -------------------------------------------------------------------------------- /Tracking Streamer.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 56; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 160ED7982B4B74C5002AD987 /* 🧑HeadTrackingComponent&System.swift in Sources */ = {isa = PBXBuildFile; fileRef = 160ED7972B4B74C5002AD987 /* 🧑HeadTrackingComponent&System.swift */; }; 11 | 16253CFB2B4E2FCB0028F0E2 /* 📏Unit.swift in Sources */ = {isa = PBXBuildFile; fileRef = 16253CFA2B4E2FCB0028F0E2 /* 📏Unit.swift */; }; 12 | 16267B502B57D172000CA8AD /* Localizable.xcstrings in Resources */ = {isa = PBXBuildFile; fileRef = 16267B4F2B57D172000CA8AD /* Localizable.xcstrings */; }; 13 | 16267B522B57D362000CA8AD /* InfoPlist.xcstrings in Resources */ = {isa = PBXBuildFile; fileRef = 16267B512B57D362000CA8AD /* InfoPlist.xcstrings */; }; 14 | 1642FAB52B4D54A60084F9ED /* 🛠️SettingPanel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1642FAB42B4D54A60084F9ED /* 🛠️SettingPanel.swift */; }; 15 | 1642FAB92B4D6CAA0084F9ED /* 🌐RealityView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 1642FAB82B4D6CAA0084F9ED /* 🌐RealityView.swift */; }; 16 | 165ADB4F2B4B71B0008A756F /* App.swift in Sources */ = {isa = PBXBuildFile; fileRef = 165ADB4E2B4B71B0008A756F /* App.swift */; }; 17 | 165ADB512B4B71B0008A756F /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 165ADB502B4B71B0008A756F /* ContentView.swift */; }; 18 | 165ADB532B4B71B2008A756F /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 165ADB522B4B71B2008A756F /* Assets.xcassets */; }; 19 | 16688DE62B59F35F004CE12B /* 🥽AppModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 16688DE52B59F35F004CE12B /* 🥽AppModel.swift */; }; 20 | 169D84AF2B5A319500BB5606 /* sound2.m4a in Resources */ = {isa = PBXBuildFile; fileRef = 169D84AD2B5A319000BB5606 /* sound2.m4a */; }; 21 | 169D84B02B5A319500BB5606 /* sound1.m4a in Resources */ = {isa = PBXBuildFile; fileRef = 169D84AE2B5A319400BB5606 /* sound1.m4a */; }; 22 | 16C95AE32B5E75D800CF0FED /* 🛠️Panel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 16C95AE22B5E75D800CF0FED /* 🛠️Panel.swift */; }; 23 | 16C95AE52B5E762400CF0FED /* 🛠️MenuTop.swift in Sources */ = {isa = PBXBuildFile; fileRef = 16C95AE42B5E762400CF0FED /* 🛠️MenuTop.swift */; }; 24 | 16EA6A612B68629100A3A740 /* screenshot1280w3.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 16EA6A5E2B68629100A3A740 /* screenshot1280w3.jpg */; }; 25 | 16EA6A632B68629200A3A740 /* screenshot1280w2.jpg in Resources */ = {isa = PBXBuildFile; fileRef = 16EA6A602B68629100A3A740 /* screenshot1280w2.jpg */; }; 26 | 16EE5E992B576DA800D354ED /* 🧩Name.swift in Sources */ = {isa = PBXBuildFile; fileRef = 16EE5E982B576DA800D354ED /* 🧩Name.swift */; }; 27 | 16EE5E9B2B576DB000D354ED /* 🧩Model.swift in Sources */ = {isa = PBXBuildFile; fileRef = 16EE5E9A2B576DB000D354ED /* 🧩Model.swift */; }; 28 | C14DAF532B7C375D00A7333E /* GRPC in Frameworks */ = {isa = PBXBuildFile; productRef = C14DAF522B7C375D00A7333E /* GRPC */; }; 29 | C14DAF552B7C375D00A7333E /* protoc-gen-grpc-swift in Frameworks */ = {isa = PBXBuildFile; productRef = C14DAF542B7C375D00A7333E /* protoc-gen-grpc-swift */; }; 30 | C1893C762B93C1AC00F2269D /* handtracking.grpc.swift in Sources */ = {isa = PBXBuildFile; fileRef = C1893C742B93C1AC00F2269D /* handtracking.grpc.swift */; }; 31 | C1893C772B93C1AC00F2269D /* handtracking.pb.swift in Sources */ = {isa = PBXBuildFile; fileRef = C1893C752B93C1AC00F2269D /* handtracking.pb.swift */; }; 32 | /* End PBXBuildFile section */ 33 | 34 | /* Begin PBXFileReference section */ 35 | 160ED7972B4B74C5002AD987 /* 🧑HeadTrackingComponent&System.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "🧑HeadTrackingComponent&System.swift"; sourceTree = ""; }; 36 | 160F46BB2B5A57AF001FE696 /* icon.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = icon.png; sourceTree = ""; }; 37 | 160F46BC2B5A57D4001FE696 /* appstore_badge.svg */ = {isa = PBXFileReference; lastKnownFileType = text; path = appstore_badge.svg; sourceTree = ""; }; 38 | 16253CFA2B4E2FCB0028F0E2 /* 📏Unit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "📏Unit.swift"; sourceTree = ""; }; 39 | 16267B4F2B57D172000CA8AD /* Localizable.xcstrings */ = {isa = PBXFileReference; lastKnownFileType = text.json.xcstrings; path = Localizable.xcstrings; sourceTree = ""; }; 40 | 16267B512B57D362000CA8AD /* InfoPlist.xcstrings */ = {isa = PBXFileReference; lastKnownFileType = text.json.xcstrings; path = InfoPlist.xcstrings; sourceTree = ""; }; 41 | 16267B532B57D387000CA8AD /* README.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = ""; }; 42 | 1638F80C2B50BC9700E0CAD2 /* DebugView.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = DebugView.md; sourceTree = ""; }; 43 | 1642FAB42B4D54A60084F9ED /* 🛠️SettingPanel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "🛠️SettingPanel.swift"; sourceTree = ""; }; 44 | 1642FAB82B4D6CAA0084F9ED /* 🌐RealityView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "🌐RealityView.swift"; sourceTree = ""; }; 45 | 165ADB472B4B71B0008A756F /* Tracking Streamer.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "Tracking Streamer.app"; sourceTree = BUILT_PRODUCTS_DIR; }; 46 | 165ADB4E2B4B71B0008A756F /* App.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = App.swift; sourceTree = ""; }; 47 | 165ADB502B4B71B0008A756F /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; 48 | 165ADB522B4B71B2008A756F /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 49 | 165ADB572B4B71B2008A756F /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 50 | 16688DE52B59F35F004CE12B /* 🥽AppModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "🥽AppModel.swift"; sourceTree = ""; }; 51 | 167817E02B61FABE00BE0067 /* screenshot1280w.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = screenshot1280w.jpg; sourceTree = ""; }; 52 | 169D84AD2B5A319000BB5606 /* sound2.m4a */ = {isa = PBXFileReference; lastKnownFileType = file; path = sound2.m4a; sourceTree = ""; }; 53 | 169D84AE2B5A319400BB5606 /* sound1.m4a */ = {isa = PBXFileReference; lastKnownFileType = file; path = sound1.m4a; sourceTree = ""; }; 54 | 16C95AE22B5E75D800CF0FED /* 🛠️Panel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "🛠️Panel.swift"; sourceTree = ""; }; 55 | 16C95AE42B5E762400CF0FED /* 🛠️MenuTop.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "🛠️MenuTop.swift"; sourceTree = ""; }; 56 | 16EA6A5E2B68629100A3A740 /* screenshot1280w3.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = screenshot1280w3.jpg; sourceTree = ""; }; 57 | 16EA6A602B68629100A3A740 /* screenshot1280w2.jpg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = screenshot1280w2.jpg; sourceTree = ""; }; 58 | 16EE5E962B576B9F00D354ED /* HandToGround.md */ = {isa = PBXFileReference; lastKnownFileType = net.daringfireball.markdown; path = HandToGround.md; sourceTree = ""; }; 59 | 16EE5E982B576DA800D354ED /* 🧩Name.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "🧩Name.swift"; sourceTree = ""; }; 60 | 16EE5E9A2B576DB000D354ED /* 🧩Model.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = "🧩Model.swift"; sourceTree = ""; }; 61 | C1893C742B93C1AC00F2269D /* handtracking.grpc.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = handtracking.grpc.swift; path = avp_stream/grpc_msg/handtracking.grpc.swift; sourceTree = SOURCE_ROOT; }; 62 | C1893C752B93C1AC00F2269D /* handtracking.pb.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; name = handtracking.pb.swift; path = avp_stream/grpc_msg/handtracking.pb.swift; sourceTree = SOURCE_ROOT; }; 63 | C19896F42B7D5099003BAF99 /* VisionProTeleop.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = VisionProTeleop.entitlements; sourceTree = ""; }; 64 | /* End PBXFileReference section */ 65 | 66 | /* Begin PBXFrameworksBuildPhase section */ 67 | 165ADB442B4B71B0008A756F /* Frameworks */ = { 68 | isa = PBXFrameworksBuildPhase; 69 | buildActionMask = 2147483647; 70 | files = ( 71 | C14DAF552B7C375D00A7333E /* protoc-gen-grpc-swift in Frameworks */, 72 | C14DAF532B7C375D00A7333E /* GRPC in Frameworks */, 73 | ); 74 | runOnlyForDeploymentPostprocessing = 0; 75 | }; 76 | /* End PBXFrameworksBuildPhase section */ 77 | 78 | /* Begin PBXGroup section */ 79 | 160F46BA2B5A5613001FE696 /* README assets */ = { 80 | isa = PBXGroup; 81 | children = ( 82 | 160F46BB2B5A57AF001FE696 /* icon.png */, 83 | 160F46BC2B5A57D4001FE696 /* appstore_badge.svg */, 84 | 167817E02B61FABE00BE0067 /* screenshot1280w.jpg */, 85 | 16EA6A602B68629100A3A740 /* screenshot1280w2.jpg */, 86 | 16EA6A5E2B68629100A3A740 /* screenshot1280w3.jpg */, 87 | ); 88 | path = "README assets"; 89 | sourceTree = ""; 90 | }; 91 | 16267B4E2B57D148000CA8AD /* Supporting files */ = { 92 | isa = PBXGroup; 93 | children = ( 94 | 165ADB522B4B71B2008A756F /* Assets.xcassets */, 95 | 169D84AE2B5A319400BB5606 /* sound1.m4a */, 96 | 169D84AD2B5A319000BB5606 /* sound2.m4a */, 97 | 165ADB572B4B71B2008A756F /* Info.plist */, 98 | 16267B4F2B57D172000CA8AD /* Localizable.xcstrings */, 99 | 16267B512B57D362000CA8AD /* InfoPlist.xcstrings */, 100 | 160F46BA2B5A5613001FE696 /* README assets */, 101 | 1638F80B2B50BC8800E0CAD2 /* Archive */, 102 | ); 103 | path = "Supporting files"; 104 | sourceTree = ""; 105 | }; 106 | 1638F80B2B50BC8800E0CAD2 /* Archive */ = { 107 | isa = PBXGroup; 108 | children = ( 109 | 1638F80C2B50BC9700E0CAD2 /* DebugView.md */, 110 | 16EE5E962B576B9F00D354ED /* HandToGround.md */, 111 | ); 112 | path = Archive; 113 | sourceTree = ""; 114 | }; 115 | 165ADB3E2B4B71B0008A756F = { 116 | isa = PBXGroup; 117 | children = ( 118 | 16267B532B57D387000CA8AD /* README.md */, 119 | 165ADB492B4B71B0008A756F /* Tracking Streamer */, 120 | 165ADB482B4B71B0008A756F /* Products */, 121 | ); 122 | sourceTree = ""; 123 | }; 124 | 165ADB482B4B71B0008A756F /* Products */ = { 125 | isa = PBXGroup; 126 | children = ( 127 | 165ADB472B4B71B0008A756F /* Tracking Streamer.app */, 128 | ); 129 | name = Products; 130 | sourceTree = ""; 131 | }; 132 | 165ADB492B4B71B0008A756F /* Tracking Streamer */ = { 133 | isa = PBXGroup; 134 | children = ( 135 | C1893C742B93C1AC00F2269D /* handtracking.grpc.swift */, 136 | C1893C752B93C1AC00F2269D /* handtracking.pb.swift */, 137 | C19896F42B7D5099003BAF99 /* VisionProTeleop.entitlements */, 138 | 165ADB4E2B4B71B0008A756F /* App.swift */, 139 | 165ADB502B4B71B0008A756F /* ContentView.swift */, 140 | 16688DE52B59F35F004CE12B /* 🥽AppModel.swift */, 141 | 1642FAB82B4D6CAA0084F9ED /* 🌐RealityView.swift */, 142 | 16EE5E982B576DA800D354ED /* 🧩Name.swift */, 143 | 16EE5E9A2B576DB000D354ED /* 🧩Model.swift */, 144 | 160ED7972B4B74C5002AD987 /* 🧑HeadTrackingComponent&System.swift */, 145 | 16253CFA2B4E2FCB0028F0E2 /* 📏Unit.swift */, 146 | 16C95ADF2B5E74FD00CF0FED /* 🛠️Menu */, 147 | 16267B4E2B57D148000CA8AD /* Supporting files */, 148 | ); 149 | path = "Tracking Streamer"; 150 | sourceTree = ""; 151 | }; 152 | 16C95ADF2B5E74FD00CF0FED /* 🛠️Menu */ = { 153 | isa = PBXGroup; 154 | children = ( 155 | 16C95AE42B5E762400CF0FED /* 🛠️MenuTop.swift */, 156 | 16C95AE22B5E75D800CF0FED /* 🛠️Panel.swift */, 157 | 1642FAB42B4D54A60084F9ED /* 🛠️SettingPanel.swift */, 158 | ); 159 | path = "🛠️Menu"; 160 | sourceTree = ""; 161 | }; 162 | /* End PBXGroup section */ 163 | 164 | /* Begin PBXNativeTarget section */ 165 | 165ADB462B4B71B0008A756F /* Tracking Streamer */ = { 166 | isa = PBXNativeTarget; 167 | buildConfigurationList = 165ADB5A2B4B71B2008A756F /* Build configuration list for PBXNativeTarget "Tracking Streamer" */; 168 | buildPhases = ( 169 | 165ADB432B4B71B0008A756F /* Sources */, 170 | 165ADB442B4B71B0008A756F /* Frameworks */, 171 | 165ADB452B4B71B0008A756F /* Resources */, 172 | ); 173 | buildRules = ( 174 | ); 175 | dependencies = ( 176 | ); 177 | name = "Tracking Streamer"; 178 | packageProductDependencies = ( 179 | C14DAF522B7C375D00A7333E /* GRPC */, 180 | C14DAF542B7C375D00A7333E /* protoc-gen-grpc-swift */, 181 | ); 182 | productName = HandsWidth; 183 | productReference = 165ADB472B4B71B0008A756F /* Tracking Streamer.app */; 184 | productType = "com.apple.product-type.application"; 185 | }; 186 | /* End PBXNativeTarget section */ 187 | 188 | /* Begin PBXProject section */ 189 | 165ADB3F2B4B71B0008A756F /* Project object */ = { 190 | isa = PBXProject; 191 | attributes = { 192 | BuildIndependentTargetsInParallel = 1; 193 | LastSwiftUpdateCheck = 1520; 194 | LastUpgradeCheck = 1530; 195 | TargetAttributes = { 196 | 165ADB462B4B71B0008A756F = { 197 | CreatedOnToolsVersion = 15.2; 198 | }; 199 | }; 200 | }; 201 | buildConfigurationList = 165ADB422B4B71B0008A756F /* Build configuration list for PBXProject "Tracking Streamer" */; 202 | compatibilityVersion = "Xcode 14.0"; 203 | developmentRegion = en; 204 | hasScannedForEncodings = 0; 205 | knownRegions = ( 206 | en, 207 | Base, 208 | ja, 209 | ); 210 | mainGroup = 165ADB3E2B4B71B0008A756F; 211 | packageReferences = ( 212 | C14DAF512B7C375D00A7333E /* XCRemoteSwiftPackageReference "grpc-swift" */, 213 | ); 214 | productRefGroup = 165ADB482B4B71B0008A756F /* Products */; 215 | projectDirPath = ""; 216 | projectRoot = ""; 217 | targets = ( 218 | 165ADB462B4B71B0008A756F /* Tracking Streamer */, 219 | ); 220 | }; 221 | /* End PBXProject section */ 222 | 223 | /* Begin PBXResourcesBuildPhase section */ 224 | 165ADB452B4B71B0008A756F /* Resources */ = { 225 | isa = PBXResourcesBuildPhase; 226 | buildActionMask = 2147483647; 227 | files = ( 228 | 169D84B02B5A319500BB5606 /* sound1.m4a in Resources */, 229 | 169D84AF2B5A319500BB5606 /* sound2.m4a in Resources */, 230 | 16EA6A612B68629100A3A740 /* screenshot1280w3.jpg in Resources */, 231 | 16267B522B57D362000CA8AD /* InfoPlist.xcstrings in Resources */, 232 | 16EA6A632B68629200A3A740 /* screenshot1280w2.jpg in Resources */, 233 | 16267B502B57D172000CA8AD /* Localizable.xcstrings in Resources */, 234 | 165ADB532B4B71B2008A756F /* Assets.xcassets in Resources */, 235 | ); 236 | runOnlyForDeploymentPostprocessing = 0; 237 | }; 238 | /* End PBXResourcesBuildPhase section */ 239 | 240 | /* Begin PBXSourcesBuildPhase section */ 241 | 165ADB432B4B71B0008A756F /* Sources */ = { 242 | isa = PBXSourcesBuildPhase; 243 | buildActionMask = 2147483647; 244 | files = ( 245 | 16EE5E992B576DA800D354ED /* 🧩Name.swift in Sources */, 246 | 160ED7982B4B74C5002AD987 /* 🧑HeadTrackingComponent&System.swift in Sources */, 247 | 1642FAB92B4D6CAA0084F9ED /* 🌐RealityView.swift in Sources */, 248 | 1642FAB52B4D54A60084F9ED /* 🛠️SettingPanel.swift in Sources */, 249 | 16253CFB2B4E2FCB0028F0E2 /* 📏Unit.swift in Sources */, 250 | 165ADB512B4B71B0008A756F /* ContentView.swift in Sources */, 251 | 16EE5E9B2B576DB000D354ED /* 🧩Model.swift in Sources */, 252 | 16688DE62B59F35F004CE12B /* 🥽AppModel.swift in Sources */, 253 | 165ADB4F2B4B71B0008A756F /* App.swift in Sources */, 254 | 16C95AE52B5E762400CF0FED /* 🛠️MenuTop.swift in Sources */, 255 | C1893C762B93C1AC00F2269D /* handtracking.grpc.swift in Sources */, 256 | C1893C772B93C1AC00F2269D /* handtracking.pb.swift in Sources */, 257 | 16C95AE32B5E75D800CF0FED /* 🛠️Panel.swift in Sources */, 258 | ); 259 | runOnlyForDeploymentPostprocessing = 0; 260 | }; 261 | /* End PBXSourcesBuildPhase section */ 262 | 263 | /* Begin XCBuildConfiguration section */ 264 | 165ADB582B4B71B2008A756F /* Debug */ = { 265 | isa = XCBuildConfiguration; 266 | buildSettings = { 267 | ALWAYS_SEARCH_USER_PATHS = NO; 268 | ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; 269 | CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES; 270 | CLANG_ANALYZER_NONNULL = YES; 271 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 272 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; 273 | CLANG_ENABLE_MODULES = YES; 274 | CLANG_ENABLE_OBJC_ARC = YES; 275 | CLANG_ENABLE_OBJC_WEAK = YES; 276 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 277 | CLANG_WARN_BOOL_CONVERSION = YES; 278 | CLANG_WARN_COMMA = YES; 279 | CLANG_WARN_CONSTANT_CONVERSION = YES; 280 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 281 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 282 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 283 | CLANG_WARN_EMPTY_BODY = YES; 284 | CLANG_WARN_ENUM_CONVERSION = YES; 285 | CLANG_WARN_INFINITE_RECURSION = YES; 286 | CLANG_WARN_INT_CONVERSION = YES; 287 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 288 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 289 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 290 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 291 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 292 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 293 | CLANG_WARN_STRICT_PROTOTYPES = YES; 294 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 295 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 296 | CLANG_WARN_UNREACHABLE_CODE = YES; 297 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 298 | COPY_PHASE_STRIP = NO; 299 | DEBUG_INFORMATION_FORMAT = dwarf; 300 | ENABLE_STRICT_OBJC_MSGSEND = YES; 301 | ENABLE_TESTABILITY = YES; 302 | ENABLE_USER_SCRIPT_SANDBOXING = YES; 303 | GCC_C_LANGUAGE_STANDARD = gnu17; 304 | GCC_DYNAMIC_NO_PIC = NO; 305 | GCC_NO_COMMON_BLOCKS = YES; 306 | GCC_OPTIMIZATION_LEVEL = 0; 307 | GCC_PREPROCESSOR_DEFINITIONS = ( 308 | "DEBUG=1", 309 | "$(inherited)", 310 | ); 311 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 312 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 313 | GCC_WARN_UNDECLARED_SELECTOR = YES; 314 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 315 | GCC_WARN_UNUSED_FUNCTION = YES; 316 | GCC_WARN_UNUSED_VARIABLE = YES; 317 | LOCALIZATION_PREFERS_STRING_CATALOGS = YES; 318 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; 319 | MTL_FAST_MATH = YES; 320 | ONLY_ACTIVE_ARCH = YES; 321 | SDKROOT = xros; 322 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; 323 | SWIFT_OPTIMIZATION_LEVEL = "-Onone"; 324 | XROS_DEPLOYMENT_TARGET = 1.0; 325 | }; 326 | name = Debug; 327 | }; 328 | 165ADB592B4B71B2008A756F /* Release */ = { 329 | isa = XCBuildConfiguration; 330 | buildSettings = { 331 | ALWAYS_SEARCH_USER_PATHS = NO; 332 | ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; 333 | CLANG_ANALYZER_LOCALIZABILITY_NONLOCALIZED = YES; 334 | CLANG_ANALYZER_NONNULL = YES; 335 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 336 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; 337 | CLANG_ENABLE_MODULES = YES; 338 | CLANG_ENABLE_OBJC_ARC = YES; 339 | CLANG_ENABLE_OBJC_WEAK = YES; 340 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 341 | CLANG_WARN_BOOL_CONVERSION = YES; 342 | CLANG_WARN_COMMA = YES; 343 | CLANG_WARN_CONSTANT_CONVERSION = YES; 344 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 345 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 346 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 347 | CLANG_WARN_EMPTY_BODY = YES; 348 | CLANG_WARN_ENUM_CONVERSION = YES; 349 | CLANG_WARN_INFINITE_RECURSION = YES; 350 | CLANG_WARN_INT_CONVERSION = YES; 351 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 352 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 353 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 354 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 355 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; 356 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 357 | CLANG_WARN_STRICT_PROTOTYPES = YES; 358 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 359 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 360 | CLANG_WARN_UNREACHABLE_CODE = YES; 361 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 362 | COPY_PHASE_STRIP = NO; 363 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 364 | ENABLE_NS_ASSERTIONS = NO; 365 | ENABLE_STRICT_OBJC_MSGSEND = YES; 366 | ENABLE_USER_SCRIPT_SANDBOXING = YES; 367 | GCC_C_LANGUAGE_STANDARD = gnu17; 368 | GCC_NO_COMMON_BLOCKS = YES; 369 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 370 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 371 | GCC_WARN_UNDECLARED_SELECTOR = YES; 372 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 373 | GCC_WARN_UNUSED_FUNCTION = YES; 374 | GCC_WARN_UNUSED_VARIABLE = YES; 375 | LOCALIZATION_PREFERS_STRING_CATALOGS = YES; 376 | MTL_ENABLE_DEBUG_INFO = NO; 377 | MTL_FAST_MATH = YES; 378 | SDKROOT = xros; 379 | SWIFT_COMPILATION_MODE = wholemodule; 380 | VALIDATE_PRODUCT = YES; 381 | XROS_DEPLOYMENT_TARGET = 1.0; 382 | }; 383 | name = Release; 384 | }; 385 | 165ADB5B2B4B71B2008A756F /* Debug */ = { 386 | isa = XCBuildConfiguration; 387 | buildSettings = { 388 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 389 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 390 | ASSETCATALOG_COMPILER_INCLUDE_ALL_APPICON_ASSETS = YES; 391 | CODE_SIGN_ENTITLEMENTS = "Tracking Streamer/VisionProTeleop.entitlements"; 392 | CODE_SIGN_IDENTITY = "Apple Development"; 393 | CODE_SIGN_STYLE = Automatic; 394 | CURRENT_PROJECT_VERSION = 1; 395 | DEVELOPMENT_ASSET_PATHS = ""; 396 | DEVELOPMENT_TEAM = ATTMC2WVK2; 397 | ENABLE_PREVIEWS = YES; 398 | GENERATE_INFOPLIST_FILE = YES; 399 | INFOPLIST_FILE = "$(TARGET_NAME)/Supporting files/Info.plist"; 400 | INFOPLIST_KEY_CFBundleDisplayName = "Tracking Streamer"; 401 | INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.utilities"; 402 | LD_RUNPATH_SEARCH_PATHS = ( 403 | "$(inherited)", 404 | "@executable_path/Frameworks", 405 | ); 406 | MARKETING_VERSION = 1.0; 407 | PRODUCT_BUNDLE_IDENTIFIER = improbable.younghyo.DexTeleop2; 408 | PRODUCT_NAME = "$(TARGET_NAME)"; 409 | PROVISIONING_PROFILE_SPECIFIER = ""; 410 | SUPPORTED_PLATFORMS = "xros xrsimulator"; 411 | SWIFT_EMIT_LOC_STRINGS = YES; 412 | SWIFT_VERSION = 5.0; 413 | TARGETED_DEVICE_FAMILY = "1,2,7"; 414 | }; 415 | name = Debug; 416 | }; 417 | 165ADB5C2B4B71B2008A756F /* Release */ = { 418 | isa = XCBuildConfiguration; 419 | buildSettings = { 420 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 421 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; 422 | ASSETCATALOG_COMPILER_INCLUDE_ALL_APPICON_ASSETS = YES; 423 | CODE_SIGN_ENTITLEMENTS = "Tracking Streamer/VisionProTeleop.entitlements"; 424 | CODE_SIGN_IDENTITY = "Apple Development"; 425 | CODE_SIGN_STYLE = Automatic; 426 | CURRENT_PROJECT_VERSION = 1; 427 | DEVELOPMENT_ASSET_PATHS = ""; 428 | DEVELOPMENT_TEAM = ATTMC2WVK2; 429 | ENABLE_PREVIEWS = YES; 430 | GENERATE_INFOPLIST_FILE = YES; 431 | INFOPLIST_FILE = "$(TARGET_NAME)/Supporting files/Info.plist"; 432 | INFOPLIST_KEY_CFBundleDisplayName = "Tracking Streamer"; 433 | INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.utilities"; 434 | LD_RUNPATH_SEARCH_PATHS = ( 435 | "$(inherited)", 436 | "@executable_path/Frameworks", 437 | ); 438 | MARKETING_VERSION = 1.0; 439 | PRODUCT_BUNDLE_IDENTIFIER = improbable.younghyo.DexTeleop2; 440 | PRODUCT_NAME = "$(TARGET_NAME)"; 441 | PROVISIONING_PROFILE_SPECIFIER = ""; 442 | SUPPORTED_PLATFORMS = "xros xrsimulator"; 443 | SWIFT_EMIT_LOC_STRINGS = YES; 444 | SWIFT_VERSION = 5.0; 445 | TARGETED_DEVICE_FAMILY = "1,2,7"; 446 | }; 447 | name = Release; 448 | }; 449 | /* End XCBuildConfiguration section */ 450 | 451 | /* Begin XCConfigurationList section */ 452 | 165ADB422B4B71B0008A756F /* Build configuration list for PBXProject "Tracking Streamer" */ = { 453 | isa = XCConfigurationList; 454 | buildConfigurations = ( 455 | 165ADB582B4B71B2008A756F /* Debug */, 456 | 165ADB592B4B71B2008A756F /* Release */, 457 | ); 458 | defaultConfigurationIsVisible = 0; 459 | defaultConfigurationName = Release; 460 | }; 461 | 165ADB5A2B4B71B2008A756F /* Build configuration list for PBXNativeTarget "Tracking Streamer" */ = { 462 | isa = XCConfigurationList; 463 | buildConfigurations = ( 464 | 165ADB5B2B4B71B2008A756F /* Debug */, 465 | 165ADB5C2B4B71B2008A756F /* Release */, 466 | ); 467 | defaultConfigurationIsVisible = 0; 468 | defaultConfigurationName = Release; 469 | }; 470 | /* End XCConfigurationList section */ 471 | 472 | /* Begin XCRemoteSwiftPackageReference section */ 473 | C14DAF512B7C375D00A7333E /* XCRemoteSwiftPackageReference "grpc-swift" */ = { 474 | isa = XCRemoteSwiftPackageReference; 475 | repositoryURL = "https://github.com/grpc/grpc-swift.git"; 476 | requirement = { 477 | kind = upToNextMajorVersion; 478 | minimumVersion = 1.21.1; 479 | }; 480 | }; 481 | /* End XCRemoteSwiftPackageReference section */ 482 | 483 | /* Begin XCSwiftPackageProductDependency section */ 484 | C14DAF522B7C375D00A7333E /* GRPC */ = { 485 | isa = XCSwiftPackageProductDependency; 486 | package = C14DAF512B7C375D00A7333E /* XCRemoteSwiftPackageReference "grpc-swift" */; 487 | productName = GRPC; 488 | }; 489 | C14DAF542B7C375D00A7333E /* protoc-gen-grpc-swift */ = { 490 | isa = XCSwiftPackageProductDependency; 491 | package = C14DAF512B7C375D00A7333E /* XCRemoteSwiftPackageReference "grpc-swift" */; 492 | productName = "protoc-gen-grpc-swift"; 493 | }; 494 | /* End XCSwiftPackageProductDependency section */ 495 | }; 496 | rootObject = 165ADB3F2B4B71B0008A756F /* Project object */; 497 | } 498 | -------------------------------------------------------------------------------- /Tracking Streamer.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /Tracking Streamer.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /Tracking Streamer.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved: -------------------------------------------------------------------------------- 1 | { 2 | "originHash" : "b0d8975aa19f1bb7106ae78f24de3911e84189892afc3b6fe6db19bcec9e631c", 3 | "pins" : [ 4 | { 5 | "identity" : "grpc-swift", 6 | "kind" : "remoteSourceControl", 7 | "location" : "https://github.com/grpc/grpc-swift.git", 8 | "state" : { 9 | "revision" : "5d0cf1c7b2e641e3a8961089e7e4672f4fe8abac", 10 | "version" : "1.21.1" 11 | } 12 | }, 13 | { 14 | "identity" : "swift-atomics", 15 | "kind" : "remoteSourceControl", 16 | "location" : "https://github.com/apple/swift-atomics.git", 17 | "state" : { 18 | "revision" : "cd142fd2f64be2100422d658e7411e39489da985", 19 | "version" : "1.2.0" 20 | } 21 | }, 22 | { 23 | "identity" : "swift-collections", 24 | "kind" : "remoteSourceControl", 25 | "location" : "https://github.com/apple/swift-collections.git", 26 | "state" : { 27 | "revision" : "94cf62b3ba8d4bed62680a282d4c25f9c63c2efb", 28 | "version" : "1.1.0" 29 | } 30 | }, 31 | { 32 | "identity" : "swift-http-types", 33 | "kind" : "remoteSourceControl", 34 | "location" : "https://github.com/apple/swift-http-types", 35 | "state" : { 36 | "revision" : "12358d55a3824bd5fed310b999ea8cf83a9a1a65", 37 | "version" : "1.0.3" 38 | } 39 | }, 40 | { 41 | "identity" : "swift-log", 42 | "kind" : "remoteSourceControl", 43 | "location" : "https://github.com/apple/swift-log.git", 44 | "state" : { 45 | "revision" : "e97a6fcb1ab07462881ac165fdbb37f067e205d5", 46 | "version" : "1.5.4" 47 | } 48 | }, 49 | { 50 | "identity" : "swift-nio", 51 | "kind" : "remoteSourceControl", 52 | "location" : "https://github.com/apple/swift-nio.git", 53 | "state" : { 54 | "revision" : "635b2589494c97e48c62514bc8b37ced762e0a62", 55 | "version" : "2.63.0" 56 | } 57 | }, 58 | { 59 | "identity" : "swift-nio-extras", 60 | "kind" : "remoteSourceControl", 61 | "location" : "https://github.com/apple/swift-nio-extras.git", 62 | "state" : { 63 | "revision" : "363da63c1966405764f380c627409b2f9d9e710b", 64 | "version" : "1.21.0" 65 | } 66 | }, 67 | { 68 | "identity" : "swift-nio-http2", 69 | "kind" : "remoteSourceControl", 70 | "location" : "https://github.com/apple/swift-nio-http2.git", 71 | "state" : { 72 | "revision" : "0904bf0feb5122b7e5c3f15db7df0eabe623dd87", 73 | "version" : "1.30.0" 74 | } 75 | }, 76 | { 77 | "identity" : "swift-nio-ssl", 78 | "kind" : "remoteSourceControl", 79 | "location" : "https://github.com/apple/swift-nio-ssl.git", 80 | "state" : { 81 | "revision" : "7c381eb6083542b124a6c18fae742f55001dc2b5", 82 | "version" : "2.26.0" 83 | } 84 | }, 85 | { 86 | "identity" : "swift-nio-transport-services", 87 | "kind" : "remoteSourceControl", 88 | "location" : "https://github.com/apple/swift-nio-transport-services.git", 89 | "state" : { 90 | "revision" : "6cbe0ed2b394f21ab0d46b9f0c50c6be964968ce", 91 | "version" : "1.20.1" 92 | } 93 | }, 94 | { 95 | "identity" : "swift-protobuf", 96 | "kind" : "remoteSourceControl", 97 | "location" : "https://github.com/apple/swift-protobuf.git", 98 | "state" : { 99 | "revision" : "65e8f29b2d63c4e38e736b25c27b83e012159be8", 100 | "version" : "1.25.2" 101 | } 102 | }, 103 | { 104 | "identity" : "swift-system", 105 | "kind" : "remoteSourceControl", 106 | "location" : "https://github.com/apple/swift-system.git", 107 | "state" : { 108 | "revision" : "025bcb1165deab2e20d4eaba79967ce73013f496", 109 | "version" : "1.2.1" 110 | } 111 | } 112 | ], 113 | "version" : 3 114 | } 115 | -------------------------------------------------------------------------------- /Tracking Streamer.xcodeproj/project.xcworkspace/xcuserdata/yhpark.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer.xcodeproj/project.xcworkspace/xcuserdata/yhpark.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /Tracking Streamer.xcodeproj/xcshareddata/xcschemes/VisionProTeleop.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 9 | 10 | 16 | 22 | 23 | 24 | 25 | 26 | 32 | 33 | 43 | 45 | 51 | 52 | 53 | 54 | 60 | 62 | 68 | 69 | 70 | 71 | 73 | 74 | 77 | 78 | 79 | -------------------------------------------------------------------------------- /Tracking Streamer.xcodeproj/xcuserdata/account2309.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | HandsWidth.xcscheme_^#shared#^_ 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | SuppressBuildableAutocreation 14 | 15 | 165ADB462B4B71B0008A756F 16 | 17 | primary 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /Tracking Streamer.xcodeproj/xcuserdata/yhpark.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | VisionProTeleop.xcscheme_^#shared#^_ 8 | 9 | orderHint 10 | 0 11 | 12 | 13 | SuppressBuildableAutocreation 14 | 15 | 165ADB462B4B71B0008A756F 16 | 17 | primary 18 | 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /Tracking Streamer/App.swift: -------------------------------------------------------------------------------- 1 | import SwiftUI 2 | 3 | @main 4 | struct VisionProTeleopApp: App { 5 | var body: some Scene { 6 | WindowGroup { 7 | ContentView() 8 | } 9 | .windowResizability(.contentSize) 10 | ImmersiveSpace(id: "immersiveSpace") { 11 | 🌐RealityView(model: 🥽AppModel()) 12 | } 13 | 14 | } 15 | init() { 16 | 🧑HeadTrackingComponent.registerComponent() 17 | 🧑HeadTrackingSystem.registerSystem() 18 | } 19 | } 20 | 21 | -------------------------------------------------------------------------------- /Tracking Streamer/ContentView.swift: -------------------------------------------------------------------------------- 1 | import SwiftUI 2 | import CoreLocation 3 | import UIKit 4 | import SystemConfiguration.CaptiveNetwork 5 | 6 | 7 | struct ContentView: View { 8 | @Environment(\.openImmersiveSpace) var openImmersiveSpace 9 | @Environment(\.dismissWindow) var dismissWindow 10 | var body: some View { 11 | VStack(spacing: 32) { 12 | HStack(spacing: 28) { 13 | Image(.graph2) 14 | .resizable() 15 | .aspectRatio(contentMode: .fit) 16 | .frame(width: 1200) 17 | .clipShape(.rect(cornerRadius: 24)) 18 | } 19 | Text("You're on IP address [\(getIPAddress())]") 20 | .font(.largeTitle.weight(.medium)) 21 | 22 | Button { 23 | Task { 24 | await self.openImmersiveSpace(id: "immersiveSpace") 25 | self.dismissWindow() 26 | } 27 | } label: { 28 | Text("Start") 29 | .font(.largeTitle) 30 | .padding(.vertical, 12) 31 | .padding(.horizontal, 4) 32 | } 33 | 34 | } 35 | .padding(32) 36 | } 37 | } 38 | 39 | func getIPAddress() -> String { 40 | var address: String? 41 | var ifaddr: UnsafeMutablePointer? = nil 42 | if getifaddrs(&ifaddr) == 0 { 43 | var ptr = ifaddr 44 | while ptr != nil { 45 | defer { ptr = ptr?.pointee.ifa_next } 46 | 47 | guard let interface = ptr?.pointee else { return "" } 48 | let addrFamily = interface.ifa_addr.pointee.sa_family 49 | if addrFamily == UInt8(AF_INET) || addrFamily == UInt8(AF_INET6) { 50 | 51 | // wifi = ["en0"] 52 | // wired = ["en2", "en3", "en4"] 53 | // cellular = ["pdp_ip0","pdp_ip1","pdp_ip2","pdp_ip3"] 54 | 55 | let name: String = String(cString: (interface.ifa_name)) 56 | if name == "en0" || name == "en2" || name == "en3" || name == "en4" || name == "pdp_ip0" || name == "pdp_ip1" || name == "pdp_ip2" || name == "pdp_ip3" { 57 | var hostname = [CChar](repeating: 0, count: Int(NI_MAXHOST)) 58 | getnameinfo(interface.ifa_addr, socklen_t((interface.ifa_addr.pointee.sa_len)), &hostname, socklen_t(hostname.count), nil, socklen_t(0), NI_NUMERICHOST) 59 | address = String(cString: hostname) 60 | } 61 | } 62 | } 63 | freeifaddrs(ifaddr) 64 | } 65 | return address ?? "" 66 | } 67 | 68 | 69 | func getWiFiName() -> String? { 70 | var ssid: String? 71 | 72 | if let interfaces = CNCopySupportedInterfaces() as NSArray? { 73 | for interface in interfaces { 74 | if let interfaceInfo = CNCopyCurrentNetworkInfo(interface as! CFString) as NSDictionary? { 75 | ssid = interfaceInfo[kCNNetworkInfoKeySSID as String] as? String 76 | break 77 | } 78 | } 79 | } 80 | 81 | return ssid 82 | } 83 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer/Supporting files/.DS_Store -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Archive/DebugView.md: -------------------------------------------------------------------------------- 1 | # 👆DebugView 2 | 3 | import SwiftUI 4 | import RealityKit 5 | import ARKit 6 | 7 | struct 👆DebugView: View { 8 | @EnvironmentObject var model: 📱AppModel 9 | @State private var rootEntity: Entity? 10 | @State private var text: String = "placeholder" 11 | var body: some View { 12 | RealityView { content, _ in 13 | let rootEntity = Entity() 14 | rootEntity.name = "ROOT" 15 | content.add(rootEntity) 16 | self.rootEntity = rootEntity 17 | let entity = Entity() 18 | entity.name = "POINTER" 19 | entity.components.set(📍HeadAnchorComponent()) 20 | entity.components.set(InputTargetComponent()) 21 | entity.components.set(CollisionComponent(shapes: [.generateConvex(from: .generateSphere(radius: 0.1))])) 22 | entity.components.set(ModelComponent(mesh: .generateSphere(radius: 0.02), 23 | materials: [SimpleMaterial(color: .white, isMetallic: false)])) 24 | rootEntity.addChild(entity) 25 | do { 26 | let entity = Entity() 27 | entity.name = "LINE" 28 | entity.components.set(OpacityComponent(opacity: 0.9)) 29 | rootEntity.addChild(entity) 30 | } 31 | } update: { content, attachments in 32 | let entity = attachments.entity(for: "resultLabel")! 33 | entity.components.set(📍HeadAnchorComponent()) 34 | entity.name = "resultLabel" 35 | rootEntity?.addChild(entity) 36 | if let p1 = rootEntity?.findEntity(named: "1")?.position, 37 | let p2 = rootEntity?.findEntity(named: "2")?.position { 38 | entity.position = (p1 + p2) / 2 39 | } 40 | } attachments: { 41 | Attachment(id: "resultLabel") { 42 | Text(self.text) 43 | .font(.system(size: 54).bold()) 44 | .padding(24) 45 | .glassBackgroundEffect() 46 | } 47 | } 48 | .onTapGesture { 49 | self.setPoints() 50 | self.setText() 51 | self.setLine() 52 | } 53 | } 54 | } 55 | 56 | fileprivate extension 👆DebugView { 57 | func setPoints() { 58 | guard let pointer = rootEntity?.findEntity(named: "POINTER") else { return } 59 | if rootEntity?.findEntity(named: "1") == nil { 60 | let entity = Entity() 61 | entity.name = "1" 62 | entity.position = pointer.position 63 | entity.components.set(ModelComponent(mesh: .generateSphere(radius: 0.025), 64 | materials: [SimpleMaterial(color: .red, isMetallic: false)])) 65 | rootEntity?.addChild(entity) 66 | } else { 67 | if let entity2 = rootEntity?.findEntity(named: "2") { 68 | rootEntity?.removeChild(entity2) 69 | } 70 | let entity = Entity() 71 | entity.name = "2" 72 | entity.position = pointer.position 73 | entity.components.set(ModelComponent(mesh: .generateSphere(radius: 0.025), 74 | materials: [SimpleMaterial(color: .green, isMetallic: false)])) 75 | rootEntity?.addChild(entity) 76 | } 77 | } 78 | func setText() { 79 | guard let p1 = rootEntity?.findEntity(named: "1")?.position, 80 | let p2 = rootEntity?.findEntity(named: "2")?.position else { 81 | return 82 | } 83 | let lengthFormatter = LengthFormatter() 84 | lengthFormatter.numberFormatter.maximumFractionDigits = 2 85 | self.text = lengthFormatter.string(fromValue: .init(distance(p1, p2)), unit: .meter) 86 | } 87 | func setLine() { 88 | guard let p1 = rootEntity?.findEntity(named: "1")?.position, 89 | let p2 = rootEntity?.findEntity(named: "2")?.position else { 90 | return 91 | } 92 | if let entity = rootEntity?.findEntity(named: "LINE") { 93 | entity.position = (p1 + p2) / 2 94 | entity.components.set(ModelComponent(mesh: .generateBox(width: 0.01, 95 | height: 0.01, 96 | depth: distance(p1, p2), 97 | cornerRadius: 0.005), 98 | materials: [SimpleMaterial(color: .white, isMetallic: false)])) 99 | entity.look(at: p1, 100 | from: entity.position, 101 | relativeTo: nil) 102 | let occlusionEntity = Entity() 103 | occlusionEntity.components.set(ModelComponent(mesh: .generateSphere(radius: 0.08), 104 | materials: [OcclusionMaterial()])) 105 | entity.addChild(occlusionEntity) 106 | } 107 | } 108 | } 109 | 110 | ## system 111 | 112 | func update(context: SceneUpdateContext) { 113 | guard let deviceAnchor = self.provider.queryDeviceAnchor(atTimestamp: CACurrentMediaTime()) else { 114 | return 115 | } 116 | for entity in context.entities(matching: .init(where: .has(📍HeadAnchorComponent.self)), 117 | updatingSystemWhen: .rendering) { 118 | if entity.name == "resultLabel" { 119 | entity.look(at: Transform(matrix: deviceAnchor.originFromAnchorTransform).translation, 120 | from: entity.position(relativeTo: nil), 121 | relativeTo: nil, 122 | forward: .positiveZ) 123 | } 124 | if DEBUG 125 | if entity.name == "POINTER" { 126 | entity.transform = Transform(matrix: deviceAnchor.originFromAnchorTransform) 127 | entity.setPosition([0, 0, -1], relativeTo: entity) 128 | } 129 | endif 130 | } 131 | } 132 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Archive/HandToGround.md: -------------------------------------------------------------------------------- 1 | # Hand to ground Mode 2 | 3 | ``` 4 | import SwiftUI 5 | 6 | enum 🪄Mode: String { 7 | case handToHand, handToGround 8 | } 9 | 10 | extension 🪄Mode: CaseIterable, Identifiable { 11 | var id: Self { self } 12 | var localizedTitle: LocalizedStringResource { 13 | switch self { 14 | case .handToHand: "Hand to hand" 15 | case .handToGround: "Hand to ground" 16 | } 17 | } 18 | } 19 | ``` 20 | 21 | ``` 22 | @AppStorage("mode") var mode: 🪄Mode = .handToHand 23 | ``` 24 | 25 | ``` 26 | Section { 27 | Picker("Mode", selection: self.$model.mode) { 28 | ForEach(🪄Mode.allCases) { 29 | Text($0.localizedTitle) 30 | } 31 | } 32 | } 33 | ``` 34 | 35 | ``` 36 | let heightLineEntity = Entity() 37 | let groundPointEntity: Entity = { 38 | let radius: Float = 0.03 39 | let value = ModelEntity(mesh: .generateSphere(radius: radius), 40 | materials: [SimpleMaterial(color: .yellow, isMetallic: false)]) 41 | let occlusion = ModelEntity(mesh: .generateCylinder(height: radius, radius: radius), 42 | materials: [OcclusionMaterial()]) 43 | occlusion.position.y -= radius / 2 44 | value.addChild(occlusion) 45 | return value 46 | }() 47 | ``` 48 | 49 | ``` 50 | guard let rightPosition = self.indexTipEntities[.right]?.position else { 51 | assertionFailure(); return 52 | } 53 | self.heightLineEntity.position = (self.groundPointEntity.position + rightPosition) / 2 54 | self.heightLineEntity.components.set( 55 | ModelComponent(mesh: .generateBox(width: 0.01, 56 | height: 0.01, 57 | depth: distance(self.groundPointEntity.position, rightPosition), 58 | cornerRadius: 0.005), 59 | materials: [SimpleMaterial(color: .white, isMetallic: false)]) 60 | ) 61 | self.heightLineEntity.look(at: self.groundPointEntity.position, 62 | from: self.heightLineEntity.position, 63 | relativeTo: nil) 64 | self.heightLineEntity.addChild(ModelEntity(mesh: .generateSphere(radius: 0.08), 65 | materials: [OcclusionMaterial()])) 66 | ``` 67 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer/Supporting files/Assets.xcassets/.DS_Store -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/AccentColor.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "idiom" : "universal" 5 | }, 6 | { 7 | "appearances" : [ 8 | { 9 | "appearance" : "luminosity", 10 | "value" : "dark" 11 | } 12 | ], 13 | "idiom" : "universal" 14 | } 15 | ], 16 | "info" : { 17 | "author" : "xcode", 18 | "version" : 1 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/AppIcon.solidimagestack/Back.solidimagestacklayer/Content.imageset/Background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer/Supporting files/Assets.xcassets/AppIcon.solidimagestack/Back.solidimagestacklayer/Content.imageset/Background.png -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/AppIcon.solidimagestack/Back.solidimagestacklayer/Content.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "Background.png", 5 | "idiom" : "vision", 6 | "scale" : "2x" 7 | } 8 | ], 9 | "info" : { 10 | "author" : "xcode", 11 | "version" : 1 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/AppIcon.solidimagestack/Back.solidimagestacklayer/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/AppIcon.solidimagestack/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | }, 6 | "layers" : [ 7 | { 8 | "filename" : "Front.solidimagestacklayer" 9 | }, 10 | { 11 | "filename" : "Middle.solidimagestacklayer" 12 | }, 13 | { 14 | "filename" : "Back.solidimagestacklayer" 15 | } 16 | ] 17 | } 18 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/AppIcon.solidimagestack/Front.solidimagestacklayer/Content.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "lab_logo_inverted.png", 5 | "idiom" : "vision", 6 | "scale" : "2x" 7 | } 8 | ], 9 | "info" : { 10 | "author" : "xcode", 11 | "version" : 1 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/AppIcon.solidimagestack/Front.solidimagestacklayer/Content.imageset/lab_logo_inverted.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer/Supporting files/Assets.xcassets/AppIcon.solidimagestack/Front.solidimagestacklayer/Content.imageset/lab_logo_inverted.png -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/AppIcon.solidimagestack/Front.solidimagestacklayer/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/AppIcon.solidimagestack/Middle.solidimagestacklayer/Content.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "vision", 5 | "scale" : "2x" 6 | } 7 | ], 8 | "info" : { 9 | "author" : "xcode", 10 | "version" : 1 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/AppIcon.solidimagestack/Middle.solidimagestacklayer/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/Image.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "scale" : "1x" 6 | }, 7 | { 8 | "idiom" : "universal", 9 | "scale" : "2x" 10 | }, 11 | { 12 | "idiom" : "universal", 13 | "scale" : "3x" 14 | }, 15 | { 16 | "idiom" : "vision", 17 | "scale" : "2x" 18 | } 19 | ], 20 | "info" : { 21 | "author" : "xcode", 22 | "version" : 1 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/graph2.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "diagram_visionpro.png", 5 | "idiom" : "universal" 6 | } 7 | ], 8 | "info" : { 9 | "author" : "xcode", 10 | "version" : 1 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/graph2.imageset/diagram_visionpro.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer/Supporting files/Assets.xcassets/graph2.imageset/diagram_visionpro.png -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/lab_logo_inverted.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "lab_logo_inverted.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "author" : "xcode", 19 | "version" : 1 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Assets.xcassets/lab_logo_inverted.imageset/lab_logo_inverted.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer/Supporting files/Assets.xcassets/lab_logo_inverted.imageset/lab_logo_inverted.png -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | ITSAppUsesNonExemptEncryption 6 | 7 | NSHandsTrackingUsageDescription 8 | This app tracks your hand to teleoperate a robot. 9 | NSWorldSensingUsageDescription 10 | This app senses your surronuding world to reconstruct a simulation. 11 | UIApplicationSceneManifest 12 | 13 | UIApplicationPreferredDefaultSceneSessionRole 14 | UIWindowSceneSessionRoleApplication 15 | UIApplicationSupportsMultipleScenes 16 | 17 | UISceneConfigurations 18 | 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/InfoPlist.xcstrings: -------------------------------------------------------------------------------- 1 | { 2 | "sourceLanguage" : "en", 3 | "strings" : { 4 | "CFBundleDisplayName" : { 5 | "comment" : "Bundle display name", 6 | "extractionState" : "extracted_with_value", 7 | "localizations" : { 8 | "en" : { 9 | "stringUnit" : { 10 | "state" : "new", 11 | "value" : "Tracking Streamer" 12 | } 13 | } 14 | } 15 | }, 16 | "CFBundleName" : { 17 | "comment" : "Bundle name", 18 | "extractionState" : "extracted_with_value", 19 | "localizations" : { 20 | "en" : { 21 | "stringUnit" : { 22 | "state" : "new", 23 | "value" : "Tracking Streamer" 24 | } 25 | }, 26 | "ja" : { 27 | "stringUnit" : { 28 | "state" : "needs_review", 29 | "value" : "手がメジャー" 30 | } 31 | } 32 | } 33 | } 34 | }, 35 | "version" : "1.0" 36 | } -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/Localizable.xcstrings: -------------------------------------------------------------------------------- 1 | { 2 | "sourceLanguage" : "en", 3 | "strings" : { 4 | "About" : { 5 | "extractionState" : "stale", 6 | "localizations" : { 7 | "ja" : { 8 | "stringUnit" : { 9 | "state" : "translated", 10 | "value" : "アプリについて" 11 | } 12 | } 13 | } 14 | }, 15 | "Exit" : { 16 | "localizations" : { 17 | "ja" : { 18 | "stringUnit" : { 19 | "state" : "translated", 20 | "value" : "終了" 21 | } 22 | } 23 | } 24 | }, 25 | "Fix / Unfix a pointer by indirect tap." : { 26 | "extractionState" : "stale", 27 | "localizations" : { 28 | "ja" : { 29 | "stringUnit" : { 30 | "state" : "translated", 31 | "value" : "間接タップでポインターを固定できます。" 32 | } 33 | } 34 | } 35 | }, 36 | "Hand tracking authorization:" : { 37 | "extractionState" : "stale", 38 | "localizations" : { 39 | "ja" : { 40 | "stringUnit" : { 41 | "state" : "translated", 42 | "value" : "ハンドトラッキング許可状況:" 43 | } 44 | } 45 | } 46 | }, 47 | "HandsWidth" : { 48 | "extractionState" : "stale", 49 | "localizations" : { 50 | "ja" : { 51 | "stringUnit" : { 52 | "state" : "needs_review", 53 | "value" : "手がメジャー" 54 | } 55 | } 56 | } 57 | }, 58 | "Measurement of the distance between the fingers." : { 59 | "extractionState" : "stale", 60 | "localizations" : { 61 | "ja" : { 62 | "stringUnit" : { 63 | "state" : "translated", 64 | "value" : "指と指の間の距離を測ります。" 65 | } 66 | } 67 | } 68 | }, 69 | "Setting" : { 70 | "extractionState" : "stale", 71 | "localizations" : { 72 | "ja" : { 73 | "stringUnit" : { 74 | "state" : "translated", 75 | "value" : "設定" 76 | } 77 | } 78 | } 79 | }, 80 | "Start" : { 81 | "localizations" : { 82 | "ja" : { 83 | "stringUnit" : { 84 | "state" : "translated", 85 | "value" : "開始" 86 | } 87 | } 88 | } 89 | }, 90 | "Unit" : { 91 | "localizations" : { 92 | "ja" : { 93 | "stringUnit" : { 94 | "state" : "translated", 95 | "value" : "単位" 96 | } 97 | } 98 | } 99 | }, 100 | "You're on IP address [%@]" : { 101 | 102 | } 103 | }, 104 | "version" : "1.0" 105 | } -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/README assets/appstore_badge.svg: -------------------------------------------------------------------------------- 1 | 2 | Download_on_the_App_Store_Badge_US-UK_RGB_blk_4SVG_092917 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/README assets/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer/Supporting files/README assets/icon.png -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/README assets/screenshot1280w.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer/Supporting files/README assets/screenshot1280w.jpg -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/README assets/screenshot1280w2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer/Supporting files/README assets/screenshot1280w2.jpg -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/README assets/screenshot1280w3.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer/Supporting files/README assets/screenshot1280w3.jpg -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/sound1.m4a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer/Supporting files/sound1.m4a -------------------------------------------------------------------------------- /Tracking Streamer/Supporting files/sound2.m4a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/Tracking Streamer/Supporting files/sound2.m4a -------------------------------------------------------------------------------- /Tracking Streamer/VisionProTeleop.entitlements: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | com.apple.developer.networking.networkextension 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /Tracking Streamer/🌐RealityView.swift: -------------------------------------------------------------------------------- 1 | import SwiftUI 2 | import RealityKit 3 | import ARKit 4 | 5 | struct 🌐RealityView: View { 6 | var model: 🥽AppModel 7 | var body: some View { 8 | RealityView { content, attachments in 9 | let resultLabelEntity = attachments.entity(for: Self.attachmentID)! 10 | resultLabelEntity.components.set(🧑HeadTrackingComponent()) 11 | resultLabelEntity.name = 🧩Name.resultLabel 12 | } attachments: { 13 | Attachment(id: Self.attachmentID) { 14 | } 15 | } 16 | .gesture( 17 | TapGesture() 18 | .targetedToAnyEntity() 19 | ) 20 | .task { self.model.run() } 21 | .task { await self.model.processDeviceAnchorUpdates() } 22 | .task { self.model.startserver() } 23 | .task(priority: .low) { await self.model.processReconstructionUpdates() } 24 | } 25 | static let attachmentID: String = "resultLabel" 26 | } 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /Tracking Streamer/📏Unit.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | 3 | enum 📏Unit: String { 4 | case centiMeters, meters, inches, feet, yards 5 | } 6 | 7 | extension 📏Unit: CaseIterable, Identifiable { 8 | var id: Self { self } 9 | var value: UnitLength { 10 | switch self { 11 | case .centiMeters: .centimeters 12 | case .meters: .meters 13 | case .inches: .inches 14 | case .feet: .feet 15 | case .yards: .yards 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /Tracking Streamer/🛠️Menu/🛠️MenuTop.swift: -------------------------------------------------------------------------------- 1 | import SwiftUI 2 | 3 | struct 🛠️MenuTop: View { 4 | // @EnvironmentObject var model: 🥽AppModel 5 | @Environment(\.dismissImmersiveSpace) var dismissImmersiveSpace 6 | var body: some View { 7 | VStack(spacing: 24) { 8 | HStack(spacing: 28) { 9 | Button { 10 | Task { await self.dismissImmersiveSpace() } 11 | } label: { 12 | HStack(spacing: 8) { 13 | Image(systemName: "escape") 14 | .imageScale(.small) 15 | Text("Exit") 16 | } 17 | .font(.title.weight(.regular)) 18 | .padding(.vertical, 12) 19 | .padding(.horizontal, 20) 20 | } 21 | .buttonStyle(.plain) 22 | .glassBackgroundEffect()} 23 | } 24 | } 25 | } 26 | 27 | -------------------------------------------------------------------------------- /Tracking Streamer/🛠️Menu/🛠️Panel.swift: -------------------------------------------------------------------------------- 1 | enum 🛠️Panel { 2 | case setting, about 3 | } 4 | -------------------------------------------------------------------------------- /Tracking Streamer/🛠️Menu/🛠️SettingPanel.swift: -------------------------------------------------------------------------------- 1 | import SwiftUI 2 | 3 | struct 🛠️SettingPanel: View { 4 | // @EnvironmentObject var model: 🥽AppModel 5 | var body: some View { 6 | VStack(spacing: 24) { 7 | HStack { 8 | Spacer() 9 | Text("Unit") 10 | .font(.largeTitle.weight(.semibold)) 11 | Spacer() 12 | } 13 | .frame(height: 60) 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /Tracking Streamer/🥽AppModel.swift: -------------------------------------------------------------------------------- 1 | import SwiftUI 2 | import RealityKit 3 | import ARKit 4 | import GRPC 5 | import NIO 6 | 7 | struct Skeleton { 8 | var joints: [simd_float4x4] 9 | 10 | init() { 11 | // Initialize the joints array with 24 identity matrices 12 | self.joints = Array(repeating: simd_float4x4(1), count: 25) 13 | } 14 | } 15 | 16 | struct HandTrackingData { 17 | var leftWrist: simd_float4x4 = simd_float4x4(1) 18 | var rightWrist: simd_float4x4 = simd_float4x4(1) 19 | var leftSkeleton: Skeleton = Skeleton() 20 | var rightSkeleton: Skeleton = Skeleton() 21 | var Head: simd_float4x4 = simd_float4x4(1) 22 | } 23 | 24 | class DataManager { 25 | static let shared = DataManager() 26 | 27 | var latestHandTrackingData: HandTrackingData = HandTrackingData() 28 | 29 | private init() {} 30 | } 31 | 32 | 33 | @MainActor 34 | class 🥽AppModel: ObservableObject { 35 | @AppStorage("unit") var unit: 📏Unit = .meters 36 | @Published private(set) var authorizationStatus: ARKitSession.AuthorizationStatus? 37 | 38 | private let session = ARKitSession() 39 | private let handTracking = HandTrackingProvider() 40 | private let worldTracking = WorldTrackingProvider() 41 | private let sceneReconstruction = SceneReconstructionProvider() 42 | 43 | } 44 | 45 | extension 🥽AppModel { 46 | 47 | func run() { 48 | #if targetEnvironment(simulator) 49 | print("Not support handTracking in simulator.") 50 | #else 51 | 52 | Task { 53 | @MainActor in 54 | do { 55 | try await self.session.run([self.handTracking, self.worldTracking, self.sceneReconstruction]) 56 | await self.processHandUpdates(); 57 | } catch { 58 | print(error) 59 | } 60 | } 61 | #endif 62 | } 63 | 64 | func startserver() { 65 | Task { startServer() } 66 | } 67 | 68 | 69 | } 70 | 71 | extension 🥽AppModel { 72 | 73 | @MainActor 74 | func run_device_tracking(function: () async -> Void, withFrequency hz: UInt64) async { 75 | while true { 76 | if Task.isCancelled { 77 | return 78 | } 79 | 80 | // Sleep for 1 s / hz before calling the function. 81 | let nanoSecondsToSleep: UInt64 = NSEC_PER_SEC / hz 82 | do { 83 | try await Task.sleep(nanoseconds: nanoSecondsToSleep) 84 | } catch { 85 | // Sleep fails when the Task is cancelled. Exit the loop. 86 | return 87 | } 88 | 89 | await function() 90 | } 91 | } 92 | 93 | @MainActor 94 | func processDeviceAnchorUpdates() async { 95 | await run_device_tracking(function: self.queryAndProcessLatestDeviceAnchor, withFrequency: 90) 96 | } 97 | 98 | func processReconstructionUpdates() async { 99 | for await update in sceneReconstruction.anchorUpdates { 100 | print("reconstruction update") 101 | let meshAnchor = update.anchor 102 | let mesh_description = meshAnchor.geometry.description 103 | print(mesh_description) 104 | } 105 | } 106 | 107 | 108 | @MainActor 109 | private func queryAndProcessLatestDeviceAnchor() async { 110 | // Device anchors are only available when the provider is running.\ 111 | guard worldTracking.state == .running else { return } 112 | 113 | let deviceAnchor = worldTracking.queryDeviceAnchor(atTimestamp: CACurrentMediaTime()) 114 | print(" *** device tracking running ") 115 | // print(deviceAnchor?.originFromAnchorTransform) 116 | guard let deviceAnchor, deviceAnchor.isTracked else { return } 117 | DataManager.shared.latestHandTrackingData.Head = deviceAnchor.originFromAnchorTransform 118 | } 119 | 120 | private func processHandUpdates() async { 121 | for await update in self.handTracking.anchorUpdates { 122 | let handAnchor = update.anchor 123 | print("processHandUpates is running.") 124 | switch handAnchor.chirality { 125 | case .left: 126 | DispatchQueue.main.async { 127 | DataManager.shared.latestHandTrackingData.leftWrist = handAnchor.originFromAnchorTransform 128 | print(handAnchor.originFromAnchorTransform) 129 | 130 | 131 | let jointTypes: [HandSkeleton.JointName] = [ 132 | .wrist, 133 | .thumbKnuckle, .thumbIntermediateBase, .thumbIntermediateTip, .thumbTip, 134 | .indexFingerMetacarpal, .indexFingerKnuckle, .indexFingerIntermediateBase, .indexFingerIntermediateTip, .indexFingerTip, 135 | .middleFingerMetacarpal, .middleFingerKnuckle, .middleFingerIntermediateBase, .middleFingerIntermediateTip, .middleFingerTip, 136 | .ringFingerMetacarpal, .ringFingerKnuckle, .ringFingerIntermediateBase, .ringFingerIntermediateTip, .ringFingerTip, 137 | .littleFingerMetacarpal, .littleFingerKnuckle, .littleFingerIntermediateBase, .littleFingerIntermediateTip, .littleFingerTip, 138 | ] 139 | 140 | for (index, jointType) in jointTypes.enumerated() { 141 | guard let joint = handAnchor.handSkeleton?.joint(jointType), joint.isTracked else { 142 | continue 143 | } 144 | DataManager.shared.latestHandTrackingData.leftSkeleton.joints[index] = joint.anchorFromJointTransform 145 | } 146 | 147 | print("Updated left hand skeleton") 148 | // Repeat for right hand and other fingers as needed 149 | } 150 | 151 | case .right: 152 | DispatchQueue.main.async { 153 | DataManager.shared.latestHandTrackingData.rightWrist = handAnchor.originFromAnchorTransform 154 | print(handAnchor.originFromAnchorTransform) 155 | 156 | let jointTypes: [HandSkeleton.JointName] = [ 157 | .wrist, 158 | .thumbKnuckle, .thumbIntermediateBase, .thumbIntermediateTip, .thumbTip, 159 | .indexFingerMetacarpal, .indexFingerKnuckle, .indexFingerIntermediateBase, .indexFingerIntermediateTip, .indexFingerTip, 160 | .middleFingerMetacarpal, .middleFingerKnuckle, .middleFingerIntermediateBase, .middleFingerIntermediateTip, .middleFingerTip, 161 | .ringFingerMetacarpal, .ringFingerKnuckle, .ringFingerIntermediateBase, .ringFingerIntermediateTip, .ringFingerTip, 162 | .littleFingerMetacarpal, .littleFingerKnuckle, .littleFingerIntermediateBase, .littleFingerIntermediateTip, .littleFingerTip, 163 | ] 164 | 165 | for (index, jointType) in jointTypes.enumerated() { 166 | guard let joint = handAnchor.handSkeleton?.joint(jointType), joint.isTracked else { 167 | continue 168 | } 169 | print(index) 170 | DataManager.shared.latestHandTrackingData.rightSkeleton.joints[index] = joint.anchorFromJointTransform 171 | } 172 | 173 | print("Updated right hand skeleton") 174 | } 175 | } 176 | 177 | } 178 | 179 | 180 | } 181 | } 182 | 183 | 184 | 185 | class HandTrackingServiceProvider: Handtracking_HandTrackingServiceProvider { 186 | 187 | var interceptors: Handtracking_HandTrackingServiceServerInterceptorFactoryProtocol? 188 | 189 | nonisolated func streamHandUpdates( 190 | request: Handtracking_HandUpdate, 191 | context: StreamingResponseCallContext 192 | ) -> EventLoopFuture { 193 | let eventLoop = context.eventLoop 194 | print("hey...") 195 | // Example task to simulate sending hand tracking data. 196 | // In a real application, you would replace this with actual data collection and streaming. 197 | let task = eventLoop.scheduleRepeatedAsyncTask(initialDelay: .milliseconds(10), delay: .milliseconds(10)) { task -> EventLoopFuture in 198 | // var handUpdate = Handtracking_HandUpdate() 199 | 200 | let recent_hand = fill_handUpdate() 201 | print("sending...") 202 | 203 | // Send the update to the client. 204 | return context.sendResponse(recent_hand).map { _ in } 205 | } 206 | 207 | // Ensure the task is cancelled when the client disconnects or the stream is otherwise closed. 208 | context.statusPromise.futureResult.whenComplete { _ in task.cancel() } 209 | 210 | // Return a future that will complete when the streaming operation is done. 211 | // Here, we're indicating that the stream will remain open indefinitely until the client disconnects. 212 | return eventLoop.makePromise(of: GRPCStatus.self).futureResult 213 | } 214 | } 215 | 216 | func startServer() { 217 | DispatchQueue.global().async { 218 | 219 | let port = 12345 220 | let host = "0.0.0.0" 221 | 222 | let group = MultiThreadedEventLoopGroup(numberOfThreads: 2) 223 | defer { 224 | try! group.syncShutdownGracefully() 225 | } 226 | 227 | let provider = HandTrackingServiceProvider() 228 | 229 | let server = GRPC.Server.insecure(group: group) 230 | .withServiceProviders([provider]) 231 | .bind(host: host, port: port) 232 | 233 | server.map { 234 | $0.channel.localAddress 235 | }.whenSuccess { address in 236 | print("server started on \(address!) \(address!.port!) ") 237 | } 238 | 239 | // Wait on the server's `onClose` future to stop the program from exiting. 240 | _ = try! server.flatMap { 241 | $0.onClose 242 | }.wait() 243 | } 244 | } 245 | 246 | func fill_handUpdate() -> Handtracking_HandUpdate { 247 | var handUpdate = Handtracking_HandUpdate() 248 | 249 | // Assuming DataManager provides an ordered list/array of joints for leftSkeleton and rightSkeleton 250 | let leftJoints = DataManager.shared.latestHandTrackingData.leftSkeleton.joints // Your actual data structure access method might differ 251 | let rightJoints = DataManager.shared.latestHandTrackingData.rightSkeleton.joints 252 | let leftWrist = DataManager.shared.latestHandTrackingData.leftWrist 253 | let rightWrist = DataManager.shared.latestHandTrackingData.rightWrist 254 | let Head = DataManager.shared.latestHandTrackingData.Head 255 | 256 | 257 | handUpdate.leftHand.wristMatrix = createMatrix4x4(from: leftWrist) 258 | handUpdate.rightHand.wristMatrix = createMatrix4x4(from: rightWrist) 259 | handUpdate.head = createMatrix4x4(from: Head) 260 | 261 | // Fill left hand joints 262 | for (index, jointMatrix) in leftJoints.enumerated() { 263 | let matrix = createMatrix4x4(from: jointMatrix) 264 | if index < handUpdate.leftHand.skeleton.jointMatrices.count { 265 | handUpdate.leftHand.skeleton.jointMatrices[index] = matrix 266 | } else { 267 | handUpdate.leftHand.skeleton.jointMatrices.append(matrix) 268 | } 269 | } 270 | 271 | // Fill right hand joints 272 | for (index, jointMatrix) in rightJoints.enumerated() { 273 | let matrix = createMatrix4x4(from: jointMatrix) 274 | if index < handUpdate.rightHand.skeleton.jointMatrices.count { 275 | handUpdate.rightHand.skeleton.jointMatrices[index] = matrix 276 | } else { 277 | handUpdate.rightHand.skeleton.jointMatrices.append(matrix) 278 | } 279 | } 280 | 281 | return handUpdate 282 | } 283 | 284 | 285 | 286 | func createMatrix4x4(from jointMatrix: simd_float4x4) -> Handtracking_Matrix4x4 { 287 | var matrix = Handtracking_Matrix4x4() 288 | matrix.m00 = Float(jointMatrix.columns.0.x) 289 | matrix.m01 = Float(jointMatrix.columns.1.x) 290 | matrix.m02 = Float(jointMatrix.columns.2.x) 291 | matrix.m03 = Float(jointMatrix.columns.3.x) 292 | matrix.m10 = Float(jointMatrix.columns.0.y) 293 | matrix.m11 = Float(jointMatrix.columns.1.y) 294 | matrix.m12 = Float(jointMatrix.columns.2.y) 295 | matrix.m13 = Float(jointMatrix.columns.3.y) 296 | matrix.m20 = Float(jointMatrix.columns.0.z) 297 | matrix.m21 = Float(jointMatrix.columns.1.z) 298 | matrix.m22 = Float(jointMatrix.columns.2.z) 299 | matrix.m23 = Float(jointMatrix.columns.3.z) 300 | matrix.m30 = Float(jointMatrix.columns.0.w) 301 | matrix.m31 = Float(jointMatrix.columns.1.w) 302 | matrix.m32 = Float(jointMatrix.columns.2.w) 303 | matrix.m33 = Float(jointMatrix.columns.3.w) 304 | return matrix 305 | } 306 | -------------------------------------------------------------------------------- /Tracking Streamer/🧑HeadTrackingComponent&System.swift: -------------------------------------------------------------------------------- 1 | import RealityKit 2 | import ARKit 3 | import SwiftUI 4 | 5 | struct 🧑HeadTrackingComponent: Component, Codable { 6 | init() {} 7 | } 8 | 9 | struct 🧑HeadTrackingSystem: System { 10 | private static let query = EntityQuery(where: .has(🧑HeadTrackingComponent.self)) 11 | 12 | private let session = ARKitSession() 13 | private let provider = WorldTrackingProvider() 14 | 15 | init(scene: RealityKit.Scene) { 16 | self.setUpSession() 17 | } 18 | 19 | private func setUpSession() { 20 | Task { 21 | do { 22 | try await self.session.run([self.provider]) 23 | } catch { 24 | assertionFailure() 25 | } 26 | } 27 | } 28 | 29 | func update(context: SceneUpdateContext) { 30 | let entities = context.scene.performQuery(Self.query).map { $0 } 31 | 32 | guard !entities.isEmpty, 33 | let deviceAnchor = self.provider.queryDeviceAnchor(atTimestamp: CACurrentMediaTime()) else { return } 34 | 35 | let cameraTransform = Transform(matrix: deviceAnchor.originFromAnchorTransform) 36 | 37 | for entity in entities { 38 | entity.look(at: cameraTransform.translation, 39 | from: entity.position(relativeTo: nil), 40 | relativeTo: nil, 41 | forward: .positiveZ) 42 | } 43 | } 44 | } 45 | 46 | -------------------------------------------------------------------------------- /Tracking Streamer/🧩Model.swift: -------------------------------------------------------------------------------- 1 | import RealityKit 2 | 3 | enum 🧩Model { 4 | static func fingerTip(_ selected: Bool = false) -> ModelComponent { 5 | .init(mesh: .generateSphere(radius: 0.005), 6 | materials: [SimpleMaterial(color: selected ? .red : .blue, 7 | isMetallic: true)]) 8 | } 9 | static func line(_ length: Float) -> ModelComponent { 10 | ModelComponent(mesh: .generateBox(width: 0.01, 11 | height: 0.01, 12 | depth: length, 13 | cornerRadius: 0.005), 14 | materials: [SimpleMaterial(color: .white, isMetallic: true)]) 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /Tracking Streamer/🧩Name.swift: -------------------------------------------------------------------------------- 1 | enum 🧩Name { 2 | static let fingerLeft = "fingerLeft" 3 | static let fingerRight = "fingerRight" 4 | static let line = "line" 5 | static let resultLabel = "resultLabel" 6 | } 7 | -------------------------------------------------------------------------------- /__pycache__/handtracking_pb2.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/__pycache__/handtracking_pb2.cpython-38.pyc -------------------------------------------------------------------------------- /__pycache__/handtracking_pb2_grpc.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/__pycache__/handtracking_pb2_grpc.cpython-38.pyc -------------------------------------------------------------------------------- /assets/axis_convention.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/assets/axis_convention.png -------------------------------------------------------------------------------- /assets/coord_system.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/assets/coord_system.png -------------------------------------------------------------------------------- /assets/hand_skeleton_convention.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/assets/hand_skeleton_convention.png -------------------------------------------------------------------------------- /assets/main.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/assets/main.png -------------------------------------------------------------------------------- /assets/short_paper.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/assets/short_paper.pdf -------------------------------------------------------------------------------- /assets/short_paper_new.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/assets/short_paper_new.pdf -------------------------------------------------------------------------------- /assets/visionpro_main.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/assets/visionpro_main.png -------------------------------------------------------------------------------- /avp_stream/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/.DS_Store -------------------------------------------------------------------------------- /avp_stream/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from avp_stream.streamer import VisionProStreamer 3 | 4 | -------------------------------------------------------------------------------- /avp_stream/__pycache__/__init__.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/__pycache__/__init__.cpython-310.pyc -------------------------------------------------------------------------------- /avp_stream/__pycache__/__init__.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/__pycache__/__init__.cpython-311.pyc -------------------------------------------------------------------------------- /avp_stream/__pycache__/__init__.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/__pycache__/__init__.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/__pycache__/isaac_env.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/__pycache__/isaac_env.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/__pycache__/streamer.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/__pycache__/streamer.cpython-311.pyc -------------------------------------------------------------------------------- /avp_stream/__pycache__/streamer.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/__pycache__/streamer.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/__pycache__/utils.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/__pycache__/utils.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/assets/huge_axis.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /avp_stream/assets/normal_axis.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /avp_stream/assets/small_axis.urdf: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | -------------------------------------------------------------------------------- /avp_stream/grpc_msg/__init__.py: -------------------------------------------------------------------------------- 1 | import avp_stream.grpc_msg.handtracking_pb2 as handtracking_pb2 2 | import avp_stream.grpc_msg.handtracking_pb2_grpc as handtracking_pb2_grpc -------------------------------------------------------------------------------- /avp_stream/grpc_msg/__pycache__/__init__.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/grpc_msg/__pycache__/__init__.cpython-310.pyc -------------------------------------------------------------------------------- /avp_stream/grpc_msg/__pycache__/__init__.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/grpc_msg/__pycache__/__init__.cpython-311.pyc -------------------------------------------------------------------------------- /avp_stream/grpc_msg/__pycache__/__init__.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/grpc_msg/__pycache__/__init__.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/grpc_msg/__pycache__/handtracking_pb2.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/grpc_msg/__pycache__/handtracking_pb2.cpython-310.pyc -------------------------------------------------------------------------------- /avp_stream/grpc_msg/__pycache__/handtracking_pb2.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/grpc_msg/__pycache__/handtracking_pb2.cpython-311.pyc -------------------------------------------------------------------------------- /avp_stream/grpc_msg/__pycache__/handtracking_pb2.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/grpc_msg/__pycache__/handtracking_pb2.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/grpc_msg/__pycache__/handtracking_pb2_grpc.cpython-310.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/grpc_msg/__pycache__/handtracking_pb2_grpc.cpython-310.pyc -------------------------------------------------------------------------------- /avp_stream/grpc_msg/__pycache__/handtracking_pb2_grpc.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/grpc_msg/__pycache__/handtracking_pb2_grpc.cpython-311.pyc -------------------------------------------------------------------------------- /avp_stream/grpc_msg/__pycache__/handtracking_pb2_grpc.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/grpc_msg/__pycache__/handtracking_pb2_grpc.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/grpc_msg/handtracking.grpc.swift: -------------------------------------------------------------------------------- 1 | // 2 | // DO NOT EDIT. 3 | // swift-format-ignore-file 4 | // 5 | // Generated by the protocol buffer compiler. 6 | // Source: handtracking.proto 7 | // 8 | import GRPC 9 | import NIO 10 | import NIOConcurrencyHelpers 11 | import SwiftProtobuf 12 | 13 | 14 | /// The hand tracking service definition. 15 | /// 16 | /// Usage: instantiate `Handtracking_HandTrackingServiceClient`, then call methods of this protocol to make API calls. 17 | internal protocol Handtracking_HandTrackingServiceClientProtocol: GRPCClient { 18 | var serviceName: String { get } 19 | var interceptors: Handtracking_HandTrackingServiceClientInterceptorFactoryProtocol? { get } 20 | 21 | func streamHandUpdates( 22 | _ request: Handtracking_HandUpdate, 23 | callOptions: CallOptions?, 24 | handler: @escaping (Handtracking_HandUpdate) -> Void 25 | ) -> ServerStreamingCall 26 | } 27 | 28 | extension Handtracking_HandTrackingServiceClientProtocol { 29 | internal var serviceName: String { 30 | return "handtracking.HandTrackingService" 31 | } 32 | 33 | /// Server streaming call to StreamHandUpdates 34 | /// 35 | /// - Parameters: 36 | /// - request: Request to send to StreamHandUpdates. 37 | /// - callOptions: Call options. 38 | /// - handler: A closure called when each response is received from the server. 39 | /// - Returns: A `ServerStreamingCall` with futures for the metadata and status. 40 | internal func streamHandUpdates( 41 | _ request: Handtracking_HandUpdate, 42 | callOptions: CallOptions? = nil, 43 | handler: @escaping (Handtracking_HandUpdate) -> Void 44 | ) -> ServerStreamingCall { 45 | return self.makeServerStreamingCall( 46 | path: Handtracking_HandTrackingServiceClientMetadata.Methods.streamHandUpdates.path, 47 | request: request, 48 | callOptions: callOptions ?? self.defaultCallOptions, 49 | interceptors: self.interceptors?.makeStreamHandUpdatesInterceptors() ?? [], 50 | handler: handler 51 | ) 52 | } 53 | } 54 | 55 | @available(*, deprecated) 56 | extension Handtracking_HandTrackingServiceClient: @unchecked Sendable {} 57 | 58 | @available(*, deprecated, renamed: "Handtracking_HandTrackingServiceNIOClient") 59 | internal final class Handtracking_HandTrackingServiceClient: Handtracking_HandTrackingServiceClientProtocol { 60 | private let lock = Lock() 61 | private var _defaultCallOptions: CallOptions 62 | private var _interceptors: Handtracking_HandTrackingServiceClientInterceptorFactoryProtocol? 63 | internal let channel: GRPCChannel 64 | internal var defaultCallOptions: CallOptions { 65 | get { self.lock.withLock { return self._defaultCallOptions } } 66 | set { self.lock.withLockVoid { self._defaultCallOptions = newValue } } 67 | } 68 | internal var interceptors: Handtracking_HandTrackingServiceClientInterceptorFactoryProtocol? { 69 | get { self.lock.withLock { return self._interceptors } } 70 | set { self.lock.withLockVoid { self._interceptors = newValue } } 71 | } 72 | 73 | /// Creates a client for the handtracking.HandTrackingService service. 74 | /// 75 | /// - Parameters: 76 | /// - channel: `GRPCChannel` to the service host. 77 | /// - defaultCallOptions: Options to use for each service call if the user doesn't provide them. 78 | /// - interceptors: A factory providing interceptors for each RPC. 79 | internal init( 80 | channel: GRPCChannel, 81 | defaultCallOptions: CallOptions = CallOptions(), 82 | interceptors: Handtracking_HandTrackingServiceClientInterceptorFactoryProtocol? = nil 83 | ) { 84 | self.channel = channel 85 | self._defaultCallOptions = defaultCallOptions 86 | self._interceptors = interceptors 87 | } 88 | } 89 | 90 | internal struct Handtracking_HandTrackingServiceNIOClient: Handtracking_HandTrackingServiceClientProtocol { 91 | internal var channel: GRPCChannel 92 | internal var defaultCallOptions: CallOptions 93 | internal var interceptors: Handtracking_HandTrackingServiceClientInterceptorFactoryProtocol? 94 | 95 | /// Creates a client for the handtracking.HandTrackingService service. 96 | /// 97 | /// - Parameters: 98 | /// - channel: `GRPCChannel` to the service host. 99 | /// - defaultCallOptions: Options to use for each service call if the user doesn't provide them. 100 | /// - interceptors: A factory providing interceptors for each RPC. 101 | internal init( 102 | channel: GRPCChannel, 103 | defaultCallOptions: CallOptions = CallOptions(), 104 | interceptors: Handtracking_HandTrackingServiceClientInterceptorFactoryProtocol? = nil 105 | ) { 106 | self.channel = channel 107 | self.defaultCallOptions = defaultCallOptions 108 | self.interceptors = interceptors 109 | } 110 | } 111 | 112 | /// The hand tracking service definition. 113 | @available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) 114 | internal protocol Handtracking_HandTrackingServiceAsyncClientProtocol: GRPCClient { 115 | static var serviceDescriptor: GRPCServiceDescriptor { get } 116 | var interceptors: Handtracking_HandTrackingServiceClientInterceptorFactoryProtocol? { get } 117 | 118 | func makeStreamHandUpdatesCall( 119 | _ request: Handtracking_HandUpdate, 120 | callOptions: CallOptions? 121 | ) -> GRPCAsyncServerStreamingCall 122 | } 123 | 124 | @available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) 125 | extension Handtracking_HandTrackingServiceAsyncClientProtocol { 126 | internal static var serviceDescriptor: GRPCServiceDescriptor { 127 | return Handtracking_HandTrackingServiceClientMetadata.serviceDescriptor 128 | } 129 | 130 | internal var interceptors: Handtracking_HandTrackingServiceClientInterceptorFactoryProtocol? { 131 | return nil 132 | } 133 | 134 | internal func makeStreamHandUpdatesCall( 135 | _ request: Handtracking_HandUpdate, 136 | callOptions: CallOptions? = nil 137 | ) -> GRPCAsyncServerStreamingCall { 138 | return self.makeAsyncServerStreamingCall( 139 | path: Handtracking_HandTrackingServiceClientMetadata.Methods.streamHandUpdates.path, 140 | request: request, 141 | callOptions: callOptions ?? self.defaultCallOptions, 142 | interceptors: self.interceptors?.makeStreamHandUpdatesInterceptors() ?? [] 143 | ) 144 | } 145 | } 146 | 147 | @available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) 148 | extension Handtracking_HandTrackingServiceAsyncClientProtocol { 149 | internal func streamHandUpdates( 150 | _ request: Handtracking_HandUpdate, 151 | callOptions: CallOptions? = nil 152 | ) -> GRPCAsyncResponseStream { 153 | return self.performAsyncServerStreamingCall( 154 | path: Handtracking_HandTrackingServiceClientMetadata.Methods.streamHandUpdates.path, 155 | request: request, 156 | callOptions: callOptions ?? self.defaultCallOptions, 157 | interceptors: self.interceptors?.makeStreamHandUpdatesInterceptors() ?? [] 158 | ) 159 | } 160 | } 161 | 162 | @available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) 163 | internal struct Handtracking_HandTrackingServiceAsyncClient: Handtracking_HandTrackingServiceAsyncClientProtocol { 164 | internal var channel: GRPCChannel 165 | internal var defaultCallOptions: CallOptions 166 | internal var interceptors: Handtracking_HandTrackingServiceClientInterceptorFactoryProtocol? 167 | 168 | internal init( 169 | channel: GRPCChannel, 170 | defaultCallOptions: CallOptions = CallOptions(), 171 | interceptors: Handtracking_HandTrackingServiceClientInterceptorFactoryProtocol? = nil 172 | ) { 173 | self.channel = channel 174 | self.defaultCallOptions = defaultCallOptions 175 | self.interceptors = interceptors 176 | } 177 | } 178 | 179 | internal protocol Handtracking_HandTrackingServiceClientInterceptorFactoryProtocol: Sendable { 180 | 181 | /// - Returns: Interceptors to use when invoking 'streamHandUpdates'. 182 | func makeStreamHandUpdatesInterceptors() -> [ClientInterceptor] 183 | } 184 | 185 | internal enum Handtracking_HandTrackingServiceClientMetadata { 186 | internal static let serviceDescriptor = GRPCServiceDescriptor( 187 | name: "HandTrackingService", 188 | fullName: "handtracking.HandTrackingService", 189 | methods: [ 190 | Handtracking_HandTrackingServiceClientMetadata.Methods.streamHandUpdates, 191 | ] 192 | ) 193 | 194 | internal enum Methods { 195 | internal static let streamHandUpdates = GRPCMethodDescriptor( 196 | name: "StreamHandUpdates", 197 | path: "/handtracking.HandTrackingService/StreamHandUpdates", 198 | type: GRPCCallType.serverStreaming 199 | ) 200 | } 201 | } 202 | 203 | /// The hand tracking service definition. 204 | /// 205 | /// To build a server, implement a class that conforms to this protocol. 206 | internal protocol Handtracking_HandTrackingServiceProvider: CallHandlerProvider { 207 | var interceptors: Handtracking_HandTrackingServiceServerInterceptorFactoryProtocol? { get } 208 | 209 | func streamHandUpdates(request: Handtracking_HandUpdate, context: StreamingResponseCallContext) -> EventLoopFuture 210 | } 211 | 212 | extension Handtracking_HandTrackingServiceProvider { 213 | internal var serviceName: Substring { 214 | return Handtracking_HandTrackingServiceServerMetadata.serviceDescriptor.fullName[...] 215 | } 216 | 217 | /// Determines, calls and returns the appropriate request handler, depending on the request's method. 218 | /// Returns nil for methods not handled by this service. 219 | internal func handle( 220 | method name: Substring, 221 | context: CallHandlerContext 222 | ) -> GRPCServerHandlerProtocol? { 223 | switch name { 224 | case "StreamHandUpdates": 225 | return ServerStreamingServerHandler( 226 | context: context, 227 | requestDeserializer: ProtobufDeserializer(), 228 | responseSerializer: ProtobufSerializer(), 229 | interceptors: self.interceptors?.makeStreamHandUpdatesInterceptors() ?? [], 230 | userFunction: self.streamHandUpdates(request:context:) 231 | ) 232 | 233 | default: 234 | return nil 235 | } 236 | } 237 | } 238 | 239 | /// The hand tracking service definition. 240 | /// 241 | /// To implement a server, implement an object which conforms to this protocol. 242 | @available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) 243 | internal protocol Handtracking_HandTrackingServiceAsyncProvider: CallHandlerProvider, Sendable { 244 | static var serviceDescriptor: GRPCServiceDescriptor { get } 245 | var interceptors: Handtracking_HandTrackingServiceServerInterceptorFactoryProtocol? { get } 246 | 247 | func streamHandUpdates( 248 | request: Handtracking_HandUpdate, 249 | responseStream: GRPCAsyncResponseStreamWriter, 250 | context: GRPCAsyncServerCallContext 251 | ) async throws 252 | } 253 | 254 | @available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) 255 | extension Handtracking_HandTrackingServiceAsyncProvider { 256 | internal static var serviceDescriptor: GRPCServiceDescriptor { 257 | return Handtracking_HandTrackingServiceServerMetadata.serviceDescriptor 258 | } 259 | 260 | internal var serviceName: Substring { 261 | return Handtracking_HandTrackingServiceServerMetadata.serviceDescriptor.fullName[...] 262 | } 263 | 264 | internal var interceptors: Handtracking_HandTrackingServiceServerInterceptorFactoryProtocol? { 265 | return nil 266 | } 267 | 268 | internal func handle( 269 | method name: Substring, 270 | context: CallHandlerContext 271 | ) -> GRPCServerHandlerProtocol? { 272 | switch name { 273 | case "StreamHandUpdates": 274 | return GRPCAsyncServerHandler( 275 | context: context, 276 | requestDeserializer: ProtobufDeserializer(), 277 | responseSerializer: ProtobufSerializer(), 278 | interceptors: self.interceptors?.makeStreamHandUpdatesInterceptors() ?? [], 279 | wrapping: { try await self.streamHandUpdates(request: $0, responseStream: $1, context: $2) } 280 | ) 281 | 282 | default: 283 | return nil 284 | } 285 | } 286 | } 287 | 288 | internal protocol Handtracking_HandTrackingServiceServerInterceptorFactoryProtocol: Sendable { 289 | 290 | /// - Returns: Interceptors to use when handling 'streamHandUpdates'. 291 | /// Defaults to calling `self.makeInterceptors()`. 292 | func makeStreamHandUpdatesInterceptors() -> [ServerInterceptor] 293 | } 294 | 295 | internal enum Handtracking_HandTrackingServiceServerMetadata { 296 | internal static let serviceDescriptor = GRPCServiceDescriptor( 297 | name: "HandTrackingService", 298 | fullName: "handtracking.HandTrackingService", 299 | methods: [ 300 | Handtracking_HandTrackingServiceServerMetadata.Methods.streamHandUpdates, 301 | ] 302 | ) 303 | 304 | internal enum Methods { 305 | internal static let streamHandUpdates = GRPCMethodDescriptor( 306 | name: "StreamHandUpdates", 307 | path: "/handtracking.HandTrackingService/StreamHandUpdates", 308 | type: GRPCCallType.serverStreaming 309 | ) 310 | } 311 | } 312 | -------------------------------------------------------------------------------- /avp_stream/grpc_msg/handtracking.pb.swift: -------------------------------------------------------------------------------- 1 | // DO NOT EDIT. 2 | // swift-format-ignore-file 3 | // 4 | // Generated by the Swift generator plugin for the protocol buffer compiler. 5 | // Source: handtracking.proto 6 | // 7 | // For information on using the generated types, please see the documentation: 8 | // https://github.com/apple/swift-protobuf/ 9 | 10 | import Foundation 11 | import SwiftProtobuf 12 | 13 | // If the compiler emits an error on this type, it is because this file 14 | // was generated by a version of the `protoc` Swift plug-in that is 15 | // incompatible with the version of SwiftProtobuf to which you are linking. 16 | // Please ensure that you are building against the same version of the API 17 | // that was used to generate this file. 18 | fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { 19 | struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} 20 | typealias Version = _2 21 | } 22 | 23 | /// Represents a 4x4 transformation matrix for a joint 24 | struct Handtracking_Matrix4x4 { 25 | // SwiftProtobuf.Message conformance is added in an extension below. See the 26 | // `Message` and `Message+*Additions` files in the SwiftProtobuf library for 27 | // methods supported on all messages. 28 | 29 | var m00: Float = 0 30 | 31 | var m01: Float = 0 32 | 33 | var m02: Float = 0 34 | 35 | var m03: Float = 0 36 | 37 | var m10: Float = 0 38 | 39 | var m11: Float = 0 40 | 41 | var m12: Float = 0 42 | 43 | var m13: Float = 0 44 | 45 | var m20: Float = 0 46 | 47 | var m21: Float = 0 48 | 49 | var m22: Float = 0 50 | 51 | var m23: Float = 0 52 | 53 | var m30: Float = 0 54 | 55 | var m31: Float = 0 56 | 57 | var m32: Float = 0 58 | 59 | var m33: Float = 0 60 | 61 | var unknownFields = SwiftProtobuf.UnknownStorage() 62 | 63 | init() {} 64 | } 65 | 66 | /// The skeleton of a hand, comprising multiple 4x4 matrices (one per joint) 67 | struct Handtracking_Skeleton { 68 | // SwiftProtobuf.Message conformance is added in an extension below. See the 69 | // `Message` and `Message+*Additions` files in the SwiftProtobuf library for 70 | // methods supported on all messages. 71 | 72 | /// Array of 4x4 matrices, expecting 24 per hand based on your structure 73 | var jointMatrices: [Handtracking_Matrix4x4] = [] 74 | 75 | var unknownFields = SwiftProtobuf.UnknownStorage() 76 | 77 | init() {} 78 | } 79 | 80 | /// The hand tracking information, including the full 4x4 matrix for the wrist and the skeleton 81 | struct Handtracking_Hand { 82 | // SwiftProtobuf.Message conformance is added in an extension below. See the 83 | // `Message` and `Message+*Additions` files in the SwiftProtobuf library for 84 | // methods supported on all messages. 85 | 86 | /// 4x4 matrix for the wrist position and orientation 87 | var wristMatrix: Handtracking_Matrix4x4 { 88 | get {return _storage._wristMatrix ?? Handtracking_Matrix4x4()} 89 | set {_uniqueStorage()._wristMatrix = newValue} 90 | } 91 | /// Returns true if `wristMatrix` has been explicitly set. 92 | var hasWristMatrix: Bool {return _storage._wristMatrix != nil} 93 | /// Clears the value of `wristMatrix`. Subsequent reads from it will return its default value. 94 | mutating func clearWristMatrix() {_uniqueStorage()._wristMatrix = nil} 95 | 96 | /// The hand's skeleton 97 | var skeleton: Handtracking_Skeleton { 98 | get {return _storage._skeleton ?? Handtracking_Skeleton()} 99 | set {_uniqueStorage()._skeleton = newValue} 100 | } 101 | /// Returns true if `skeleton` has been explicitly set. 102 | var hasSkeleton: Bool {return _storage._skeleton != nil} 103 | /// Clears the value of `skeleton`. Subsequent reads from it will return its default value. 104 | mutating func clearSkeleton() {_uniqueStorage()._skeleton = nil} 105 | 106 | var unknownFields = SwiftProtobuf.UnknownStorage() 107 | 108 | init() {} 109 | 110 | fileprivate var _storage = _StorageClass.defaultInstance 111 | } 112 | 113 | /// The overall hand update message, including data for both hands 114 | struct Handtracking_HandUpdate { 115 | // SwiftProtobuf.Message conformance is added in an extension below. See the 116 | // `Message` and `Message+*Additions` files in the SwiftProtobuf library for 117 | // methods supported on all messages. 118 | 119 | var leftHand: Handtracking_Hand { 120 | get {return _storage._leftHand ?? Handtracking_Hand()} 121 | set {_uniqueStorage()._leftHand = newValue} 122 | } 123 | /// Returns true if `leftHand` has been explicitly set. 124 | var hasLeftHand: Bool {return _storage._leftHand != nil} 125 | /// Clears the value of `leftHand`. Subsequent reads from it will return its default value. 126 | mutating func clearLeftHand() {_uniqueStorage()._leftHand = nil} 127 | 128 | var rightHand: Handtracking_Hand { 129 | get {return _storage._rightHand ?? Handtracking_Hand()} 130 | set {_uniqueStorage()._rightHand = newValue} 131 | } 132 | /// Returns true if `rightHand` has been explicitly set. 133 | var hasRightHand: Bool {return _storage._rightHand != nil} 134 | /// Clears the value of `rightHand`. Subsequent reads from it will return its default value. 135 | mutating func clearRightHand() {_uniqueStorage()._rightHand = nil} 136 | 137 | var head: Handtracking_Matrix4x4 { 138 | get {return _storage._head ?? Handtracking_Matrix4x4()} 139 | set {_uniqueStorage()._head = newValue} 140 | } 141 | /// Returns true if `head` has been explicitly set. 142 | var hasHead: Bool {return _storage._head != nil} 143 | /// Clears the value of `head`. Subsequent reads from it will return its default value. 144 | mutating func clearHead() {_uniqueStorage()._head = nil} 145 | 146 | var unknownFields = SwiftProtobuf.UnknownStorage() 147 | 148 | init() {} 149 | 150 | fileprivate var _storage = _StorageClass.defaultInstance 151 | } 152 | 153 | /// Acknowledgement message for hand updates 154 | struct Handtracking_HandUpdateAck { 155 | // SwiftProtobuf.Message conformance is added in an extension below. See the 156 | // `Message` and `Message+*Additions` files in the SwiftProtobuf library for 157 | // methods supported on all messages. 158 | 159 | var message: String = String() 160 | 161 | var unknownFields = SwiftProtobuf.UnknownStorage() 162 | 163 | init() {} 164 | } 165 | 166 | #if swift(>=5.5) && canImport(_Concurrency) 167 | extension Handtracking_Matrix4x4: @unchecked Sendable {} 168 | extension Handtracking_Skeleton: @unchecked Sendable {} 169 | extension Handtracking_Hand: @unchecked Sendable {} 170 | extension Handtracking_HandUpdate: @unchecked Sendable {} 171 | extension Handtracking_HandUpdateAck: @unchecked Sendable {} 172 | #endif // swift(>=5.5) && canImport(_Concurrency) 173 | 174 | // MARK: - Code below here is support for the SwiftProtobuf runtime. 175 | 176 | fileprivate let _protobuf_package = "handtracking" 177 | 178 | extension Handtracking_Matrix4x4: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { 179 | static let protoMessageName: String = _protobuf_package + ".Matrix4x4" 180 | static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ 181 | 1: .same(proto: "m00"), 182 | 2: .same(proto: "m01"), 183 | 3: .same(proto: "m02"), 184 | 4: .same(proto: "m03"), 185 | 5: .same(proto: "m10"), 186 | 6: .same(proto: "m11"), 187 | 7: .same(proto: "m12"), 188 | 8: .same(proto: "m13"), 189 | 9: .same(proto: "m20"), 190 | 10: .same(proto: "m21"), 191 | 11: .same(proto: "m22"), 192 | 12: .same(proto: "m23"), 193 | 13: .same(proto: "m30"), 194 | 14: .same(proto: "m31"), 195 | 15: .same(proto: "m32"), 196 | 16: .same(proto: "m33"), 197 | ] 198 | 199 | mutating func decodeMessage(decoder: inout D) throws { 200 | while let fieldNumber = try decoder.nextFieldNumber() { 201 | // The use of inline closures is to circumvent an issue where the compiler 202 | // allocates stack space for every case branch when no optimizations are 203 | // enabled. https://github.com/apple/swift-protobuf/issues/1034 204 | switch fieldNumber { 205 | case 1: try { try decoder.decodeSingularFloatField(value: &self.m00) }() 206 | case 2: try { try decoder.decodeSingularFloatField(value: &self.m01) }() 207 | case 3: try { try decoder.decodeSingularFloatField(value: &self.m02) }() 208 | case 4: try { try decoder.decodeSingularFloatField(value: &self.m03) }() 209 | case 5: try { try decoder.decodeSingularFloatField(value: &self.m10) }() 210 | case 6: try { try decoder.decodeSingularFloatField(value: &self.m11) }() 211 | case 7: try { try decoder.decodeSingularFloatField(value: &self.m12) }() 212 | case 8: try { try decoder.decodeSingularFloatField(value: &self.m13) }() 213 | case 9: try { try decoder.decodeSingularFloatField(value: &self.m20) }() 214 | case 10: try { try decoder.decodeSingularFloatField(value: &self.m21) }() 215 | case 11: try { try decoder.decodeSingularFloatField(value: &self.m22) }() 216 | case 12: try { try decoder.decodeSingularFloatField(value: &self.m23) }() 217 | case 13: try { try decoder.decodeSingularFloatField(value: &self.m30) }() 218 | case 14: try { try decoder.decodeSingularFloatField(value: &self.m31) }() 219 | case 15: try { try decoder.decodeSingularFloatField(value: &self.m32) }() 220 | case 16: try { try decoder.decodeSingularFloatField(value: &self.m33) }() 221 | default: break 222 | } 223 | } 224 | } 225 | 226 | func traverse(visitor: inout V) throws { 227 | if self.m00 != 0 { 228 | try visitor.visitSingularFloatField(value: self.m00, fieldNumber: 1) 229 | } 230 | if self.m01 != 0 { 231 | try visitor.visitSingularFloatField(value: self.m01, fieldNumber: 2) 232 | } 233 | if self.m02 != 0 { 234 | try visitor.visitSingularFloatField(value: self.m02, fieldNumber: 3) 235 | } 236 | if self.m03 != 0 { 237 | try visitor.visitSingularFloatField(value: self.m03, fieldNumber: 4) 238 | } 239 | if self.m10 != 0 { 240 | try visitor.visitSingularFloatField(value: self.m10, fieldNumber: 5) 241 | } 242 | if self.m11 != 0 { 243 | try visitor.visitSingularFloatField(value: self.m11, fieldNumber: 6) 244 | } 245 | if self.m12 != 0 { 246 | try visitor.visitSingularFloatField(value: self.m12, fieldNumber: 7) 247 | } 248 | if self.m13 != 0 { 249 | try visitor.visitSingularFloatField(value: self.m13, fieldNumber: 8) 250 | } 251 | if self.m20 != 0 { 252 | try visitor.visitSingularFloatField(value: self.m20, fieldNumber: 9) 253 | } 254 | if self.m21 != 0 { 255 | try visitor.visitSingularFloatField(value: self.m21, fieldNumber: 10) 256 | } 257 | if self.m22 != 0 { 258 | try visitor.visitSingularFloatField(value: self.m22, fieldNumber: 11) 259 | } 260 | if self.m23 != 0 { 261 | try visitor.visitSingularFloatField(value: self.m23, fieldNumber: 12) 262 | } 263 | if self.m30 != 0 { 264 | try visitor.visitSingularFloatField(value: self.m30, fieldNumber: 13) 265 | } 266 | if self.m31 != 0 { 267 | try visitor.visitSingularFloatField(value: self.m31, fieldNumber: 14) 268 | } 269 | if self.m32 != 0 { 270 | try visitor.visitSingularFloatField(value: self.m32, fieldNumber: 15) 271 | } 272 | if self.m33 != 0 { 273 | try visitor.visitSingularFloatField(value: self.m33, fieldNumber: 16) 274 | } 275 | try unknownFields.traverse(visitor: &visitor) 276 | } 277 | 278 | static func ==(lhs: Handtracking_Matrix4x4, rhs: Handtracking_Matrix4x4) -> Bool { 279 | if lhs.m00 != rhs.m00 {return false} 280 | if lhs.m01 != rhs.m01 {return false} 281 | if lhs.m02 != rhs.m02 {return false} 282 | if lhs.m03 != rhs.m03 {return false} 283 | if lhs.m10 != rhs.m10 {return false} 284 | if lhs.m11 != rhs.m11 {return false} 285 | if lhs.m12 != rhs.m12 {return false} 286 | if lhs.m13 != rhs.m13 {return false} 287 | if lhs.m20 != rhs.m20 {return false} 288 | if lhs.m21 != rhs.m21 {return false} 289 | if lhs.m22 != rhs.m22 {return false} 290 | if lhs.m23 != rhs.m23 {return false} 291 | if lhs.m30 != rhs.m30 {return false} 292 | if lhs.m31 != rhs.m31 {return false} 293 | if lhs.m32 != rhs.m32 {return false} 294 | if lhs.m33 != rhs.m33 {return false} 295 | if lhs.unknownFields != rhs.unknownFields {return false} 296 | return true 297 | } 298 | } 299 | 300 | extension Handtracking_Skeleton: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { 301 | static let protoMessageName: String = _protobuf_package + ".Skeleton" 302 | static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ 303 | 1: .same(proto: "jointMatrices"), 304 | ] 305 | 306 | mutating func decodeMessage(decoder: inout D) throws { 307 | while let fieldNumber = try decoder.nextFieldNumber() { 308 | // The use of inline closures is to circumvent an issue where the compiler 309 | // allocates stack space for every case branch when no optimizations are 310 | // enabled. https://github.com/apple/swift-protobuf/issues/1034 311 | switch fieldNumber { 312 | case 1: try { try decoder.decodeRepeatedMessageField(value: &self.jointMatrices) }() 313 | default: break 314 | } 315 | } 316 | } 317 | 318 | func traverse(visitor: inout V) throws { 319 | if !self.jointMatrices.isEmpty { 320 | try visitor.visitRepeatedMessageField(value: self.jointMatrices, fieldNumber: 1) 321 | } 322 | try unknownFields.traverse(visitor: &visitor) 323 | } 324 | 325 | static func ==(lhs: Handtracking_Skeleton, rhs: Handtracking_Skeleton) -> Bool { 326 | if lhs.jointMatrices != rhs.jointMatrices {return false} 327 | if lhs.unknownFields != rhs.unknownFields {return false} 328 | return true 329 | } 330 | } 331 | 332 | extension Handtracking_Hand: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { 333 | static let protoMessageName: String = _protobuf_package + ".Hand" 334 | static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ 335 | 1: .same(proto: "wristMatrix"), 336 | 2: .same(proto: "skeleton"), 337 | ] 338 | 339 | fileprivate class _StorageClass { 340 | var _wristMatrix: Handtracking_Matrix4x4? = nil 341 | var _skeleton: Handtracking_Skeleton? = nil 342 | 343 | static let defaultInstance = _StorageClass() 344 | 345 | private init() {} 346 | 347 | init(copying source: _StorageClass) { 348 | _wristMatrix = source._wristMatrix 349 | _skeleton = source._skeleton 350 | } 351 | } 352 | 353 | fileprivate mutating func _uniqueStorage() -> _StorageClass { 354 | if !isKnownUniquelyReferenced(&_storage) { 355 | _storage = _StorageClass(copying: _storage) 356 | } 357 | return _storage 358 | } 359 | 360 | mutating func decodeMessage(decoder: inout D) throws { 361 | _ = _uniqueStorage() 362 | try withExtendedLifetime(_storage) { (_storage: _StorageClass) in 363 | while let fieldNumber = try decoder.nextFieldNumber() { 364 | // The use of inline closures is to circumvent an issue where the compiler 365 | // allocates stack space for every case branch when no optimizations are 366 | // enabled. https://github.com/apple/swift-protobuf/issues/1034 367 | switch fieldNumber { 368 | case 1: try { try decoder.decodeSingularMessageField(value: &_storage._wristMatrix) }() 369 | case 2: try { try decoder.decodeSingularMessageField(value: &_storage._skeleton) }() 370 | default: break 371 | } 372 | } 373 | } 374 | } 375 | 376 | func traverse(visitor: inout V) throws { 377 | try withExtendedLifetime(_storage) { (_storage: _StorageClass) in 378 | // The use of inline closures is to circumvent an issue where the compiler 379 | // allocates stack space for every if/case branch local when no optimizations 380 | // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and 381 | // https://github.com/apple/swift-protobuf/issues/1182 382 | try { if let v = _storage._wristMatrix { 383 | try visitor.visitSingularMessageField(value: v, fieldNumber: 1) 384 | } }() 385 | try { if let v = _storage._skeleton { 386 | try visitor.visitSingularMessageField(value: v, fieldNumber: 2) 387 | } }() 388 | } 389 | try unknownFields.traverse(visitor: &visitor) 390 | } 391 | 392 | static func ==(lhs: Handtracking_Hand, rhs: Handtracking_Hand) -> Bool { 393 | if lhs._storage !== rhs._storage { 394 | let storagesAreEqual: Bool = withExtendedLifetime((lhs._storage, rhs._storage)) { (_args: (_StorageClass, _StorageClass)) in 395 | let _storage = _args.0 396 | let rhs_storage = _args.1 397 | if _storage._wristMatrix != rhs_storage._wristMatrix {return false} 398 | if _storage._skeleton != rhs_storage._skeleton {return false} 399 | return true 400 | } 401 | if !storagesAreEqual {return false} 402 | } 403 | if lhs.unknownFields != rhs.unknownFields {return false} 404 | return true 405 | } 406 | } 407 | 408 | extension Handtracking_HandUpdate: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { 409 | static let protoMessageName: String = _protobuf_package + ".HandUpdate" 410 | static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ 411 | 1: .standard(proto: "left_hand"), 412 | 2: .standard(proto: "right_hand"), 413 | 3: .same(proto: "Head"), 414 | ] 415 | 416 | fileprivate class _StorageClass { 417 | var _leftHand: Handtracking_Hand? = nil 418 | var _rightHand: Handtracking_Hand? = nil 419 | var _head: Handtracking_Matrix4x4? = nil 420 | 421 | static let defaultInstance = _StorageClass() 422 | 423 | private init() {} 424 | 425 | init(copying source: _StorageClass) { 426 | _leftHand = source._leftHand 427 | _rightHand = source._rightHand 428 | _head = source._head 429 | } 430 | } 431 | 432 | fileprivate mutating func _uniqueStorage() -> _StorageClass { 433 | if !isKnownUniquelyReferenced(&_storage) { 434 | _storage = _StorageClass(copying: _storage) 435 | } 436 | return _storage 437 | } 438 | 439 | mutating func decodeMessage(decoder: inout D) throws { 440 | _ = _uniqueStorage() 441 | try withExtendedLifetime(_storage) { (_storage: _StorageClass) in 442 | while let fieldNumber = try decoder.nextFieldNumber() { 443 | // The use of inline closures is to circumvent an issue where the compiler 444 | // allocates stack space for every case branch when no optimizations are 445 | // enabled. https://github.com/apple/swift-protobuf/issues/1034 446 | switch fieldNumber { 447 | case 1: try { try decoder.decodeSingularMessageField(value: &_storage._leftHand) }() 448 | case 2: try { try decoder.decodeSingularMessageField(value: &_storage._rightHand) }() 449 | case 3: try { try decoder.decodeSingularMessageField(value: &_storage._head) }() 450 | default: break 451 | } 452 | } 453 | } 454 | } 455 | 456 | func traverse(visitor: inout V) throws { 457 | try withExtendedLifetime(_storage) { (_storage: _StorageClass) in 458 | // The use of inline closures is to circumvent an issue where the compiler 459 | // allocates stack space for every if/case branch local when no optimizations 460 | // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and 461 | // https://github.com/apple/swift-protobuf/issues/1182 462 | try { if let v = _storage._leftHand { 463 | try visitor.visitSingularMessageField(value: v, fieldNumber: 1) 464 | } }() 465 | try { if let v = _storage._rightHand { 466 | try visitor.visitSingularMessageField(value: v, fieldNumber: 2) 467 | } }() 468 | try { if let v = _storage._head { 469 | try visitor.visitSingularMessageField(value: v, fieldNumber: 3) 470 | } }() 471 | } 472 | try unknownFields.traverse(visitor: &visitor) 473 | } 474 | 475 | static func ==(lhs: Handtracking_HandUpdate, rhs: Handtracking_HandUpdate) -> Bool { 476 | if lhs._storage !== rhs._storage { 477 | let storagesAreEqual: Bool = withExtendedLifetime((lhs._storage, rhs._storage)) { (_args: (_StorageClass, _StorageClass)) in 478 | let _storage = _args.0 479 | let rhs_storage = _args.1 480 | if _storage._leftHand != rhs_storage._leftHand {return false} 481 | if _storage._rightHand != rhs_storage._rightHand {return false} 482 | if _storage._head != rhs_storage._head {return false} 483 | return true 484 | } 485 | if !storagesAreEqual {return false} 486 | } 487 | if lhs.unknownFields != rhs.unknownFields {return false} 488 | return true 489 | } 490 | } 491 | 492 | extension Handtracking_HandUpdateAck: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { 493 | static let protoMessageName: String = _protobuf_package + ".HandUpdateAck" 494 | static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ 495 | 1: .same(proto: "message"), 496 | ] 497 | 498 | mutating func decodeMessage(decoder: inout D) throws { 499 | while let fieldNumber = try decoder.nextFieldNumber() { 500 | // The use of inline closures is to circumvent an issue where the compiler 501 | // allocates stack space for every case branch when no optimizations are 502 | // enabled. https://github.com/apple/swift-protobuf/issues/1034 503 | switch fieldNumber { 504 | case 1: try { try decoder.decodeSingularStringField(value: &self.message) }() 505 | default: break 506 | } 507 | } 508 | } 509 | 510 | func traverse(visitor: inout V) throws { 511 | if !self.message.isEmpty { 512 | try visitor.visitSingularStringField(value: self.message, fieldNumber: 1) 513 | } 514 | try unknownFields.traverse(visitor: &visitor) 515 | } 516 | 517 | static func ==(lhs: Handtracking_HandUpdateAck, rhs: Handtracking_HandUpdateAck) -> Bool { 518 | if lhs.message != rhs.message {return false} 519 | if lhs.unknownFields != rhs.unknownFields {return false} 520 | return true 521 | } 522 | } 523 | -------------------------------------------------------------------------------- /avp_stream/grpc_msg/handtracking.proto: -------------------------------------------------------------------------------- 1 | syntax = "proto3"; 2 | 3 | package handtracking; 4 | 5 | // Represents a 4x4 transformation matrix for a joint 6 | message Matrix4x4 { 7 | float m00 = 1; 8 | float m01 = 2; 9 | float m02 = 3; 10 | float m03 = 4; 11 | float m10 = 5; 12 | float m11 = 6; 13 | float m12 = 7; 14 | float m13 = 8; 15 | float m20 = 9; 16 | float m21 = 10; 17 | float m22 = 11; 18 | float m23 = 12; 19 | float m30 = 13; 20 | float m31 = 14; 21 | float m32 = 15; 22 | float m33 = 16; 23 | } 24 | 25 | // The skeleton of a hand, comprising multiple 4x4 matrices (one per joint) 26 | message Skeleton { 27 | repeated Matrix4x4 jointMatrices = 1; // Array of 4x4 matrices, expecting 24 per hand based on your structure 28 | } 29 | 30 | // The hand tracking information, including the full 4x4 matrix for the wrist and the skeleton 31 | message Hand { 32 | Matrix4x4 wristMatrix = 1; // 4x4 matrix for the wrist position and orientation 33 | Skeleton skeleton = 2; // The hand's skeleton 34 | } 35 | 36 | // The overall hand update message, including data for both hands 37 | message HandUpdate { 38 | Hand left_hand = 1; 39 | Hand right_hand = 2; 40 | Matrix4x4 Head = 3; 41 | } 42 | 43 | // The hand tracking service definition. 44 | service HandTrackingService { 45 | rpc StreamHandUpdates(HandUpdate) returns (stream HandUpdate) {} 46 | } 47 | 48 | // Acknowledgement message for hand updates 49 | message HandUpdateAck { 50 | string message = 1; 51 | } 52 | -------------------------------------------------------------------------------- /avp_stream/grpc_msg/handtracking_pb2.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Generated by the protocol buffer compiler. DO NOT EDIT! 3 | # source: handtracking.proto 4 | # Protobuf Python Version: 4.25.0 5 | """Generated protocol buffer code.""" 6 | from google.protobuf import descriptor as _descriptor 7 | from google.protobuf import descriptor_pool as _descriptor_pool 8 | from google.protobuf import symbol_database as _symbol_database 9 | from google.protobuf.internal import builder as _builder 10 | # @@protoc_insertion_point(imports) 11 | 12 | _sym_db = _symbol_database.Default() 13 | 14 | 15 | 16 | 17 | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x12handtracking.proto\x12\x0chandtracking\"\xdb\x01\n\tMatrix4x4\x12\x0b\n\x03m00\x18\x01 \x01(\x02\x12\x0b\n\x03m01\x18\x02 \x01(\x02\x12\x0b\n\x03m02\x18\x03 \x01(\x02\x12\x0b\n\x03m03\x18\x04 \x01(\x02\x12\x0b\n\x03m10\x18\x05 \x01(\x02\x12\x0b\n\x03m11\x18\x06 \x01(\x02\x12\x0b\n\x03m12\x18\x07 \x01(\x02\x12\x0b\n\x03m13\x18\x08 \x01(\x02\x12\x0b\n\x03m20\x18\t \x01(\x02\x12\x0b\n\x03m21\x18\n \x01(\x02\x12\x0b\n\x03m22\x18\x0b \x01(\x02\x12\x0b\n\x03m23\x18\x0c \x01(\x02\x12\x0b\n\x03m30\x18\r \x01(\x02\x12\x0b\n\x03m31\x18\x0e \x01(\x02\x12\x0b\n\x03m32\x18\x0f \x01(\x02\x12\x0b\n\x03m33\x18\x10 \x01(\x02\":\n\x08Skeleton\x12.\n\rjointMatrices\x18\x01 \x03(\x0b\x32\x17.handtracking.Matrix4x4\"^\n\x04Hand\x12,\n\x0bwristMatrix\x18\x01 \x01(\x0b\x32\x17.handtracking.Matrix4x4\x12(\n\x08skeleton\x18\x02 \x01(\x0b\x32\x16.handtracking.Skeleton\"\x82\x01\n\nHandUpdate\x12%\n\tleft_hand\x18\x01 \x01(\x0b\x32\x12.handtracking.Hand\x12&\n\nright_hand\x18\x02 \x01(\x0b\x32\x12.handtracking.Hand\x12%\n\x04Head\x18\x03 \x01(\x0b\x32\x17.handtracking.Matrix4x4\" \n\rHandUpdateAck\x12\x0f\n\x07message\x18\x01 \x01(\t2b\n\x13HandTrackingService\x12K\n\x11StreamHandUpdates\x12\x18.handtracking.HandUpdate\x1a\x18.handtracking.HandUpdate\"\x00\x30\x01\x62\x06proto3') 18 | 19 | _globals = globals() 20 | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) 21 | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'handtracking_pb2', _globals) 22 | if _descriptor._USE_C_DESCRIPTORS == False: 23 | DESCRIPTOR._options = None 24 | _globals['_MATRIX4X4']._serialized_start=37 25 | _globals['_MATRIX4X4']._serialized_end=256 26 | _globals['_SKELETON']._serialized_start=258 27 | _globals['_SKELETON']._serialized_end=316 28 | _globals['_HAND']._serialized_start=318 29 | _globals['_HAND']._serialized_end=412 30 | _globals['_HANDUPDATE']._serialized_start=415 31 | _globals['_HANDUPDATE']._serialized_end=545 32 | _globals['_HANDUPDATEACK']._serialized_start=547 33 | _globals['_HANDUPDATEACK']._serialized_end=579 34 | _globals['_HANDTRACKINGSERVICE']._serialized_start=581 35 | _globals['_HANDTRACKINGSERVICE']._serialized_end=679 36 | # @@protoc_insertion_point(module_scope) 37 | -------------------------------------------------------------------------------- /avp_stream/grpc_msg/handtracking_pb2_grpc.py: -------------------------------------------------------------------------------- 1 | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! 2 | """Client and server classes corresponding to protobuf-defined services.""" 3 | import grpc 4 | 5 | import avp_stream.grpc_msg.handtracking_pb2 as handtracking__pb2 6 | 7 | 8 | class HandTrackingServiceStub(object): 9 | """The hand tracking service definition. 10 | """ 11 | 12 | def __init__(self, channel): 13 | """Constructor. 14 | 15 | Args: 16 | channel: A grpc.Channel. 17 | """ 18 | self.StreamHandUpdates = channel.unary_stream( 19 | '/handtracking.HandTrackingService/StreamHandUpdates', 20 | request_serializer=handtracking__pb2.HandUpdate.SerializeToString, 21 | response_deserializer=handtracking__pb2.HandUpdate.FromString, 22 | ) 23 | 24 | 25 | class HandTrackingServiceServicer(object): 26 | """The hand tracking service definition. 27 | """ 28 | 29 | def StreamHandUpdates(self, request, context): 30 | """Missing associated documentation comment in .proto file.""" 31 | context.set_code(grpc.StatusCode.UNIMPLEMENTED) 32 | context.set_details('Method not implemented!') 33 | raise NotImplementedError('Method not implemented!') 34 | 35 | 36 | def add_HandTrackingServiceServicer_to_server(servicer, server): 37 | rpc_method_handlers = { 38 | 'StreamHandUpdates': grpc.unary_stream_rpc_method_handler( 39 | servicer.StreamHandUpdates, 40 | request_deserializer=handtracking__pb2.HandUpdate.FromString, 41 | response_serializer=handtracking__pb2.HandUpdate.SerializeToString, 42 | ), 43 | } 44 | generic_handler = grpc.method_handlers_generic_handler( 45 | 'handtracking.HandTrackingService', rpc_method_handlers) 46 | server.add_generic_rpc_handlers((generic_handler,)) 47 | 48 | 49 | # This class is part of an EXPERIMENTAL API. 50 | class HandTrackingService(object): 51 | """The hand tracking service definition. 52 | """ 53 | 54 | @staticmethod 55 | def StreamHandUpdates(request, 56 | target, 57 | options=(), 58 | channel_credentials=None, 59 | call_credentials=None, 60 | insecure=False, 61 | compression=None, 62 | wait_for_ready=None, 63 | timeout=None, 64 | metadata=None): 65 | return grpc.experimental.unary_stream(request, target, '/handtracking.HandTrackingService/StreamHandUpdates', 66 | handtracking__pb2.HandUpdate.SerializeToString, 67 | handtracking__pb2.HandUpdate.FromString, 68 | options, channel_credentials, 69 | insecure, call_credentials, compression, wait_for_ready, timeout, metadata) 70 | -------------------------------------------------------------------------------- /avp_stream/isaac_env.py: -------------------------------------------------------------------------------- 1 | import isaacgym 2 | import torch 3 | from isaacgym import gymapi 4 | from isaacgym import gymutil 5 | from isaacgym import gymtorch 6 | 7 | import numpy as np 8 | import torch 9 | import time 10 | from pathlib import Path 11 | 12 | from avp_stream import VisionProStreamer 13 | from avp_stream.utils.isaac_utils import * 14 | from avp_stream.utils.se3_utils import * 15 | from avp_stream.utils.trn_constants import * 16 | from copy import deepcopy 17 | from typing import * 18 | 19 | CUR_PATH = Path(__file__).parent.resolve() 20 | 21 | class IsaacVisualizerEnv: 22 | 23 | def __init__(self, args): 24 | 25 | self.args = args 26 | 27 | # acquire gym interface 28 | self.gym = gymapi.acquire_gym() 29 | 30 | # set torch device 31 | self.device = 'cpu' # i'll just fix this to CUDA 32 | 33 | # configure sim 34 | self.sim_params = default_sim_params(use_gpu = True if self.device == 'cuda:0' else False) 35 | 36 | # create sim 37 | self.sim = self.gym.create_sim(0, 0, gymapi.SIM_PHYSX, self.sim_params) 38 | if self.sim is None: 39 | raise Exception("Failed to create sim") 40 | 41 | # load assets 42 | self.num_envs = 1 43 | 44 | # create viewer 45 | self.viewer = self.gym.create_viewer(self.sim, gymapi.CameraProperties()) 46 | if self.viewer is None: 47 | raise Exception("Failed to create viewer") 48 | 49 | # create env 50 | self._load_asset() 51 | self.create_env() 52 | 53 | # setup viewer camera 54 | middle_env = self.num_envs // 2 55 | setup_viewer_camera(self.gym, self.envs[middle_env], self.viewer) 56 | 57 | # ==== prepare tensors ===== 58 | # from now on, we will use the tensor API that can run on CPU or GPU 59 | self.gym.prepare_sim(self.sim) 60 | self.initialize_tensors() 61 | 62 | 63 | 64 | def _load_asset(self): 65 | 66 | self.axis = load_axis(self.gym, self.sim, self.device, 'normal', f'{CUR_PATH}/assets') 67 | self.small_axis = load_axis(self.gym, self.sim, self.device, 'small', f'{CUR_PATH}/assets') 68 | self.huge_axis = load_axis(self.gym, self.sim, self.device, 'huge', f'{CUR_PATH}/assets') 69 | 70 | asset_options = gymapi.AssetOptions() 71 | asset_options.disable_gravity = True 72 | asset_options.fix_base_link = True 73 | self.sphere = self.gym.create_sphere(self.sim, 0.008, asset_options) 74 | 75 | 76 | def create_env(self): 77 | spacing = 1.0 78 | env_lower = gymapi.Vec3(-spacing, -spacing, 0.0) 79 | env_upper = gymapi.Vec3(spacing, spacing, spacing) 80 | 81 | plane_params = gymapi.PlaneParams() 82 | plane_params.normal = gymapi.Vec3(0, 0, 1) 83 | self.gym.add_ground(self.sim, plane_params) 84 | 85 | # create env 86 | self.envs = [] 87 | self.robot_actor_idxs_over_sim = [] 88 | self.env_side_actor_idxs_over_sim = [] 89 | 90 | for env_idx in range(self.num_envs): 91 | env = self.gym.create_env(self.sim, env_lower, env_upper, 1) 92 | self.envs.append(env) 93 | 94 | self.head_axis = self.gym.create_actor(env, self.axis, gymapi.Transform(), 'head', 0) 95 | 96 | self.right_wrist_axis = self.gym.create_actor(env, self.axis, gymapi.Transform(), 'right_wrist', 1) 97 | self.left_wrist_axis = self.gym.create_actor(env, self.axis, gymapi.Transform(), 'left_wrist', 2) 98 | 99 | 100 | # SPHERE 101 | for i in range(25): 102 | 103 | finger_1 = self.gym.create_actor(env, self.sphere, gymapi.Transform(), f'right_finger_{i}', 3 + i ) 104 | if i in [0, 4, 9, 14, 19, 24]: 105 | self.gym.set_rigid_body_color(env, finger_1, 0, gymapi.MESH_VISUAL_AND_COLLISION, gymapi.Vec3(1, 1, 0)) 106 | else: 107 | self.gym.set_rigid_body_color(env, finger_1, 0, gymapi.MESH_VISUAL_AND_COLLISION, gymapi.Vec3(1, 1, 1)) 108 | 109 | for i in range(25): 110 | finger_2 = self.gym.create_actor(env, self.sphere, gymapi.Transform(), f'left_finger_{i}', 28 + i ) 111 | 112 | if i in [0, 4, 9, 14, 19, 24]: 113 | self.gym.set_rigid_body_color(env, finger_2, 0, gymapi.MESH_VISUAL_AND_COLLISION, gymapi.Vec3(1, 1, 0)) 114 | else: 115 | self.gym.set_rigid_body_color(env, finger_2, 0, gymapi.MESH_VISUAL_AND_COLLISION, gymapi.Vec3(1, 1, 1)) 116 | 117 | # SMALL AXIS 118 | for i in range(25): 119 | finger_1 = self.gym.create_actor(env, self.small_axis, gymapi.Transform(), f'right_finger_{i}', 53 + i ) 120 | 121 | for i in range(25): 122 | finger_2 = self.gym.create_actor(env, self.small_axis, gymapi.Transform(), f'left_finger_{i}', 78 + i ) 123 | 124 | self.env_axis = self.gym.create_actor(env, self.huge_axis, gymapi.Transform(), 'env_axis', 103 ) 125 | 126 | 127 | def initialize_tensors(self): 128 | 129 | refresh_tensors(self.gym, self.sim) 130 | # get jacobian tensor 131 | # get rigid body state tensor 132 | _rb_states = self.gym.acquire_rigid_body_state_tensor(self.sim) 133 | self.rb_states = gymtorch.wrap_tensor(_rb_states).view(self.num_envs, -1, 13) 134 | 135 | # get actor root state tensor 136 | _root_states = self.gym.acquire_actor_root_state_tensor(self.sim) 137 | root_states = gymtorch.wrap_tensor(_root_states).view(self.num_envs, -1, 13) 138 | self.root_state = root_states 139 | 140 | self.gym.simulate(self.sim) 141 | self.gym.fetch_results(self.sim, True) 142 | self.gym.step_graphics(self.sim) 143 | self.gym.draw_viewer(self.viewer, self.sim, False) 144 | self.gym.sync_frame_time(self.sim) 145 | 146 | # will be overloaded 147 | def step(self, transformation: Dict[str, torch.Tensor], sync_frame_time = False): 148 | 149 | self.simulate() 150 | 151 | new_root_state = self.modify_root_state(transformation) 152 | env_side_actor_idxs = torch.arange(0, 103, dtype = torch.int32) 153 | self.gym.set_actor_root_state_tensor_indexed(self.sim, gymtorch.unwrap_tensor(new_root_state), gymtorch.unwrap_tensor(env_side_actor_idxs), len(env_side_actor_idxs)) 154 | 155 | # update viewer 156 | self.render(sync_frame_time) 157 | 158 | def move_camera(self): 159 | 160 | head_xyz = self.visionos_head[:, :3, 3] 161 | head_ydir = self.visionos_head[:, :3, 1] 162 | 163 | cam_pos = head_xyz - head_ydir * 0.5 164 | cam_target = head_xyz + head_ydir * 0.5 165 | cam_target[..., -1] -= 0.2 166 | 167 | cam_pos = gymapi.Vec3(*cam_pos[0]) 168 | cam_target = gymapi.Vec3(*cam_target[0]) 169 | 170 | self.gym.viewer_camera_look_at(self.viewer, self.envs[0], cam_pos, cam_target) 171 | 172 | def simulate(self): 173 | # step the physics 174 | self.gym.simulate(self.sim) 175 | 176 | # refresh tensors 177 | refresh_tensors(self.gym, self.sim) 178 | 179 | 180 | def render(self, sync_frame_time = True): 181 | 182 | # update viewer 183 | if self.args.follow: 184 | self.move_camera() 185 | self.gym.step_graphics(self.sim) 186 | self.gym.draw_viewer(self.viewer, self.sim, False) 187 | if sync_frame_time: 188 | self.gym.sync_frame_time(self.sim) 189 | 190 | def modify_root_state(self, transformations): 191 | 192 | new_root_state = self.root_state 193 | 194 | self.visionos_head = transformations['head'] 195 | 196 | self.sim_right_wrist = transformations['right_wrist'] #@ VISIONOS_RIGHT_HAND_TO_LEAP 197 | self.sim_left_wrist = transformations['left_wrist'] # @ VISIONOS_LEFT_HAND_TO_LEAP 198 | 199 | sim_right_fingers = torch.cat([self.sim_right_wrist @ finger for finger in transformations['right_fingers']], dim = 0) 200 | sim_left_fingers = torch.cat([self.sim_left_wrist @ finger for finger in transformations['left_fingers']], dim = 0) 201 | 202 | self.sim_right_fingers = sim_right_fingers 203 | self.sim_left_fingers = sim_left_fingers 204 | 205 | new_root_state = deepcopy(self.root_state) 206 | new_root_state[:, 0, :7] = mat2posquat(self.visionos_head ) 207 | new_root_state[:, 1, :7] = mat2posquat(self.sim_right_wrist ) 208 | new_root_state[:, 2, :7] = mat2posquat(self.sim_left_wrist ) 209 | new_root_state[:, 3:28, :7] = mat2posquat(self.sim_right_fingers )# 210 | new_root_state[:, 28:53, :7] = mat2posquat(self.sim_left_fingers )# 211 | new_root_state[:, 53:78, :7] = mat2posquat(self.sim_right_fingers)# 212 | new_root_state[:, 78:103, :7] = mat2posquat(self.sim_left_fingers ) 213 | # new_root_state[:, 103, :7] = mat2posquat(transformed_wrist_right) 214 | new_root_state = new_root_state.view(-1, 13) 215 | 216 | return new_root_state 217 | 218 | 219 | def np2tensor(data: Dict[str, np.ndarray], device) -> Dict[str, torch.Tensor]: 220 | for key in data.keys(): 221 | data[key] = torch.tensor(data[key], dtype = torch.float32, device = device) 222 | return data 223 | 224 | 225 | if __name__=="__main__": 226 | 227 | import argparse 228 | import os 229 | 230 | parser = argparse.ArgumentParser() 231 | parser.add_argument('--ip', type = str, required = True) 232 | parser.add_argument('--record', action = 'store_true') 233 | parser.add_argument('--follow', action = 'store_true', help = "The viewpoint follows the users head") 234 | args = parser.parse_args() 235 | 236 | s = VisionProStreamer(args.ip, args.record) 237 | 238 | env = IsaacVisualizerEnv(args) 239 | while True: 240 | t0 = time.time() 241 | latest = s.latest 242 | env.step(np2tensor(latest, env.device)) 243 | print(time.time() - t0) 244 | 245 | 246 | -------------------------------------------------------------------------------- /avp_stream/streamer.py: -------------------------------------------------------------------------------- 1 | import grpc 2 | from avp_stream.grpc_msg import * 3 | from threading import Thread 4 | from avp_stream.utils.grpc_utils import * 5 | import time 6 | import numpy as np 7 | 8 | 9 | YUP2ZUP = np.array([[[1, 0, 0, 0], 10 | [0, 0, -1, 0], 11 | [0, 1, 0, 0], 12 | [0, 0, 0, 1]]], dtype = np.float64) 13 | 14 | 15 | class VisionProStreamer: 16 | 17 | def __init__(self, ip, record = True): 18 | 19 | # Vision Pro IP 20 | self.ip = ip 21 | self.record = record 22 | self.recording = [] 23 | self.latest = None 24 | self.axis_transform = YUP2ZUP 25 | self.start_streaming() 26 | 27 | def start_streaming(self): 28 | 29 | stream_thread = Thread(target = self.stream) 30 | stream_thread.start() 31 | while self.latest is None: 32 | pass 33 | print(' == DATA IS FLOWING IN! ==') 34 | print('Ready to start streaming.') 35 | 36 | 37 | def stream(self): 38 | 39 | request = handtracking_pb2.HandUpdate() 40 | try: 41 | with grpc.insecure_channel(f"{self.ip}:12345") as channel: 42 | stub = handtracking_pb2_grpc.HandTrackingServiceStub(channel) 43 | responses = stub.StreamHandUpdates(request) 44 | for response in responses: 45 | transformations = { 46 | "left_wrist": self.axis_transform @ process_matrix(response.left_hand.wristMatrix), 47 | "right_wrist": self.axis_transform @ process_matrix(response.right_hand.wristMatrix), 48 | "left_fingers": process_matrices(response.left_hand.skeleton.jointMatrices), 49 | "right_fingers": process_matrices(response.right_hand.skeleton.jointMatrices), 50 | "head": rotate_head(self.axis_transform @ process_matrix(response.Head)) , 51 | "left_pinch_distance": get_pinch_distance(response.left_hand.skeleton.jointMatrices), 52 | "right_pinch_distance": get_pinch_distance(response.right_hand.skeleton.jointMatrices), 53 | # "rgb": response.rgb, # TODO: should figure out how to get the rgb image from vision pro 54 | } 55 | transformations["right_wrist_roll"] = get_wrist_roll(transformations["right_wrist"]) 56 | transformations["left_wrist_roll"] = get_wrist_roll(transformations["left_wrist"]) 57 | if self.record: 58 | self.recording.append(transformations) 59 | self.latest = transformations 60 | 61 | except Exception as e: 62 | print(f"An error occurred: {e}") 63 | pass 64 | 65 | def get_latest(self): 66 | return self.latest 67 | 68 | def get_recording(self): 69 | return self.recording 70 | 71 | 72 | if __name__ == "__main__": 73 | 74 | streamer = VisionProStreamer(ip = '10.29.230.57') 75 | while True: 76 | 77 | latest = streamer.get_latest() 78 | print(latest) -------------------------------------------------------------------------------- /avp_stream/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/utils/__init__.py -------------------------------------------------------------------------------- /avp_stream/utils/__pycache__/__init__.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/utils/__pycache__/__init__.cpython-311.pyc -------------------------------------------------------------------------------- /avp_stream/utils/__pycache__/__init__.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/utils/__pycache__/__init__.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/utils/__pycache__/constants.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/utils/__pycache__/constants.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/utils/__pycache__/grpc_utils.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/utils/__pycache__/grpc_utils.cpython-311.pyc -------------------------------------------------------------------------------- /avp_stream/utils/__pycache__/grpc_utils.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/utils/__pycache__/grpc_utils.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/utils/__pycache__/isaac_utils.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/utils/__pycache__/isaac_utils.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/utils/__pycache__/se3_utils.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/utils/__pycache__/se3_utils.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/utils/__pycache__/trn_constants.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Improbable-AI/VisionProTeleop/19c2eebdea50ed6851375a0e48b11eec84b9816d/avp_stream/utils/__pycache__/trn_constants.cpython-38.pyc -------------------------------------------------------------------------------- /avp_stream/utils/grpc_utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from typing import * 3 | 4 | def process_matrix(message): 5 | m = np.array([[[message.m00, message.m01, message.m02, message.m03], 6 | [message.m10, message.m11, message.m12, message.m13], 7 | [message.m20, message.m21, message.m22, message.m23], 8 | [0, 0, 0, 1]]]) 9 | return m 10 | 11 | def process_matrices(skeleton, matrix = np.eye(4)): 12 | return np.concatenate([matrix @ process_matrix(joint) for joint in skeleton], axis = 0) 13 | 14 | 15 | def rotate_head(R, degrees=-90): 16 | # Convert degrees to radians 17 | theta = np.radians(degrees) 18 | # Create the rotation matrix for rotating around the x-axis 19 | R_x = np.array([[ 20 | [1, 0, 0, 0], 21 | [0, np.cos(theta), -np.sin(theta), 0], 22 | [0, np.sin(theta), np.cos(theta), 0], 23 | [0, 0, 0, 1] 24 | ]]) 25 | R_rotated = R @ R_x 26 | return R_rotated 27 | 28 | 29 | def get_pinch_distance(finger_messages): 30 | fingers = process_matrices(finger_messages) 31 | thumb = fingers[4, :3, 3] 32 | index = fingers[9, :3, 3] 33 | 34 | return np.linalg.norm(thumb - index) 35 | 36 | def get_wrist_roll(mat): 37 | """ 38 | returns roll, pitch, yaw in radians 39 | """ 40 | R = mat[0, :3, :3] 41 | 42 | # Calculate angles for rotation around z and y axis to align the first column with [1, 0, 0] 43 | # Angle to rotate around z-axis to align the projection on the XY plane 44 | theta_z = np.arctan2(R[1, 0], R[0, 0]) # arctan2(y, x) 45 | 46 | # Rotate R around the z-axis by -theta_z to align its x-axis on the XY plane 47 | Rz = np.array([ 48 | [np.cos(-theta_z), -np.sin(-theta_z), 0], 49 | [np.sin(-theta_z), np.cos(-theta_z), 0], 50 | [0, 0, 1] 51 | ]) 52 | R_after_z = Rz @ R 53 | 54 | # Angle to rotate around y-axis to align the x-axis with the global x-axis 55 | theta_y = np.arctan2(R_after_z[0, 2], R_after_z[0, 0]) # arctan2(z, x) 56 | 57 | # Since the goal is to align the x-axis, the rotation around the x-axis might not be necessary 58 | # unless there are specific orientations required for the y and z axes after the alignment. 59 | 60 | # Calculated angles (converted to degrees for easier interpretation) 61 | theta_z_deg = np.degrees(theta_z) 62 | theta_y_deg = np.degrees(theta_y) 63 | 64 | Ry = np.array([ 65 | [np.cos(-theta_y), 0, np.sin(-theta_y)], 66 | [0, 1, 0], 67 | [-np.sin(-theta_y), 0, np.cos(-theta_y)] 68 | ]) 69 | R_after_y = Ry @ R_after_z 70 | 71 | # Angle to rotate around x-axis to align the y-axis and z-axis properly with the global y-axis and z-axis 72 | theta_x = np.arctan2(R_after_y[1, 2], R_after_y[1, 1]) # arctan2(z, y) of the second row 73 | 74 | # Calculated angle (converted to degrees for easier interpretation) 75 | # theta_x_deg = np.degrees(theta_x) 76 | 77 | return theta_x 78 | -------------------------------------------------------------------------------- /avp_stream/utils/isaac_utils.py: -------------------------------------------------------------------------------- 1 | import isaacgym 2 | import torch 3 | from isaacgym import gymapi 4 | import numpy as np 5 | import argparse 6 | import torch.nn.functional as F 7 | from typing import * 8 | import yaml 9 | import os 10 | 11 | asset_loaded = False 12 | 13 | def load_cfg(cfg_path: str) -> Dict: 14 | cfg = yaml.load(open(cfg_path, 'r'), Loader=yaml.FullLoader) 15 | return cfg 16 | 17 | def default_sim_params(use_gpu, up_axis = 'Z', hz = 60.0, substeps = 4, num_position_iterations = 8, num_velocity_iterations = 2): 18 | sim_params = gymapi.SimParams() 19 | sim_params.up_axis = gymapi.UP_AXIS_Y if up_axis == 'Y' else gymapi.UP_AXIS_Z 20 | sim_params.gravity = gymapi.Vec3(0.0, -9.8, 0.0) if up_axis == 'Y' else gymapi.Vec3(0.0, 0.0, -9.8) 21 | sim_params.dt = 1.0 / hz 22 | sim_params.substeps = substeps 23 | sim_params.use_gpu_pipeline = use_gpu 24 | sim_params.physx.solver_type = 1 25 | sim_params.physx.num_position_iterations = num_position_iterations 26 | sim_params.physx.num_velocity_iterations = num_velocity_iterations 27 | sim_params.physx.rest_offset = 0.0 28 | sim_params.physx.contact_offset = 0.001 29 | sim_params.physx.friction_offset_threshold = 0.001 30 | sim_params.physx.friction_correlation_distance = 0.0005 31 | sim_params.physx.num_threads = 4 32 | sim_params.physx.use_gpu = use_gpu 33 | return sim_params 34 | 35 | 36 | def load_axis(gym, sim, device, size, asset_root = './assets'): 37 | 38 | robot_asset_file = "{}_axis.urdf".format(size) 39 | asset_options = gymapi.AssetOptions() 40 | asset_options.armature = 0.01 41 | asset_options.fix_base_link = True 42 | asset_options.disable_gravity = True 43 | robot_asset = gym.load_asset(sim, asset_root, robot_asset_file, asset_options) 44 | 45 | return robot_asset 46 | 47 | 48 | 49 | def load_left_leap_hand_asset(gym, sim, device, asset_root = '../bidex_sim/assets'): 50 | 51 | robot_asset_file = "robots/hands/allegro_hand/allegro_hand_left.urdf" 52 | asset_options = gymapi.AssetOptions() 53 | asset_options.armature = 0.01 54 | asset_options.fix_base_link = False 55 | asset_options.disable_gravity = True 56 | asset_options.flip_visual_attachments = False 57 | asset_options.use_mesh_materials = True 58 | asset_options.vhacd_enabled = True 59 | asset_options.vhacd_params = gymapi.VhacdParams() 60 | asset_options.vhacd_params.resolution = 1000000 61 | robot_asset = gym.load_asset(sim, asset_root, robot_asset_file, asset_options) 62 | 63 | return robot_asset 64 | 65 | def load_ur3e_asset(gym, sim, device, asset_root = '../bidex_sim/assets', hand = None, chirality = None, control = {'arm': 'POS', 'hand': 'POS'}): 66 | 67 | if hand is None: 68 | robot_asset_file = "robots/ur_description/urdf/ur3e.urdf" 69 | else: 70 | robot_asset_file = "robots/ur_description/urdf/ur3e_{}_{}.urdf".format(hand, chirality) 71 | 72 | asset_options = gymapi.AssetOptions() 73 | asset_options.armature = 0.01 74 | asset_options.fix_base_link = True 75 | asset_options.disable_gravity = True 76 | asset_options.flip_visual_attachments = False 77 | asset_options.use_mesh_materials = True 78 | asset_options.vhacd_enabled = True 79 | asset_options.vhacd_params = gymapi.VhacdParams() 80 | if control['arm'] == 'POS': 81 | asset_options.default_dof_drive_mode = gymapi.DOF_MODE_POS 82 | elif control['arm'] == 'EFFORT': 83 | asset_options.default_dof_drive_mode = gymapi.DOF_MODE_EFFORT 84 | asset_options.vhacd_params.resolution = 1000000 85 | robot_asset = gym.load_asset(sim, asset_root, robot_asset_file, asset_options) 86 | robot_dof_props = gym.get_asset_dof_properties(robot_asset) 87 | for i in range(6, 22): 88 | robot_dof_props['driveMode'][i] = gymapi.DOF_MODE_POS 89 | robot_dof_props['stiffness'][i] = 10000 90 | robot_dof_props['damping'][i] = 500 91 | return robot_asset, robot_dof_props 92 | 93 | 94 | def refresh_tensors(gym, sim): 95 | # refresh tensors 96 | gym.refresh_rigid_body_state_tensor(sim) 97 | gym.refresh_actor_root_state_tensor(sim) 98 | gym.refresh_dof_state_tensor(sim) 99 | gym.refresh_jacobian_tensors(sim) 100 | gym.refresh_mass_matrix_tensors(sim) 101 | 102 | def setup_viewer_camera(gym, env, viewer): 103 | cam_pos = gymapi.Vec3(0.0, -0.05, 1.55) 104 | cam_target = gymapi.Vec3(0.0, 0.5, 0.4) 105 | middle_env = env 106 | gym.viewer_camera_look_at(viewer, middle_env, cam_pos, cam_target) 107 | 108 | def adjust_viewer_camera(gym, env, viewer, cam_pos, cam_target): 109 | gym.viewer_camera_look_at(viewer, env, cam_pos, cam_target) -------------------------------------------------------------------------------- /avp_stream/utils/se3_utils.py: -------------------------------------------------------------------------------- 1 | 2 | import torch 3 | import torch.nn.functional as F 4 | import numpy as np 5 | from typing import * 6 | 7 | def standardize_quaternion(quaternions: torch.Tensor) -> torch.Tensor: 8 | """ 9 | Convert a unit quaternion to a standard form: one in which the real 10 | part is non negative. 11 | 12 | Args: 13 | quaternions: Quaternions with real part first, 14 | as tensor of shape (..., 4). 15 | 16 | Returns: 17 | Standardized quaternions as tensor of shape (..., 4). 18 | """ 19 | return torch.where(quaternions[..., 0:1] < 0, -quaternions, quaternions) 20 | 21 | 22 | def _sqrt_positive_part(x: torch.Tensor) -> torch.Tensor: 23 | """ 24 | Returns torch.sqrt(torch.max(0, x)) 25 | but with a zero subgradient where x is 0. 26 | """ 27 | ret = torch.zeros_like(x) 28 | positive_mask = x > 0 29 | ret[positive_mask] = torch.sqrt(x[positive_mask]) 30 | return ret 31 | 32 | 33 | def matrix_to_quaternion(matrix: torch.Tensor) -> torch.Tensor: 34 | """ 35 | Convert rotations given as rotation matrices to quaternions. 36 | 37 | Args: 38 | matrix: Rotation matrices as tensor of shape (..., 3, 3). 39 | 40 | Returns: 41 | quaternions with real part first, as tensor of shape (..., 4). 42 | """ 43 | if matrix.size(-1) != 3 or matrix.size(-2) != 3: 44 | raise ValueError(f"Invalid rotation matrix shape {matrix.shape}.") 45 | 46 | batch_dim = matrix.shape[:-2] 47 | m00, m01, m02, m10, m11, m12, m20, m21, m22 = torch.unbind( 48 | matrix.reshape(batch_dim + (9,)), dim=-1 49 | ) 50 | 51 | q_abs = _sqrt_positive_part( 52 | torch.stack( 53 | [ 54 | 1.0 + m00 + m11 + m22, 55 | 1.0 + m00 - m11 - m22, 56 | 1.0 - m00 + m11 - m22, 57 | 1.0 - m00 - m11 + m22, 58 | ], 59 | dim=-1, 60 | ) 61 | ) 62 | 63 | # we produce the desired quaternion multiplied by each of r, i, j, k 64 | quat_by_rijk = torch.stack( 65 | [ 66 | # pyre-fixme[58]: `**` is not supported for operand types `Tensor` and 67 | # `int`. 68 | torch.stack([q_abs[..., 0] ** 2, m21 - m12, m02 - m20, m10 - m01], dim=-1), 69 | # pyre-fixme[58]: `**` is not supported for operand types `Tensor` and 70 | # `int`. 71 | torch.stack([m21 - m12, q_abs[..., 1] ** 2, m10 + m01, m02 + m20], dim=-1), 72 | # pyre-fixme[58]: `**` is not supported for operand types `Tensor` and 73 | # `int`. 74 | torch.stack([m02 - m20, m10 + m01, q_abs[..., 2] ** 2, m12 + m21], dim=-1), 75 | # pyre-fixme[58]: `**` is not supported for operand types `Tensor` and 76 | # `int`. 77 | torch.stack([m10 - m01, m20 + m02, m21 + m12, q_abs[..., 3] ** 2], dim=-1), 78 | ], 79 | dim=-2, 80 | ) 81 | 82 | # We floor here at 0.1 but the exact level is not important; if q_abs is small, 83 | # the candidate won't be picked. 84 | flr = torch.tensor(0.1).to(dtype=q_abs.dtype, device=q_abs.device) 85 | quat_candidates = quat_by_rijk / (2.0 * q_abs[..., None].max(flr)) 86 | 87 | # if not for numerical problems, quat_candidates[i] should be same (up to a sign), 88 | # forall i; we pick the best-conditioned one (with the largest denominator) 89 | out = quat_candidates[ 90 | F.one_hot(q_abs.argmax(dim=-1), num_classes=4) > 0.5, : 91 | ].reshape(batch_dim + (4,)) 92 | return standardize_quaternion(out) 93 | 94 | def quaternion_to_matrix(quaternions: torch.Tensor) -> torch.Tensor: 95 | """ 96 | Convert rotations given as quaternions to rotation matrices. 97 | 98 | Args: 99 | quaternions: quaternions with real part first, 100 | as tensor of shape (..., 4). 101 | 102 | Returns: 103 | Rotation matrices as tensor of shape (..., 3, 3). 104 | """ 105 | r, i, j, k = torch.unbind(quaternions, -1) 106 | # pyre-fixme[58]: `/` is not supported for operand types `float` and `Tensor`. 107 | two_s = 2.0 / (quaternions * quaternions).sum(-1) 108 | 109 | o = torch.stack( 110 | ( 111 | 1 - two_s * (j * j + k * k), 112 | two_s * (i * j - k * r), 113 | two_s * (i * k + j * r), 114 | two_s * (i * j + k * r), 115 | 1 - two_s * (i * i + k * k), 116 | two_s * (j * k - i * r), 117 | two_s * (i * k - j * r), 118 | two_s * (j * k + i * r), 119 | 1 - two_s * (i * i + j * j), 120 | ), 121 | -1, 122 | ) 123 | return o.reshape(quaternions.shape[:-1] + (3, 3)) 124 | 125 | 126 | 127 | def isaac_mat2quat(mat: torch.Tensor) -> torch.Tensor: 128 | """ 129 | returns posquat in pos + xyzw format (following isaacgym convention) 130 | """ 131 | wxyz = matrix_to_quaternion(mat) 132 | xyzw = torch.cat([wxyz[:, 1:], wxyz[:, :1]], dim=-1) 133 | return xyzw 134 | 135 | def mat2posquat(mat: torch.Tensor) -> torch.Tensor: 136 | pos = mat[..., :3, 3] 137 | quat = isaac_mat2quat(mat[..., :3, :3]) 138 | return torch.cat([pos, quat], dim=-1) 139 | 140 | def posquat2mat(posquat: torch.Tensor) -> torch.Tensor: 141 | """ 142 | convert pos + quaternion in xyzw format to matrix 143 | """ 144 | batch = posquat.shape[0] 145 | pos = posquat[..., :3] 146 | quat_xyzw = posquat[..., 3:] 147 | quat_wxyz = torch.cat([quat_xyzw[:, 3:], quat_xyzw[:, :3]], dim=-1) 148 | rot_mat = quaternion_to_matrix(quat_wxyz) 149 | 150 | result = torch.eye(4, device=pos.device, dtype=pos.dtype).unsqueeze(0).expand(batch, -1, -1) 151 | result[..., :3, :3] = rot_mat 152 | result[..., :3, 3] = pos 153 | 154 | return result 155 | 156 | -------------------------------------------------------------------------------- /avp_stream/utils/trn_constants.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from avp_stream.utils.se3_utils import * 3 | 4 | 5 | VISIONOS_TO_ISAAC = torch.tensor([[1, 0, 0, 0], 6 | [0, 0, -1, 0], 7 | [0, 1, 0, 0], 8 | [0, 0, 0, 1]], dtype = torch.float32).unsqueeze(0) 9 | 10 | FRAME_TRANSFORMATION = torch.tensor([[-1, 0, 0], 11 | [0, 0, 1], 12 | [0, 1, 0]], dtype = torch.float32).unsqueeze(0) 13 | FRAME_QUAT = isaac_mat2quat(FRAME_TRANSFORMATION) 14 | 15 | FRAME_44 = torch.tensor([[-1, 0, 0, 0.7], 16 | [0, 0, 1, 0.9], 17 | [0, 1, 0, 1.6], 18 | [0, 0, 0, 1]], dtype = torch.float32).unsqueeze(0) 19 | 20 | 21 | ROTATE_90DEG_AROUND_X = torch.tensor([[1, 0, 0, 0], 22 | [0, 0, -1, 0], 23 | [0, 1, 0, 0], 24 | [0, 0, 0, 1]], dtype = torch.float32).unsqueeze(0) 25 | 26 | ROTATE_90DEG_AROUND_Y = torch.tensor([[0, 0, 1, 0], 27 | [0, 1, 0, 0], 28 | [-1, 0, 0, 0], 29 | [0, 0, 0, 1]], dtype = torch.float32).unsqueeze(0) 30 | 31 | ROTATE_NEG_90DEG_AROUND_Y = torch.tensor([[0, 0, -1, 0], 32 | [0, 1, 0, 0], 33 | [1, 0, 0, 0], 34 | [0, 0, 0, 1]], dtype = torch.float32).unsqueeze(0) 35 | 36 | ROTATE_180DEG_AROUND_Z = torch.tensor([[-1, 0, 0, 0], 37 | [0, -1, 0, 0], 38 | [0, 0, 1, 0], 39 | [0, 0, 0, 1]], dtype=torch.float32).unsqueeze(0) 40 | 41 | 42 | VISIONOS_RIGHT_HAND_TO_LEAP = ROTATE_90DEG_AROUND_X @ ROTATE_NEG_90DEG_AROUND_Y 43 | 44 | VISIONOS_LEFT_HAND_TO_LEAP = ROTATE_90DEG_AROUND_X @ ROTATE_90DEG_AROUND_Y 45 | 46 | VISIONOS_RIGHT_FINGERS_TO_LEAP = ROTATE_90DEG_AROUND_Y 47 | -------------------------------------------------------------------------------- /example.py: -------------------------------------------------------------------------------- 1 | # subscriber.py 2 | 3 | from avp_stream import VisionProStreamer 4 | import argparse 5 | from typing import * 6 | 7 | if __name__ == "__main__": 8 | 9 | parser = argparse.ArgumentParser() 10 | parser.add_argument('--ip', type = str, required = True) 11 | parser.add_argument('--record', action = 'store_true') 12 | args = parser.parse_args() 13 | 14 | s = VisionProStreamer(args.ip, args.record) 15 | 16 | while True: 17 | latest = s.latest 18 | print(latest['head'][:, :3, -1], latest['right_wrist'][:, :3, -1], latest['right_fingers'].shape) 19 | -------------------------------------------------------------------------------- /how_to_install.md: -------------------------------------------------------------------------------- 1 | # How to install on your Vision Pro 2 | 3 | ## Prerequisites 4 | 5 | - Mac with Xcode 15 [[Install here]](https://apps.apple.com/us/app/xcode/id497799835?mt=12/) 6 | - Apple Vision Pro [[Buy Here]](https://www.apple.com/apple-vision-pro/) 7 | - Apple Developer Program Account [[Register Here]](https://developer.apple.com/) 8 | - Vision Pro Developer Strap [[Buy Here]](https://developer.apple.com/visionos/developer-strap/) 9 | 10 | 11 | ## Installing the App 12 | 13 | 14 | ### Step 1. Open Xcode Project 15 | 16 | After you `git cloned` the project, double click `VisionProTeleop.xcodproj`. This will automatically open up Xcode where you can install the app. 17 | 18 | 19 | ### Step 2. Register your developer account 20 | 21 | ![](https://share.cleanshot.com/r2fj3GXg+) 22 | 23 | On the left side bar, click on `VisionProTeleop`. Under Targets - Signing & Capabilities - Team, click `Add an Account` and follow the steps to add your Apple Developer Account. Then, change the `Bundle Identifier` to a unique name of yours. 24 | 25 | ### Step 3. Connect Apple Vision Pro to your Mac 26 | 27 | Using the Vision Pro [Developer Strap](https://developer.apple.com/visionos/developer-strap/), connect your Apple Vision Pro to your Mac. When you're first connecting, Mac will take some time to configure things. When things are ready, you can see Apple Vision Pro appearing to as one of the possible destinations to install. 28 | 29 | ![](https://share.cleanshot.com/knZJsbZB+) 30 | 31 | Then, click the `play` button to start installing. If everrything went well, you will see "Build Succeeded". 32 | 33 | ### Step 4. Enjoy the App! 34 | 35 | Once you've successfully installed the App, you can disconnect the Vision Pro from your Mac, and use it as a standalone device/app without the Mac. You will see an App Icon on your home screen. Click on the icon to run the app and start streaming. 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name='avp_stream', 5 | version='1.0', 6 | description='This python package streams diverse tracking data available from AVP to any devices that can communicate with gRPC.', 7 | author='Younghyo Park', 8 | author_email='younghyo@mit.edu', 9 | packages=find_packages(), 10 | install_requires=[ 11 | 'numpy', 'grpcio', 'grpcio-tools', 'matplotlib' 12 | ], 13 | extras_require={ 14 | }, 15 | ) -------------------------------------------------------------------------------- /viz_isaac.py: -------------------------------------------------------------------------------- 1 | from avp_stream.isaac_env import IsaacVisualizerEnv 2 | from avp_stream import VisionProStreamer 3 | import time 4 | from typing import * 5 | import numpy as np 6 | import torch 7 | 8 | class IsaacVisualizer: 9 | 10 | def __init__(self, args): 11 | self.s = VisionProStreamer(args.ip, args.record) 12 | self.env = IsaacVisualizerEnv(args) 13 | 14 | def run(self): 15 | 16 | while True: 17 | latest = self.s.latest 18 | self.env.step(np2tensor(latest, self.env.device)) 19 | 20 | 21 | def np2tensor(data: Dict[str, np.ndarray], device) -> Dict[str, torch.Tensor]: 22 | for key in data.keys(): 23 | data[key] = torch.tensor(data[key], dtype = torch.float32, device = device) 24 | return data 25 | 26 | 27 | if __name__ == "__main__": 28 | import argparse 29 | import os 30 | 31 | parser = argparse.ArgumentParser() 32 | parser.add_argument('--ip', type = str, required = True) 33 | parser.add_argument('--record', action = 'store_true') 34 | parser.add_argument('--follow', action = 'store_true', help = "The viewpoint follows the users head") 35 | args = parser.parse_args() 36 | 37 | vis = IsaacVisualizer(args) 38 | vis.run() -------------------------------------------------------------------------------- /viz_localization.py: -------------------------------------------------------------------------------- 1 | 2 | import matplotlib.pyplot as plt 3 | import numpy as np 4 | import matplotlib.animation as animation 5 | 6 | from avp_stream import VisionProStreamer 7 | from threading import Thread 8 | 9 | class LocalizationVisualizer: 10 | 11 | def __init__(self, args): 12 | 13 | self.args = args 14 | 15 | self.s = VisionProStreamer(args.ip, args.record) 16 | 17 | def background_localization_visualization(self): 18 | 19 | 20 | fig, ax = plt.subplots() 21 | 22 | # set fig size 23 | fig.set_size_inches(10, 10) 24 | 25 | ax.set_xlim(-0.05, 0.1) 26 | ax.set_ylim(-0.05, 0.1) 27 | ax.set_aspect('equal') 28 | ax.set_xlabel('X') 29 | ax.set_ylabel('Y') 30 | 31 | position_history = np.array([[0.0, 0.0]]) 32 | scat = ax.scatter(position_history[:, 0], position_history[:, 1], s = 100) 33 | 34 | def update(interval): 35 | nonlocal position_history 36 | 37 | print('updating visualizatino') 38 | transformations = self.s.latest 39 | head_pos = transformations["head"][:, :3, 3] 40 | print(head_pos[:, 0]) 41 | new_pos = np.array([[head_pos[0, 0], -head_pos[0, 1]]]) # Adjust axis if necessary 42 | # print(new_pos.shape) 43 | position_history = np.append(position_history, new_pos, axis=0) 44 | # scat.set_offsets(np.c_[head_pos[:, 0], - head_pos[:, 2]]) 45 | scat.set_offsets(position_history) 46 | 47 | xmin = min(position_history[:, 0]) 48 | xmax = max(position_history[:, 0]) 49 | ymin = min(position_history[:, 1]) 50 | ymax = max(position_history[:, 1]) 51 | 52 | ax.set_xlim(xmin - 0.1, xmax + 0.1) 53 | ax.set_ylim(ymin - 0.1, ymax + 0.1) 54 | 55 | # make x, y axis text size larger 56 | for item in ([ax.title, ax.xaxis.label, ax.yaxis.label] + 57 | ax.get_xticklabels() + ax.get_yticklabels()): 58 | item.set_fontsize(20) 59 | 60 | ani = animation.FuncAnimation(fig, update, frames=10000, interval=1) 61 | plt.show() 62 | 63 | def run(self): 64 | 65 | thread = Thread(target=self.background_localization_visualization) 66 | thread.start() 67 | 68 | 69 | if __name__ == "__main__": 70 | 71 | import argparse 72 | import os 73 | 74 | parser = argparse.ArgumentParser() 75 | parser.add_argument('--ip', type = str, required = True) 76 | parser.add_argument('--record', action = 'store_true') 77 | args = parser.parse_args() 78 | 79 | env = LocalizationVisualizer(args) 80 | 81 | env.run() 82 | --------------------------------------------------------------------------------