├── Enterprise.license
├── LICENSE
├── Packages
└── RealityKitContent
│ ├── .swiftpm
│ └── xcode
│ │ └── xcuserdata
│ │ └── devoncopley.xcuserdatad
│ │ └── xcschemes
│ │ └── xcschememanagement.plist
│ ├── Package.realitycomposerpro
│ ├── ProjectData
│ │ └── main.json
│ └── WorkspaceData
│ │ ├── SceneMetadataList.json
│ │ └── Settings.rcprojectdata
│ ├── Package.swift
│ ├── README.md
│ └── Sources
│ └── RealityKitContent
│ ├── RealityKitContent.rkassets
│ ├── Immersive.usda
│ ├── Materials
│ │ └── GridMaterial.usda
│ └── Scene.usda
│ └── RealityKitContent.swift
├── README.md
├── Resources
└── Enterprise.license
├── avpenterprisetest.xcodeproj
├── project.pbxproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ │ └── swiftpm
│ │ └── Package.resolved
└── xcuserdata
│ └── devoncopley.xcuserdatad
│ ├── xcdebugger
│ └── Breakpoints_v2.xcbkptlist
│ └── xcschemes
│ └── xcschememanagement.plist
├── avpenterprisetest
├── AppModel.swift
├── Assets.xcassets
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ ├── AppIcon.solidimagestack
│ │ ├── Back.solidimagestacklayer
│ │ │ ├── Content.imageset
│ │ │ │ └── Contents.json
│ │ │ └── Contents.json
│ │ ├── Contents.json
│ │ ├── Front.solidimagestacklayer
│ │ │ ├── Content.imageset
│ │ │ │ └── Contents.json
│ │ │ └── Contents.json
│ │ └── Middle.solidimagestacklayer
│ │ │ ├── Content.imageset
│ │ │ └── Contents.json
│ │ │ └── Contents.json
│ └── Contents.json
├── AudioCapture.swift
├── AudioHandler.swift
├── ContentView.swift
├── FrontCameraCapture.swift
├── Info.plist
├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
├── ToggleImmersiveSpaceButton.swift
├── VideoEncoder.swift
├── YouTubeStreamManager.swift
├── avpenterprisetest.entitlements
└── avpenterprisetestApp.swift
└── avpenterprisetestTests
└── avpenterprisetestTests.swift
/Enterprise.license:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/devon3000/AVPEnterpriseAPI-CameraStreaming/f69f78b7a5b2142229032e8a7e4f165469bffb13/Enterprise.license
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Devon Copley
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Packages/RealityKitContent/.swiftpm/xcode/xcuserdata/devoncopley.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | RealityKitContent.xcscheme_^#shared#^_
8 |
9 | orderHint
10 | 1
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/Packages/RealityKitContent/Package.realitycomposerpro/ProjectData/main.json:
--------------------------------------------------------------------------------
1 | {
2 | "pathsToIds" : {
3 | "\/RealityKitContent\/Sources\/RealityKitContent\/RealityKitContent.rkassets\/GridMaterial.usda" : "CB766F92-EE55-4A63-9401-E7B8C009764D",
4 | "\/RealityKitContent\/Sources\/RealityKitContent\/RealityKitContent.rkassets\/Immersive.usda" : "65F6F990-A780-4474-B78B-572E0E4E273D",
5 | "\/RealityKitContent\/Sources\/RealityKitContent\/RealityKitContent.rkassets\/Scene.usda" : "0A9B4653-B11E-4D6A-850E-C6FCB621626C",
6 | "\/RealityKitContent\/Sources\/RealityKitContent\/RealityKitContent.rkassets\/Untitled Scene.usda" : "D560BB77-AAF3-4BDE-B7C4-989332A4688B",
7 | "RealityKitContent\/Sources\/RealityKitContent\/RealityKitContent.rkassets\/GridMaterial.usda" : "66168B71-AB05-424E-8B6C-D33D6E61B08F",
8 | "RealityKitContent\/Sources\/RealityKitContent\/RealityKitContent.rkassets\/Immersive.usda" : "AF09ED6F-1707-48FD-8720-65B998362C09",
9 | "RealityKitContent\/Sources\/RealityKitContent\/RealityKitContent.rkassets\/Scene.usda" : "D66134B1-3681-4A8E-AFE5-29F257229F3B"
10 | }
11 | }
--------------------------------------------------------------------------------
/Packages/RealityKitContent/Package.realitycomposerpro/WorkspaceData/SceneMetadataList.json:
--------------------------------------------------------------------------------
1 | {
2 | "0A9B4653-B11E-4D6A-850E-C6FCB621626C" : {
3 | "cameraTransform" : [
4 | 1,
5 | 0,
6 | 0,
7 | 0,
8 | 0,
9 | 0.86602545,
10 | -0.49999994,
11 | 0,
12 | 0,
13 | 0.49999994,
14 | 0.86602545,
15 | 0,
16 | 0.0035969093,
17 | 0.35542378,
18 | 0.62919164,
19 | 1
20 | ],
21 | "objectMetadataList" : [
22 | [
23 | "0A9B4653-B11E-4D6A-850E-C6FCB621626C",
24 | "Root"
25 | ],
26 | {
27 | "isExpanded" : true,
28 | "isLocked" : false
29 | }
30 | ]
31 | },
32 | "65F6F990-A780-4474-B78B-572E0E4E273D" : {
33 | "cameraTransform" : [
34 | 1,
35 | 0,
36 | -0,
37 | 0,
38 | -0,
39 | 0.86602545,
40 | -0.49999988,
41 | 0,
42 | 0,
43 | 0.49999988,
44 | 0.86602545,
45 | 0,
46 | 1.1972517e-08,
47 | 2.6179132,
48 | 0.43191218,
49 | 1
50 | ],
51 | "objectMetadataList" : [
52 | [
53 | "65F6F990-A780-4474-B78B-572E0E4E273D",
54 | "Root"
55 | ],
56 | {
57 | "isExpanded" : true,
58 | "isLocked" : false
59 | }
60 | ]
61 | },
62 | "66168B71-AB05-424E-8B6C-D33D6E61B08F" : {
63 | "cameraTransform" : [
64 | 1,
65 | 0,
66 | -0,
67 | 0,
68 | -0,
69 | 0.8660254,
70 | -0.5,
71 | 0,
72 | 0,
73 | 0.5,
74 | 0.8660254,
75 | 0,
76 | 0,
77 | 0.23875366,
78 | 0.4135335,
79 | 1
80 | ],
81 | "objectMetadataList" : [
82 | [
83 | "66168B71-AB05-424E-8B6C-D33D6E61B08F",
84 | "Root"
85 | ],
86 | {
87 | "isExpanded" : true,
88 | "isLocked" : false
89 | }
90 | ]
91 | },
92 | "AF09ED6F-1707-48FD-8720-65B998362C09" : {
93 | "cameraTransform" : [
94 | 1,
95 | 0,
96 | -0,
97 | 0,
98 | -0,
99 | 0.7071069,
100 | -0.7071067,
101 | 0,
102 | 0,
103 | 0.7071067,
104 | 0.7071069,
105 | 0,
106 | 0,
107 | 2.8836339,
108 | -0.107588194,
109 | 1
110 | ],
111 | "objectMetadataList" : [
112 | [
113 | "AF09ED6F-1707-48FD-8720-65B998362C09",
114 | "Root"
115 | ],
116 | {
117 | "isExpanded" : true,
118 | "isLocked" : false
119 | },
120 | [
121 | "AF09ED6F-1707-48FD-8720-65B998362C09",
122 | "Root",
123 | "Sphere_Left"
124 | ],
125 | {
126 | "isExpanded" : true,
127 | "isLocked" : false
128 | },
129 | [
130 | "AF09ED6F-1707-48FD-8720-65B998362C09",
131 | "Root",
132 | "Sphere_Right"
133 | ],
134 | {
135 | "isExpanded" : true,
136 | "isLocked" : false
137 | }
138 | ]
139 | },
140 | "CB766F92-EE55-4A63-9401-E7B8C009764D" : {
141 | "cameraTransform" : [
142 | 1,
143 | 0,
144 | -0,
145 | 0,
146 | -0,
147 | 0.8660253,
148 | -0.5000001,
149 | 0,
150 | 0,
151 | 0.5000001,
152 | 0.8660253,
153 | 0,
154 | 0,
155 | 0.27093494,
156 | 0.4692731,
157 | 1
158 | ],
159 | "objectMetadataList" : [
160 | [
161 | "CB766F92-EE55-4A63-9401-E7B8C009764D",
162 | "Root",
163 | "GridMaterial"
164 | ],
165 | {
166 | "isExpanded" : true,
167 | "isLocked" : false
168 | },
169 | [
170 | "CB766F92-EE55-4A63-9401-E7B8C009764D",
171 | "Root"
172 | ],
173 | {
174 | "isExpanded" : true,
175 | "isLocked" : false
176 | }
177 | ]
178 | },
179 | "D560BB77-AAF3-4BDE-B7C4-989332A4688B" : {
180 | "cameraTransform" : [
181 | 1,
182 | 0,
183 | -0,
184 | 0,
185 | -0,
186 | 0.8660253,
187 | -0.5000001,
188 | 0,
189 | 0,
190 | 0.5000001,
191 | 0.8660253,
192 | 0,
193 | 0,
194 | 0.27093494,
195 | 0.4692731,
196 | 1
197 | ],
198 | "objectMetadataList" : [
199 |
200 | ]
201 | },
202 | "D66134B1-3681-4A8E-AFE5-29F257229F3B" : {
203 | "cameraTransform" : [
204 | 1,
205 | 0,
206 | -0,
207 | 0,
208 | -0,
209 | 0.7071069,
210 | -0.7071067,
211 | 0,
212 | 0,
213 | 0.7071067,
214 | 0.7071069,
215 | 0,
216 | 0,
217 | 0.26894823,
218 | 0.26934713,
219 | 1
220 | ],
221 | "objectMetadataList" : [
222 | [
223 | "D66134B1-3681-4A8E-AFE5-29F257229F3B",
224 | "Root",
225 | "GridMaterial",
226 | "GridMaterial"
227 | ],
228 | {
229 | "isExpanded" : true,
230 | "isLocked" : false
231 | },
232 | [
233 | "D66134B1-3681-4A8E-AFE5-29F257229F3B",
234 | "Root"
235 | ],
236 | {
237 | "isExpanded" : true,
238 | "isLocked" : false
239 | }
240 | ]
241 | }
242 | }
--------------------------------------------------------------------------------
/Packages/RealityKitContent/Package.realitycomposerpro/WorkspaceData/Settings.rcprojectdata:
--------------------------------------------------------------------------------
1 | {
2 | "cameraPresets" : {
3 |
4 | },
5 | "secondaryToolbarData" : {
6 | "isGridVisible" : true,
7 | "sceneReverbPreset" : -1
8 | },
9 | "unitDefaults" : {
10 | "°" : "°",
11 | "kg" : "g",
12 | "m" : "cm",
13 | "m\/s" : "m\/s",
14 | "m\/s²" : "m\/s²",
15 | "s" : "s"
16 | }
17 | }
--------------------------------------------------------------------------------
/Packages/RealityKitContent/Package.swift:
--------------------------------------------------------------------------------
1 | // swift-tools-version:6.0
2 | // The swift-tools-version declares the minimum version of Swift required to build this package.
3 |
4 | import PackageDescription
5 |
6 | let package = Package(
7 | name: "RealityKitContent",
8 | platforms: [
9 | .visionOS(.v2),
10 | .macOS(.v15),
11 | .iOS(.v18)
12 | ],
13 | products: [
14 | // Products define the executables and libraries a package produces, and make them visible to other packages.
15 | .library(
16 | name: "RealityKitContent",
17 | targets: ["RealityKitContent"]),
18 | ],
19 | dependencies: [
20 | // Dependencies declare other packages that this package depends on.
21 | // .package(url: /* package url */, from: "1.0.0"),
22 | ],
23 | targets: [
24 | // Targets are the basic building blocks of a package. A target can define a module or a test suite.
25 | // Targets can depend on other targets in this package, and on products in packages this package depends on.
26 | .target(
27 | name: "RealityKitContent",
28 | dependencies: []),
29 | ]
30 | )
--------------------------------------------------------------------------------
/Packages/RealityKitContent/README.md:
--------------------------------------------------------------------------------
1 | # RealityKitContent
2 |
3 | A description of this package.
--------------------------------------------------------------------------------
/Packages/RealityKitContent/Sources/RealityKitContent/RealityKitContent.rkassets/Immersive.usda:
--------------------------------------------------------------------------------
1 | #usda 1.0
2 | (
3 | defaultPrim = "Root"
4 | metersPerUnit = 1
5 | upAxis = "Y"
6 | )
7 |
8 | def Xform "Root"
9 | {
10 | reorder nameChildren = ["Sphere_Left", "Sphere_Right", "GridMaterial"]
11 | def Sphere "Sphere_Right" (
12 | active = true
13 | prepend apiSchemas = ["MaterialBindingAPI"]
14 | )
15 | {
16 | rel material:binding = (
17 | bindMaterialAs = "weakerThanDescendants"
18 | )
19 | double radius = 0.1
20 | quatf xformOp:orient = (1, 0, 0, 0)
21 | float3 xformOp:scale = (1, 1, 1)
22 | float3 xformOp:translate = (0.5, 1.5, -1.5)
23 | uniform token[] xformOpOrder = ["xformOp:translate", "xformOp:orient", "xformOp:scale"]
24 | }
25 |
26 | def Sphere "Sphere_Left" (
27 | active = true
28 | prepend apiSchemas = ["MaterialBindingAPI"]
29 | )
30 | {
31 | rel material:binding = (
32 | bindMaterialAs = "weakerThanDescendants"
33 | )
34 | double radius = 0.1
35 | quatf xformOp:orient = (1, 0, 0, 0)
36 | float3 xformOp:scale = (1, 1, 1)
37 | float3 xformOp:translate = (-0.5, 1.5, -1.5)
38 | uniform token[] xformOpOrder = ["xformOp:translate", "xformOp:orient", "xformOp:scale"]
39 | }
40 |
41 | def "GridMaterial" (
42 | active = true
43 | prepend references = @Materials/GridMaterial.usda@
44 | )
45 | {
46 | float3 xformOp:scale = (1, 1, 1)
47 | uniform token[] xformOpOrder = ["xformOp:translate", "xformOp:orient", "xformOp:scale"]
48 | }
49 | }
50 |
51 |
--------------------------------------------------------------------------------
/Packages/RealityKitContent/Sources/RealityKitContent/RealityKitContent.rkassets/Materials/GridMaterial.usda:
--------------------------------------------------------------------------------
1 | #usda 1.0
2 | (
3 | defaultPrim = "Root"
4 | metersPerUnit = 1
5 | upAxis = "Y"
6 | )
7 |
8 | def Xform "Root"
9 | {
10 | def Material "GridMaterial"
11 | {
12 | reorder nameChildren = ["", "", "", "", "", "", "", "", "", "", "", "", "", "", "DefaultSurfaceShader", "MaterialXPreviewSurface", "Texcoord", "Add", "Multiply", "Fractional", "LineCounts", "Multiply_1", "Separate2", "Separate2_1", "Ifgreater", "Ifgreater_1", "Max", "Background_Color"]
13 | token outputs:mtlx:surface.connect =
14 | token outputs:realitykit:vertex
15 | token outputs:surface
16 | float2 ui:nodegraph:realitykit:subgraphOutputs:pos = (2222, 300.5)
17 | float2 ui:nodegraph:realitykit:subgraphOutputs:size = (182, 89)
18 | int ui:nodegraph:realitykit:subgraphOutputs:stackingOrder = 749
19 |
20 | def Shader "DefaultSurfaceShader"
21 | {
22 | uniform token info:id = "UsdPreviewSurface"
23 | color3f inputs:diffuseColor = (1, 1, 1)
24 | float inputs:roughness = 0.75
25 | token outputs:surface
26 | }
27 |
28 | def Shader "MaterialXPreviewSurface"
29 | {
30 | uniform token info:id = "ND_UsdPreviewSurface_surfaceshader"
31 | float inputs:clearcoat
32 | float inputs:clearcoatRoughness
33 | color3f inputs:diffuseColor.connect =
34 | color3f inputs:emissiveColor
35 | float inputs:ior
36 | float inputs:metallic = 0.15
37 | float3 inputs:normal
38 | float inputs:occlusion
39 | float inputs:opacity
40 | float inputs:opacityThreshold
41 | float inputs:roughness = 0.5
42 | token outputs:out
43 | float2 ui:nodegraph:node:pos = (1967, 300.5)
44 | float2 ui:nodegraph:node:size = (208, 297)
45 | int ui:nodegraph:node:stackingOrder = 870
46 | string[] ui:nodegraph:realitykit:node:attributesShowingChildren = ["Advanced"]
47 | }
48 |
49 | def Shader "Texcoord"
50 | {
51 | uniform token info:id = "ND_texcoord_vector2"
52 | float2 outputs:out
53 | float2 ui:nodegraph:node:pos = (94.14453, 35.29297)
54 | float2 ui:nodegraph:node:size = (182, 43)
55 | int ui:nodegraph:node:stackingOrder = 1358
56 | }
57 |
58 | def Shader "Multiply"
59 | {
60 | uniform token info:id = "ND_multiply_vector2"
61 | float2 inputs:in1.connect =
62 | float2 inputs:in2 = (32, 15)
63 | float2 inputs:in2.connect =
64 | float2 outputs:out
65 | float2 ui:nodegraph:node:pos = (275.64453, 47.29297)
66 | float2 ui:nodegraph:node:size = (61, 36)
67 | int ui:nodegraph:node:stackingOrder = 1348
68 | string[] ui:nodegraph:realitykit:node:attributesShowingChildren = ["inputs:in2"]
69 | }
70 |
71 | def Shader "Fractional"
72 | {
73 | uniform token info:id = "ND_realitykit_fractional_vector2"
74 | float2 inputs:in.connect =
75 | float2 outputs:out
76 | float2 ui:nodegraph:node:pos = (440.5, 49.5)
77 | float2 ui:nodegraph:node:size = (155, 99)
78 | int ui:nodegraph:node:stackingOrder = 1345
79 | }
80 |
81 | def Shader "BaseColor"
82 | {
83 | uniform token info:id = "ND_constant_color3"
84 | color3f inputs:value = (0.89737034, 0.89737034, 0.89737034) (
85 | colorSpace = "Input - Texture - sRGB - sRGB"
86 | )
87 | color3f inputs:value.connect = None
88 | color3f outputs:out
89 | float2 ui:nodegraph:node:pos = (1537.5977, 363.07812)
90 | float2 ui:nodegraph:node:size = (150, 43)
91 | int ui:nodegraph:node:stackingOrder = 1353
92 | }
93 |
94 | def Shader "LineColor"
95 | {
96 | uniform token info:id = "ND_constant_color3"
97 | color3f inputs:value = (0.55945957, 0.55945957, 0.55945957) (
98 | colorSpace = "Input - Texture - sRGB - sRGB"
99 | )
100 | color3f inputs:value.connect = None
101 | color3f outputs:out
102 | float2 ui:nodegraph:node:pos = (1536.9844, 287.86328)
103 | float2 ui:nodegraph:node:size = (146, 43)
104 | int ui:nodegraph:node:stackingOrder = 1355
105 | }
106 |
107 | def Shader "LineWidths"
108 | {
109 | uniform token info:id = "ND_combine2_vector2"
110 | float inputs:in1 = 0.1
111 | float inputs:in2 = 0.1
112 | float2 outputs:out
113 | float2 ui:nodegraph:node:pos = (443.64453, 233.79297)
114 | float2 ui:nodegraph:node:size = (151, 43)
115 | int ui:nodegraph:node:stackingOrder = 1361
116 | }
117 |
118 | def Shader "LineCounts"
119 | {
120 | uniform token info:id = "ND_combine2_vector2"
121 | float inputs:in1 = 24
122 | float inputs:in2 = 12
123 | float2 outputs:out
124 | float2 ui:nodegraph:node:pos = (94.14453, 138.29297)
125 | float2 ui:nodegraph:node:size = (153, 43)
126 | int ui:nodegraph:node:stackingOrder = 1359
127 | }
128 |
129 | def Shader "Remap"
130 | {
131 | uniform token info:id = "ND_remap_color3"
132 | color3f inputs:in.connect =
133 | color3f inputs:inhigh.connect = None
134 | color3f inputs:inlow.connect = None
135 | color3f inputs:outhigh.connect =
136 | color3f inputs:outlow.connect =
137 | color3f outputs:out
138 | float2 ui:nodegraph:node:pos = (1755.5, 300.5)
139 | float2 ui:nodegraph:node:size = (95, 171)
140 | int ui:nodegraph:node:stackingOrder = 1282
141 | string[] ui:nodegraph:realitykit:node:attributesShowingChildren = ["inputs:outlow"]
142 | }
143 |
144 | def Shader "Separate2"
145 | {
146 | uniform token info:id = "ND_separate2_vector2"
147 | float2 inputs:in.connect =
148 | float outputs:outx
149 | float outputs:outy
150 | float2 ui:nodegraph:node:pos = (1212.6445, 128.91797)
151 | float2 ui:nodegraph:node:size = (116, 117)
152 | int ui:nodegraph:node:stackingOrder = 1363
153 | }
154 |
155 | def Shader "Combine3"
156 | {
157 | uniform token info:id = "ND_combine3_color3"
158 | float inputs:in1.connect =
159 | float inputs:in2.connect =
160 | float inputs:in3.connect =
161 | color3f outputs:out
162 | float2 ui:nodegraph:node:pos = (1578.1445, 128.91797)
163 | float2 ui:nodegraph:node:size = (146, 54)
164 | int ui:nodegraph:node:stackingOrder = 1348
165 | }
166 |
167 | def Shader "Range"
168 | {
169 | uniform token info:id = "ND_range_vector2"
170 | bool inputs:doclamp = 1
171 | float2 inputs:gamma = (2, 2)
172 | float2 inputs:in.connect =
173 | float2 inputs:inhigh.connect =
174 | float2 inputs:inlow = (0.02, 0.02)
175 | float2 inputs:outhigh
176 | float2 inputs:outlow
177 | float2 outputs:out
178 | float2 ui:nodegraph:node:pos = (990.64453, 128.91797)
179 | float2 ui:nodegraph:node:size = (98, 207)
180 | int ui:nodegraph:node:stackingOrder = 1364
181 | }
182 |
183 | def Shader "Subtract"
184 | {
185 | uniform token info:id = "ND_subtract_vector2"
186 | float2 inputs:in1.connect =
187 | float2 inputs:in2.connect =
188 | float2 outputs:out
189 | float2 ui:nodegraph:node:pos = (612.64453, 87.04297)
190 | float2 ui:nodegraph:node:size = (63, 36)
191 | int ui:nodegraph:node:stackingOrder = 1348
192 | }
193 |
194 | def Shader "Absval"
195 | {
196 | uniform token info:id = "ND_absval_vector2"
197 | float2 inputs:in.connect =
198 | float2 outputs:out
199 | float2 ui:nodegraph:node:pos = (765.64453, 87.04297)
200 | float2 ui:nodegraph:node:size = (123, 43)
201 | int ui:nodegraph:node:stackingOrder = 1348
202 | }
203 |
204 | def Shader "Min"
205 | {
206 | uniform token info:id = "ND_min_float"
207 | float inputs:in1.connect =
208 | float inputs:in2.connect =
209 | float outputs:out
210 | float2 ui:nodegraph:node:pos = (1388.1445, 128.91797)
211 | float2 ui:nodegraph:node:size = (114, 36)
212 | int ui:nodegraph:node:stackingOrder = 1363
213 | }
214 | }
215 | }
216 |
217 |
--------------------------------------------------------------------------------
/Packages/RealityKitContent/Sources/RealityKitContent/RealityKitContent.rkassets/Scene.usda:
--------------------------------------------------------------------------------
1 | #usda 1.0
2 | (
3 | defaultPrim = "Root"
4 | metersPerUnit = 1
5 | upAxis = "Y"
6 | )
7 |
8 | def Xform "Root"
9 | {
10 | reorder nameChildren = ["GridMaterial", "Sphere"]
11 | rel material:binding = None (
12 | bindMaterialAs = "weakerThanDescendants"
13 | )
14 |
15 | def Sphere "Sphere" (
16 | active = true
17 | prepend apiSchemas = ["MaterialBindingAPI"]
18 | )
19 | {
20 | rel material:binding = (
21 | bindMaterialAs = "weakerThanDescendants"
22 | )
23 | double radius = 0.05
24 | quatf xformOp:orient = (1, 0, 0, 0)
25 | float3 xformOp:scale = (1, 1, 1)
26 | float3 xformOp:translate = (0, 0, 0.0004)
27 | uniform token[] xformOpOrder = ["xformOp:translate", "xformOp:orient", "xformOp:scale"]
28 |
29 | def RealityKitComponent "Collider"
30 | {
31 | uint group = 1
32 | uniform token info:id = "RealityKit.Collider"
33 | uint mask = 4294967295
34 | token type = "Default"
35 |
36 | def RealityKitStruct "Shape"
37 | {
38 | float3 extent = (0.2, 0.2, 0.2)
39 | float radius = 0.05
40 | token shapeType = "Sphere"
41 | }
42 | }
43 |
44 | def RealityKitComponent "InputTarget"
45 | {
46 | uniform token info:id = "RealityKit.InputTarget"
47 | }
48 | }
49 |
50 | def "GridMaterial" (
51 | active = true
52 | prepend references = @Materials/GridMaterial.usda@
53 | )
54 | {
55 | float3 xformOp:scale = (1, 1, 1)
56 | uniform token[] xformOpOrder = ["xformOp:translate", "xformOp:orient", "xformOp:scale"]
57 | }
58 | }
59 |
60 |
--------------------------------------------------------------------------------
/Packages/RealityKitContent/Sources/RealityKitContent/RealityKitContent.swift:
--------------------------------------------------------------------------------
1 | import Foundation
2 |
3 | /// Bundle for the RealityKitContent project
4 | public let realityKitContentBundle = Bundle.module
5 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Vision Pro Enterprise API Main Camera Streaming #
2 |
3 |
4 | This example project demonstrates how to access the Vision Pro main front-mounted camera using the new [Enterprise API entitlements](https://developer.apple.com/documentation/visionOS/building-spatial-experiences-for-business-apps-with-enterprise-apis) available for private apps.
5 |
6 | In order to demo something useful with the camera, it also livestreams the microphone and main camera to YouTube using RTMP. This functionality makes use of the excellent [HaishinKit package](https://github.com/shogo4405/HaishinKit.swift) for video streaming along with the VideoToolbox package for h264 encoding of the raw camera frames.
7 |
8 | This project is a testbed, used to explore different applications of the Enterprise API, and I expect to continue to develop it to support WebRTC and perhaps other forms of connectivity, as well as the ARKit functionality and perhaps other API functions.
9 |
10 | ## Setup ##
11 | This project was built on XCode 16.1 for visionOS 2.x, using the HaishinKit version 2.0.1. *It requires a license entitlement from Apple that is locked to the bundle ID.* I've checked my license into the repo, but I can't guarantee that it will continue to be valid, as I don't entirely know how it works. Most likely you need to [obtain your own license from Apple](https://developer.apple.com/go/?id=69613ca716fe11ef8ec848df370857f4) and update the project file to reference your own bundleID. Also you need a physical Vision Pro to run this app - it won't run on the simulator since there's no main camera available there.
12 |
13 | ## Building ##
14 | - Check out the code
15 | - Open avpenterprisetest.xcodeproj in Xcode
16 | - Update streamKey in YouTubeStreamManager to your own YouTube stream key
17 | - Connect your Vision Pro to your Mac
18 | - Build and Run
19 |
20 | ## Operation ##
21 | As of this writing:
22 | - Click "Show Immersive Space" to activate ARKit
23 | - Click "Test Camera" to start the main camera feed (you should see it appear in the UX window)
24 | - Click "Start Streaming" to begin streaming live to YouTube. It'll take about 10 seconds for the stream to appear on the YouTube console.
25 |
26 | ## License ##
27 | This code is licensed under the MIT open source license and is free and unencumbered for all uses personal and commercial. Please note the terms of the dependencies (especially HaishinKit).
28 |
29 | ## Video Demo ##
30 | https://www.youtube.com/watch?v=Y2DNCtZxP-k
31 |
32 | ## Implementation Notes ##
33 | Since the front camera doesn't present as an `AVCaptureDevice` but rather as a `CameraFrameProvider`, we can't use HaishinKit's simple `attachVideo` method to get a video stream for the RTMP streaming. Instead we have to access individual frames and encode them using the Apple VideoToolbox framework. Frame output from the encoder is sent to HaishinKit via the `append` method.
34 |
35 |
--------------------------------------------------------------------------------
/Resources/Enterprise.license:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/devon3000/AVPEnterpriseAPI-CameraStreaming/f69f78b7a5b2142229032e8a7e4f165469bffb13/Resources/Enterprise.license
--------------------------------------------------------------------------------
/avpenterprisetest.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 77;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 1411F50A2D06E32700EBC7D0 /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = 1411F5092D06E32700EBC7D0 /* HaishinKit */; };
11 | 145462EB2D0170DD00567C3F /* Enterprise.license in Resources */ = {isa = PBXBuildFile; fileRef = 145462EA2D0170DD00567C3F /* Enterprise.license */; };
12 | 149235532D077D0D00844682 /* HaishinKit in Frameworks */ = {isa = PBXBuildFile; productRef = 149235522D077D0D00844682 /* HaishinKit */; };
13 | 14CA9F6B2D001B4800BB8327 /* RealityKitContent in Frameworks */ = {isa = PBXBuildFile; productRef = 14CA9F6A2D001B4800BB8327 /* RealityKitContent */; };
14 | /* End PBXBuildFile section */
15 |
16 | /* Begin PBXContainerItemProxy section */
17 | 14CA9F812D001B4C00BB8327 /* PBXContainerItemProxy */ = {
18 | isa = PBXContainerItemProxy;
19 | containerPortal = 14CA9F5D2D001B4800BB8327 /* Project object */;
20 | proxyType = 1;
21 | remoteGlobalIDString = 14CA9F642D001B4800BB8327;
22 | remoteInfo = avpenterprisetest;
23 | };
24 | /* End PBXContainerItemProxy section */
25 |
26 | /* Begin PBXFileReference section */
27 | 145462EA2D0170DD00567C3F /* Enterprise.license */ = {isa = PBXFileReference; lastKnownFileType = file; path = Enterprise.license; sourceTree = ""; };
28 | 14CA9F652D001B4800BB8327 /* avpenterprisetest.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = avpenterprisetest.app; sourceTree = BUILT_PRODUCTS_DIR; };
29 | 14CA9F692D001B4800BB8327 /* RealityKitContent */ = {isa = PBXFileReference; lastKnownFileType = wrapper; path = RealityKitContent; sourceTree = ""; };
30 | 14CA9F802D001B4C00BB8327 /* avpenterprisetestTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = avpenterprisetestTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
31 | /* End PBXFileReference section */
32 |
33 | /* Begin PBXFileSystemSynchronizedBuildFileExceptionSet section */
34 | 14CA9F862D001B4C00BB8327 /* Exceptions for "avpenterprisetest" folder in "avpenterprisetest" target */ = {
35 | isa = PBXFileSystemSynchronizedBuildFileExceptionSet;
36 | membershipExceptions = (
37 | Info.plist,
38 | );
39 | target = 14CA9F642D001B4800BB8327 /* avpenterprisetest */;
40 | };
41 | /* End PBXFileSystemSynchronizedBuildFileExceptionSet section */
42 |
43 | /* Begin PBXFileSystemSynchronizedRootGroup section */
44 | 14CA9F672D001B4800BB8327 /* avpenterprisetest */ = {
45 | isa = PBXFileSystemSynchronizedRootGroup;
46 | exceptions = (
47 | 14CA9F862D001B4C00BB8327 /* Exceptions for "avpenterprisetest" folder in "avpenterprisetest" target */,
48 | );
49 | path = avpenterprisetest;
50 | sourceTree = "";
51 | };
52 | 14CA9F832D001B4C00BB8327 /* avpenterprisetestTests */ = {
53 | isa = PBXFileSystemSynchronizedRootGroup;
54 | path = avpenterprisetestTests;
55 | sourceTree = "";
56 | };
57 | /* End PBXFileSystemSynchronizedRootGroup section */
58 |
59 | /* Begin PBXFrameworksBuildPhase section */
60 | 14CA9F622D001B4800BB8327 /* Frameworks */ = {
61 | isa = PBXFrameworksBuildPhase;
62 | buildActionMask = 2147483647;
63 | files = (
64 | 1411F50A2D06E32700EBC7D0 /* HaishinKit in Frameworks */,
65 | 149235532D077D0D00844682 /* HaishinKit in Frameworks */,
66 | 14CA9F6B2D001B4800BB8327 /* RealityKitContent in Frameworks */,
67 | );
68 | runOnlyForDeploymentPostprocessing = 0;
69 | };
70 | 14CA9F7D2D001B4C00BB8327 /* Frameworks */ = {
71 | isa = PBXFrameworksBuildPhase;
72 | buildActionMask = 2147483647;
73 | files = (
74 | );
75 | runOnlyForDeploymentPostprocessing = 0;
76 | };
77 | /* End PBXFrameworksBuildPhase section */
78 |
79 | /* Begin PBXGroup section */
80 | 14CA9F5C2D001B4800BB8327 = {
81 | isa = PBXGroup;
82 | children = (
83 | 145462EA2D0170DD00567C3F /* Enterprise.license */,
84 | 14CA9F672D001B4800BB8327 /* avpenterprisetest */,
85 | 14CA9F682D001B4800BB8327 /* Packages */,
86 | 14CA9F832D001B4C00BB8327 /* avpenterprisetestTests */,
87 | 14CA9F662D001B4800BB8327 /* Products */,
88 | );
89 | sourceTree = "";
90 | };
91 | 14CA9F662D001B4800BB8327 /* Products */ = {
92 | isa = PBXGroup;
93 | children = (
94 | 14CA9F652D001B4800BB8327 /* avpenterprisetest.app */,
95 | 14CA9F802D001B4C00BB8327 /* avpenterprisetestTests.xctest */,
96 | );
97 | name = Products;
98 | sourceTree = "";
99 | };
100 | 14CA9F682D001B4800BB8327 /* Packages */ = {
101 | isa = PBXGroup;
102 | children = (
103 | 14CA9F692D001B4800BB8327 /* RealityKitContent */,
104 | );
105 | path = Packages;
106 | sourceTree = "";
107 | };
108 | /* End PBXGroup section */
109 |
110 | /* Begin PBXNativeTarget section */
111 | 14CA9F642D001B4800BB8327 /* avpenterprisetest */ = {
112 | isa = PBXNativeTarget;
113 | buildConfigurationList = 14CA9F872D001B4C00BB8327 /* Build configuration list for PBXNativeTarget "avpenterprisetest" */;
114 | buildPhases = (
115 | 14CA9F612D001B4800BB8327 /* Sources */,
116 | 14CA9F622D001B4800BB8327 /* Frameworks */,
117 | 14CA9F632D001B4800BB8327 /* Resources */,
118 | );
119 | buildRules = (
120 | );
121 | dependencies = (
122 | );
123 | fileSystemSynchronizedGroups = (
124 | 14CA9F672D001B4800BB8327 /* avpenterprisetest */,
125 | );
126 | name = avpenterprisetest;
127 | packageProductDependencies = (
128 | 14CA9F6A2D001B4800BB8327 /* RealityKitContent */,
129 | 1411F5092D06E32700EBC7D0 /* HaishinKit */,
130 | 149235522D077D0D00844682 /* HaishinKit */,
131 | );
132 | productName = avpenterprisetest;
133 | productReference = 14CA9F652D001B4800BB8327 /* avpenterprisetest.app */;
134 | productType = "com.apple.product-type.application";
135 | };
136 | 14CA9F7F2D001B4C00BB8327 /* avpenterprisetestTests */ = {
137 | isa = PBXNativeTarget;
138 | buildConfigurationList = 14CA9F8C2D001B4C00BB8327 /* Build configuration list for PBXNativeTarget "avpenterprisetestTests" */;
139 | buildPhases = (
140 | 14CA9F7C2D001B4C00BB8327 /* Sources */,
141 | 14CA9F7D2D001B4C00BB8327 /* Frameworks */,
142 | 14CA9F7E2D001B4C00BB8327 /* Resources */,
143 | );
144 | buildRules = (
145 | );
146 | dependencies = (
147 | 14CA9F822D001B4C00BB8327 /* PBXTargetDependency */,
148 | );
149 | fileSystemSynchronizedGroups = (
150 | 14CA9F832D001B4C00BB8327 /* avpenterprisetestTests */,
151 | );
152 | name = avpenterprisetestTests;
153 | packageProductDependencies = (
154 | );
155 | productName = avpenterprisetestTests;
156 | productReference = 14CA9F802D001B4C00BB8327 /* avpenterprisetestTests.xctest */;
157 | productType = "com.apple.product-type.bundle.unit-test";
158 | };
159 | /* End PBXNativeTarget section */
160 |
161 | /* Begin PBXProject section */
162 | 14CA9F5D2D001B4800BB8327 /* Project object */ = {
163 | isa = PBXProject;
164 | attributes = {
165 | BuildIndependentTargetsInParallel = 1;
166 | LastSwiftUpdateCheck = 1610;
167 | LastUpgradeCheck = 1610;
168 | TargetAttributes = {
169 | 14CA9F642D001B4800BB8327 = {
170 | CreatedOnToolsVersion = 16.1;
171 | };
172 | 14CA9F7F2D001B4C00BB8327 = {
173 | CreatedOnToolsVersion = 16.1;
174 | TestTargetID = 14CA9F642D001B4800BB8327;
175 | };
176 | };
177 | };
178 | buildConfigurationList = 14CA9F602D001B4800BB8327 /* Build configuration list for PBXProject "avpenterprisetest" */;
179 | developmentRegion = en;
180 | hasScannedForEncodings = 0;
181 | knownRegions = (
182 | en,
183 | Base,
184 | );
185 | mainGroup = 14CA9F5C2D001B4800BB8327;
186 | minimizedProjectReferenceProxies = 1;
187 | packageReferences = (
188 | 149235512D077D0D00844682 /* XCRemoteSwiftPackageReference "HaishinKit" */,
189 | );
190 | preferredProjectObjectVersion = 77;
191 | productRefGroup = 14CA9F662D001B4800BB8327 /* Products */;
192 | projectDirPath = "";
193 | projectRoot = "";
194 | targets = (
195 | 14CA9F642D001B4800BB8327 /* avpenterprisetest */,
196 | 14CA9F7F2D001B4C00BB8327 /* avpenterprisetestTests */,
197 | );
198 | };
199 | /* End PBXProject section */
200 |
201 | /* Begin PBXResourcesBuildPhase section */
202 | 14CA9F632D001B4800BB8327 /* Resources */ = {
203 | isa = PBXResourcesBuildPhase;
204 | buildActionMask = 2147483647;
205 | files = (
206 | 145462EB2D0170DD00567C3F /* Enterprise.license in Resources */,
207 | );
208 | runOnlyForDeploymentPostprocessing = 0;
209 | };
210 | 14CA9F7E2D001B4C00BB8327 /* Resources */ = {
211 | isa = PBXResourcesBuildPhase;
212 | buildActionMask = 2147483647;
213 | files = (
214 | );
215 | runOnlyForDeploymentPostprocessing = 0;
216 | };
217 | /* End PBXResourcesBuildPhase section */
218 |
219 | /* Begin PBXSourcesBuildPhase section */
220 | 14CA9F612D001B4800BB8327 /* Sources */ = {
221 | isa = PBXSourcesBuildPhase;
222 | buildActionMask = 2147483647;
223 | files = (
224 | );
225 | runOnlyForDeploymentPostprocessing = 0;
226 | };
227 | 14CA9F7C2D001B4C00BB8327 /* Sources */ = {
228 | isa = PBXSourcesBuildPhase;
229 | buildActionMask = 2147483647;
230 | files = (
231 | );
232 | runOnlyForDeploymentPostprocessing = 0;
233 | };
234 | /* End PBXSourcesBuildPhase section */
235 |
236 | /* Begin PBXTargetDependency section */
237 | 14CA9F822D001B4C00BB8327 /* PBXTargetDependency */ = {
238 | isa = PBXTargetDependency;
239 | target = 14CA9F642D001B4800BB8327 /* avpenterprisetest */;
240 | targetProxy = 14CA9F812D001B4C00BB8327 /* PBXContainerItemProxy */;
241 | };
242 | /* End PBXTargetDependency section */
243 |
244 | /* Begin XCBuildConfiguration section */
245 | 14CA9F882D001B4C00BB8327 /* Debug */ = {
246 | isa = XCBuildConfiguration;
247 | buildSettings = {
248 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
249 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
250 | CODE_SIGN_ENTITLEMENTS = avpenterprisetest/avpenterprisetest.entitlements;
251 | CODE_SIGN_IDENTITY = "Apple Development";
252 | "CODE_SIGN_IDENTITY[sdk=xros*]" = "iPhone Developer";
253 | CODE_SIGN_STYLE = Manual;
254 | CURRENT_PROJECT_VERSION = 1;
255 | DEVELOPMENT_ASSET_PATHS = "\"avpenterprisetest/Preview Content\"";
256 | DEVELOPMENT_TEAM = "";
257 | "DEVELOPMENT_TEAM[sdk=xros*]" = Z9FYRK8P89;
258 | ENABLE_PREVIEWS = YES;
259 | GENERATE_INFOPLIST_FILE = YES;
260 | INFOPLIST_FILE = "$(TARGET_NAME)/Info.plist";
261 | INFOPLIST_KEY_CFBundleDisplayName = "AVP Enterprise Test";
262 | LD_RUNPATH_SEARCH_PATHS = (
263 | "$(inherited)",
264 | "@executable_path/Frameworks",
265 | );
266 | MARKETING_VERSION = 1.0;
267 | PRODUCT_BUNDLE_IDENTIFIER = com.imeve.avpenterprisetest;
268 | PRODUCT_NAME = "$(TARGET_NAME)";
269 | PROVISIONING_PROFILE_SPECIFIER = "";
270 | "PROVISIONING_PROFILE_SPECIFIER[sdk=xros*]" = "Enterprise Test Profile for Avatour AVP Device";
271 | SUPPORTED_PLATFORMS = "xros xrsimulator";
272 | SWIFT_EMIT_LOC_STRINGS = YES;
273 | SWIFT_VERSION = 5.0;
274 | TARGETED_DEVICE_FAMILY = 7;
275 | };
276 | name = Debug;
277 | };
278 | 14CA9F892D001B4C00BB8327 /* Release */ = {
279 | isa = XCBuildConfiguration;
280 | buildSettings = {
281 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
282 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
283 | CODE_SIGN_ENTITLEMENTS = avpenterprisetest/avpenterprisetest.entitlements;
284 | CODE_SIGN_IDENTITY = "Apple Development";
285 | "CODE_SIGN_IDENTITY[sdk=xros*]" = "iPhone Developer";
286 | CODE_SIGN_STYLE = Manual;
287 | CURRENT_PROJECT_VERSION = 1;
288 | DEVELOPMENT_ASSET_PATHS = "\"avpenterprisetest/Preview Content\"";
289 | DEVELOPMENT_TEAM = "";
290 | "DEVELOPMENT_TEAM[sdk=xros*]" = Z9FYRK8P89;
291 | ENABLE_PREVIEWS = YES;
292 | GENERATE_INFOPLIST_FILE = YES;
293 | INFOPLIST_FILE = "$(TARGET_NAME)/Info.plist";
294 | INFOPLIST_KEY_CFBundleDisplayName = "AVP Enterprise Test";
295 | LD_RUNPATH_SEARCH_PATHS = (
296 | "$(inherited)",
297 | "@executable_path/Frameworks",
298 | );
299 | MARKETING_VERSION = 1.0;
300 | PRODUCT_BUNDLE_IDENTIFIER = com.imeve.avpenterprisetest;
301 | PRODUCT_NAME = "$(TARGET_NAME)";
302 | PROVISIONING_PROFILE_SPECIFIER = "";
303 | "PROVISIONING_PROFILE_SPECIFIER[sdk=xros*]" = "Enterprise Test Profile for Avatour AVP Device";
304 | SUPPORTED_PLATFORMS = "xros xrsimulator";
305 | SWIFT_EMIT_LOC_STRINGS = YES;
306 | SWIFT_VERSION = 5.0;
307 | TARGETED_DEVICE_FAMILY = 7;
308 | };
309 | name = Release;
310 | };
311 | 14CA9F8A2D001B4C00BB8327 /* Debug */ = {
312 | isa = XCBuildConfiguration;
313 | buildSettings = {
314 | ALWAYS_SEARCH_USER_PATHS = NO;
315 | ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
316 | CLANG_ANALYZER_NONNULL = YES;
317 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
318 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
319 | CLANG_ENABLE_MODULES = YES;
320 | CLANG_ENABLE_OBJC_ARC = YES;
321 | CLANG_ENABLE_OBJC_WEAK = YES;
322 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
323 | CLANG_WARN_BOOL_CONVERSION = YES;
324 | CLANG_WARN_COMMA = YES;
325 | CLANG_WARN_CONSTANT_CONVERSION = YES;
326 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
327 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
328 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
329 | CLANG_WARN_EMPTY_BODY = YES;
330 | CLANG_WARN_ENUM_CONVERSION = YES;
331 | CLANG_WARN_INFINITE_RECURSION = YES;
332 | CLANG_WARN_INT_CONVERSION = YES;
333 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
334 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
335 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
336 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
337 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
338 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
339 | CLANG_WARN_STRICT_PROTOTYPES = YES;
340 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
341 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
342 | CLANG_WARN_UNREACHABLE_CODE = YES;
343 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
344 | COPY_PHASE_STRIP = NO;
345 | DEBUG_INFORMATION_FORMAT = dwarf;
346 | ENABLE_STRICT_OBJC_MSGSEND = YES;
347 | ENABLE_TESTABILITY = YES;
348 | ENABLE_USER_SCRIPT_SANDBOXING = YES;
349 | GCC_C_LANGUAGE_STANDARD = gnu17;
350 | GCC_DYNAMIC_NO_PIC = NO;
351 | GCC_NO_COMMON_BLOCKS = YES;
352 | GCC_OPTIMIZATION_LEVEL = 0;
353 | GCC_PREPROCESSOR_DEFINITIONS = (
354 | "DEBUG=1",
355 | "$(inherited)",
356 | );
357 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
358 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
359 | GCC_WARN_UNDECLARED_SELECTOR = YES;
360 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
361 | GCC_WARN_UNUSED_FUNCTION = YES;
362 | GCC_WARN_UNUSED_VARIABLE = YES;
363 | LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
364 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
365 | MTL_FAST_MATH = YES;
366 | ONLY_ACTIVE_ARCH = YES;
367 | SDKROOT = xros;
368 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
369 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
370 | XROS_DEPLOYMENT_TARGET = 2.1;
371 | };
372 | name = Debug;
373 | };
374 | 14CA9F8B2D001B4C00BB8327 /* Release */ = {
375 | isa = XCBuildConfiguration;
376 | buildSettings = {
377 | ALWAYS_SEARCH_USER_PATHS = NO;
378 | ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
379 | CLANG_ANALYZER_NONNULL = YES;
380 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
381 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
382 | CLANG_ENABLE_MODULES = YES;
383 | CLANG_ENABLE_OBJC_ARC = YES;
384 | CLANG_ENABLE_OBJC_WEAK = YES;
385 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
386 | CLANG_WARN_BOOL_CONVERSION = YES;
387 | CLANG_WARN_COMMA = YES;
388 | CLANG_WARN_CONSTANT_CONVERSION = YES;
389 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
390 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
391 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
392 | CLANG_WARN_EMPTY_BODY = YES;
393 | CLANG_WARN_ENUM_CONVERSION = YES;
394 | CLANG_WARN_INFINITE_RECURSION = YES;
395 | CLANG_WARN_INT_CONVERSION = YES;
396 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
397 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
398 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
399 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
400 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
401 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
402 | CLANG_WARN_STRICT_PROTOTYPES = YES;
403 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
404 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
405 | CLANG_WARN_UNREACHABLE_CODE = YES;
406 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
407 | COPY_PHASE_STRIP = NO;
408 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
409 | ENABLE_NS_ASSERTIONS = NO;
410 | ENABLE_STRICT_OBJC_MSGSEND = YES;
411 | ENABLE_USER_SCRIPT_SANDBOXING = YES;
412 | GCC_C_LANGUAGE_STANDARD = gnu17;
413 | GCC_NO_COMMON_BLOCKS = YES;
414 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
415 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
416 | GCC_WARN_UNDECLARED_SELECTOR = YES;
417 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
418 | GCC_WARN_UNUSED_FUNCTION = YES;
419 | GCC_WARN_UNUSED_VARIABLE = YES;
420 | LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
421 | MTL_ENABLE_DEBUG_INFO = NO;
422 | MTL_FAST_MATH = YES;
423 | SDKROOT = xros;
424 | SWIFT_COMPILATION_MODE = wholemodule;
425 | VALIDATE_PRODUCT = YES;
426 | XROS_DEPLOYMENT_TARGET = 2.1;
427 | };
428 | name = Release;
429 | };
430 | 14CA9F8D2D001B4C00BB8327 /* Debug */ = {
431 | isa = XCBuildConfiguration;
432 | buildSettings = {
433 | BUNDLE_LOADER = "$(TEST_HOST)";
434 | CODE_SIGN_STYLE = Automatic;
435 | CURRENT_PROJECT_VERSION = 1;
436 | DEVELOPMENT_TEAM = Z9FYRK8P89;
437 | GENERATE_INFOPLIST_FILE = YES;
438 | MARKETING_VERSION = 1.0;
439 | PRODUCT_BUNDLE_IDENTIFIER = com.imeve.avpenterprisetestTests;
440 | PRODUCT_NAME = "$(TARGET_NAME)";
441 | SUPPORTED_PLATFORMS = "xros xrsimulator";
442 | SWIFT_EMIT_LOC_STRINGS = NO;
443 | SWIFT_VERSION = 5.0;
444 | TARGETED_DEVICE_FAMILY = 7;
445 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/avpenterprisetest.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/avpenterprisetest";
446 | XROS_DEPLOYMENT_TARGET = 2.1;
447 | };
448 | name = Debug;
449 | };
450 | 14CA9F8E2D001B4C00BB8327 /* Release */ = {
451 | isa = XCBuildConfiguration;
452 | buildSettings = {
453 | BUNDLE_LOADER = "$(TEST_HOST)";
454 | CODE_SIGN_STYLE = Automatic;
455 | CURRENT_PROJECT_VERSION = 1;
456 | DEVELOPMENT_TEAM = Z9FYRK8P89;
457 | GENERATE_INFOPLIST_FILE = YES;
458 | MARKETING_VERSION = 1.0;
459 | PRODUCT_BUNDLE_IDENTIFIER = com.imeve.avpenterprisetestTests;
460 | PRODUCT_NAME = "$(TARGET_NAME)";
461 | SUPPORTED_PLATFORMS = "xros xrsimulator";
462 | SWIFT_EMIT_LOC_STRINGS = NO;
463 | SWIFT_VERSION = 5.0;
464 | TARGETED_DEVICE_FAMILY = 7;
465 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/avpenterprisetest.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/avpenterprisetest";
466 | XROS_DEPLOYMENT_TARGET = 2.1;
467 | };
468 | name = Release;
469 | };
470 | /* End XCBuildConfiguration section */
471 |
472 | /* Begin XCConfigurationList section */
473 | 14CA9F602D001B4800BB8327 /* Build configuration list for PBXProject "avpenterprisetest" */ = {
474 | isa = XCConfigurationList;
475 | buildConfigurations = (
476 | 14CA9F8A2D001B4C00BB8327 /* Debug */,
477 | 14CA9F8B2D001B4C00BB8327 /* Release */,
478 | );
479 | defaultConfigurationIsVisible = 0;
480 | defaultConfigurationName = Release;
481 | };
482 | 14CA9F872D001B4C00BB8327 /* Build configuration list for PBXNativeTarget "avpenterprisetest" */ = {
483 | isa = XCConfigurationList;
484 | buildConfigurations = (
485 | 14CA9F882D001B4C00BB8327 /* Debug */,
486 | 14CA9F892D001B4C00BB8327 /* Release */,
487 | );
488 | defaultConfigurationIsVisible = 0;
489 | defaultConfigurationName = Release;
490 | };
491 | 14CA9F8C2D001B4C00BB8327 /* Build configuration list for PBXNativeTarget "avpenterprisetestTests" */ = {
492 | isa = XCConfigurationList;
493 | buildConfigurations = (
494 | 14CA9F8D2D001B4C00BB8327 /* Debug */,
495 | 14CA9F8E2D001B4C00BB8327 /* Release */,
496 | );
497 | defaultConfigurationIsVisible = 0;
498 | defaultConfigurationName = Release;
499 | };
500 | /* End XCConfigurationList section */
501 |
502 | /* Begin XCRemoteSwiftPackageReference section */
503 | 149235512D077D0D00844682 /* XCRemoteSwiftPackageReference "HaishinKit" */ = {
504 | isa = XCRemoteSwiftPackageReference;
505 | repositoryURL = "https://github.com/shogo4405/HaishinKit.swift";
506 | requirement = {
507 | kind = upToNextMajorVersion;
508 | minimumVersion = 2.0.1;
509 | };
510 | };
511 | /* End XCRemoteSwiftPackageReference section */
512 |
513 | /* Begin XCSwiftPackageProductDependency section */
514 | 1411F5092D06E32700EBC7D0 /* HaishinKit */ = {
515 | isa = XCSwiftPackageProductDependency;
516 | productName = HaishinKit;
517 | };
518 | 149235522D077D0D00844682 /* HaishinKit */ = {
519 | isa = XCSwiftPackageProductDependency;
520 | package = 149235512D077D0D00844682 /* XCRemoteSwiftPackageReference "HaishinKit" */;
521 | productName = HaishinKit;
522 | };
523 | 14CA9F6A2D001B4800BB8327 /* RealityKitContent */ = {
524 | isa = XCSwiftPackageProductDependency;
525 | productName = RealityKitContent;
526 | };
527 | /* End XCSwiftPackageProductDependency section */
528 | };
529 | rootObject = 14CA9F5D2D001B4800BB8327 /* Project object */;
530 | }
531 |
--------------------------------------------------------------------------------
/avpenterprisetest.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/avpenterprisetest.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "originHash" : "dcbe2c60984a8af858d3f1b61d9d6499db83bce0b916a1df2fcf416d32318dbc",
3 | "pins" : [
4 | {
5 | "identity" : "haishinkit.swift",
6 | "kind" : "remoteSourceControl",
7 | "location" : "https://github.com/shogo4405/HaishinKit.swift",
8 | "state" : {
9 | "revision" : "e83507c77af50ffe4e907f842473dc0e27e344b1",
10 | "version" : "2.0.1"
11 | }
12 | },
13 | {
14 | "identity" : "logboard",
15 | "kind" : "remoteSourceControl",
16 | "location" : "https://github.com/shogo4405/Logboard.git",
17 | "state" : {
18 | "revision" : "272976e1f3e8873e60ffe4b08fe50df48a93751b",
19 | "version" : "2.5.0"
20 | }
21 | }
22 | ],
23 | "version" : 3
24 | }
25 |
--------------------------------------------------------------------------------
/avpenterprisetest.xcodeproj/xcuserdata/devoncopley.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist:
--------------------------------------------------------------------------------
1 |
2 |
6 |
7 |
--------------------------------------------------------------------------------
/avpenterprisetest.xcodeproj/xcuserdata/devoncopley.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | avpenterprisetest.xcscheme_^#shared#^_
8 |
9 | orderHint
10 | 0
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/avpenterprisetest/AppModel.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppModel.swift
3 | // avpenterprisetest
4 | //
5 | // Created by Devon Copley on 12/3/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | class AppModel: ObservableObject {
11 | @Published var immersiveSpaceState: ImmersiveSpaceState = .closed
12 | let immersiveSpaceID = "IDImmersiveSpace"
13 | }
14 |
15 | enum ImmersiveSpaceState {
16 | case open
17 | case closed
18 | case inTransition
19 | }
20 |
--------------------------------------------------------------------------------
/avpenterprisetest/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/avpenterprisetest/Assets.xcassets/AppIcon.solidimagestack/Back.solidimagestacklayer/Content.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "vision",
5 | "scale" : "2x"
6 | }
7 | ],
8 | "info" : {
9 | "author" : "xcode",
10 | "version" : 1
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/avpenterprisetest/Assets.xcassets/AppIcon.solidimagestack/Back.solidimagestacklayer/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/avpenterprisetest/Assets.xcassets/AppIcon.solidimagestack/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | },
6 | "layers" : [
7 | {
8 | "filename" : "Front.solidimagestacklayer"
9 | },
10 | {
11 | "filename" : "Middle.solidimagestacklayer"
12 | },
13 | {
14 | "filename" : "Back.solidimagestacklayer"
15 | }
16 | ]
17 | }
18 |
--------------------------------------------------------------------------------
/avpenterprisetest/Assets.xcassets/AppIcon.solidimagestack/Front.solidimagestacklayer/Content.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "vision",
5 | "scale" : "2x"
6 | }
7 | ],
8 | "info" : {
9 | "author" : "xcode",
10 | "version" : 1
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/avpenterprisetest/Assets.xcassets/AppIcon.solidimagestack/Front.solidimagestacklayer/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/avpenterprisetest/Assets.xcassets/AppIcon.solidimagestack/Middle.solidimagestacklayer/Content.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "vision",
5 | "scale" : "2x"
6 | }
7 | ],
8 | "info" : {
9 | "author" : "xcode",
10 | "version" : 1
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/avpenterprisetest/Assets.xcassets/AppIcon.solidimagestack/Middle.solidimagestacklayer/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/avpenterprisetest/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/avpenterprisetest/AudioCapture.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioCapture.swift
3 | // avpenterprisetest
4 | //
5 | // Created by Devon Copley on 12/8/24.
6 | //
7 |
8 | import AVFoundation
9 |
10 | protocol AudioCaptureDelegate: AnyObject {
11 | func audioCapture(_ audioCapture: AudioCapture, buffer: AVAudioBuffer, time: AVAudioTime)
12 | }
13 |
14 | final class AudioCapture {
15 | var isRunning = false
16 | weak var delegate: AudioCaptureDelegate?
17 | private let audioEngine = AVAudioEngine()
18 |
19 | func startRunning() {
20 | guard !isRunning else {
21 | return
22 | }
23 | let input = audioEngine.inputNode
24 | let mixer = audioEngine.mainMixerNode
25 | audioEngine.connect(input, to: mixer, format: input.inputFormat(forBus: 0))
26 | input.installTap(onBus: 0, bufferSize: 1024, format: input.inputFormat(forBus: 0)) { buffer, when in
27 | self.delegate?.audioCapture(self, buffer: buffer, time: when)
28 | }
29 | do {
30 | try audioEngine.start()
31 | isRunning = true
32 | } catch {
33 | print("Error starting audio engine: \(error.localizedDescription)")
34 | }
35 | }
36 |
37 | func stopRunning() {
38 | guard isRunning else {
39 | return
40 | }
41 | audioEngine.stop()
42 | isRunning = false
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/avpenterprisetest/AudioHandler.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioHandler.swift
3 | // avpenterprisetest
4 | //
5 | // Created by Devon Copley on 12/9/24.
6 | //
7 |
8 | import HaishinKit
9 | import AVFoundation
10 |
11 | class AudioHandler {
12 | private let mixer: MediaMixer
13 | private let audioEngine = AVAudioEngine()
14 | private let format: AVAudioFormat
15 |
16 | init(mixer: MediaMixer) {
17 | self.mixer = mixer
18 | self.format = audioEngine.inputNode.inputFormat(forBus: 0)
19 | }
20 |
21 | func configureAudio() {
22 | let inputNode = audioEngine.inputNode
23 |
24 | // Install a tap to capture audio data
25 | inputNode.installTap(onBus: 0, bufferSize: 1024, format: format) { [weak self] (buffer, time) in
26 | guard let self = self else { return }
27 | self.processAudioBuffer(buffer, at: time)
28 | }
29 |
30 | // Start the audio engine
31 | do {
32 | try audioEngine.start()
33 | print("Audio engine started successfully.")
34 | } catch {
35 | print("Failed to start audio engine: \(error)")
36 | }
37 | }
38 |
39 | private func processAudioBuffer(_ buffer: AVAudioPCMBuffer, at time: AVAudioTime) {
40 | guard let audioBuffer = CMSampleBuffer.create(from: buffer, format: format, at: time) else {
41 | print("Failed to create CMSampleBuffer")
42 | return
43 | }
44 | // Append the buffer asynchronously
45 | print("Appending audio buffer at time: \(time)")
46 | Task {
47 | await mixer.append(audioBuffer)
48 | }
49 | }
50 | }
51 |
52 | extension CMSampleBuffer {
53 | static func create(from audioBuffer: AVAudioPCMBuffer, format: AVAudioFormat, at time: AVAudioTime) -> CMSampleBuffer? {
54 | var sampleBuffer: CMSampleBuffer?
55 |
56 | // Convert AVAudioPCMBuffer to AudioBufferList
57 | let audioBufferList = audioBuffer.audioBufferList
58 |
59 | // Define the timing information for the sample buffer
60 | var timing = CMSampleTimingInfo(
61 | duration: CMTime(value: 1, timescale: CMTimeScale(format.sampleRate)),
62 | presentationTimeStamp: time.toCMTime(),
63 | decodeTimeStamp: .invalid
64 | )
65 |
66 | // Create the format description
67 | let formatDescription = try? CMAudioFormatDescription(audioStreamBasicDescription: format.streamDescription.pointee)
68 | guard let description = formatDescription else { return nil }
69 |
70 | // Create the CMSampleBuffer
71 | CMSampleBufferCreate(
72 | allocator: kCFAllocatorDefault,
73 | dataBuffer: nil,
74 | dataReady: true,
75 | makeDataReadyCallback: nil,
76 | refcon: nil,
77 | formatDescription: description,
78 | sampleCount: CMItemCount(audioBuffer.frameLength),
79 | sampleTimingEntryCount: 1,
80 | sampleTimingArray: &timing, // `timing` must be mutable
81 | sampleSizeEntryCount: 0,
82 | sampleSizeArray: nil,
83 | sampleBufferOut: &sampleBuffer
84 | )
85 |
86 | return sampleBuffer
87 | }
88 | }
89 |
90 | extension AVAudioTime {
91 | func toCMTime() -> CMTime {
92 | let sampleTime = self.sampleTime
93 | let sampleRate = self.sampleRate
94 | return CMTimeMake(value: Int64(sampleTime), timescale: Int32(sampleRate))
95 | }
96 | }
97 |
--------------------------------------------------------------------------------
/avpenterprisetest/ContentView.swift:
--------------------------------------------------------------------------------
1 | import SwiftUI
2 | import RealityKit
3 | import ARKit
4 | import AVFoundation
5 |
6 | struct ContentView: View {
7 | @EnvironmentObject var appModel: AppModel
8 | @State private var isStreaming = false
9 | private let streamManager = YouTubeStreamManager()
10 | @Environment(\.dismissImmersiveSpace) private var dismissImmersiveSpace
11 | @Environment(\.openImmersiveSpace) private var openImmersiveSpace
12 |
13 | @State private var arkitSession = ARKitSession()
14 | @State private var cameraAccessStatus: String = "Checking camera access..."
15 | @State private var frameCount = 0
16 | @State private var pixelBuffer: CVPixelBuffer?
17 | @State private var isCameraRunning = false
18 | @State private var videoEncoder: VideoEncoder?
19 |
20 | let placeholderImage = Image(systemName: "camera")
21 |
22 | var body: some View {
23 | VStack(spacing: 20) {
24 | // Status Indicator
25 | Text(cameraAccessStatus)
26 | .font(.headline)
27 | .foregroundColor(cameraAccessStatus == "Frames are displaying!" ? .green : .red)
28 |
29 | // Frame count
30 | Text("Frames processed: \(frameCount)")
31 | .font(.subheadline)
32 | .foregroundColor(.gray)
33 |
34 | // Display the camera frame
35 | let displayedImage = pixelBuffer?.image ?? placeholderImage
36 | displayedImage
37 | .resizable()
38 | .scaledToFit()
39 | .frame(height: 300)
40 | .cornerRadius(10)
41 | .padding()
42 |
43 | // Buttons
44 | HStack {
45 | Button("Test Camera") {
46 | testCamera()
47 | }
48 | .padding()
49 | .background(isCameraRunning ? Color.gray : Color.blue)
50 | .foregroundColor(.white)
51 | .cornerRadius(10)
52 | .disabled(isCameraRunning)
53 |
54 | Button(appModel.immersiveSpaceState == .open ? "Hide Immersive Space" : "Show Immersive Space") {
55 | toggleImmersiveSpace()
56 | }
57 | .padding()
58 | .background(Color.blue)
59 | .foregroundColor(.white)
60 | .cornerRadius(10)
61 | }
62 | // Streaming Button
63 | Button(isStreaming ? "Stop Streaming" : "Start Streaming") {
64 | Task {
65 | if isStreaming {
66 | do {
67 | try await streamManager.stopStreaming()
68 | } catch {
69 | print("Failed to stop streaming: \(error.localizedDescription)")
70 | }
71 | } else {
72 | do {
73 | setupVideoEncoder()
74 | try await streamManager.startStreaming()
75 | } catch {
76 | print("Failed to start streaming: \(error.localizedDescription)")
77 | }
78 | }
79 | isStreaming.toggle()
80 | }
81 | }
82 | .padding()
83 | .background(isStreaming ? Color.red : Color.green)
84 | .foregroundColor(.white)
85 | .cornerRadius(10)
86 | }
87 | .padding()
88 | .onAppear {
89 | checkCameraAccess()
90 | checkMicrophoneAccess()
91 |
92 | }
93 | }
94 |
95 | private func setupVideoEncoder() {
96 | // Create a new VideoEncoder instance
97 | videoEncoder = VideoEncoder(width: 854, height: 480, frameRate: 30)
98 | print("[Debug] VideoEncoder initialized.")
99 | }
100 |
101 | private func sendFrameToEncoder(pixelBuffer: CVPixelBuffer) {
102 | guard isStreaming else {
103 | print("[Debug] Skipping frame as streaming is not active.")
104 | return
105 | }
106 |
107 | guard let encoder = videoEncoder else {
108 | print("[Error] VideoEncoder is not initialized.")
109 | return
110 | }
111 |
112 | let timestamp = CMTime(value: Int64(frameCount), timescale: 30)
113 | frameCount += 1
114 |
115 | print("[Debug] Sending frame to encoder with timestamp: \(timestamp)")
116 | encoder.encode(pixelBuffer: pixelBuffer, presentationTimeStamp: timestamp)
117 | }
118 |
119 | private func checkCameraAccess() {
120 | if CameraFrameProvider.isSupported {
121 | cameraAccessStatus = "Camera access is supported!"
122 | } else {
123 | cameraAccessStatus = "Camera access is not supported."
124 | }
125 | }
126 |
127 | private func checkMicrophoneAccess() {
128 | AVCaptureDevice.requestAccess(for: .audio) { granted in
129 | if granted {
130 | print("Microphone access granted.")
131 | } else {
132 | print("Microphone access denied.")
133 | }
134 | }
135 | }
136 | private func testCamera() {
137 | guard cameraAccessStatus == "Camera access is supported!" else {
138 | print("Cannot start camera: access not supported.")
139 | return
140 | }
141 |
142 | cameraAccessStatus = "Starting camera..."
143 | isCameraRunning = true
144 |
145 | Task {
146 | await configureCameraAndStartFrames()
147 | }
148 | }
149 |
150 | private func toggleImmersiveSpace() {
151 | Task { @MainActor in
152 | switch appModel.immersiveSpaceState {
153 | case .open:
154 | print("Dismissing immersive space...")
155 | appModel.immersiveSpaceState = .inTransition
156 | await dismissImmersiveSpace()
157 | appModel.immersiveSpaceState = .closed
158 |
159 | case .closed:
160 | print("Opening immersive space...")
161 | appModel.immersiveSpaceState = .inTransition
162 | let result = await openImmersiveSpace(id: appModel.immersiveSpaceID)
163 | switch result {
164 | case .opened:
165 | appModel.immersiveSpaceState = .open
166 | case .userCancelled, .error:
167 | appModel.immersiveSpaceState = .closed
168 | @unknown default:
169 | appModel.immersiveSpaceState = .closed
170 | }
171 |
172 | case .inTransition:
173 | print("Action ignored during transition.")
174 | }
175 | }
176 | }
177 |
178 | private func configureCameraAndStartFrames() async {
179 | let cameraFrameProvider = CameraFrameProvider()
180 |
181 | do {
182 | print("Starting ARKit session...")
183 | try await arkitSession.run([cameraFrameProvider])
184 |
185 | let formats = CameraVideoFormat.supportedVideoFormats(for: .main, cameraPositions: [.left])
186 | guard let highResolutionFormat = formats.max(by: { $0.frameSize.height < $1.frameSize.height }),
187 | let cameraFrameUpdates = cameraFrameProvider.cameraFrameUpdates(for: highResolutionFormat) else {
188 | print("Failed to initialize cameraFrameUpdates.")
189 | cameraAccessStatus = "Failed to start camera."
190 | isCameraRunning = false
191 | return
192 | }
193 |
194 | print("ARKit session started successfully.")
195 | cameraAccessStatus = "Frames are displaying!"
196 |
197 | for await frame in cameraFrameUpdates {
198 | if let sample = frame.sample(for: .left) {
199 | DispatchQueue.main.async {
200 | pixelBuffer = sample.pixelBuffer
201 | sendFrameToEncoder(pixelBuffer: sample.pixelBuffer)
202 | }
203 | }
204 | }
205 | } catch {
206 | print("Failed to start ARKit session: \(error.localizedDescription)")
207 | cameraAccessStatus = "Failed to start ARKit: \(error.localizedDescription)"
208 | isCameraRunning = false
209 | }
210 | }
211 | }
212 |
213 | extension CVPixelBuffer {
214 | var image: Image? {
215 | let ciImage = CIImage(cvPixelBuffer: self)
216 | let context = CIContext(options: nil)
217 | guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
218 | return nil
219 | }
220 | let uiImage = UIImage(cgImage: cgImage)
221 | return Image(uiImage: uiImage)
222 | }
223 | }
224 |
--------------------------------------------------------------------------------
/avpenterprisetest/FrontCameraCapture.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FrontCameraCapture.swift
3 | // avpenterprisetest
4 | //
5 | // Created by Devon Copley on 12/9/24.
6 | //
7 |
8 | // Capturing frames from the forward-facing camera.
9 | // Encoding frames using VideoEncoder.
10 | // Notifying listeners when encoded frames are available.
11 |
12 |
13 | /*
14 | import AVFoundation
15 | import Foundation
16 |
17 | class FrontCameraCapture: NSObject {
18 | let videoEncoder: VideoEncoder
19 | private let captureSession: AVCaptureSession
20 | private let videoOutput: AVCaptureVideoDataOutput
21 |
22 | init(width: Int, height: Int, frameRate: Int) {
23 | self.videoEncoder = VideoEncoder(width: Int32(width), height: Int32(height), frameRate: frameRate)
24 | self.captureSession = AVCaptureSession()
25 | self.videoOutput = AVCaptureVideoDataOutput()
26 | super.init()
27 |
28 | configureCaptureSession()
29 | }
30 |
31 | private func configureCaptureSession() {
32 | guard let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) else {
33 | print("Failed to access the forward-facing camera")
34 | return
35 | }
36 |
37 | do {
38 | // Lock the camera for configuration
39 | try camera.lockForConfiguration()
40 |
41 | // Select the best available format
42 | if let format = camera.formats.first(where: { format in
43 | let dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription)
44 | return dimensions.width >= 640 && dimensions.height >= 480
45 | }) {
46 | camera.activeFormat = format
47 | camera.activeVideoMinFrameDuration = CMTime(value: 1, timescale: 30) // 30 FPS
48 | camera.activeVideoMaxFrameDuration = CMTime(value: 1, timescale: 30)
49 | print("Selected camera format: \(format)")
50 | } else {
51 | print("No compatible format found")
52 | }
53 |
54 | camera.unlockForConfiguration()
55 | } catch {
56 | print("Failed to configure the camera: \(error)")
57 | return
58 | }
59 |
60 | // Configure the session
61 | captureSession.beginConfiguration()
62 |
63 | // Add the camera input to the session
64 | guard let videoInput = try? AVCaptureDeviceInput(device: camera),
65 | captureSession.canAddInput(videoInput) else {
66 | print("Cannot add camera input to the session")
67 | captureSession.commitConfiguration() // Commit configuration before returning
68 | return
69 | }
70 | captureSession.addInput(videoInput)
71 |
72 | // Configure video output
73 | videoOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "video.capture.queue"))
74 | guard captureSession.canAddOutput(videoOutput) else {
75 | print("Cannot add video output to the session")
76 | captureSession.commitConfiguration() // Commit configuration before returning
77 | return
78 | }
79 | captureSession.addOutput(videoOutput)
80 |
81 | captureSession.commitConfiguration() // Commit configuration before starting the session
82 | }
83 |
84 | func start() {
85 | captureSession.startRunning()
86 | print("Capture session started")
87 | }
88 |
89 | func stop() {
90 | captureSession.stopRunning()
91 | print("Capture session stopped")
92 | }
93 | }
94 |
95 | extension FrontCameraCapture: AVCaptureVideoDataOutputSampleBufferDelegate {
96 | func captureOutput(
97 | _ output: AVCaptureOutput,
98 | didOutput sampleBuffer: CMSampleBuffer,
99 | from connection: AVCaptureConnection
100 | ) {
101 | guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
102 | print("Failed to get pixel buffer")
103 | return
104 | }
105 |
106 | let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
107 | videoEncoder.encode(pixelBuffer: pixelBuffer, presentationTimeStamp: presentationTimeStamp)
108 | }
109 | }
110 | */
111 |
--------------------------------------------------------------------------------
/avpenterprisetest/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | NSEnterpriseMCAMUsageDescription
6 | For testing of enterprise API functionality, internal use only
7 | NSCameraUsageDescription
8 | For testing of enterprise API functionality, internal use only
9 | NSScreenCaptureUsageDescription
10 | For testing of enterprise API functionality, internal use only
11 | NSMicrophoneUsageDescription
12 | For testing of enterprise API functionality, internal use only
13 | UIApplicationSceneManifest
14 |
15 | UIApplicationPreferredDefaultSceneSessionRole
16 | UIWindowSceneSessionRoleApplication
17 | UIApplicationSupportsMultipleScenes
18 |
19 | UISceneConfigurations
20 |
21 | UISceneSessionRoleImmersiveSpaceApplication
22 |
23 |
24 | UISceneInitialImmersionStyle
25 | UIImmersionStyleProgressive
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/avpenterprisetest/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/avpenterprisetest/ToggleImmersiveSpaceButton.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ToggleImmersiveSpaceButton.swift
3 | // avpenterprisetest
4 | //
5 | // Created by Devon Copley on 12/3/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ToggleImmersiveSpaceButton: View {
11 |
12 | @EnvironmentObject private var appModel: AppModel // Link to your shared app state
13 |
14 | @Environment(\.dismissImmersiveSpace) private var dismissImmersiveSpace
15 | @Environment(\.openImmersiveSpace) private var openImmersiveSpace
16 |
17 | var body: some View {
18 | Button {
19 | print("Button tapped!") // Add this to confirm the button is responding
20 | Task { @MainActor in
21 | switch appModel.immersiveSpaceState {
22 | case .open:
23 | print("Attempting to dismiss immersive space...")
24 | appModel.immersiveSpaceState = .inTransition
25 | await dismissImmersiveSpace() // This triggers Vision Pro to close the immersive space
26 | print("Immersive space dismissed.")
27 |
28 | case .closed:
29 | print("Attempting to open immersive space...")
30 | appModel.immersiveSpaceState = .inTransition
31 | let result = await openImmersiveSpace(id: appModel.immersiveSpaceID) // This triggers Vision Pro to open the immersive space
32 | switch result {
33 | case .opened:
34 | print("Immersive space successfully opened.")
35 | break // Lifecycle hooks (e.g., onAppear) will finalize state
36 |
37 | case .userCancelled, .error:
38 | print("Failed to open immersive space: \(result).")
39 | appModel.immersiveSpaceState = .closed
40 |
41 | @unknown default:
42 | print("Unexpected result from openImmersiveSpace.")
43 | appModel.immersiveSpaceState = .closed
44 | }
45 |
46 | case .inTransition:
47 | print("Transition in progress. Button action ignored.")
48 | }
49 | }
50 | } label: {
51 | Text(appModel.immersiveSpaceState == .open ? "Hide Immersive Space" : "Show Immersive Space")
52 | }
53 | .disabled(appModel.immersiveSpaceState == .inTransition) // Prevent interaction during transitions
54 | .fontWeight(.semibold)
55 | }
56 | }
57 |
--------------------------------------------------------------------------------
/avpenterprisetest/VideoEncoder.swift:
--------------------------------------------------------------------------------
1 | //
2 | // VideoEncoder.swift
3 | // avpenterprisetest
4 | //
5 | // Created by Devon Copley on 12/4/24.
6 | //
7 |
8 | import VideoToolbox
9 | import Foundation
10 |
11 | extension NSNotification.Name {
12 | static let didEncodeFrame = NSNotification.Name("didEncodeFrame")
13 | }
14 |
15 | class VideoEncoder {
16 | private var compressionSession: VTCompressionSession?
17 | private var lastInputPTS = CMTime.zero
18 | private let frameDuration: CMTime
19 |
20 | static let videoOutputCallback: VTCompressionOutputCallback = { refcon, sourceFrameRefCon, status, infoFlags, sampleBuffer in
21 | guard status == noErr, let sampleBuffer = sampleBuffer else {
22 | print("Encoding failed with status: \(status)")
23 | return
24 | }
25 |
26 | // Retrieve the instance of VideoEncoder using the refcon pointer
27 | let videoEncoder = Unmanaged.fromOpaque(refcon!).takeUnretainedValue()
28 |
29 | // Process the encoded frame
30 | videoEncoder.processEncodedFrame(sampleBuffer)
31 | }
32 |
33 | init(width: Int32, height: Int32, frameRate: Int = 30) {
34 | self.frameDuration = CMTime(value: 1, timescale: CMTimeScale(frameRate))
35 |
36 | var compressionSessionOrNil: VTCompressionSession?
37 | let status = VTCompressionSessionCreate(
38 | allocator: kCFAllocatorDefault,
39 | width: width,
40 | height: height,
41 | codecType: kCMVideoCodecType_H264,
42 | encoderSpecification: nil,
43 | imageBufferAttributes: nil,
44 | compressedDataAllocator: kCFAllocatorDefault,
45 | outputCallback: VideoEncoder.videoOutputCallback,
46 | refcon: Unmanaged.passUnretained(self).toOpaque(),
47 | compressionSessionOut: &compressionSessionOrNil
48 | )
49 |
50 | guard status == noErr, let session = compressionSessionOrNil else {
51 | print("Failed to create compression session: \(status)")
52 | return
53 | }
54 |
55 | self.compressionSession = session
56 |
57 | // Configure compression session
58 | VTSessionSetProperty(session, key: kVTCompressionPropertyKey_RealTime, value: kCFBooleanTrue)
59 | VTSessionSetProperty(session, key: kVTCompressionPropertyKey_ProfileLevel, value: kVTProfileLevel_H264_Baseline_3_1)
60 | VTSessionSetProperty(session, key: kVTCompressionPropertyKey_AverageBitRate, value: 2_000_000 as CFTypeRef) // 2 Mbps
61 | VTSessionSetProperty(session, key: kVTCompressionPropertyKey_DataRateLimits, value: [2_000_000, 1] as CFArray)
62 | VTSessionSetProperty(session, key: kVTCompressionPropertyKey_AllowFrameReordering, value: kCFBooleanFalse)
63 | VTSessionSetProperty(session, key: kVTCompressionPropertyKey_MaxKeyFrameInterval, value: frameRate as CFTypeRef) // Keyframe interval
64 |
65 | VTCompressionSessionPrepareToEncodeFrames(session)
66 | }
67 |
68 | func encode(pixelBuffer: CVPixelBuffer, presentationTimeStamp: CMTime) {
69 | guard let session = compressionSession else {
70 | print("Compression session is not initialized")
71 | return
72 | }
73 |
74 | // Calculate the frame duration
75 | var duration = frameDuration
76 | if lastInputPTS != CMTime.zero {
77 | duration = CMTimeSubtract(presentationTimeStamp, lastInputPTS)
78 | }
79 | lastInputPTS = presentationTimeStamp
80 |
81 | // Define frame properties
82 | let frameProperties: [NSString: Any] = [
83 | kVTEncodeFrameOptionKey_ForceKeyFrame: false
84 | ]
85 |
86 | // Encode the frame
87 | let status = VTCompressionSessionEncodeFrame(
88 | session,
89 | imageBuffer: pixelBuffer,
90 | presentationTimeStamp: presentationTimeStamp,
91 | duration: duration,
92 | frameProperties: frameProperties as CFDictionary,
93 | sourceFrameRefcon: nil,
94 | infoFlagsOut: nil
95 | )
96 |
97 | if status != noErr {
98 | print("Failed to encode frame: \(status)")
99 | } else {
100 | print("Encoded frame with PTS: \(presentationTimeStamp)")
101 | }
102 | }
103 |
104 | private func processEncodedFrame(_ sampleBuffer: CMSampleBuffer) {
105 | guard let attachments = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, createIfNecessary: true) else {
106 | print("No sample attachments found")
107 | return
108 | }
109 |
110 | let dic = Unmanaged.fromOpaque(CFArrayGetValueAtIndex(attachments, 0)).takeUnretainedValue()
111 | let isKeyframe = !CFDictionaryContainsKey(dic, Unmanaged.passUnretained(kCMSampleAttachmentKey_NotSync).toOpaque())
112 | print("Encoded frame is keyframe: \(isKeyframe)")
113 |
114 | if isKeyframe, let formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer) {
115 | extractSPSAndPPS(formatDescription)
116 | }
117 |
118 | NotificationCenter.default.post(name: .didEncodeFrame, object: sampleBuffer)
119 | }
120 |
121 | private func extractSPSAndPPS(_ formatDescription: CMFormatDescription) {
122 | var spsPointer: UnsafePointer?
123 | var spsLength: Int = 0
124 | var ppsPointer: UnsafePointer?
125 | var ppsLength: Int = 0
126 |
127 | CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
128 | formatDescription,
129 | parameterSetIndex: 0,
130 | parameterSetPointerOut: &spsPointer,
131 | parameterSetSizeOut: &spsLength,
132 | parameterSetCountOut: nil,
133 | nalUnitHeaderLengthOut: nil
134 | )
135 | CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
136 | formatDescription,
137 | parameterSetIndex: 1,
138 | parameterSetPointerOut: &ppsPointer,
139 | parameterSetSizeOut: &ppsLength,
140 | parameterSetCountOut: nil,
141 | nalUnitHeaderLengthOut: nil
142 | )
143 |
144 | if let spsPointer = spsPointer, let ppsPointer = ppsPointer {
145 | let spsData = Data(bytes: spsPointer, count: spsLength)
146 | let ppsData = Data(bytes: ppsPointer, count: ppsLength)
147 | print("SPS: \(spsData as NSData), PPS: \(ppsData as NSData)")
148 | }
149 | }
150 |
151 | deinit {
152 | compressionSession.map { VTCompressionSessionInvalidate($0) }
153 | }
154 | }
155 |
--------------------------------------------------------------------------------
/avpenterprisetest/YouTubeStreamManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // YouTubeStreamManager.swift
3 | // avpenterprisetest
4 | //
5 | // Created by Devon Copley on 12/4/24.
6 | //
7 | import HaishinKit
8 | import Foundation
9 | import AVFoundation
10 | import VideoToolbox
11 |
12 | // Extend Notification.Name to include a custom name for RTMP status notifications
13 | extension Notification.Name {
14 | static let rtmpStatus = Notification.Name("rtmpStatus")
15 | }
16 |
17 |
18 | class YouTubeStreamManager {
19 | private let rtmpConnection = RTMPConnection()
20 | private var rtmpStream: RTMPStream!
21 | private let rtmpURL = "rtmp://a.rtmp.youtube.com/live2"
22 | private let streamKey = "syw0-13w1-j29p-xumw-43jv" // Replace with your actual stream key
23 | private var currentCameraPosition: AVCaptureDevice.Position = .front
24 | //private let audioCapture = AudioCapture()
25 | private var audioHandler: AudioHandler?
26 | private let mixer = MediaMixer()
27 | private var frameCount: Int64 = 0 // Track frame count
28 |
29 | init() {
30 | rtmpStream = RTMPStream(connection: rtmpConnection)
31 | }
32 |
33 | // MARK: - Setup Methods
34 |
35 | private func setupRTMPStream() async throws {
36 | rtmpStream = RTMPStream(connection: rtmpConnection)
37 |
38 | // Configure audio settings
39 | var audioSettings = AudioCodecSettings()
40 | audioSettings.bitRate = 64 * 1000
41 | audioSettings.downmix = true
42 | await rtmpStream.setAudioSettings(audioSettings)
43 |
44 | // Configure video settings
45 | var videoSettings = VideoCodecSettings(
46 | videoSize: .init(width: 854, height: 480),
47 | bitRate: 640 * 1000,
48 | profileLevel: kVTProfileLevel_H264_Baseline_3_1 as String,
49 | scalingMode: .trim,
50 | bitRateMode: .average,
51 | maxKeyFrameIntervalDuration: 2,
52 | isHardwareEncoderEnabled: true
53 | )
54 | await rtmpStream.setVideoSettings(videoSettings)
55 |
56 |
57 |
58 | // Initialize the AudioHandler
59 | let handler = AudioHandler(mixer: mixer)
60 | self.audioHandler = handler
61 |
62 | // OPTION 1
63 | // Initialize the FrontCameraCapture
64 | // Add observer for encoded frame notifications
65 | NotificationCenter.default.addObserver(
66 | self,
67 | selector: #selector(handleEncodedFrame(notification:)),
68 | name: .didEncodeFrame,
69 | object: nil
70 | )
71 |
72 | // OPTION 2
73 | // Attach the Persona Video
74 | // This works
75 | // try await attachPersonaVideo()
76 |
77 | // Connect the MediaMixer to RTMPStream
78 | await mixer.addOutput(rtmpStream)
79 | print("[Info] Mixer output connected to RTMPStream.")
80 |
81 | // Check stream initialization for SPS/PPS and keyframes
82 | //validateStreamConfiguration()
83 | }
84 |
85 | @objc private func validateStreamConfiguration(notification: Notification) {
86 | let sampleBuffer = notification.object as! CMSampleBuffer
87 |
88 | // Proceed with your validation logic
89 | if let formatDesc = CMSampleBufferGetFormatDescription(sampleBuffer) {
90 | let parameterSets = self.getParameterSets(from: formatDesc)
91 | if let sps = parameterSets.sps {
92 | print("SPS detected with size: \(sps.count)")
93 | }
94 | if let pps = parameterSets.pps {
95 | print("PPS detected with size: \(pps.count)")
96 | }
97 | }
98 | }
99 |
100 | private func getParameterSets(from formatDescription: CMFormatDescription) -> (sps: Data?, pps: Data?) {
101 | var sps: UnsafePointer?
102 | var spsLength: Int = 0
103 | var pps: UnsafePointer?
104 | var ppsLength: Int = 0
105 | var parameterSetCount: Int = 0
106 |
107 | let status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
108 | formatDescription,
109 | parameterSetIndex: 0,
110 | parameterSetPointerOut: &sps,
111 | parameterSetSizeOut: &spsLength,
112 | parameterSetCountOut: ¶meterSetCount,
113 | nalUnitHeaderLengthOut: nil
114 | )
115 |
116 | guard status == noErr, parameterSetCount > 1 else {
117 | print("[Error] Failed to retrieve SPS/PPS from format description. Status: \(status)")
118 | return (nil, nil)
119 | }
120 |
121 | CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
122 | formatDescription,
123 | parameterSetIndex: 1,
124 | parameterSetPointerOut: &pps,
125 | parameterSetSizeOut: &ppsLength,
126 | parameterSetCountOut: nil,
127 | nalUnitHeaderLengthOut: nil
128 | )
129 |
130 | return (
131 | sps: sps.map { Data(bytes: $0, count: spsLength) },
132 | pps: pps.map { Data(bytes: $0, count: ppsLength) }
133 | )
134 | }
135 |
136 |
137 | func attachPersonaVideo() async throws {
138 | do {
139 | // Optionally configure Video
140 | if let camera = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front) {
141 | try await mixer.attachVideo(camera)
142 | print("Camera attached successfully.")
143 | } else {
144 | print("Front camera not available.")
145 | }
146 | } catch {
147 | throw NSError(domain: "StreamConfigurationError", code: -1, userInfo: [NSLocalizedDescriptionKey: "Error configuring stream: \(error)"])
148 | }
149 | }
150 |
151 |
152 |
153 | @objc private func handleEncodedFrame(notification: Notification) {
154 | guard let sampleBuffer = notification.object as! CMSampleBuffer? else {
155 | print("[Error] Notification does not contain a valid CMSampleBuffer.")
156 | return
157 | }
158 |
159 | // Pass the sample buffer to the MediaMixer
160 | Task {
161 | await mixer.append(sampleBuffer)
162 | print("[Debug] Encoded frame appended to mixer.")
163 | }
164 | }
165 |
166 | // MARK: - Streaming Control
167 |
168 | func startStreaming() async {
169 | print("[Info] Starting video + audio stream...")
170 | do {
171 | // Step 1: Set up the RTMP stream (audio, video, mixer setup)
172 | try await setupRTMPStream()
173 |
174 | // Step 2: Connect to the RTMP server
175 | print("[Info] Connecting to RTMP server...")
176 | let connectResponse = try await rtmpConnection.connect(rtmpURL)
177 | print("[Info] RTMP connection response: \(connectResponse)")
178 |
179 | // Step 3: Publish the stream
180 | print("[Info] Publishing stream...")
181 | let publishResponse = try await rtmpStream.publish(streamKey)
182 | print("[Info] RTMP publish response: \(publishResponse)")
183 |
184 | // Step 4: Start media data flow
185 | startMediaFlow()
186 | } catch {
187 | print("[Error] Starting streaming failed: \(error.localizedDescription)")
188 | retryStreaming()
189 | }
190 | }
191 |
192 | private func startMediaFlow() {
193 |
194 | // Start audio processing
195 | audioHandler?.configureAudio()
196 | print("[Info] AudioHandler started.")
197 |
198 | // FUTURE - should check for valid front camera
199 | }
200 |
201 | private func retryStreaming() {
202 | DispatchQueue.main.asyncAfter(deadline: .now() + 5) {
203 | Task {
204 | await self.startStreaming()
205 | }
206 | }
207 | }
208 |
209 | func stopStreaming() async {
210 | print("Stopping the stream...")
211 | do {
212 | try await rtmpStream.close()
213 | try await rtmpConnection.close()
214 | print("Stream stopped successfully.")
215 | } catch {
216 | print("Error stopping the stream: \(error)")
217 | }
218 | }
219 |
220 | // MARK: - RTMP Monitoring
221 |
222 | func startMonitoringRTMPConnection() {
223 | print("[Debug] Setting up RTMP status monitoring...")
224 | NotificationCenter.default.addObserver(
225 | self,
226 | selector: #selector(handleRTMPStatus(notification:)),
227 | name: .rtmpStatus,
228 | object: rtmpConnection
229 | )
230 | }
231 |
232 |
233 |
234 | @objc private func handleRTMPStatus(notification: Notification) {
235 | guard let data = notification.userInfo as? [String: Any],
236 | let status = data["code"] as? String else {
237 | print("[Warning] No RTMP status data available.")
238 | return
239 | }
240 |
241 | // Handle different RTMP connection states
242 | switch status {
243 | case RTMPConnection.Code.connectSuccess.rawValue:
244 | print("[Info] RTMP: Connection successful.")
245 | case RTMPConnection.Code.connectFailed.rawValue:
246 | print("[Error] RTMP: Connection failed.")
247 | default:
248 | print("[Info] RTMP: Status code \(status)")
249 | }
250 | }
251 | }
252 |
253 |
254 |
255 |
--------------------------------------------------------------------------------
/avpenterprisetest/avpenterprisetest.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | com.apple.developer.screen-capture.include-passthrough
6 |
7 | com.apple.developer.arkit.main-camera-access.allow
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/avpenterprisetest/avpenterprisetestApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // avpenterprisetestApp.swift
3 | // avpenterprisetest
4 | //
5 | // Created by Devon Copley on 12/3/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct avpenterprisetestApp: App {
12 | @StateObject private var appModel = AppModel()
13 |
14 | var body: some Scene {
15 | WindowGroup {
16 | ContentView()
17 | .environmentObject(appModel)
18 | }
19 |
20 | // Define the immersive space here
21 | ImmersiveSpace(id: appModel.immersiveSpaceID) {
22 | EmptyView() // Minimal placeholder; functionality is handled in ContentView
23 | }
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/avpenterprisetestTests/avpenterprisetestTests.swift:
--------------------------------------------------------------------------------
1 | //
2 | // avpenterprisetestTests.swift
3 | // avpenterprisetestTests
4 | //
5 | // Created by Devon Copley on 12/3/24.
6 | //
7 |
8 | import Testing
9 | @testable import avpenterprisetest
10 |
11 | struct avpenterprisetestTests {
12 |
13 | @Test func example() async throws {
14 | // Write your test here and use APIs like `#expect(...)` to check expected conditions.
15 | }
16 |
17 | }
18 |
--------------------------------------------------------------------------------