├── .github
└── ISSUE_TEMPLATE.md
├── .gitignore
├── ARKitExample.xcodeproj
├── project.pbxproj
└── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ └── IDEWorkspaceChecks.plist
├── ARKitExample
├── AppDelegate.swift
├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── Info.plist
├── README.md
├── ViewController.swift
└── art.scnassets
│ ├── ship.scn
│ └── texture.png
├── AVPlayerExample.xcodeproj
├── project.pbxproj
└── project.xcworkspace
│ └── contents.xcworkspacedata
├── AVPlayerExample
├── AVPlayerView.h
├── AVPlayerView.m
├── AppDelegate.h
├── AppDelegate.m
├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── Info.plist
├── README.md
├── Utils.h
├── Utils.m
├── ViewController.h
├── ViewController.m
└── main.m
├── AudioDeviceExample.xcodeproj
├── project.pbxproj
└── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ └── IDEWorkspaceChecks.plist
├── AudioDeviceExample
├── AppDelegate.swift
├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── AudioDevices
│ ├── AudioDevices-Bridging-Header.h
│ ├── ExampleAVAudioEngineDevice.h
│ ├── ExampleAVAudioEngineDevice.m
│ ├── ExampleCoreAudioDevice.h
│ ├── ExampleCoreAudioDevice.m
│ └── mixLoop.caf
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── Info.plist
├── README.md
└── ViewController.swift
├── AudioSinkExample.xcodeproj
├── project.pbxproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
└── xcshareddata
│ └── xcschemes
│ └── AudioSinkExample.xcscheme
├── AudioSinkExample
├── AppDelegate.swift
├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── AudioSinks
│ ├── AudioSinks-Bridging-Header.h
│ ├── ExampleAudioRecorder.h
│ ├── ExampleAudioRecorder.m
│ ├── ExampleSpeechRecognizer.h
│ └── ExampleSpeechRecognizer.m
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── Info.plist
├── README.md
├── RecordingsViewController.swift
└── ViewController.swift
├── DataTrackExample.xcodeproj
├── project.pbxproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
└── xcshareddata
│ └── xcschemes
│ └── DataTrackExample.xcscheme
├── DataTrackExample
├── AppDelegate.swift
├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── Info.plist
├── README.md
└── ViewController.swift
├── LICENSE
├── ObjCVideoQuickstart.xcodeproj
├── project.pbxproj
└── project.xcworkspace
│ └── contents.xcworkspacedata
├── ObjCVideoQuickstart
├── AppDelegate.h
├── AppDelegate.m
├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── Info.plist
├── Utils.h
├── Utils.m
├── ViewController.h
├── ViewController.m
└── main.m
├── README.md
├── ReplayKitExample.xcodeproj
├── project.pbxproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
└── xcshareddata
│ └── xcschemes
│ ├── BroadcastExtension.xcscheme
│ ├── BroadcastExtensionSetupUI.xcscheme
│ └── ReplayKitExample.xcscheme
├── ReplayKitExample
├── BroadcastExtension
│ ├── BroadcastExtension-Bridging-Header.h
│ ├── BroadcastExtension.entitlements
│ ├── ExampleReplayKitAudioCapturer.h
│ ├── ExampleReplayKitAudioCapturer.m
│ ├── ExampleReplayKitAudioCapturerDispatch.h
│ ├── Info.plist
│ └── SampleHandler.swift
├── BroadcastExtensionSetupUI
│ ├── BroadcastExtensionSetupUI.entitlements
│ ├── BroadcastSetupViewController.swift
│ ├── Info.plist
│ └── MainInterface.storyboard
├── README.md
└── ReplayKitExample
│ ├── AppDelegate.swift
│ ├── Assets.xcassets
│ ├── AppIcon.appiconset
│ │ └── Contents.json
│ └── Contents.json
│ ├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
│ ├── Info.plist
│ ├── ReplayKitExample-Bridging-Header.h
│ ├── ReplayKitExample.entitlements
│ ├── ReplayKitVideoSource.swift
│ ├── Telecine.swift
│ └── ViewController.swift
├── Resources
└── twilio_cloud_com.mov
├── ScreenCapturerExample.xcodeproj
├── project.pbxproj
└── project.xcworkspace
│ └── xcshareddata
│ └── IDEWorkspaceChecks.plist
├── ScreenCapturerExample
├── AppDelegate.swift
├── Assets.xcassets
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── ExampleWebViewSource.swift
├── Info.plist
├── README.md
└── ViewController.swift
├── Utils
├── Colors.swift
├── Settings.swift
├── SettingsTableViewController.swift
└── Utils.swift
├── VideoCallKitQuickStart.xcodeproj
├── project.pbxproj
├── project.xcworkspace
│ └── contents.xcworkspacedata
└── xcshareddata
│ └── xcschemes
│ └── VideoCallKitQuickStart.xcscheme
├── VideoCallKitQuickStart
├── AppDelegate.swift
├── Assets.xcassets
│ ├── AppIcon.appiconset
│ │ └── Contents.json
│ └── Contents.json
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── Info.plist
├── ViewController+CallKit.swift
├── ViewController+SimulateIncomingCall.swift
└── ViewController.swift
├── VideoQuickStart.xcodeproj
├── project.pbxproj
├── project.xcworkspace
│ └── contents.xcworkspacedata
└── xcshareddata
│ └── xcschemes
│ └── VideoQuickStart.xcscheme
├── VideoQuickStart
├── AppDelegate.swift
├── Assets.xcassets
│ ├── AppIcon.appiconset
│ │ └── Contents.json
│ └── Contents.json
├── Base.lproj
│ ├── LaunchScreen.storyboard
│ └── Main.storyboard
├── Info.plist
├── SceneDelegate.swift
└── ViewController.swift
├── VideoQuickstart.xcworkspace
├── contents.xcworkspacedata
└── xcshareddata
│ ├── IDEWorkspaceChecks.plist
│ ├── WorkspaceSettings.xcsettings
│ └── swiftpm
│ └── Package.resolved
├── bump_spm_version.sh
└── images
└── quickstart
├── audio-device-launched.jpg
├── audio-engine-example.jpg
├── audio-sink-launched.jpg
├── audio-sink-recognizing.jpg
├── audio-sink-recordings.png
├── console-room-topology-group.png
├── data-track-drawing.gif
├── data-track-home.png
├── enter-room-name.jpg
├── generate_access_tokens.png
├── home-screen.png
├── multi-party-audio-send-bandwidth.png
├── multi-party-home-screen.png
├── objc-home-screen.png
├── objc-xcode-video-quickstart-token.png
├── objc-xcode-video-quickstart.png
├── replaykit-broadcast-mic-ios13-audio-resource-limit.png
├── replaykit-broadcast-picker-ios-13.0.png
├── replaykit-extension-memory.png
├── replaykit-launch-ios11.png
├── replaykit-launch-ios12.png
├── replaykit-picker-ios12.png
├── replaykit-reset-media-services.png
├── room-connected.png
├── select-audio-device.jpg
├── xcode-video-quickstart-token.png
└── xcode-video-quickstart.png
/.github/ISSUE_TEMPLATE.md:
--------------------------------------------------------------------------------
1 |
2 | > Before filing an issue please check that the issue is not already addressed by the following:
3 | >
4 | > * [Video Guides](https://www.twilio.com/docs/api/video)
5 | > * [Changelog](https://www.twilio.com/docs/api/video/changelog-twilio-video-ios-version-2x).
6 | >
7 | > If this is an issue with the QuickStart itself, file it here. If this is an issue with the SDK or how to do something with the SDK please use [twilio-video-ios](https://github.com/twilio/twilio-video-ios/issues) instead.
8 | >
9 | > Please ensure that you are not sharing any
10 | [Personally Identifiable Information(PII)](https://www.twilio.com/docs/glossary/what-is-personally-identifiable-information-pii)
11 | or sensitive account information (API keys, credentials, etc.) when reporting an issue.
12 |
13 | ### Description
14 |
15 | [Description of the issue]
16 |
17 | ### Steps to Reproduce
18 |
19 | 1. [Step one]
20 | 2. [Step two]
21 | 3. [Insert as many steps as needed]
22 |
23 | #### Code
24 |
25 | ```swift
26 | // Code that helps reproduce the issue
27 | ```
28 |
29 | #### Expected Behavior
30 |
31 | [What you expect to happen]
32 |
33 | #### Actual Behavior
34 |
35 | [What actually happens]
36 |
37 | #### Reproduces How Often
38 |
39 | [What percentage of the time does it reproduce?]
40 |
41 | #### Logs
42 | Debug level logs are helpful when investigating issues. To enable debug level logging, add the following code to your application:
43 |
44 | ```.swift
45 | TwilioVideoSDK.setLogLevel(.debug)
46 | ```
47 |
48 | ```
49 | // Log output when the issue occurs
50 | ```
51 |
52 | ### Versions
53 |
54 | All relevant version information for the issue.
55 |
56 | #### Video iOS SDK
57 |
58 | [e.g. 1.3.12 via CocoaPods]
59 |
60 | #### Xcode
61 |
62 | [e.g. 9.2]
63 |
64 | #### iOS Version
65 |
66 | [e.g. 11.2.6]
67 |
68 | #### iOS Device
69 |
70 | [e.g. iPhone 8 Plus]
71 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Xcode
2 | #
3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
4 | .DS_Store
5 |
6 | ## Build generated
7 | build/
8 | DerivedData
9 |
10 | ## Various settings
11 | *.pbxuser
12 | !default.pbxuser
13 | *.mode1v3
14 | !default.mode1v3
15 | *.mode2v3
16 | !default.mode2v3
17 | *.perspectivev3
18 | !default.perspectivev3
19 | xcuserdata
20 |
21 | ## Other
22 | *.xccheckout
23 | *.moved-aside
24 | *.xcuserstate
25 | *.xcscmblueprint
26 |
27 | ## Obj-C/Swift specific
28 | *.hmap
29 | *.ipa
30 |
31 | # Manual Install
32 | TwilioVideo.framework/
33 | TwilioVideo.xcframework/
34 |
35 | # fastlane
36 | #
37 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the
38 | # screenshots whenever they are needed.
39 | # For more information about the recommended setup visit:
40 | # https://github.com/fastlane/fastlane/blob/master/docs/Gitignore.md
41 |
42 | fastlane/report.xml
43 | fastlane/screenshots
44 |
--------------------------------------------------------------------------------
/ARKitExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/ARKitExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/ARKitExample/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // ARKitExample
4 | //
5 | // Copyright © 2016-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | import UIKit
9 | @UIApplicationMain
10 | class AppDelegate: UIResponder, UIApplicationDelegate {
11 |
12 | var window: UIWindow?
13 |
14 |
15 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
16 | // Override point for customization after application launch.
17 | return true
18 | }
19 |
20 | func applicationWillResignActive(_ application: UIApplication) {
21 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
22 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
23 | }
24 |
25 | func applicationDidEnterBackground(_ application: UIApplication) {
26 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
27 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
28 | }
29 |
30 | func applicationWillEnterForeground(_ application: UIApplication) {
31 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
32 | }
33 |
34 | func applicationDidBecomeActive(_ application: UIApplication) {
35 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
36 | }
37 |
38 | func applicationWillTerminate(_ application: UIApplication) {
39 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
40 | }
41 |
42 |
43 | }
44 |
45 |
--------------------------------------------------------------------------------
/ARKitExample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "size" : "1024x1024",
91 | "scale" : "1x"
92 | }
93 | ],
94 | "info" : {
95 | "version" : 1,
96 | "author" : "xcode"
97 | }
98 | }
--------------------------------------------------------------------------------
/ARKitExample/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/ARKitExample/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/ARKitExample/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 | LSRequiresIPhoneOS
22 |
23 | NSCameraUsageDescription
24 | ${PRODUCT_NAME} shares an augmented reality scene with other Participants connected to a Room.
25 | NSMicrophoneUsageDescription
26 | ${PRODUCT_NAME} shares your voice with other Participants connected to a Room.
27 | UIBackgroundModes
28 |
29 | audio
30 | voip
31 |
32 | UILaunchStoryboardName
33 | LaunchScreen
34 | UIMainStoryboardFile
35 | Main
36 | UIRequiredDeviceCapabilities
37 |
38 | armv7
39 | arkit
40 |
41 | UIStatusBarHidden
42 |
43 | UISupportedInterfaceOrientations
44 |
45 | UIInterfaceOrientationPortrait
46 | UIInterfaceOrientationLandscapeLeft
47 | UIInterfaceOrientationLandscapeRight
48 |
49 | UISupportedInterfaceOrientations~ipad
50 |
51 | UIInterfaceOrientationPortrait
52 | UIInterfaceOrientationPortraitUpsideDown
53 | UIInterfaceOrientationLandscapeLeft
54 | UIInterfaceOrientationLandscapeRight
55 |
56 | UIUserInterfaceStyle
57 | Light
58 |
59 |
60 |
--------------------------------------------------------------------------------
/ARKitExample/README.md:
--------------------------------------------------------------------------------
1 | # Twilio Video ARKit Example
2 |
3 | The project demonstrates how to use Twilio's Programmable Video SDK to stream an augmented reality scene created with ARKit and SceneKit. This example was originally written by [Lizzie Siegle](https://github.com/elizabethsiegle/) for her blog post about [ARKit](https://www.twilio.com/blog/2017/10/ios-arkit-swift-twilio-programmable-video.html).
4 |
5 | ### Setup
6 |
7 | See the master [README](https://github.com/twilio/video-quickstart-ios/blob/master/README.md) for instructions on how to generate access tokens and connect to a Room.
8 |
9 | This example requires Xcode 12.0, and the iOS 14.0 SDK. An iOS device with an A9 CPU or greater is needed for ARKit to function properly.
10 |
11 | ### Usage
12 |
13 | At launch the example immediately begins capturing AR content with an `ARSession`. An `ARSCNView` is used to render the SceneKit virtual scene on top of the camera feed.
14 |
15 | At the same time the Client will attempt to connect to a Room named `arkit`. To view the AR content being shared join the same Room using the regular QuickStart example. For this to work properly **you need to generate a new access token with a different identity** otherwise you will kick out the existing ARKit Participant.
16 |
17 | Please note that this project does not demonstrate rendering remote video, but you will be able to hear the audio from other Participants and they will be able to see and hear you.
18 |
19 | ### Known Issues
20 |
21 | The technique used to capture AR content rendered by SceneKit is somewhat un-optimized, and does not use the native sizes produced by `ARSession`. It may be possible to have SceneKit render into developer provided buffers (shared between the CPU and GPU), but we were unable to accomplish this while still using an `ARSCNView` for rendering.
22 |
--------------------------------------------------------------------------------
/ARKitExample/art.scnassets/ship.scn:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/ARKitExample/art.scnassets/ship.scn
--------------------------------------------------------------------------------
/ARKitExample/art.scnassets/texture.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/ARKitExample/art.scnassets/texture.png
--------------------------------------------------------------------------------
/AVPlayerExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/AVPlayerExample/AVPlayerView.h:
--------------------------------------------------------------------------------
1 | //
2 | // AVPlayerView.h
3 | // AVPlayerExample
4 | //
5 | // Copyright © 2016-2017 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import
9 |
10 | @class AVPlayer;
11 |
12 | @interface AVPlayerView : UIView
13 |
14 | - (instancetype)initWithPlayer:(AVPlayer *)player;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/AVPlayerExample/AVPlayerView.m:
--------------------------------------------------------------------------------
1 | //
2 | // AVPlayerView.m
3 | // AVPlayerExample
4 | //
5 | // Copyright © 2016-2017 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import "AVPlayerView.h"
9 |
10 | #import
11 |
12 | @implementation AVPlayerView
13 |
14 | - (instancetype)initWithPlayer:(AVPlayer *)player {
15 | self = [super initWithFrame:CGRectZero];
16 | if (self) {
17 | [self playerLayer].player = player;
18 | }
19 | return self;
20 | }
21 |
22 | + (Class)layerClass {
23 | return [AVPlayerLayer class];
24 | }
25 |
26 | - (AVPlayerLayer *)playerLayer {
27 | return (AVPlayerLayer *)self.layer;
28 | }
29 |
30 | @end
31 |
--------------------------------------------------------------------------------
/AVPlayerExample/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // AVPlayerExample
4 | //
5 | // Copyright © 2016-2017 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import
9 |
10 | @interface AppDelegate : UIResponder
11 |
12 | @property (strong, nonatomic) UIWindow *window;
13 |
14 | @end
15 |
16 |
--------------------------------------------------------------------------------
/AVPlayerExample/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // AVPlayerExample
4 | //
5 | // Copyright © 2016-2017 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import "AppDelegate.h"
9 |
10 | @interface AppDelegate ()
11 |
12 | @end
13 |
14 | @implementation AppDelegate
15 |
16 |
17 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
18 | // Override point for customization after application launch.
19 | return YES;
20 | }
21 |
22 | - (void)applicationWillResignActive:(UIApplication *)application {
23 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
24 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
25 | }
26 |
27 | - (void)applicationDidEnterBackground:(UIApplication *)application {
28 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
29 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
30 | }
31 |
32 | - (void)applicationWillEnterForeground:(UIApplication *)application {
33 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
34 | }
35 |
36 | - (void)applicationDidBecomeActive:(UIApplication *)application {
37 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
38 | }
39 |
40 | - (void)applicationWillTerminate:(UIApplication *)application {
41 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
42 | }
43 |
44 | @end
45 |
--------------------------------------------------------------------------------
/AVPlayerExample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | }
88 | ],
89 | "info" : {
90 | "version" : 1,
91 | "author" : "xcode"
92 | }
93 | }
--------------------------------------------------------------------------------
/AVPlayerExample/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/AVPlayerExample/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | NSAppTransportSecurity
6 |
7 | NSAllowsArbitraryLoads
8 |
9 |
10 | CFBundleDevelopmentRegion
11 | en
12 | CFBundleExecutable
13 | $(EXECUTABLE_NAME)
14 | CFBundleIdentifier
15 | $(PRODUCT_BUNDLE_IDENTIFIER)
16 | CFBundleInfoDictionaryVersion
17 | 6.0
18 | CFBundleName
19 | $(PRODUCT_NAME)
20 | CFBundlePackageType
21 | APPL
22 | CFBundleShortVersionString
23 | 1.0
24 | CFBundleVersion
25 | 1
26 | LSRequiresIPhoneOS
27 |
28 | NSCameraUsageDescription
29 | ${PRODUCT_NAME} uses your camera to capture video which is shared with other Room Participants.
30 | NSMicrophoneUsageDescription
31 | ${PRODUCT_NAME} uses your microphone to capture audio which is shared with other Room Participants.
32 | UIBackgroundModes
33 |
34 | audio
35 |
36 | UILaunchStoryboardName
37 | LaunchScreen
38 | UIMainStoryboardFile
39 | Main
40 | UIRequiredDeviceCapabilities
41 |
42 | armv7
43 |
44 | UISupportedInterfaceOrientations
45 |
46 | UIInterfaceOrientationPortrait
47 | UIInterfaceOrientationLandscapeLeft
48 | UIInterfaceOrientationLandscapeRight
49 |
50 | UISupportedInterfaceOrientations~ipad
51 |
52 | UIInterfaceOrientationPortrait
53 | UIInterfaceOrientationPortraitUpsideDown
54 | UIInterfaceOrientationLandscapeLeft
55 | UIInterfaceOrientationLandscapeRight
56 |
57 | UIUserInterfaceStyle
58 | Light
59 |
60 |
61 |
--------------------------------------------------------------------------------
/AVPlayerExample/README.md:
--------------------------------------------------------------------------------
1 | # AVPlayer example for Objective-C
2 |
3 | This example demonstrates how to use `AVPlayer` to stream Audio & Video content while connected to a `TVIRoom`.
4 |
5 | ### Setup
6 |
7 | See the master [README](https://github.com/twilio/video-quickstart-ios/blob/master/README.md) for instructions on how to generate access tokens and connect to a Room.
8 |
9 | ## Usage
10 |
11 | This example is very similar to the basic Quickstart. However, if you join a Room with no other Participants the app will stream media using `AVPlayer` while you wait. Once the first Participant joins the media content is paused and the remote video is shown in its place.
12 |
13 | In order to use `AVPlayer` along with Twilio Video the `TVIAudioController+CallKit` APIs are used. Unlike normal CallKit operation, the application manually activates and deactivates `AVAudioSession` as needed.
14 |
15 | ## Known Issues
16 |
17 | We are currently experiencing some problems with low output volume when `AVPlayer` content is mixed with remote Participant audio. This occurs when using the built-in device loudspeaker and microphone, but not when using headphones to monitor audio. For more information please refer to issue [#402](https://github.com/twilio/video-quickstart-ios/issues/402).
18 |
--------------------------------------------------------------------------------
/AVPlayerExample/Utils.h:
--------------------------------------------------------------------------------
1 | //
2 | // Utils.h
3 | // AVPlayerExample
4 | //
5 | // Copyright © 2016-2017 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import
9 |
10 | @interface PlatformUtils : NSObject
11 |
12 | + (BOOL)isSimulator;
13 |
14 | @end
15 |
16 | @interface TokenUtils : NSObject
17 |
18 | + (void)retrieveAccessTokenFromURL:(NSString *)tokenURLStr
19 | completion:(void (^)(NSString* token, NSError *err)) completionHandler;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/AVPlayerExample/Utils.m:
--------------------------------------------------------------------------------
1 | //
2 | // Utils.m
3 | // AVPlayerExample
4 | //
5 | // Copyright © 2016-2017 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import "Utils.h"
9 |
10 | @implementation PlatformUtils
11 |
12 | + (BOOL)isSimulator {
13 | #if TARGET_IPHONE_SIMULATOR
14 | return YES;
15 | #endif
16 | return NO;
17 | }
18 |
19 | @end
20 |
21 | @implementation TokenUtils
22 |
23 | + (void)retrieveAccessTokenFromURL:(NSString *)tokenURLStr
24 | completion:(void (^)(NSString* token, NSError *err)) completionHandler {
25 | NSURL *tokenURL = [NSURL URLWithString:tokenURLStr];
26 | NSURLSessionConfiguration *sessionConfig = [NSURLSessionConfiguration defaultSessionConfiguration];
27 | NSURLSession *session = [NSURLSession sessionWithConfiguration:sessionConfig];
28 | NSURLSessionDataTask *task = [session dataTaskWithURL:tokenURL
29 | completionHandler: ^(NSData * _Nullable data,
30 | NSURLResponse * _Nullable response,
31 | NSError * _Nullable error) {
32 | NSString *accessToken = nil;
33 | if (!error && data) {
34 | accessToken = [[NSString alloc] initWithData:data
35 | encoding:NSUTF8StringEncoding];
36 | }
37 | completionHandler(accessToken, error);
38 | }];
39 | [task resume];
40 | }
41 |
42 | @end
43 |
--------------------------------------------------------------------------------
/AVPlayerExample/ViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.h
3 | // AVPlayerExample
4 | //
5 | // Copyright © 2016-2017 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import
9 |
10 | @interface ViewController : UIViewController
11 |
12 |
13 | @end
14 |
15 |
--------------------------------------------------------------------------------
/AVPlayerExample/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // AVPlayerExample
4 | //
5 | // Created by Chris Eagleston on 5/16/17.
6 | // Copyright © 2017 Twilio Inc. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/AudioDeviceExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/AudioDeviceExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/AudioDeviceExample/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // AudioDeviceExample
4 | //
5 | // Copyright © 2018-2019 Twilio Inc. All rights reserved.
6 | //
7 |
8 | import UIKit
9 |
10 | @UIApplicationMain
11 | class AppDelegate: UIResponder, UIApplicationDelegate {
12 |
13 | var window: UIWindow?
14 |
15 |
16 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
17 | // Override point for customization after application launch.
18 | return true
19 | }
20 |
21 | func applicationWillResignActive(_ application: UIApplication) {
22 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
23 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
24 | }
25 |
26 | func applicationDidEnterBackground(_ application: UIApplication) {
27 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
28 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
29 | }
30 |
31 | func applicationWillEnterForeground(_ application: UIApplication) {
32 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
33 | }
34 |
35 | func applicationDidBecomeActive(_ application: UIApplication) {
36 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
37 | }
38 |
39 | func applicationWillTerminate(_ application: UIApplication) {
40 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
41 | }
42 |
43 |
44 | }
45 |
46 |
--------------------------------------------------------------------------------
/AudioDeviceExample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "size" : "1024x1024",
91 | "scale" : "1x"
92 | }
93 | ],
94 | "info" : {
95 | "version" : 1,
96 | "author" : "xcode"
97 | }
98 | }
--------------------------------------------------------------------------------
/AudioDeviceExample/AudioDevices/AudioDevices-Bridging-Header.h:
--------------------------------------------------------------------------------
1 | //
2 | // AudioDevices-Bridging-Header.h
3 | // AudioDeviceExample
4 | //
5 | // Copyright © 2018-2019 Twilio Inc. All rights reserved.
6 | //
7 |
8 | #import "ExampleAVAudioEngineDevice.h"
9 | #import "ExampleCoreAudioDevice.h"
10 |
--------------------------------------------------------------------------------
/AudioDeviceExample/AudioDevices/ExampleAVAudioEngineDevice.h:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleAVAudioEngineDevice.h
3 | // AudioDeviceExample
4 | //
5 | // Copyright © 2018-2019 Twilio Inc. All rights reserved.
6 | //
7 |
8 | #import
9 |
10 | NS_CLASS_AVAILABLE(NA, 11_0)
11 | @interface ExampleAVAudioEngineDevice : NSObject
12 |
13 | /**
14 | * @brief This method is invoked when client wish to play music using the AVAudioEngine and CoreAudio
15 | *
16 | * @param continuous Continue playing music after the disconnect.
17 | *
18 | * @discussion Your app can play music before connecting a Room, while in a Room or after the disconnect.
19 | * If you wish to play music irespective of you are connected to a Room or not (before [TwilioVideo connect:] or
20 | * after [room disconnect]), or wish to continue playing music after disconnected from a Room, set the `continuous`
21 | * argument to `YES`.
22 | * If the `continuous` is set to `NO`, the audio device will not continue playing the music once you disconnect from the Room.
23 | */
24 | - (void)playMusic:(BOOL)continuous;
25 |
26 | /**
27 | * @brief Enable audio device
28 | *
29 | * @discussion By default, the SDK initializes this property to YES. Setting it to NO entirely disables the audio device. When the device is disabled, both audio capture and playback halt. This toggle should be used in CallKit delegate (CXProviderDelegate) methods (ex: didReset, didActivate, didDeactivate) to negotiate call holding and other events taking place from the iOS dialer
30 | */
31 |
32 | @property (nonatomic, assign, getter=isEnabled) BOOL enabled;
33 |
34 | @end
35 |
--------------------------------------------------------------------------------
/AudioDeviceExample/AudioDevices/ExampleCoreAudioDevice.h:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleCoreAudioDevice.h
3 | // AudioDeviceExample
4 | //
5 | // Copyright © 2018-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import
9 |
10 | /*
11 | * ExampleCoreAudioDevice uses a RemoteIO audio unit to playback stereo audio at up to 48 kHz.
12 | * In contrast to `TVIDefaultAudioDevice`, this class does not record audio and is intended for high quality playback.
13 | * Since full duplex audio is not needed this device does not use the built in echo cancellation provided by
14 | * CoreAudio's VoiceProcessingIO audio unit.
15 | */
16 | @interface ExampleCoreAudioDevice : NSObject
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/AudioDeviceExample/AudioDevices/mixLoop.caf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/AudioDeviceExample/AudioDevices/mixLoop.caf
--------------------------------------------------------------------------------
/AudioDeviceExample/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/AudioDeviceExample/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | UIBackgroundModes
6 |
7 | voip
8 | audio
9 |
10 | NSCameraUsageDescription
11 | ${PRODUCT_NAME} uses your camera to capture video which is shared with other Room Participants.
12 | NSMicrophoneUsageDescription
13 | ${PRODUCT_NAME} uses your microphone to capture audio which is shared with other Room Participants.
14 | NSAppTransportSecurity
15 |
16 | NSAllowsArbitraryLoads
17 |
18 |
19 | CFBundleDevelopmentRegion
20 | $(DEVELOPMENT_LANGUAGE)
21 | CFBundleExecutable
22 | $(EXECUTABLE_NAME)
23 | CFBundleIdentifier
24 | $(PRODUCT_BUNDLE_IDENTIFIER)
25 | CFBundleInfoDictionaryVersion
26 | 6.0
27 | CFBundleName
28 | $(PRODUCT_NAME)
29 | CFBundlePackageType
30 | APPL
31 | CFBundleShortVersionString
32 | 1.0
33 | CFBundleVersion
34 | 1
35 | LSRequiresIPhoneOS
36 |
37 | UILaunchStoryboardName
38 | LaunchScreen
39 | UIMainStoryboardFile
40 | Main
41 | UIRequiredDeviceCapabilities
42 |
43 | armv7
44 |
45 | UISupportedInterfaceOrientations
46 |
47 | UIInterfaceOrientationPortrait
48 | UIInterfaceOrientationLandscapeLeft
49 | UIInterfaceOrientationLandscapeRight
50 |
51 | UISupportedInterfaceOrientations~ipad
52 |
53 | UIInterfaceOrientationPortrait
54 | UIInterfaceOrientationPortraitUpsideDown
55 | UIInterfaceOrientationLandscapeLeft
56 | UIInterfaceOrientationLandscapeRight
57 |
58 | UIUserInterfaceStyle
59 | Light
60 |
61 |
62 |
--------------------------------------------------------------------------------
/AudioDeviceExample/README.md:
--------------------------------------------------------------------------------
1 | # Twilio Video TVIAudioDevice Example
2 |
3 | The project demonstrates how to use Twilio's Programmable Video SDK with audio playback and recording functionality provided by a custom `TVIAudioDevice`.
4 |
5 | The example demonstrates the following custom audio devices:
6 |
7 | **ExampleCoreAudioDevice**
8 |
9 | Uses a RemoteIO audio unit to playback stereo audio at up to 48 kHz. In contrast to `TVIDefaultAudioDevice`, this class does not record audio and is intended for high quality playback. Since recording is not needed this device does not use the built in echo cancellation provided by CoreAudio's VoiceProcessingIO audio unit nor does it require microphone permissions from the user.
10 |
11 | **ExampleAVAudioEngineDevice**
12 |
13 | Uses CoreAudio's VoiceProcessingIO audio unit to playback and record audio at up to 48KHz with built-in echo cancellation. The example uses two AVAudioEngine in manual rendering mode:
14 |
15 | 1. The upstream AVAudioEngine is used for mixing the Remote Participant's audio with audio from a file. The AudioUnit receives mixed audio samples from AVAudioEngine's output node.
16 | 2. The downstream AVAudioEngine is used for mixing the Local Participant's microphone audio with audio from a file. The Video SDK receives the mixed audio samples from the AVAudioEngine's output node.
17 |
18 | This diagram describes how ExampleAVAudioEngineDevice uses TwilioVideo, AVAudioEngine, and CoreAudio -
19 |
20 |
21 |
22 | Please note, ExampleAVAudioEngineDevice requires iOS 11.0 or above.
23 |
24 | ### Setup
25 |
26 | See the master [README](https://github.com/twilio/video-quickstart-ios/blob/master/README.md) for instructions on how to generate access tokens and connect to a Room.
27 |
28 | This example requires Xcode 12.0 and the iOS 14.0 SDK, as well as a device running iOS 11.0 or above.
29 |
30 | ### Running
31 |
32 | Once you have configured your access token, build and run the example. You will be presented with the following screen:
33 |
34 |
35 |
36 | Tap the audio device button to select an audio device:
37 |
38 |
39 |
40 | Once the audio device of your choice is selected, enter the room name.
41 |
42 |
43 |
44 | Tap the "Connect" button to join a Room. Once you've joined you will be sharing video and audio if `ExampleAVAudioEngineDevice` is used. However the audio will not be shared if `ExampleCoreAudioDevice` is used. In order to playback audio from a remote Participant you will need a Client which supports audio recording. The easiest way to do this is to build and run the normal QuickStart [example](https://github.com/twilio/video-quickstart-ios/tree/master/VideoQuickStart) and join the same Room.
45 |
46 | After the remote Participant has joined you should be able to hear their audio. If you are using the `ExampleCoreAudioDevice`, watch out if both devices are in the same physical space, because `ExampleCoreAudioDevice` does not use echo cancellation.
47 |
48 | ### Known Issues
49 |
50 | The AVAudioSession is configured and activated at device initialization time. Ideally, it would be better to activate the AVAudioSession only when audio playback or recording is needed.
--------------------------------------------------------------------------------
/AudioSinkExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/AudioSinkExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/AudioSinkExample.xcodeproj/xcshareddata/xcschemes/AudioSinkExample.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
32 |
33 |
43 |
45 |
51 |
52 |
53 |
54 |
60 |
62 |
68 |
69 |
70 |
71 |
73 |
74 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/AudioSinkExample/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // AudioSinkExample
4 | //
5 | // Copyright © 2017-2019 Twilio Inc. All rights reserved.
6 | //
7 |
8 | import UIKit
9 |
10 | @UIApplicationMain
11 | class AppDelegate: UIResponder, UIApplicationDelegate {
12 |
13 | var window: UIWindow?
14 |
15 |
16 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
17 | // Override point for customization after application launch.
18 | return true
19 | }
20 |
21 | func applicationWillResignActive(_ application: UIApplication) {
22 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
23 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
24 | }
25 |
26 | func applicationDidEnterBackground(_ application: UIApplication) {
27 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
28 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
29 | }
30 |
31 | func applicationWillEnterForeground(_ application: UIApplication) {
32 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
33 | }
34 |
35 | func applicationDidBecomeActive(_ application: UIApplication) {
36 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
37 | }
38 |
39 | func applicationWillTerminate(_ application: UIApplication) {
40 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
41 | }
42 |
43 |
44 | }
45 |
46 |
--------------------------------------------------------------------------------
/AudioSinkExample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "size" : "1024x1024",
91 | "scale" : "1x"
92 | }
93 | ],
94 | "info" : {
95 | "version" : 1,
96 | "author" : "xcode"
97 | }
98 | }
--------------------------------------------------------------------------------
/AudioSinkExample/AudioSinks/AudioSinks-Bridging-Header.h:
--------------------------------------------------------------------------------
1 | //
2 | // AudioSinks-Bridging-Header.h
3 | // AudioSinkExample
4 | //
5 | // Copyright © 2017-2019 Twilio Inc. All rights reserved.
6 | //
7 |
8 | #import "ExampleAudioRecorder.h"
9 | #import "ExampleSpeechRecognizer.h"
10 |
--------------------------------------------------------------------------------
/AudioSinkExample/AudioSinks/ExampleAudioRecorder.h:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleAudioRecorder.h
3 | // AudioSinkExample
4 | //
5 | // Copyright © 2017-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | @import Foundation;
9 | @import TwilioVideo;
10 |
11 | @interface ExampleAudioRecorder : NSObject
12 |
13 | - (null_unspecified instancetype)initWithAudioTrack:(nonnull TVIAudioTrack *)audioTrack
14 | identifier:(nonnull NSString *)identifier;
15 |
16 | // Breaks the strong reference from TVIAudioTrack by removing its Sink.
17 | - (void)stopRecording;
18 |
19 | @property (nonatomic, copy, readonly, nonnull) NSString *identifier;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/AudioSinkExample/AudioSinks/ExampleAudioRecorder.m:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleAudioRecorder.m
3 | // AudioSinkExample
4 | //
5 | // Copyright © 2017-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import "ExampleAudioRecorder.h"
9 |
10 | #import
11 |
12 | @interface ExampleAudioRecorder()
13 |
14 | @property (nonatomic, strong) AVAssetWriter *audioRecorder;
15 | @property (nonatomic, strong) AVAssetWriterInput *audioRecorderInput;
16 | @property (nonatomic, assign) CMTime recorderTimestamp;
17 | @property (nonatomic, assign) UInt32 numberOfChannels;
18 | @property (nonatomic, assign) Float64 sampleRate;
19 |
20 | @property (nonatomic, weak) TVIAudioTrack *audioTrack;
21 |
22 | @end
23 |
24 | @implementation ExampleAudioRecorder
25 |
26 | - (instancetype)initWithAudioTrack:(TVIAudioTrack *)audioTrack identifier:(NSString *)identifier {
27 | NSParameterAssert(audioTrack);
28 | NSParameterAssert(identifier);
29 |
30 | self = [super init];
31 | if (self) {
32 | _recorderTimestamp = kCMTimeInvalid;
33 | _audioTrack = audioTrack;
34 | _identifier = identifier;
35 |
36 | // We will defer recording until the first audio sample is available.
37 | [_audioTrack addSink:self];
38 | }
39 | return self;
40 | }
41 |
42 | - (void)startRecordingWithTimestamp:(CMTime)timestamp basicDescription:(const AudioStreamBasicDescription *)basicDescription {
43 | // Setup Recorder
44 | NSError *error = nil;
45 | _audioRecorder = [[AVAssetWriter alloc] initWithURL:[[self class] recordingURLWithIdentifier:_identifier]
46 | fileType:AVFileTypeWAVE
47 | error:&error];
48 |
49 | if (error) {
50 | NSLog(@"Error setting up audio recorder: %@", error);
51 | return;
52 | }
53 |
54 | _numberOfChannels = basicDescription->mChannelsPerFrame;
55 | _sampleRate = basicDescription->mSampleRate;
56 |
57 | NSLog(@"Recorder input is %d %@, %f Hz.",
58 | _numberOfChannels, _numberOfChannels == 1 ? @"channel" : @"channels", _sampleRate);
59 |
60 | // Assume that TVIAudioTrack will produce interleaved stereo LPCM @ 16-bit / 48khz
61 | NSDictionary *outputSettings = @{AVFormatIDKey : @(kAudioFormatLinearPCM),
62 | AVSampleRateKey : @(_sampleRate),
63 | AVNumberOfChannelsKey : @(_numberOfChannels),
64 | AVLinearPCMBitDepthKey : @(16),
65 | AVLinearPCMIsFloatKey : @(NO),
66 | AVLinearPCMIsBigEndianKey : @(NO),
67 | AVLinearPCMIsNonInterleaved : @(NO)};
68 |
69 | _audioRecorderInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:outputSettings];
70 | _audioRecorderInput.expectsMediaDataInRealTime = YES;
71 |
72 | if ([_audioRecorder canAddInput:_audioRecorderInput]) {
73 | [_audioRecorder addInput:_audioRecorderInput];
74 | BOOL success = [_audioRecorder startWriting];
75 |
76 | if (success) {
77 | NSLog(@"Started recording audio track to: %@", _audioRecorder.outputURL);
78 | [self.audioRecorder startSessionAtSourceTime:timestamp];
79 | self.recorderTimestamp = timestamp;
80 | } else {
81 | NSLog(@"Couldn't start the AVAssetWriter: %@ error: %@", _audioRecorder, _audioRecorder.error);
82 | }
83 | } else {
84 | _audioRecorder = nil;
85 | _audioRecorderInput = nil;
86 | }
87 |
88 | // This example does not support backgrounding. Now is a good point to consider kicking off a background
89 | // task, and handling failures.
90 | }
91 |
92 | - (void)stopRecording {
93 | if (self.audioTrack) {
94 | [self.audioTrack removeSink:self];
95 | self.audioTrack = nil;
96 | }
97 |
98 | [self.audioRecorderInput markAsFinished];
99 |
100 | // Teardown the recorder
101 | [self.audioRecorder finishWritingWithCompletionHandler:^{
102 | if (self.audioRecorder.status == AVAssetWriterStatusFailed) {
103 | NSLog(@"AVAssetWriter failed with error: %@", self.audioRecorder.error);
104 | } else if (self.audioRecorder.status == AVAssetWriterStatusCompleted) {
105 | NSLog(@"AVAssetWriter finished writing to: %@", self.audioRecorder.outputURL);
106 | }
107 | self.audioRecorder = nil;
108 | self.audioRecorderInput = nil;
109 | self.recorderTimestamp = kCMTimeInvalid;
110 | }];
111 | }
112 |
113 | - (BOOL)detectSilence:(CMSampleBufferRef)audioSample {
114 | // Get the audio samples. We count a corrupted buffer as silence.
115 | CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(audioSample);
116 | size_t inputBytes = 0;
117 | char *inputSamples = NULL;
118 | OSStatus status = CMBlockBufferGetDataPointer(blockBuffer, 0, NULL, &inputBytes, &inputSamples);
119 |
120 | if (status != kCMBlockBufferNoErr) {
121 | NSLog(@"Failed to get data pointer: %d", status);
122 | return YES;
123 | }
124 |
125 | // Check for silence. This technique is not efficient, it might be better to sum the values of the vector instead.
126 | BOOL silence = YES;
127 | for (size_t i = 0; i < inputBytes; i+=2) {
128 | int16_t *sample = (int16_t *)(inputSamples + i);
129 | if (*sample != 0) {
130 | silence = NO;
131 | break;
132 | }
133 | }
134 |
135 | return silence;
136 | }
137 |
138 | + (NSURL *)recordingURLWithIdentifier:(NSString *)identifier {
139 | NSURL *documentsDirectory = [[[NSFileManager defaultManager] URLsForDirectory:NSDocumentDirectory inDomains:NSUserDomainMask] lastObject];
140 |
141 | // Choose a filename which will be unique if the `identifier` is reused (Append RFC3339 formatted date).
142 | NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init];
143 | dateFormatter.locale = [NSLocale localeWithLocaleIdentifier:@"en_US_POSIX"];
144 | dateFormatter.dateFormat = @"HHmmss";
145 | dateFormatter.timeZone = [NSTimeZone timeZoneForSecondsFromGMT:0];
146 |
147 | NSString *dateComponent = [dateFormatter stringFromDate:[NSDate date]];
148 | NSString *filename = [NSString stringWithFormat:@"%@-%@.wav", identifier, dateComponent];
149 |
150 | return [documentsDirectory URLByAppendingPathComponent:filename];
151 | }
152 |
153 | #pragma mark - TVIAudioSink
154 |
155 | - (void)renderSample:(CMSampleBufferRef)audioSample {
156 | CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(audioSample);
157 | const AudioStreamBasicDescription *basicDescription = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription);
158 | CMTime presentationTimestamp = CMSampleBufferGetPresentationTimeStamp(audioSample);
159 |
160 | // We defer recording until the first sample in order to determine the appropriate channel layout and sample rate.
161 | if (CMTIME_IS_INVALID(self.recorderTimestamp)) {
162 | // Detect and discard initial 16 kHz silence, before the first real samples are received from a remote source.
163 | if (basicDescription->mSampleRate == 16000. && [self detectSilence:audioSample]) {
164 | return;
165 | } else {
166 | [self startRecordingWithTimestamp:presentationTimestamp basicDescription:basicDescription];
167 | }
168 | } else {
169 | // Sanity check on our assumptions.
170 | NSAssert(basicDescription->mChannelsPerFrame == _numberOfChannels,
171 | @"Channel mismatch. was: %d now: %d", _numberOfChannels, basicDescription->mChannelsPerFrame);
172 | NSAssert(basicDescription->mSampleRate == _sampleRate,
173 | @"Sample rate mismatch. was: %f now: %f", _sampleRate, basicDescription->mSampleRate);
174 | }
175 |
176 | BOOL success = [self.audioRecorderInput appendSampleBuffer:audioSample];
177 | if (!success) {
178 | NSLog(@"Failed to append sample to writer: %@, error: %@", self.audioRecorder, self.audioRecorder.error);
179 | }
180 | }
181 |
182 | @end
183 |
--------------------------------------------------------------------------------
/AudioSinkExample/AudioSinks/ExampleSpeechRecognizer.h:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleSpeechRecognizer.h
3 | // AudioSinkExample
4 | //
5 | // Copyright © 2017-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | @import Foundation;
9 | @import Speech;
10 | @import TwilioVideo;
11 |
12 | @interface ExampleSpeechRecognizer : NSObject
13 |
14 | - (null_unspecified instancetype)initWithAudioTrack:(nonnull TVIAudioTrack *)audioTrack
15 | identifier:(nonnull NSString *)identifier
16 | resultHandler:(void (^ _Nonnull)(SFSpeechRecognitionResult * __nullable result, NSError * __nullable error))resultHandler;
17 |
18 | // Breaks the strong reference from TVIAudioTrack by removing its Sink.
19 | - (void)stopRecognizing;
20 |
21 | @property (nonatomic, copy, readonly, nullable) NSString *speechResult;
22 | @property (nonatomic, copy, readonly, nonnull) NSString *identifier;
23 |
24 | @end
25 |
--------------------------------------------------------------------------------
/AudioSinkExample/AudioSinks/ExampleSpeechRecognizer.m:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleSpeechRecognizer.m
3 | // AudioSinkExample
4 | //
5 | // Copyright © 2017-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import "ExampleSpeechRecognizer.h"
9 |
10 | #import
11 |
12 | static UInt32 kChannelCountMono = 1;
13 |
14 | @interface ExampleSpeechRecognizer()
15 |
16 | @property (nonatomic, strong) SFSpeechRecognizer *speechRecognizer;
17 | @property (nonatomic, strong) SFSpeechAudioBufferRecognitionRequest *speechRequest;
18 | @property (nonatomic, strong) SFSpeechRecognitionTask *speechTask;
19 | @property (nonatomic, assign) AudioConverterRef speechConverter;
20 |
21 | @property (nonatomic, copy) NSString *speechResult;
22 | @property (nonatomic, weak) TVIAudioTrack *audioTrack;
23 |
24 | @end
25 |
26 | @implementation ExampleSpeechRecognizer
27 |
28 | - (instancetype)initWithAudioTrack:(TVIAudioTrack *)audioTrack
29 | identifier:(NSString *)identifier
30 | resultHandler:(void (^)(SFSpeechRecognitionResult * result, NSError * error))resultHandler {
31 | self = [super init];
32 |
33 | if (self != nil) {
34 | _speechRecognizer = [[SFSpeechRecognizer alloc] init];
35 | _speechRecognizer.defaultTaskHint = SFSpeechRecognitionTaskHintDictation;
36 |
37 | _speechRequest = [[SFSpeechAudioBufferRecognitionRequest alloc] init];
38 | _speechRequest.shouldReportPartialResults = YES;
39 |
40 | __weak typeof(self) weakSelf = self;
41 | _speechTask = [_speechRecognizer recognitionTaskWithRequest:_speechRequest resultHandler:^(SFSpeechRecognitionResult * _Nullable result, NSError * _Nullable error) {
42 | __strong typeof(self) strongSelf = weakSelf;
43 | if (result) {
44 | strongSelf.speechResult = result.bestTranscription.formattedString;
45 | } else {
46 | NSLog(@"Speech recognition error: %@", error);
47 | }
48 |
49 | resultHandler(result, error);
50 | }];
51 |
52 | _audioTrack = audioTrack;
53 | [_audioTrack addSink:self];
54 | _identifier = identifier;
55 | }
56 |
57 | return self;
58 | }
59 |
60 | - (void)dealloc {
61 | [self.speechTask cancel];
62 | }
63 |
64 | - (void)stopRecognizing {
65 | [self.audioTrack removeSink:self];
66 |
67 | [self.speechTask finish];
68 | self.speechRequest = nil;
69 | self.speechRecognizer = nil;
70 |
71 | if (self.speechConverter != NULL) {
72 | AudioConverterDispose(self.speechConverter);
73 | self.speechConverter = NULL;
74 | }
75 | }
76 |
77 | #pragma mark - TVIAudioSink
78 |
79 | - (void)renderSample:(CMSampleBufferRef)audioSample {
80 | CMAudioFormatDescriptionRef coreMediaFormat = (CMAudioFormatDescriptionRef)CMSampleBufferGetFormatDescription(audioSample);
81 | const AudioStreamBasicDescription *basicDescription = CMAudioFormatDescriptionGetStreamBasicDescription(coreMediaFormat);
82 | AVAudioFrameCount frameCount = (AVAudioFrameCount)CMSampleBufferGetNumSamples(audioSample);
83 |
84 | // SFSpeechAudioBufferRecognitionRequest does not handle stereo PCM inputs correctly, so always deliver mono.
85 | AVAudioFormat *avAudioFormat = [[AVAudioFormat alloc] initWithCommonFormat:AVAudioPCMFormatInt16
86 | sampleRate:basicDescription->mSampleRate
87 | channels:kChannelCountMono
88 | interleaved:YES];
89 |
90 | // Allocate an AudioConverter to perform mono downmixing for us.
91 | if (self.speechConverter == NULL && basicDescription->mChannelsPerFrame != kChannelCountMono) {
92 | OSStatus status = AudioConverterNew(basicDescription, avAudioFormat.streamDescription, &_speechConverter);
93 | if (status != 0) {
94 | NSLog(@"Failed to create AudioConverter: %d", (int)status);
95 | return;
96 | }
97 | }
98 |
99 | // Perform downmixing if needed, otherwise deliver the original CMSampleBuffer.
100 | if (basicDescription->mChannelsPerFrame == kChannelCountMono) {
101 | [self.speechRequest appendAudioSampleBuffer:audioSample];
102 | } else {
103 | AVAudioPCMBuffer *pcmBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:avAudioFormat frameCapacity:frameCount];
104 |
105 | // Fill the AVAudioPCMBuffer with downmixed mono audio.
106 | CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(audioSample);
107 | size_t inputBytes = 0;
108 | char *inputSamples = NULL;
109 | OSStatus status = CMBlockBufferGetDataPointer(blockBuffer, 0, NULL, &inputBytes, &inputSamples);
110 |
111 | if (status != kCMBlockBufferNoErr) {
112 | NSLog(@"Failed to get data pointer: %d", (int)status);
113 | return;
114 | }
115 |
116 | // Allocate some memory for us...
117 | pcmBuffer.frameLength = pcmBuffer.frameCapacity;
118 | AudioBufferList *bufferList = pcmBuffer.mutableAudioBufferList;
119 | AudioBuffer buffer = bufferList->mBuffers[0];
120 | void *outputSamples = buffer.mData;
121 | UInt32 outputBytes = buffer.mDataByteSize;
122 |
123 | status = AudioConverterConvertBuffer(_speechConverter, (UInt32)inputBytes, (const void *)inputSamples, &outputBytes, outputSamples);
124 |
125 | if (status == 0) {
126 | [self.speechRequest appendAudioPCMBuffer:pcmBuffer];
127 | } else {
128 | NSLog(@"Failed to convert audio: %d", (int)status);
129 | }
130 | }
131 | }
132 |
133 | @end
134 |
--------------------------------------------------------------------------------
/AudioSinkExample/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/AudioSinkExample/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | UIRequiresPersistentWiFi
6 |
7 | UIBackgroundModes
8 |
9 | audio
10 | voip
11 |
12 | NSSpeechRecognitionUsageDescription
13 | ${PRODUCT_NAME} transcribes audio from local and remote Participants.
14 | NSMicrophoneUsageDescription
15 | ${PRODUCT_NAME} uses your microphone to capture audio which is shared with other Participants.
16 | NSCameraUsageDescription
17 | ${PRODUCT_NAME} uses your camera to capture video which is shared with other Participants.
18 | CFBundleDevelopmentRegion
19 | $(DEVELOPMENT_LANGUAGE)
20 | CFBundleExecutable
21 | $(EXECUTABLE_NAME)
22 | CFBundleIdentifier
23 | $(PRODUCT_BUNDLE_IDENTIFIER)
24 | CFBundleInfoDictionaryVersion
25 | 6.0
26 | CFBundleName
27 | $(PRODUCT_NAME)
28 | CFBundlePackageType
29 | APPL
30 | CFBundleShortVersionString
31 | 1.0
32 | CFBundleVersion
33 | 1
34 | LSRequiresIPhoneOS
35 |
36 | UILaunchStoryboardName
37 | LaunchScreen
38 | UIMainStoryboardFile
39 | Main
40 | UIRequiredDeviceCapabilities
41 |
42 | armv7
43 |
44 | UISupportedInterfaceOrientations
45 |
46 | UIInterfaceOrientationPortrait
47 | UIInterfaceOrientationLandscapeLeft
48 | UIInterfaceOrientationLandscapeRight
49 |
50 | UISupportedInterfaceOrientations~ipad
51 |
52 | UIInterfaceOrientationPortrait
53 | UIInterfaceOrientationPortraitUpsideDown
54 | UIInterfaceOrientationLandscapeLeft
55 | UIInterfaceOrientationLandscapeRight
56 |
57 | UIUserInterfaceStyle
58 | Light
59 |
60 |
61 |
--------------------------------------------------------------------------------
/AudioSinkExample/README.md:
--------------------------------------------------------------------------------
1 | # Twilio Video TVIAudioSink Example
2 |
3 | The project demonstrates how to use Twilio's Programmable Video SDK to access raw audio samples using the `TVIAudioSink` API on `TVIAudioTrack`. Local and remote audio is recorded using `AVFoundation.framework` and speech is recognized using `Speech.framework`.
4 |
5 | ### Setup
6 |
7 | See the master [README](https://github.com/twilio/video-quickstart-ios/blob/master/README.md) for instructions on how to generate access tokens and connect to a Room.
8 |
9 | This example requires Xcode 12.0 and the iOS 14.0 SDK, as well as a device running iOS 11.0 or above.
10 |
11 | ### Running
12 |
13 | Once you have setup your access token, install and run the example. You will be presented with the following screen:
14 |
15 |
16 |
17 | After you connect to a Room tap on your camera preview to begin recognizing local audio. As you speak `ExampleSpeechRecognizer` will attempt to use `Speech.Framework` to transcribe your speech and display the text on screen. Once other Participants join you can select their video to recognize remote speech.
18 |
19 |
20 |
21 | Audio is automatically recorded when you join a Room. After disconnecting, tap "Recordings" to browse a list of your `TVIAudioTrack`s recorded using `ExampleAudioRecorder`. Select a recording cell to begin playback using `AVPlayerViewController`, or swipe to delete the file.
22 |
23 |
24 |
25 | ### Known Issues
26 |
27 | 1. Local audio samples are not raised until at least one underlying WebRTC PeerConnection is negotiated. In a Peer-to-Peer Room it is not possible to record or recognize audio until at least one other Participant joins. The same limitation does not apply to Group Rooms where there is a persistent PeerConnection with Twilio's media servers.
28 | 2. When a `TVIAudioSink` is added to a `TVIRemoteAudioTrack` and encoded audio has not been received yet, the media engine outputs (1-channel / 16 kHz) silence. In order for `ExampleAudioRecorder` to determine the correct recording format it detects and discards initial silence and waits for the first decoded samples.
29 |
--------------------------------------------------------------------------------
/AudioSinkExample/RecordingsViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RecordingsViewController.swift
3 | // AudioSinkExample
4 | //
5 | // Copyright © 2017-2019 Twilio Inc. All rights reserved.
6 | //
7 |
8 | import Foundation
9 | import AVKit
10 |
11 | class RecordingsViewController: UITableViewController {
12 |
13 | let kReuseIdentifier = "RecordingsCellId"
14 | var recordings = Array()
15 |
16 | override func viewDidLoad() {
17 | super.viewDidLoad()
18 |
19 | self.title = "Recordings"
20 | tableView.register(UITableViewCell.classForCoder(), forCellReuseIdentifier: kReuseIdentifier)
21 |
22 | let fileManager = FileManager.default
23 | guard let documentsDirectory = fileManager.urls(for: .documentDirectory, in: .userDomainMask).last else {
24 | return
25 | }
26 |
27 | do {
28 | let directoryContents = try fileManager.contentsOfDirectory(atPath: documentsDirectory.path)
29 |
30 | for path in directoryContents {
31 | if (path.lowercased().hasSuffix("wav")) {
32 | recordings.append(URL(fileURLWithPath: path, relativeTo: documentsDirectory))
33 | }
34 | }
35 | } catch {
36 | print("Couldn't fetch directory contents. \(error)")
37 | }
38 | }
39 |
40 | override func tableView(_ tableView: UITableView, commit editingStyle: UITableViewCell.EditingStyle, forRowAt indexPath: IndexPath) {
41 | if editingStyle == .delete {
42 | let recordingToDelete = self.recordings[indexPath.row]
43 |
44 | do {
45 | try FileManager.default.removeItem(at: recordingToDelete)
46 | self.recordings.remove(at: indexPath.row)
47 | self.tableView.deleteRows(at: [indexPath], with: .automatic)
48 | } catch {
49 | print("Couldn't delete recording: \(recordingToDelete)")
50 | }
51 | }
52 | }
53 |
54 | override func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
55 | return recordings.count
56 | }
57 |
58 | override func tableView(_ tableView: UITableView, titleForFooterInSection section: Int) -> String? {
59 | return self.recordings.count > 0 ? "" : "Enter a Room to record audio Tracks."
60 | }
61 |
62 | override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
63 | let item = recordings[indexPath.row]
64 |
65 | // Present a full-screen AVPlayerViewController and begin playback.
66 | let player = AVPlayer(url: item)
67 | let playerVC = AVPlayerViewController()
68 | playerVC.player = player
69 | playerVC.entersFullScreenWhenPlaybackBegins = true
70 |
71 | self.showDetailViewController(playerVC, sender: self)
72 |
73 | player.play()
74 | }
75 |
76 | override func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
77 | let cell = tableView.dequeueReusableCell(withIdentifier: kReuseIdentifier, for: indexPath)
78 | let recordingItem = recordings[indexPath.row]
79 |
80 | if let textLabel = cell.textLabel {
81 | textLabel.adjustsFontSizeToFitWidth = true
82 | textLabel.minimumScaleFactor = 0.75
83 | textLabel.text = recordingItem.lastPathComponent
84 | }
85 |
86 | return cell
87 | }
88 | }
89 |
--------------------------------------------------------------------------------
/DataTrackExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/DataTrackExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/DataTrackExample.xcodeproj/xcshareddata/xcschemes/DataTrackExample.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
32 |
33 |
43 |
45 |
51 |
52 |
53 |
54 |
60 |
62 |
68 |
69 |
70 |
71 |
73 |
74 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/DataTrackExample/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // DataTrackExample
4 | //
5 | // Copyright © 2017-2019 Twilio. All rights reserved.
6 | //
7 |
8 | import UIKit
9 |
10 | @UIApplicationMain
11 | class AppDelegate: UIResponder, UIApplicationDelegate {
12 |
13 | var window: UIWindow?
14 |
15 |
16 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
17 | // Override point for customization after application launch.
18 | return true
19 | }
20 |
21 | func applicationWillResignActive(_ application: UIApplication) {
22 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
23 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
24 | }
25 |
26 | func applicationDidEnterBackground(_ application: UIApplication) {
27 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
28 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
29 | }
30 |
31 | func applicationWillEnterForeground(_ application: UIApplication) {
32 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
33 | }
34 |
35 | func applicationDidBecomeActive(_ application: UIApplication) {
36 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
37 | }
38 |
39 | func applicationWillTerminate(_ application: UIApplication) {
40 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
41 | }
42 |
43 |
44 | }
45 |
46 |
--------------------------------------------------------------------------------
/DataTrackExample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "size" : "1024x1024",
91 | "scale" : "1x"
92 | }
93 | ],
94 | "info" : {
95 | "version" : 1,
96 | "author" : "xcode"
97 | }
98 | }
--------------------------------------------------------------------------------
/DataTrackExample/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/DataTrackExample/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | NSMicrophoneUsageDescription
6 | ${PRODUCT_NAME} does not use your microphone, but requires permission to play audio due to issue #207.
7 | CFBundleDevelopmentRegion
8 | $(DEVELOPMENT_LANGUAGE)
9 | CFBundleExecutable
10 | $(EXECUTABLE_NAME)
11 | CFBundleIdentifier
12 | $(PRODUCT_BUNDLE_IDENTIFIER)
13 | CFBundleInfoDictionaryVersion
14 | 6.0
15 | CFBundleName
16 | $(PRODUCT_NAME)
17 | CFBundlePackageType
18 | APPL
19 | CFBundleShortVersionString
20 | 1.0
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | UILaunchStoryboardName
26 | LaunchScreen
27 | UIMainStoryboardFile
28 | Main
29 | UIRequiredDeviceCapabilities
30 |
31 | armv7
32 |
33 | UISupportedInterfaceOrientations
34 |
35 | UIInterfaceOrientationPortrait
36 | UIInterfaceOrientationLandscapeLeft
37 | UIInterfaceOrientationLandscapeRight
38 |
39 | UISupportedInterfaceOrientations~ipad
40 |
41 | UIInterfaceOrientationPortrait
42 | UIInterfaceOrientationPortraitUpsideDown
43 | UIInterfaceOrientationLandscapeLeft
44 | UIInterfaceOrientationLandscapeRight
45 |
46 | UIUserInterfaceStyle
47 | Light
48 |
49 |
50 |
--------------------------------------------------------------------------------
/DataTrackExample/README.md:
--------------------------------------------------------------------------------
1 | # Twilio Video Data Tracks Example
2 |
3 |
4 | The DataTrack API lets you create a DataTrack channel which can be used to send low latency messages to zero or more receivers subscribed to the data. For a detailed guide about DataTracks reference [our documentation](https://www.twilio.com/docs/api/video/using-the-datatrack-api).
5 |
6 | This example illustrates how to use the DataTrack API to write a simple collaborative drawing app.
7 |
8 | In this example, participants can join a Room and begin drawing on a UIView. The drawing is shared to all other participants using the DataTrack API to provide a collaborative whiteboard experience. Once disconnected from the Room, your UIView clears and your drawings are removed from all other participant's UIView as well. Local participant's drawings will be presented in black color while remote participant's drawing will be presented in light gray color.
9 |
10 | ### Setup
11 |
12 | See the master [README](https://github.com/twilio/video-quickstart-ios/blob/master/README.md) for instructions on how to generate access tokens and connect to a Room.
13 |
14 | ### Run the application
15 |
16 | Once you have setup your access token you can run the application and you will be presented with the following screen:
17 |
18 |
19 |
20 | ### Connect and draw!
21 |
22 | Once connected you can start drawing and see other connected Participant's drawing.
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2015-2017 Twilio, Inc.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
23 |
--------------------------------------------------------------------------------
/ObjCVideoQuickstart.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/ObjCVideoQuickstart/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // Twilio Video Conversations Quickstart - Objective C
4 |
5 |
6 | #import
7 |
8 | @interface AppDelegate : UIResponder
9 |
10 | @property (strong, nonatomic) UIWindow *window;
11 |
12 |
13 | @end
14 |
15 |
--------------------------------------------------------------------------------
/ObjCVideoQuickstart/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // Twilio Video Conversations Quickstart - Objective C
4 |
5 |
6 | #import "AppDelegate.h"
7 |
8 | @interface AppDelegate ()
9 |
10 | @end
11 |
12 | @implementation AppDelegate
13 |
14 |
15 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
16 | // Override point for customization after application launch.
17 | return YES;
18 | }
19 |
20 | - (void)applicationWillResignActive:(UIApplication *)application {
21 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
22 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
23 | }
24 |
25 | - (void)applicationDidEnterBackground:(UIApplication *)application {
26 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
27 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
28 | }
29 |
30 | - (void)applicationWillEnterForeground:(UIApplication *)application {
31 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
32 | }
33 |
34 | - (void)applicationDidBecomeActive:(UIApplication *)application {
35 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
36 | }
37 |
38 | - (void)applicationWillTerminate:(UIApplication *)application {
39 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
40 | }
41 |
42 | @end
43 |
--------------------------------------------------------------------------------
/ObjCVideoQuickstart/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "29x29",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "29x29",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "40x40",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "40x40",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "60x60",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "60x60",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "ipad",
35 | "size" : "29x29",
36 | "scale" : "1x"
37 | },
38 | {
39 | "idiom" : "ipad",
40 | "size" : "29x29",
41 | "scale" : "2x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "40x40",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "40x40",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "76x76",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "76x76",
61 | "scale" : "2x"
62 | }
63 | ],
64 | "info" : {
65 | "version" : 1,
66 | "author" : "xcode"
67 | }
68 | }
--------------------------------------------------------------------------------
/ObjCVideoQuickstart/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/ObjCVideoQuickstart/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | NSAppTransportSecurity
26 |
27 | NSAllowsArbitraryLoads
28 |
29 |
30 | NSCameraUsageDescription
31 | ${PRODUCT_NAME} uses your camera to capture video which is shared with other Room Participants.
32 | NSMicrophoneUsageDescription
33 | ${PRODUCT_NAME} uses your microphone to capture audio which is shared with other Room Participants.
34 | UIBackgroundModes
35 |
36 | audio
37 |
38 | UILaunchStoryboardName
39 | LaunchScreen
40 | UIMainStoryboardFile
41 | Main
42 | UIRequiredDeviceCapabilities
43 |
44 | armv7
45 |
46 | UISupportedInterfaceOrientations
47 |
48 | UIInterfaceOrientationPortrait
49 | UIInterfaceOrientationLandscapeLeft
50 | UIInterfaceOrientationLandscapeRight
51 |
52 | UISupportedInterfaceOrientations~ipad
53 |
54 | UIInterfaceOrientationPortrait
55 | UIInterfaceOrientationPortraitUpsideDown
56 | UIInterfaceOrientationLandscapeLeft
57 | UIInterfaceOrientationLandscapeRight
58 |
59 | UIUserInterfaceStyle
60 | Light
61 |
62 |
63 |
--------------------------------------------------------------------------------
/ObjCVideoQuickstart/Utils.h:
--------------------------------------------------------------------------------
1 | //
2 | // Utils.h
3 | // ObjCVideoQuickstart
4 | //
5 | // Copyright © 2016-2017 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import
9 |
10 | @interface PlatformUtils : NSObject
11 |
12 | + (BOOL)isSimulator;
13 |
14 | @end
15 |
16 | @interface TokenUtils : NSObject
17 |
18 | + (void)retrieveAccessTokenFromURL:(NSString *)tokenURLStr
19 | completion:(void (^)(NSString* token, NSError *err)) completionHandler;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/ObjCVideoQuickstart/Utils.m:
--------------------------------------------------------------------------------
1 | //
2 | // Utils.m
3 | // ObjCVideoQuickstart
4 | //
5 | // Copyright © 2016-2017 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import "Utils.h"
9 |
10 | @implementation PlatformUtils
11 |
12 | + (BOOL)isSimulator {
13 | #if TARGET_IPHONE_SIMULATOR
14 | return YES;
15 | #endif
16 | return NO;
17 | }
18 |
19 | @end
20 |
21 | @implementation TokenUtils
22 |
23 | + (void)retrieveAccessTokenFromURL:(NSString *)tokenURLStr
24 | completion:(void (^)(NSString* token, NSError *err)) completionHandler {
25 | NSURL *tokenURL = [NSURL URLWithString:tokenURLStr];
26 | NSURLSessionConfiguration *sessionConfig = [NSURLSessionConfiguration defaultSessionConfiguration];
27 | NSURLSession *session = [NSURLSession sessionWithConfiguration:sessionConfig];
28 | NSURLSessionDataTask *task = [session dataTaskWithURL:tokenURL
29 | completionHandler: ^(NSData * _Nullable data,
30 | NSURLResponse * _Nullable response,
31 | NSError * _Nullable error) {
32 | NSString *accessToken = nil;
33 | if (!error && data) {
34 | accessToken = [[NSString alloc] initWithData:data
35 | encoding:NSUTF8StringEncoding];
36 | }
37 | completionHandler(accessToken, error);
38 | }];
39 | [task resume];
40 | }
41 |
42 | @end
43 |
--------------------------------------------------------------------------------
/ObjCVideoQuickstart/ViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.h
3 | // ObjCVideoQuickstart
4 | //
5 | // Copyright © 2016-2017 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import
9 |
10 | @interface ViewController : UIViewController
11 |
12 | @end
13 |
--------------------------------------------------------------------------------
/ObjCVideoQuickstart/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // ObjCVideoQuickstart
4 | //
5 | // Created by Jeffrey Linwood on 4/27/16.
6 | // Copyright © 2016-2017 Twilio, Inc. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/ReplayKitExample.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/ReplayKitExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/ReplayKitExample.xcodeproj/xcshareddata/xcschemes/BroadcastExtension.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
6 |
9 |
10 |
16 |
22 |
23 |
24 |
30 |
36 |
37 |
38 |
39 |
40 |
45 |
46 |
47 |
48 |
60 |
62 |
68 |
69 |
70 |
71 |
78 |
80 |
86 |
87 |
88 |
89 |
91 |
92 |
95 |
96 |
97 |
--------------------------------------------------------------------------------
/ReplayKitExample.xcodeproj/xcshareddata/xcschemes/BroadcastExtensionSetupUI.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
6 |
9 |
10 |
16 |
22 |
23 |
24 |
30 |
36 |
37 |
38 |
39 |
40 |
45 |
46 |
47 |
48 |
60 |
62 |
68 |
69 |
70 |
71 |
78 |
80 |
86 |
87 |
88 |
89 |
91 |
92 |
95 |
96 |
97 |
--------------------------------------------------------------------------------
/ReplayKitExample.xcodeproj/xcshareddata/xcschemes/ReplayKitExample.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
32 |
33 |
43 |
45 |
51 |
52 |
53 |
54 |
60 |
62 |
68 |
69 |
70 |
71 |
73 |
74 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/ReplayKitExample/BroadcastExtension/BroadcastExtension-Bridging-Header.h:
--------------------------------------------------------------------------------
1 | //
2 | // Use this file to import your target's public headers that you would like to expose to Swift.
3 | //
4 |
5 | #import "ExampleReplayKitAudioCapturer.h"
6 |
--------------------------------------------------------------------------------
/ReplayKitExample/BroadcastExtension/BroadcastExtension.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | inter-app-audio
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/ReplayKitExample/BroadcastExtension/ExampleReplayKitAudioCapturer.h:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleReplayKitAudioCapturer.h
3 | // ReplayKitExample
4 | //
5 | // Copyright © 2018-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import
9 | #import
10 |
11 | dispatch_queue_t _Nullable ExampleCoreAudioDeviceGetCurrentQueue(void);
12 |
13 | typedef struct ExampleAudioContext {
14 | TVIAudioDeviceContext _Nullable deviceContext;
15 | size_t maxFramesPerBuffer;
16 | AudioStreamBasicDescription streamDescription;
17 | } ExampleAudioContext;
18 |
19 | /*
20 | * ExampleReplayKitAudioCapturer consumes audio samples recorded by ReplayKit. Due to limitations of extensions, this
21 | * device can't playback remote audio.
22 | */
23 | @interface ExampleReplayKitAudioCapturer : NSObject
24 |
25 | - (nonnull instancetype)init;
26 |
27 | - (nonnull instancetype)initWithSampleType:(RPSampleBufferType)type NS_DESIGNATED_INITIALIZER;
28 |
29 | @end
30 |
31 | /// Deliver audio samples to the capturer.
32 | /// @param capturer The capturer to deliver the samples to.
33 | /// @param sampleBuffer A CMSampleBuffer which contains an audio sample.
34 | OSStatus ExampleCoreAudioDeviceCapturerCallback(ExampleReplayKitAudioCapturer * _Nonnull capturer,
35 | CMSampleBufferRef _Nonnull sampleBuffer);
36 |
--------------------------------------------------------------------------------
/ReplayKitExample/BroadcastExtension/ExampleReplayKitAudioCapturer.m:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleReplayKitAudioCapturer.m
3 | // ReplayKitExample
4 | //
5 | // Copyright © 2018-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import "ExampleReplayKitAudioCapturer.h"
9 |
10 | // Our guess at the maximum slice size used by ReplayKit app audio. We have observed up to 22596 in the field.
11 | static size_t kMaximumFramesPerAppAudioBuffer = 45192;
12 | // Our guess at the maximum slice size used by ReplayKit mic audio. We have observed up to 1024 in the field.
13 | static size_t kMaximumFramesPerMicAudioBuffer = 2048;
14 |
15 | @interface ExampleReplayKitAudioCapturer()
16 |
17 | @property (nonatomic, strong, nullable) TVIAudioFormat *capturingFormat;
18 |
19 | @property (nonatomic, assign, nullable) ExampleAudioContext *capturingContext;
20 |
21 | /**
22 | The maximum number of frames that we will capture at a time. This is determined based upon the RPSampleBufferType.
23 | */
24 | @property (nonatomic, assign, readonly) size_t maxFramesPerBuffer;
25 |
26 | @end
27 |
28 | @implementation ExampleReplayKitAudioCapturer
29 |
30 | #pragma mark - Init & Dealloc
31 |
32 | - (instancetype)init {
33 | return [self initWithSampleType:RPSampleBufferTypeAudioMic];
34 | }
35 |
36 | - (instancetype)initWithSampleType:(RPSampleBufferType)type {
37 | NSAssert(type == RPSampleBufferTypeAudioMic || type == RPSampleBufferTypeAudioApp, @"We only support capturing audio samples.");
38 |
39 | self = [super init];
40 | if (self) {
41 | // Unfortunately, we need to spend more memory to capture application audio samples, which have some delay.
42 | _maxFramesPerBuffer = type == RPSampleBufferTypeAudioMic ? kMaximumFramesPerMicAudioBuffer : kMaximumFramesPerAppAudioBuffer;
43 | _capturingFormat = [[self class] defaultCapturingFormat:_maxFramesPerBuffer];
44 | }
45 | return self;
46 | }
47 |
48 | + (NSString *)description {
49 | return @"ExampleReplayKitAudioCapturer";
50 | }
51 |
52 | #pragma mark - TVIAudioDeviceRenderer
53 |
54 | - (nullable TVIAudioFormat *)renderFormat {
55 | return nil;
56 | }
57 |
58 | - (BOOL)initializeRenderer {
59 | return NO;
60 | }
61 |
62 | - (BOOL)startRendering:(nonnull TVIAudioDeviceContext)context {
63 | return NO;
64 | }
65 |
66 | - (BOOL)stopRendering {
67 | return NO;
68 | }
69 |
70 | #pragma mark - TVIAudioDeviceCapturer
71 |
72 | - (nullable TVIAudioFormat *)captureFormat {
73 | return _capturingFormat;
74 | }
75 |
76 | - (BOOL)initializeCapturer {
77 | return YES;
78 | }
79 |
80 | - (BOOL)startCapturing:(nonnull TVIAudioDeviceContext)context {
81 | @synchronized (self) {
82 | NSAssert(_capturingContext == NULL, @"Should not have any capturing context.");
83 | _capturingContext = malloc(sizeof(ExampleAudioContext));
84 | _capturingContext->deviceContext = context;
85 | _capturingContext->maxFramesPerBuffer = _capturingFormat.framesPerBuffer;
86 | _capturingContext->deviceContext = context;
87 | // Represents the expected capture format. If the capturer's guess is incorrect then a restart will occur.
88 | _capturingContext->streamDescription = _capturingFormat.streamDescription;
89 | }
90 | return YES;
91 | }
92 |
93 | - (BOOL)stopCapturing {
94 | @synchronized(self) {
95 | NSAssert(_capturingContext != NULL, @"Should have a capturing context.");
96 | free(_capturingContext);
97 | _capturingContext = NULL;
98 | }
99 |
100 | return YES;
101 | }
102 |
103 | #pragma mark - Public
104 |
105 | dispatch_queue_t ExampleCoreAudioDeviceGetCurrentQueue(void) {
106 | /*
107 | * The current dispatch queue is needed in order to synchronize with samples delivered by ReplayKit. Ideally, the
108 | * ReplayKit APIs would support this use case, but since they do not we use a deprecated API to discover the queue.
109 | * The dispatch queue is used for both resource teardown, and to schedule retransmissions (when enabled).
110 | */
111 | #pragma clang diagnostic push
112 | #pragma clang diagnostic ignored "-Wdeprecated"
113 | return dispatch_get_current_queue();
114 | #pragma clang diagnostic pop
115 | }
116 |
117 | OSStatus ExampleCoreAudioDeviceCapturerCallback(ExampleReplayKitAudioCapturer *capturer,
118 | CMSampleBufferRef sampleBuffer) {
119 | CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer);
120 | const AudioStreamBasicDescription *asbd = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription);
121 | ExampleAudioContext *context = capturer->_capturingContext;
122 |
123 | if (!context || !context->deviceContext) {
124 | return noErr;
125 | }
126 |
127 | // Update the capture format at runtime in case the input changes, or does not match the capturer's initial guess.
128 | TVIAudioFormat *format = capturer->_capturingFormat;
129 | if (asbd->mChannelsPerFrame != context->streamDescription.mChannelsPerFrame ||
130 | asbd->mSampleRate != context->streamDescription.mSampleRate) {
131 | capturer->_capturingFormat = [[TVIAudioFormat alloc] initWithChannels:asbd->mChannelsPerFrame
132 | sampleRate:asbd->mSampleRate
133 | framesPerBuffer:format.framesPerBuffer];
134 | context->streamDescription = *asbd;
135 | TVIAudioDeviceReinitialize(context->deviceContext);
136 | return noErr;
137 | }
138 |
139 | CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
140 | if (blockBuffer == nil) {
141 | NSLog(@"Empty buffer received");
142 | return noErr;
143 | }
144 |
145 | AudioBufferList bufferList;
146 | CMSampleBufferGetAudioBufferListWithRetainedBlockBuffer(sampleBuffer,
147 | NULL,
148 | &bufferList,
149 | sizeof(bufferList),
150 | NULL,
151 | NULL,
152 | kCMSampleBufferFlag_AudioBufferList_Assure16ByteAlignment,
153 | &blockBuffer);
154 |
155 | int8_t *audioBuffer = (int8_t *)bufferList.mBuffers[0].mData;
156 | UInt32 audioBufferSizeInBytes = bufferList.mBuffers[0].mDataByteSize;
157 |
158 | // Perform an endianess conversion, if needed. A TVIAudioDevice should deliver little endian samples.
159 | if (asbd->mFormatFlags & kAudioFormatFlagIsBigEndian) {
160 | for (int i = 0; i < (audioBufferSizeInBytes - 1); i += 2) {
161 | int8_t temp = audioBuffer[i];
162 | audioBuffer[i] = audioBuffer[i+1];
163 | audioBuffer[i+1] = temp;
164 | }
165 | }
166 |
167 | TVIAudioDeviceWriteCaptureData(context->deviceContext, (int8_t *)audioBuffer, audioBufferSizeInBytes);
168 |
169 | CFRelease(blockBuffer);
170 |
171 | return noErr;
172 | }
173 |
174 | #pragma mark - Private
175 |
176 | + (nullable TVIAudioFormat *)defaultCapturingFormat:(const size_t)framesPerBuffer {
177 | // It is possible that 44.1 kHz / 1 channel or 44.1 kHz / 2 channel will be enountered at runtime depending on
178 | // the RPSampleBufferType and iOS version.
179 | const double sessionSampleRate = 44100;
180 | size_t rendererChannels = 1;
181 |
182 | return [[TVIAudioFormat alloc] initWithChannels:rendererChannels
183 | sampleRate:sessionSampleRate
184 | framesPerBuffer:framesPerBuffer];
185 | }
186 |
187 | @end
188 |
--------------------------------------------------------------------------------
/ReplayKitExample/BroadcastExtension/ExampleReplayKitAudioCapturerDispatch.h:
--------------------------------------------------------------------------------
1 | //
2 | // ExampleReplayKitAudioCapturerDispatch.h
3 | // ReplayKitExample
4 | //
5 | // Copyright © 2018-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | #import
9 |
10 | dispatch_queue_t ExampleCoreAudioDeviceGetCurrentQueue(void);
11 |
--------------------------------------------------------------------------------
/ReplayKitExample/BroadcastExtension/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleDisplayName
8 | BroadcastExtension
9 | CFBundleExecutable
10 | $(EXECUTABLE_NAME)
11 | CFBundleIdentifier
12 | $(PRODUCT_BUNDLE_IDENTIFIER)
13 | CFBundleInfoDictionaryVersion
14 | 6.0
15 | CFBundleName
16 | $(PRODUCT_NAME)
17 | CFBundlePackageType
18 | XPC!
19 | CFBundleShortVersionString
20 | 1.0
21 | CFBundleVersion
22 | 1
23 | NSExtension
24 |
25 | NSExtensionPointIdentifier
26 | com.apple.broadcast-services-upload
27 | NSExtensionPrincipalClass
28 | $(PRODUCT_MODULE_NAME).SampleHandler
29 | RPBroadcastProcessMode
30 | RPBroadcastProcessModeSampleBuffer
31 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/ReplayKitExample/BroadcastExtensionSetupUI/BroadcastExtensionSetupUI.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | inter-app-audio
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/ReplayKitExample/BroadcastExtensionSetupUI/BroadcastSetupViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // BroadcastSetupViewController.swift
3 | // BroadcastExtensionSetupUI
4 | //
5 | // Copyright © 2018-2019 Twilio. All rights reserved.
6 | //
7 |
8 | import ReplayKit
9 |
10 | class BroadcastSetupViewController: UIViewController {
11 |
12 | @IBOutlet weak var broadcastButton: UIButton!
13 | @IBOutlet weak var roomTextField: UITextField!
14 |
15 | @IBAction func broadcast(_ sender: Any) {
16 | userDidFinishSetup()
17 | }
18 |
19 | // Call this method when the user has finished interacting with the view controller and a broadcast stream can start
20 | func userDidFinishSetup() {
21 | // URL of the resource where broadcast can be viewed that will be returned to the application
22 | let broadcastURL = Bundle.main.url(forResource: "twilio_cloud_com", withExtension: "mov")
23 | // Dictionary with setup information that will be provided to broadcast extension when broadcast is started
24 |
25 | let setupInfo: [String : NSCoding & NSObjectProtocol] = ["roomName": roomTextField.text! as NSCoding & NSObjectProtocol]
26 |
27 | // Tell ReplayKit that the extension is finished setting up and can begin broadcasting
28 | self.extensionContext?.completeRequest(withBroadcast: broadcastURL!, setupInfo: setupInfo)
29 | }
30 |
31 | func userDidCancelSetup() {
32 | let error = NSError(domain: "YouAppDomain", code: -1, userInfo: nil)
33 | // Tell ReplayKit that the extension was cancelled by the user
34 | self.extensionContext?.cancelRequest(withError: error)
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/ReplayKitExample/BroadcastExtensionSetupUI/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleDisplayName
8 | BroadcastExtensionSetupUI
9 | CFBundleExecutable
10 | $(EXECUTABLE_NAME)
11 | CFBundleIdentifier
12 | $(PRODUCT_BUNDLE_IDENTIFIER)
13 | CFBundleInfoDictionaryVersion
14 | 6.0
15 | CFBundleName
16 | $(PRODUCT_NAME)
17 | CFBundlePackageType
18 | XPC!
19 | CFBundleShortVersionString
20 | 1.0
21 | CFBundleVersion
22 | 1
23 | NSExtension
24 |
25 | NSExtensionAttributes
26 |
27 | NSExtensionActivationRule
28 |
29 | NSExtensionActivationSupportsReplayKitStreaming
30 |
31 |
32 |
33 | NSExtensionMainStoryboard
34 | MainInterface
35 | NSExtensionPointIdentifier
36 | com.apple.broadcast-services-setupui
37 |
38 |
39 |
40 |
--------------------------------------------------------------------------------
/ReplayKitExample/BroadcastExtensionSetupUI/MainInterface.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
30 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
--------------------------------------------------------------------------------
/ReplayKitExample/ReplayKitExample/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // ReplayKitExample
4 | //
5 | // Copyright © 2018-2019 Twilio. All rights reserved.
6 | //
7 |
8 | import UIKit
9 |
10 | @UIApplicationMain
11 | class AppDelegate: UIResponder, UIApplicationDelegate {
12 |
13 | var window: UIWindow?
14 |
15 |
16 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
17 | // Override point for customization after application launch.
18 | return true
19 | }
20 |
21 | func applicationWillResignActive(_ application: UIApplication) {
22 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
23 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
24 | }
25 |
26 | func applicationDidEnterBackground(_ application: UIApplication) {
27 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
28 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
29 | }
30 |
31 | func applicationWillEnterForeground(_ application: UIApplication) {
32 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
33 | }
34 |
35 | func applicationDidBecomeActive(_ application: UIApplication) {
36 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
37 | }
38 |
39 | func applicationWillTerminate(_ application: UIApplication) {
40 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
41 | }
42 |
43 |
44 | }
45 |
46 |
--------------------------------------------------------------------------------
/ReplayKitExample/ReplayKitExample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "size" : "1024x1024",
91 | "scale" : "1x"
92 | }
93 | ],
94 | "info" : {
95 | "version" : 1,
96 | "author" : "xcode"
97 | }
98 | }
--------------------------------------------------------------------------------
/ReplayKitExample/ReplayKitExample/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/ReplayKitExample/ReplayKitExample/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/ReplayKitExample/ReplayKitExample/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 | LSRequiresIPhoneOS
22 |
23 | NSMicrophoneUsageDescription
24 | Use microphone to broadcast.
25 | UIBackgroundModes
26 |
27 | audio
28 | voip
29 |
30 | UILaunchStoryboardName
31 | LaunchScreen
32 | UIMainStoryboardFile
33 | Main
34 | UIRequiredDeviceCapabilities
35 |
36 | armv7
37 |
38 | UISupportedInterfaceOrientations
39 |
40 | UIInterfaceOrientationPortrait
41 | UIInterfaceOrientationLandscapeLeft
42 | UIInterfaceOrientationLandscapeRight
43 |
44 | UISupportedInterfaceOrientations~ipad
45 |
46 | UIInterfaceOrientationPortrait
47 | UIInterfaceOrientationPortraitUpsideDown
48 | UIInterfaceOrientationLandscapeLeft
49 | UIInterfaceOrientationLandscapeRight
50 |
51 | UIUserInterfaceStyle
52 | Light
53 |
54 |
55 |
--------------------------------------------------------------------------------
/ReplayKitExample/ReplayKitExample/ReplayKitExample-Bridging-Header.h:
--------------------------------------------------------------------------------
1 | //
2 | // Use this file to import your target's public headers that you would like to expose to Swift.
3 | //
4 |
5 | #import "ExampleReplayKitAudioCapturerDispatch.h"
6 |
--------------------------------------------------------------------------------
/ReplayKitExample/ReplayKitExample/ReplayKitExample.entitlements:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | inter-app-audio
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Resources/twilio_cloud_com.mov:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/Resources/twilio_cloud_com.mov
--------------------------------------------------------------------------------
/ScreenCapturerExample.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/ScreenCapturerExample/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // ScreenCapturerExample
4 | //
5 | // Copyright © 2016-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | import UIKit
9 |
10 | @UIApplicationMain
11 | class AppDelegate: UIResponder, UIApplicationDelegate {
12 |
13 | var window: UIWindow?
14 |
15 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
16 | // Override point for customization after application launch.
17 | return true
18 | }
19 |
20 | func applicationWillResignActive(_ application: UIApplication) {
21 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
22 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
23 | }
24 |
25 | func applicationDidEnterBackground(_ application: UIApplication) {
26 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
27 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
28 | }
29 |
30 | func applicationWillEnterForeground(_ application: UIApplication) {
31 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
32 | }
33 |
34 | func applicationDidBecomeActive(_ application: UIApplication) {
35 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
36 | }
37 |
38 | func applicationWillTerminate(_ application: UIApplication) {
39 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
40 | }
41 |
42 |
43 | }
44 |
45 |
--------------------------------------------------------------------------------
/ScreenCapturerExample/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | }
88 | ],
89 | "info" : {
90 | "version" : 1,
91 | "author" : "xcode"
92 | }
93 | }
--------------------------------------------------------------------------------
/ScreenCapturerExample/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/ScreenCapturerExample/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/ScreenCapturerExample/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleDisplayName
8 | ScreenCapturer
9 | CFBundleExecutable
10 | $(EXECUTABLE_NAME)
11 | CFBundleIdentifier
12 | $(PRODUCT_BUNDLE_IDENTIFIER)
13 | CFBundleInfoDictionaryVersion
14 | 6.0
15 | CFBundleName
16 | $(PRODUCT_NAME)
17 | CFBundlePackageType
18 | APPL
19 | CFBundleShortVersionString
20 | 1.0
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | UILaunchStoryboardName
26 | LaunchScreen
27 | UIMainStoryboardFile
28 | Main
29 | UIRequiredDeviceCapabilities
30 |
31 | armv7
32 |
33 | UISupportedInterfaceOrientations
34 |
35 | UIInterfaceOrientationPortrait
36 | UIInterfaceOrientationLandscapeLeft
37 | UIInterfaceOrientationLandscapeRight
38 |
39 | UISupportedInterfaceOrientations~ipad
40 |
41 | UIInterfaceOrientationPortrait
42 | UIInterfaceOrientationPortraitUpsideDown
43 | UIInterfaceOrientationLandscapeLeft
44 | UIInterfaceOrientationLandscapeRight
45 |
46 | UIUserInterfaceStyle
47 | Light
48 |
49 |
50 |
--------------------------------------------------------------------------------
/ScreenCapturerExample/README.md:
--------------------------------------------------------------------------------
1 | # Twilio Video Screen Capturer Example
2 |
3 | > NOTE: `TVIScreenCapturer` has been removed in `3.x`. If you wish to share the contents of the entire screen we recommend that you use [ReplayKit](https://developer.apple.com/documentation/replaykit) instead. Take a look at our ReplayKit example [app](../ReplayKitExample) to get started.
4 |
5 | This project demonstrates how to write your own `TVIVideoSource` to capture the contents of a `WKWebView` using the snasphotting APIs [available in WebKit.framework](https://developer.apple.com/documentation/webkit). Since snapshots include only a subset of the view hierarchy, they do offer some flexibility over screen sharing solutions like ReplayKit.
6 |
7 |
8 | ### Setup
9 |
10 | This example does not connect to a Room, and thus does not require any access tokens or other configuration. Internet connectivity is required to load the contents of the `WKWebView`. Any device or simulator with iOS 11.0 or later may be used.
11 |
12 | ### FAQ
13 |
14 | 1. When should I use `ReplayKitVideoSource` vs `ExampleWebViewSource`?
15 |
16 | Using ReplayKit means that you will require user consent in order to begin recording. Also, video captured by ReplayKit.framework includes your application's entire `UIWindow`, and the status bar.
17 |
18 | If you only want to share a portion of the view hierarchy, and can accept some performance penalty consider writing a use-case based `TVIVideoSource` (like `ExampleWebViewSource`) instead.
19 |
20 | ### Known Issues
21 |
22 | 1. Snapshots captured on iOS simulators appear to include pre-multiplied alpha, but the alpha channel is always opaque (0xFF). Without proper alpha information it is impossible to un-premultiply the data and the images look too dim. This issue does not occur on a real device.
23 |
--------------------------------------------------------------------------------
/ScreenCapturerExample/ViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.swift
3 | // ScreenCapturerExample
4 | //
5 | // Copyright © 2016-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | import TwilioVideo
9 | import UIKit
10 | import WebKit
11 | import AVFoundation
12 |
13 | class ViewController : UIViewController {
14 |
15 | var localVideoTrack: LocalVideoTrack?
16 | weak var localView: VideoView?
17 |
18 | // A source which uses snapshotting APIs to capture the contents of a WKWebView.
19 | var webViewSource: VideoSource?
20 |
21 | var webView: WKWebView?
22 | var webNavigation: WKNavigation?
23 |
24 | override func viewDidLoad() {
25 | super.viewDidLoad()
26 |
27 | // Setup a WKWebView, and request Twilio's website
28 | webView = WKWebView.init(frame: view.frame)
29 | webView?.navigationDelegate = self
30 | webView?.translatesAutoresizingMaskIntoConstraints = false
31 | webView?.allowsBackForwardNavigationGestures = true
32 | self.view.addSubview(webView!)
33 |
34 | let requestURL: URL = URL(string: "https://twilio.com")!
35 | let request = URLRequest.init(url: requestURL)
36 | webNavigation = webView?.load(request)
37 |
38 | setupLocalMedia()
39 |
40 | // Setup a renderer to preview what we are capturing.
41 | if let videoView = VideoView(frame: CGRect.zero, delegate: self) {
42 | self.localView = videoView
43 |
44 | localVideoTrack?.addRenderer(videoView)
45 | videoView.isHidden = true
46 | self.view.addSubview(videoView)
47 | self.view.setNeedsLayout()
48 | }
49 | }
50 |
51 | deinit {
52 | teardownLocalMedia()
53 | }
54 |
55 | override func didReceiveMemoryWarning() {
56 | super.didReceiveMemoryWarning()
57 | }
58 |
59 | override var prefersStatusBarHidden: Bool {
60 | return true
61 | }
62 |
63 | override func viewWillLayoutSubviews() {
64 | super.viewWillLayoutSubviews()
65 |
66 | webView?.frame = self.view.bounds
67 |
68 | // Layout the remote video using frame based techniques. It's also possible to do this using autolayout.
69 | if let remoteView = self.localView {
70 | if remoteView.hasVideoData {
71 | var bottomRight = CGPoint(x: view.bounds.width, y: view.bounds.height)
72 | // Ensure the preview fits in the safe area.
73 | let safeAreaGuide = self.view.safeAreaLayoutGuide
74 | let layoutFrame = safeAreaGuide.layoutFrame
75 | bottomRight.x = layoutFrame.origin.x + layoutFrame.width
76 | bottomRight.y = layoutFrame.origin.y + layoutFrame.height
77 | let dimensions = remoteView.videoDimensions
78 | let remoteRect = remoteViewSize()
79 | let aspect = CGSize(width: CGFloat(dimensions.width), height: CGFloat(dimensions.height))
80 | let padding : CGFloat = 10.0
81 | let boundedRect = AVMakeRect(aspectRatio: aspect, insideRect: remoteRect).integral
82 | remoteView.frame = CGRect(x: bottomRight.x - boundedRect.width - padding,
83 | y: bottomRight.y - boundedRect.height - padding,
84 | width: boundedRect.width,
85 | height: boundedRect.height)
86 | } else {
87 | remoteView.frame = CGRect.zero
88 | }
89 | }
90 | }
91 |
92 | func setupLocalMedia() {
93 | let source = ExampleWebViewSource(aView: self.webView!)
94 |
95 | guard let videoTrack = LocalVideoTrack(source: source, enabled: true, name: "Screen") else {
96 | presentError(message: "Failed to add ExampleWebViewSource video track!")
97 | return
98 | }
99 |
100 | self.localVideoTrack = videoTrack
101 | webViewSource = source
102 | source.startCapture()
103 | }
104 |
105 | func teardownLocalMedia() {
106 | // ExampleWebViewSource has an explicit API to start and stop capturing. Stop to break the retain cycle.
107 | if let source = self.webViewSource {
108 | let webSource = source as! ExampleWebViewSource
109 | webSource.stopCapture()
110 | }
111 |
112 | if let renderer = localView {
113 | localVideoTrack?.removeRenderer(renderer)
114 | }
115 | localVideoTrack = nil
116 | }
117 |
118 | func presentError( message: String) {
119 | print(message)
120 | }
121 |
122 | func remoteViewSize() -> CGRect {
123 | let traits = self.traitCollection
124 | let width = traits.horizontalSizeClass == UIUserInterfaceSizeClass.regular ? 188 : 160;
125 | let height = traits.horizontalSizeClass == UIUserInterfaceSizeClass.regular ? 188 : 120;
126 | return CGRect(x: 0, y: 0, width: width, height: height)
127 | }
128 | }
129 |
130 | // MARK: WKNavigationDelegate
131 | extension ViewController : WKNavigationDelegate {
132 | func webView(_ webView: WKWebView, didFinish navigation: WKNavigation!) {
133 | print("WebView:", webView, "finished navigation:", navigation!)
134 |
135 | self.navigationItem.title = webView.title
136 | }
137 |
138 | func webView(_ webView: WKWebView, didFail navigation: WKNavigation!, withError error: Error) {
139 | let message = String(format: "WebView:", webView, "did fail navigation:", navigation, error as CVarArg)
140 | presentError(message: message)
141 | }
142 | }
143 |
144 | // MARK: TVIVideoViewDelegate
145 | extension ViewController : VideoViewDelegate {
146 | func videoViewDidReceiveData(view: VideoView) {
147 | if (view == localView) {
148 | localView?.isHidden = false
149 | self.view.setNeedsLayout()
150 | }
151 | }
152 |
153 | func videoViewDimensionsDidChange(view: VideoView, dimensions: CMVideoDimensions) {
154 | self.view.setNeedsLayout()
155 | }
156 | }
157 |
--------------------------------------------------------------------------------
/Utils/Colors.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Colors.swift
3 | // VideoQuickStart
4 | //
5 | // Copyright © 2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | import UIKit
9 |
10 | extension UIColor {
11 | convenience init(red: Int, green: Int, blue: Int) {
12 | assert(red >= 0 && red <= 255, "Invalid red component")
13 | assert(green >= 0 && green <= 255, "Invalid green component")
14 | assert(blue >= 0 && blue <= 255, "Invalid blue component")
15 | self.init(red: CGFloat(red) / 255.0, green: CGFloat(green) / 255.0, blue: CGFloat(blue) / 255.0, alpha: 1.0)
16 | }
17 |
18 | convenience init(hex:Int) {
19 | self.init(red:(hex >> 16) & 0xff, green:(hex >> 8) & 0xff, blue:hex & 0xff)
20 | }
21 |
22 | struct Twilio {
23 | struct Status {
24 | static let Blue = UIColor(hex: 0x0070CC)
25 | static let Green = UIColor(hex: 0x29BB4F)
26 | static let Orange = UIColor(hex: 0xFF9800)
27 | static let Red = UIColor(hex: 0xC41025)
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/Utils/Settings.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Settings.swift
3 | // VideoQuickStart
4 | //
5 | // Copyright © 2017-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | import TwilioVideo
9 |
10 | enum VideoCodec: CaseIterable {
11 | case auto, VP8, VP8Simulcast, H264, VP9
12 |
13 | var codec: TwilioVideo.VideoCodec? {
14 | switch self {
15 | case .auto:
16 | return nil
17 | case .VP8:
18 | return Vp8Codec()
19 | case .VP8Simulcast:
20 | return Vp8Codec(simulcast: true)
21 | case .H264:
22 | return H264Codec()
23 | case .VP9:
24 | return Vp9Codec()
25 | }
26 | }
27 |
28 | var name: String {
29 | switch self {
30 | case .auto:
31 | return "Auto"
32 | case .VP8, .H264, .VP9:
33 | return codec?.name ?? ""
34 | case .VP8Simulcast:
35 | return "\(VideoCodec.VP8.name) Simulcast"
36 | }
37 | }
38 | }
39 |
40 | class Settings: NSObject {
41 |
42 | // ISDK-2644: Resolving a conflict with AudioToolbox in iOS 13
43 | let supportedAudioCodecs: [TwilioVideo.AudioCodec] = [OpusCodec(),
44 | PcmaCodec(),
45 | PcmuCodec(),
46 | G722Codec()]
47 |
48 | // Valid signaling Regions are listed here:
49 | // https://www.twilio.com/docs/video/ip-address-whitelisting#signaling-communication
50 | let supportedSignalingRegions: [String] = ["gll",
51 | "au1",
52 | "br1",
53 | "de1",
54 | "ie1",
55 | "in1",
56 | "jp1",
57 | "sg1",
58 | "us1",
59 | "us2"]
60 |
61 |
62 | let supportedSignalingRegionDisplayString: [String : String] = ["gll": "Global Low Latency",
63 | "au1": "Australia",
64 | "br1": "Brazil",
65 | "de1": "Germany",
66 | "ie1": "Ireland",
67 | "in1": "India",
68 | "jp1": "Japan",
69 | "sg1": "Singapore",
70 | "us1": "US East Coast (Virginia)",
71 | "us2": "US West Coast (Oregon)"]
72 |
73 | var audioCodec: TwilioVideo.AudioCodec?
74 | var videoCodec: VideoCodec = .auto
75 |
76 | var maxAudioBitrate = UInt()
77 | var maxVideoBitrate = UInt()
78 |
79 | var signalingRegion: String?
80 |
81 | // The videoEncodingMode API is mutually exclusive with existing codec management APIs EncodingParameters.maxVideoBitrate and preferredVideoCodecs, therefore when .auto is used, set maxVideoBitrate to 0 (Zero indicates the WebRTC default value, which is 2000 Kbps)
82 | func getEncodingParameters() -> EncodingParameters? {
83 | if maxAudioBitrate == 0 && maxVideoBitrate == 0 {
84 | return nil;
85 | } else if videoCodec == .auto {
86 | return EncodingParameters(audioBitrate: maxAudioBitrate,
87 | videoBitrate: 0)
88 | } else {
89 | return EncodingParameters(audioBitrate: maxAudioBitrate,
90 | videoBitrate: maxVideoBitrate)
91 | }
92 | }
93 |
94 | private override init() {
95 | // Can't initialize a singleton
96 | }
97 |
98 | // MARK:- Shared Instance
99 | static let shared = Settings()
100 | }
101 |
--------------------------------------------------------------------------------
/Utils/Utils.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Utils.swift
3 | //
4 | // Copyright © 2016-2019 Twilio, Inc. All rights reserved.
5 | //
6 |
7 | import Foundation
8 |
9 | // Helper to determine if we're running on simulator or device
10 | struct PlatformUtils {
11 | static let isSimulator: Bool = {
12 | var isSim = false
13 | #if arch(i386) || arch(x86_64)
14 | isSim = true
15 | #endif
16 | return isSim
17 | }()
18 | }
19 |
20 | struct TokenUtils {
21 | static func fetchToken(from url : String, completionHandler: @escaping (String, Error?) -> Void) {
22 | var token: String = "TWILIO_ACCESS_TOKEN"
23 | let requestURL: URL = URL(string: url)!
24 | let task = URLSession.shared.dataTask(with: requestURL) {
25 | (data, response, error) in
26 | if let error = error {
27 | completionHandler(token, error)
28 | return
29 | }
30 |
31 | if let data = data, let tokenReponse = String(data: data, encoding: .utf8) {
32 | token = tokenReponse
33 | completionHandler(token, nil)
34 | }
35 | }
36 | task.resume()
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/VideoCallKitQuickStart.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/VideoCallKitQuickStart.xcodeproj/xcshareddata/xcschemes/VideoCallKitQuickStart.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
32 |
33 |
43 |
45 |
51 |
52 |
53 |
54 |
60 |
62 |
68 |
69 |
70 |
71 |
73 |
74 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/VideoCallKitQuickStart/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // VideoCallKitQuickStart
4 | //
5 | // Copyright © 2016-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | import UIKit
9 | import Intents
10 |
11 | @UIApplicationMain
12 | class AppDelegate: UIResponder, UIApplicationDelegate {
13 |
14 | var window: UIWindow?
15 |
16 | func application(_ application: UIApplication,
17 | didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
18 | // Override point for customization after application launch.
19 |
20 | return true
21 | }
22 |
23 | func application(_ application: UIApplication, continue userActivity: NSUserActivity, restorationHandler: @escaping ([UIUserActivityRestoring]?) -> Void) -> Bool {
24 | guard let viewController = window?.rootViewController as? ViewController, let interaction = userActivity.interaction else {
25 | return false
26 | }
27 |
28 | var personHandle: INPersonHandle?
29 |
30 | if let startVideoCallIntent = interaction.intent as? INStartVideoCallIntent {
31 | personHandle = startVideoCallIntent.contacts?[0].personHandle
32 | } else if let startAudioCallIntent = interaction.intent as? INStartAudioCallIntent {
33 | personHandle = startAudioCallIntent.contacts?[0].personHandle
34 | }
35 |
36 | if let personHandle = personHandle {
37 | viewController.performStartCallAction(uuid: UUID(), roomName: personHandle.value)
38 | }
39 |
40 | return true
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/VideoCallKitQuickStart/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | }
88 | ],
89 | "info" : {
90 | "version" : 1,
91 | "author" : "xcode"
92 | }
93 | }
--------------------------------------------------------------------------------
/VideoCallKitQuickStart/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/VideoCallKitQuickStart/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/VideoCallKitQuickStart/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | NSAppTransportSecurity
26 |
27 | NSAllowsArbitraryLoads
28 |
29 |
30 | NSCameraUsageDescription
31 | ${PRODUCT_NAME} uses your camera to capture video which is shared with other room participants.
32 | NSMicrophoneUsageDescription
33 | ${PRODUCT_NAME} uses your microphone to capture audio which is shared with other room participants.
34 | UIBackgroundModes
35 |
36 | audio
37 | voip
38 |
39 | UILaunchStoryboardName
40 | LaunchScreen
41 | UIMainStoryboardFile
42 | Main
43 | UIRequiredDeviceCapabilities
44 |
45 | armv7
46 |
47 | UISupportedInterfaceOrientations
48 |
49 | UIInterfaceOrientationPortrait
50 | UIInterfaceOrientationLandscapeLeft
51 | UIInterfaceOrientationLandscapeRight
52 |
53 | UISupportedInterfaceOrientations~ipad
54 |
55 | UIInterfaceOrientationPortrait
56 | UIInterfaceOrientationPortraitUpsideDown
57 | UIInterfaceOrientationLandscapeLeft
58 | UIInterfaceOrientationLandscapeRight
59 |
60 | UIUserInterfaceStyle
61 | Light
62 |
63 |
64 |
--------------------------------------------------------------------------------
/VideoCallKitQuickStart/ViewController+SimulateIncomingCall.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController+SimulateIncomingCall.swift
3 | // VideoCallKitQuickStart
4 | //
5 | // Copyright © 2016-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | import UIKit
9 | import UserNotifications
10 |
11 | // MARK:- Simulate Incoming Call
12 | extension ViewController {
13 |
14 | func registerForLocalNotifications() {
15 | // Define the custom actions.
16 | let inviteAction = UNNotificationAction(identifier: "INVITE_ACTION",
17 | title: "Simulate VoIP Push",
18 | options: UNNotificationActionOptions(rawValue: 0))
19 | let declineAction = UNNotificationAction(identifier: "DECLINE_ACTION",
20 | title: "Decline",
21 | options: .destructive)
22 | let notificationCenter = UNUserNotificationCenter.current()
23 |
24 | // Define the notification type
25 | let meetingInviteCategory = UNNotificationCategory(identifier: "ROOM_INVITATION",
26 | actions: [inviteAction, declineAction],
27 | intentIdentifiers: [],
28 | options: .customDismissAction)
29 | notificationCenter.setNotificationCategories([meetingInviteCategory])
30 |
31 | // Register for notification callbacks.
32 | notificationCenter.delegate = self
33 |
34 | // Request permission to display alerts and play sounds.
35 | notificationCenter.requestAuthorization(options: [.alert])
36 | { (granted, error) in
37 | // Enable or disable features based on authorization.
38 | }
39 | }
40 |
41 | @IBAction func simulateIncomingCall(sender: AnyObject) {
42 |
43 | let alertController = UIAlertController(title: "Schedule Notification", message: nil, preferredStyle: .alert)
44 |
45 | let okAction = UIAlertAction(title: "OK", style: .default, handler: { alert -> Void in
46 |
47 | let roomNameTextField = alertController.textFields![0] as UITextField
48 | let delayTextField = alertController.textFields![1] as UITextField
49 |
50 | let roomName = roomNameTextField.text
51 | self.roomTextField.text = roomName
52 |
53 | var delay = 5.0
54 | if let delayString = delayTextField.text, let delayFromString = Double(delayString) {
55 | delay = delayFromString
56 | }
57 |
58 | self.logMessage(messageText: "Schedule local notification for Room: \(String(describing: roomName)) after a \(delay) second delay")
59 |
60 | let trigger = UNTimeIntervalNotificationTrigger(timeInterval: delay, repeats: false)
61 | let content = UNMutableNotificationContent()
62 | content.title = "Room Invite"
63 | content.body = "Tap to connect to \(roomName ?? "a Room")."
64 | content.categoryIdentifier = "ROOM_INVITATION"
65 | if let name = roomName {
66 | content.userInfo = [ "roomName" : name ]
67 | }
68 | let identifier = NSUUID.init().uuidString
69 | let request = UNNotificationRequest(identifier: identifier, content: content, trigger: trigger)
70 | UNUserNotificationCenter.current().add(request) { (error) in
71 | if let theError = error {
72 | print("Error posting local notification \(theError)")
73 | }
74 | }
75 | })
76 |
77 | let cancelAction = UIAlertAction(title: "Cancel", style: .cancel, handler: {
78 | (action : UIAlertAction!) -> Void in
79 | })
80 |
81 | alertController.addTextField { (textField : UITextField!) -> Void in
82 | textField.placeholder = "Room Name"
83 | }
84 |
85 | alertController.addTextField { (textField : UITextField!) -> Void in
86 | textField.placeholder = "Delay in seconds (defaults is 5)"
87 | }
88 |
89 | alertController.addAction(okAction)
90 | alertController.addAction(cancelAction)
91 |
92 | self.present(alertController, animated: true, completion: nil)
93 | }
94 | }
95 |
96 | extension ViewController : UNUserNotificationCenterDelegate {
97 | func userNotificationCenter(_ center: UNUserNotificationCenter, willPresent notification: UNNotification, withCompletionHandler completionHandler: @escaping (UNNotificationPresentationOptions) -> Void) {
98 | print("Will present notification \(notification)")
99 |
100 | self.reportIncomingCall(uuid: UUID(), roomName: ViewController.parseNotification(notification: notification)) { _ in
101 | // Always call the completion handler when done.
102 | completionHandler(UNNotificationPresentationOptions())
103 | }
104 | }
105 |
106 | static func parseNotification(notification: UNNotification) -> String {
107 | var roomName = ""
108 | if let requestedName = notification.request.content.userInfo["roomName"] as? String {
109 | roomName = requestedName
110 | }
111 | return roomName
112 | }
113 |
114 | func userNotificationCenter(_ center: UNUserNotificationCenter,
115 | didReceive response: UNNotificationResponse,
116 | withCompletionHandler completionHandler: @escaping () -> Void) {
117 |
118 | print("Received notification response in \(UIApplication.shared.applicationState.rawValue) \(response)")
119 | let roomName = ViewController.parseNotification(notification: response.notification)
120 | switch response.actionIdentifier {
121 | case UNNotificationDefaultActionIdentifier:
122 | self.performStartCallAction(uuid: UUID(), roomName: roomName)
123 | completionHandler()
124 | break
125 | case "INVITE_ACTION":
126 | self.reportIncomingCall(uuid: UUID(), roomName: roomName) { _ in
127 | // Always call the completion handler when done.
128 | completionHandler()
129 | }
130 | break
131 | case "DECLINE_ACTION":
132 | completionHandler()
133 | break
134 | case UNNotificationDismissActionIdentifier:
135 | completionHandler()
136 | break
137 | // Handle other actions…
138 | default:
139 | break
140 | }
141 | }
142 | }
143 |
--------------------------------------------------------------------------------
/VideoQuickStart.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/VideoQuickStart.xcodeproj/xcshareddata/xcschemes/VideoQuickStart.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
32 |
33 |
43 |
45 |
51 |
52 |
53 |
54 |
60 |
62 |
68 |
69 |
70 |
71 |
73 |
74 |
77 |
78 |
79 |
--------------------------------------------------------------------------------
/VideoQuickStart/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // VideoQuickStart
4 | //
5 | // Copyright © 2015-2019 Twilio, Inc. All rights reserved.
6 | //
7 |
8 | import UIKit
9 |
10 | @UIApplicationMain
11 | class AppDelegate: UIResponder, UIApplicationDelegate {
12 |
13 | var window: UIWindow?
14 |
15 | func application(_ application: UIApplication,
16 | didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
17 | // Override point for customization after application launch.
18 |
19 | return true
20 | }
21 |
22 | // These methods will not be called on iOS 13 where the SceneDelegate is invoked instead.
23 | func applicationDidBecomeActive(_ application: UIApplication) {
24 | print(#function)
25 | }
26 |
27 | func applicationWillResignActive(_ application: UIApplication) {
28 | print(#function)
29 | }
30 |
31 | func applicationDidEnterBackground(_ application: UIApplication) {
32 | print(#function)
33 | }
34 |
35 | func applicationWillEnterForeground(_ application: UIApplication) {
36 | print(#function)
37 | }
38 | }
39 |
40 |
--------------------------------------------------------------------------------
/VideoQuickStart/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | }
88 | ],
89 | "info" : {
90 | "version" : 1,
91 | "author" : "xcode"
92 | }
93 | }
--------------------------------------------------------------------------------
/VideoQuickStart/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/VideoQuickStart/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/VideoQuickStart/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | NSAppTransportSecurity
26 |
27 | NSAllowsArbitraryLoads
28 |
29 |
30 | NSCameraUsageDescription
31 | ${PRODUCT_NAME} uses your camera to capture video which is shared with other room participants.
32 | NSMicrophoneUsageDescription
33 | ${PRODUCT_NAME} uses your microphone to capture audio which is shared with other room participants.
34 | UIApplicationSceneManifest
35 |
36 | UIApplicationSupportsMultipleScenes
37 |
38 | UISceneConfigurations
39 |
40 | UIWindowSceneSessionRoleApplication
41 |
42 |
43 | UISceneConfigurationName
44 | Default Configuration
45 | UISceneDelegateClassName
46 | VideoQuickStart.SceneDelegate
47 | UISceneStoryboardFile
48 | Main
49 |
50 |
51 |
52 |
53 | UIBackgroundModes
54 |
55 | audio
56 |
57 | UILaunchStoryboardName
58 | LaunchScreen
59 | UIMainStoryboardFile
60 | Main
61 | UIRequiredDeviceCapabilities
62 |
63 | armv7
64 |
65 | UISupportedInterfaceOrientations
66 |
67 | UIInterfaceOrientationPortrait
68 | UIInterfaceOrientationLandscapeLeft
69 | UIInterfaceOrientationLandscapeRight
70 |
71 | UISupportedInterfaceOrientations~ipad
72 |
73 | UIInterfaceOrientationPortrait
74 | UIInterfaceOrientationPortraitUpsideDown
75 | UIInterfaceOrientationLandscapeLeft
76 | UIInterfaceOrientationLandscapeRight
77 |
78 | UIUserInterfaceStyle
79 | Light
80 |
81 |
82 |
--------------------------------------------------------------------------------
/VideoQuickStart/SceneDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SceneDelegate.swift
3 | // VideoQuickStart
4 | //
5 | // Created by Chris Eagleston on 9/11/19.
6 | // Copyright © 2019 Twilio, Inc. All rights reserved.
7 | //
8 |
9 | import UIKit
10 | import TwilioVideo
11 |
12 | @available(iOS 13.0, *)
13 | class SceneDelegate: UIResponder, UIWindowSceneDelegate {
14 |
15 | var window: UIWindow?
16 |
17 | // UIWindowScene delegate
18 |
19 | func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) {
20 | print(#function)
21 |
22 | // The example does not support user activities & state restoration at this time.
23 | // The `window` property will automatically be loaded with the storyboard's initial view controller.
24 | }
25 |
26 | func stateRestorationActivity(for scene: UIScene) -> NSUserActivity? {
27 | return scene.userActivity
28 | }
29 |
30 | func sceneDidBecomeActive(_ scene: UIScene) {
31 | print(#function)
32 | }
33 |
34 | func sceneWillResignActive(_ scene: UIScene) {
35 | print(#function)
36 | }
37 |
38 | func sceneDidEnterBackground(_ scene: UIScene) {
39 | print(#function)
40 | }
41 |
42 | func sceneWillEnterForeground(_ scene: UIScene) {
43 | print(#function)
44 | }
45 |
46 | func windowScene(_ windowScene: UIWindowScene,
47 | didUpdate previousCoordinateSpace: UICoordinateSpace,
48 | interfaceOrientation previousInterfaceOrientation: UIInterfaceOrientation,
49 | traitCollection previousTraitCollection: UITraitCollection) {
50 | // Forward WindowScene changes to Twilio
51 | UserInterfaceTracker.sceneInterfaceOrientationDidChange(windowScene)
52 | }
53 |
54 | }
55 |
--------------------------------------------------------------------------------
/VideoQuickstart.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
9 |
10 |
12 |
13 |
15 |
16 |
18 |
19 |
21 |
22 |
24 |
25 |
27 |
28 |
30 |
31 |
33 |
34 |
35 |
--------------------------------------------------------------------------------
/VideoQuickstart.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/VideoQuickstart.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEWorkspaceSharedSettings_AutocreateContextsIfNeeded
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/VideoQuickstart.xcworkspace/xcshareddata/swiftpm/Package.resolved:
--------------------------------------------------------------------------------
1 | {
2 | "originHash" : "4edf9ec3862c895d0873c25cd49b123d266d69afef4ced627441c33f82f4e146",
3 | "pins" : [
4 | {
5 | "identity" : "twilio-video-ios",
6 | "kind" : "remoteSourceControl",
7 | "location" : "git@github.com:twilio/twilio-video-ios.git",
8 | "state" : {
9 | "revision" : "a896521ecf73844731f00ed62f7d5afa487e0b3a",
10 | "version" : "5.9.0"
11 | }
12 | }
13 | ],
14 | "version" : 3
15 | }
16 |
--------------------------------------------------------------------------------
/bump_spm_version.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | VERSION_REGEX="^[0-9]+.[0-9]+.[0-9]+$"
4 |
5 | if [ -z "$1" ]; then
6 | echo "NEW_VERSION was not provided"
7 | exit 1
8 | elif [[ ! $1 =~ $VERSION_REGEX ]]; then
9 | echo "Invalid version number: $1"
10 | exit 2
11 | else
12 | NEW_VERSION=$1
13 | fi
14 |
15 | for FILE in $(grep -lR "minimumVersion = " *.xcodeproj)
16 | do
17 | sed -Ei '' -e "s/minimumVersion = [0-9]+\.[0-9]+\.[0-9]+/minimumVersion = $NEW_VERSION/g" $FILE
18 | done
19 |
--------------------------------------------------------------------------------
/images/quickstart/audio-device-launched.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/audio-device-launched.jpg
--------------------------------------------------------------------------------
/images/quickstart/audio-engine-example.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/audio-engine-example.jpg
--------------------------------------------------------------------------------
/images/quickstart/audio-sink-launched.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/audio-sink-launched.jpg
--------------------------------------------------------------------------------
/images/quickstart/audio-sink-recognizing.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/audio-sink-recognizing.jpg
--------------------------------------------------------------------------------
/images/quickstart/audio-sink-recordings.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/audio-sink-recordings.png
--------------------------------------------------------------------------------
/images/quickstart/console-room-topology-group.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/console-room-topology-group.png
--------------------------------------------------------------------------------
/images/quickstart/data-track-drawing.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/data-track-drawing.gif
--------------------------------------------------------------------------------
/images/quickstart/data-track-home.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/data-track-home.png
--------------------------------------------------------------------------------
/images/quickstart/enter-room-name.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/enter-room-name.jpg
--------------------------------------------------------------------------------
/images/quickstart/generate_access_tokens.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/generate_access_tokens.png
--------------------------------------------------------------------------------
/images/quickstart/home-screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/home-screen.png
--------------------------------------------------------------------------------
/images/quickstart/multi-party-audio-send-bandwidth.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/multi-party-audio-send-bandwidth.png
--------------------------------------------------------------------------------
/images/quickstart/multi-party-home-screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/multi-party-home-screen.png
--------------------------------------------------------------------------------
/images/quickstart/objc-home-screen.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/objc-home-screen.png
--------------------------------------------------------------------------------
/images/quickstart/objc-xcode-video-quickstart-token.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/objc-xcode-video-quickstart-token.png
--------------------------------------------------------------------------------
/images/quickstart/objc-xcode-video-quickstart.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/objc-xcode-video-quickstart.png
--------------------------------------------------------------------------------
/images/quickstart/replaykit-broadcast-mic-ios13-audio-resource-limit.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/replaykit-broadcast-mic-ios13-audio-resource-limit.png
--------------------------------------------------------------------------------
/images/quickstart/replaykit-broadcast-picker-ios-13.0.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/replaykit-broadcast-picker-ios-13.0.png
--------------------------------------------------------------------------------
/images/quickstart/replaykit-extension-memory.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/replaykit-extension-memory.png
--------------------------------------------------------------------------------
/images/quickstart/replaykit-launch-ios11.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/replaykit-launch-ios11.png
--------------------------------------------------------------------------------
/images/quickstart/replaykit-launch-ios12.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/replaykit-launch-ios12.png
--------------------------------------------------------------------------------
/images/quickstart/replaykit-picker-ios12.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/replaykit-picker-ios12.png
--------------------------------------------------------------------------------
/images/quickstart/replaykit-reset-media-services.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/replaykit-reset-media-services.png
--------------------------------------------------------------------------------
/images/quickstart/room-connected.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/room-connected.png
--------------------------------------------------------------------------------
/images/quickstart/select-audio-device.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/select-audio-device.jpg
--------------------------------------------------------------------------------
/images/quickstart/xcode-video-quickstart-token.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/xcode-video-quickstart-token.png
--------------------------------------------------------------------------------
/images/quickstart/xcode-video-quickstart.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/twilio/video-quickstart-ios/d0da993e23f1bc6f632d5807066c702a5ab822be/images/quickstart/xcode-video-quickstart.png
--------------------------------------------------------------------------------