├── Podfile
├── LearningOpenTok
├── mic-24.png
├── Images.xcassets
│ ├── Contents.json
│ ├── mic.imageset
│ │ ├── mic.png
│ │ ├── mic@2x.png
│ │ ├── mic@3x.png
│ │ └── Contents.json
│ ├── audio.imageset
│ │ ├── audio.png
│ │ ├── audio@2x.png
│ │ ├── audio@3x.png
│ │ └── Contents.json
│ ├── muted_mic.imageset
│ │ ├── muted_mic.png
│ │ ├── muted_mic@2x.png
│ │ ├── muted_mic@3x.png
│ │ └── Contents.json
│ ├── noAudio.imageset
│ │ ├── noSoundCopy.png
│ │ ├── noSoundCopy@2x.png
│ │ ├── noSoundCopy@3x.png
│ │ └── Contents.json
│ ├── switch camera.imageset
│ │ ├── switch camera.png
│ │ ├── switch camera@2x.png
│ │ ├── switch camera@3x.png
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── mic_muted-24.png
├── camera_switch-33.png
├── Subscriber-Speaker-35.png
├── camera-switch_black-33.png
├── Subscriber-Speaker-35@2x.png
├── Subscriber-Speaker-Mute-35.png
├── Subscriber-Speaker-Mute-35@2x.png
├── AppDelegate.h
├── main.m
├── ViewController.h
├── SampleConfig.h
├── Info.plist
├── AppDelegate.m
├── Base.lproj
│ ├── LaunchScreen.xib
│ └── Main.storyboard
└── ViewController.m
├── LearningOpenTok.xcodeproj
├── project.xcworkspace
│ └── contents.xcworkspacedata
└── project.pbxproj
├── .gitignore
├── .github
└── workflows
│ └── metrics.yml
├── CONTRIBUTING.md
├── test.html
├── CODE_OF_CONDUCT.md
└── README.md
/Podfile:
--------------------------------------------------------------------------------
1 | target 'LearningOpenTok' do
2 | pod 'OpenTok', '= 2.12.0’
3 | end
4 |
--------------------------------------------------------------------------------
/LearningOpenTok/mic-24.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/mic-24.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/LearningOpenTok/mic_muted-24.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/mic_muted-24.png
--------------------------------------------------------------------------------
/LearningOpenTok/camera_switch-33.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/camera_switch-33.png
--------------------------------------------------------------------------------
/LearningOpenTok/Subscriber-Speaker-35.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Subscriber-Speaker-35.png
--------------------------------------------------------------------------------
/LearningOpenTok/camera-switch_black-33.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/camera-switch_black-33.png
--------------------------------------------------------------------------------
/LearningOpenTok/Subscriber-Speaker-35@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Subscriber-Speaker-35@2x.png
--------------------------------------------------------------------------------
/LearningOpenTok/Subscriber-Speaker-Mute-35.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Subscriber-Speaker-Mute-35.png
--------------------------------------------------------------------------------
/LearningOpenTok/Subscriber-Speaker-Mute-35@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Subscriber-Speaker-Mute-35@2x.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/mic.imageset/mic.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/mic.imageset/mic.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/audio.imageset/audio.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/audio.imageset/audio.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/mic.imageset/mic@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/mic.imageset/mic@2x.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/mic.imageset/mic@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/mic.imageset/mic@3x.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/audio.imageset/audio@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/audio.imageset/audio@2x.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/audio.imageset/audio@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/audio.imageset/audio@3x.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/muted_mic.imageset/muted_mic.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/muted_mic.imageset/muted_mic.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/noAudio.imageset/noSoundCopy.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/noAudio.imageset/noSoundCopy.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/muted_mic.imageset/muted_mic@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/muted_mic.imageset/muted_mic@2x.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/muted_mic.imageset/muted_mic@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/muted_mic.imageset/muted_mic@3x.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/noAudio.imageset/noSoundCopy@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/noAudio.imageset/noSoundCopy@2x.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/noAudio.imageset/noSoundCopy@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/noAudio.imageset/noSoundCopy@3x.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/switch camera.imageset/switch camera.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/switch camera.imageset/switch camera.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/switch camera.imageset/switch camera@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/switch camera.imageset/switch camera@2x.png
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/switch camera.imageset/switch camera@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/opentok/learning-opentok-ios/HEAD/LearningOpenTok/Images.xcassets/switch camera.imageset/switch camera@3x.png
--------------------------------------------------------------------------------
/LearningOpenTok.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/LearningOpenTok/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // Getting Started
4 | //
5 | // Created by Swartz on 11/19/14.
6 | // Copyright (c) 2014 TokBox, Inc. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AppDelegate : UIResponder
12 |
13 | @property (strong, nonatomic) UIWindow *window;
14 |
15 |
16 | @end
17 |
18 |
--------------------------------------------------------------------------------
/LearningOpenTok/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // LearningOpenTok
4 | //
5 | // Created by Swartz on 4/17/15.
6 | // Copyright (c) 2015 TokBox. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/LearningOpenTok/ViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.h
3 | // Getting Started
4 | //
5 | // Created by Jeff Swartz on 11/19/14.
6 | // Copyright (c) 2014 TokBox, Inc. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | // Save a copy of the SampleConfig.h file named Config.h. See the README file.
12 | #import "Config.h"
13 |
14 | @interface ViewController : UIViewController
15 |
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | OpenTok.framework/
2 | OpenTok.framework
3 |
4 | untitled folder/
5 | # Xcode
6 | .DS_Store
7 | */build/*
8 | *.pbxuser
9 | !default.pbxuser
10 | *.mode1v3
11 | !default.mode1v3
12 | *.mode2v3
13 | !default.mode2v3
14 | *.perspectivev3
15 | !default.perspectivev3
16 | xcuserdata
17 |
18 | profile
19 | *.moved-aside
20 | DerivedData
21 | .idea/
22 | *.hmap
23 | *.xccheckout
24 | *.xcworkspacedata
25 |
26 | #CocoaPods
27 | Pods
28 | Podfile.lock
29 |
30 | Config.h
31 |
--------------------------------------------------------------------------------
/.github/workflows/metrics.yml:
--------------------------------------------------------------------------------
1 | name: Aggregit
2 |
3 | on:
4 | schedule:
5 | - cron: "0 0 * * *"
6 |
7 | jobs:
8 | recordMetrics:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: michaeljolley/aggregit@v1
12 | with:
13 | githubToken: ${{ secrets.GITHUB_TOKEN }}
14 | project_id: ${{ secrets.project_id }}
15 | private_key: ${{ secrets.private_key }}
16 | client_email: ${{ secrets.client_email }}
17 | firebaseDbUrl: ${{ secrets.firebaseDbUrl }}
18 |
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/mic.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "mic.png",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "filename" : "mic@2x.png",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "filename" : "mic@3x.png",
16 | "scale" : "3x"
17 | }
18 | ],
19 | "info" : {
20 | "version" : 1,
21 | "author" : "xcode"
22 | }
23 | }
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/audio.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "audio.png",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "filename" : "audio@2x.png",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "filename" : "audio@3x.png",
16 | "scale" : "3x"
17 | }
18 | ],
19 | "info" : {
20 | "version" : 1,
21 | "author" : "xcode"
22 | }
23 | }
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/muted_mic.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "muted_mic.png",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "filename" : "muted_mic@2x.png",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "filename" : "muted_mic@3x.png",
16 | "scale" : "3x"
17 | }
18 | ],
19 | "info" : {
20 | "version" : 1,
21 | "author" : "xcode"
22 | }
23 | }
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/noAudio.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "noSoundCopy.png",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "filename" : "noSoundCopy@2x.png",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "filename" : "noSoundCopy@3x.png",
16 | "scale" : "3x"
17 | }
18 | ],
19 | "info" : {
20 | "version" : 1,
21 | "author" : "xcode"
22 | }
23 | }
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/switch camera.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "filename" : "switch camera.png",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "filename" : "switch camera@2x.png",
11 | "scale" : "2x"
12 | },
13 | {
14 | "idiom" : "universal",
15 | "filename" : "switch camera@3x.png",
16 | "scale" : "3x"
17 | }
18 | ],
19 | "info" : {
20 | "version" : 1,
21 | "author" : "xcode"
22 | }
23 | }
--------------------------------------------------------------------------------
/LearningOpenTok/SampleConfig.h:
--------------------------------------------------------------------------------
1 | //
2 | // Config.h
3 | // Getting Started
4 | //
5 | // Created by Swartz on 12/1/14.
6 | // Copyright (c) 2014 TokBox, Inc. All rights reserved.
7 | //
8 |
9 | #ifndef Getting_Started_Config_h
10 | #define Getting_Started_Config_h
11 |
12 | /*
13 | Set SAMPLE_SERVER_BASE_URL to the base URL of the web server that implements
14 | the OpenTok PHP Getting Started Sample code (see the main README file.) This
15 | web service handles some OpenTok-related API calls, related to obtaining
16 | session IDs and tokens, and for working with archives.
17 | */
18 |
19 | #define SAMPLE_SERVER_BASE_URL @"https://YOUR-SERVER-URL"
20 |
21 | #endif
22 |
--------------------------------------------------------------------------------
/LearningOpenTok/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | com.tokbox.$(PRODUCT_NAME:rfc1034identifier)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundleDisplayName
16 | Learn OpenTok
17 | CFBundlePackageType
18 | APPL
19 | CFBundleShortVersionString
20 | 1.0
21 | CFBundleSignature
22 | ????
23 | CFBundleVersion
24 | 1
25 | LSRequiresIPhoneOS
26 |
27 | UILaunchStoryboardName
28 | LaunchScreen
29 | UIMainStoryboardFile
30 | Main
31 | UIRequiredDeviceCapabilities
32 |
33 | armv7
34 |
35 | UISupportedInterfaceOrientations
36 |
37 | UIInterfaceOrientationPortrait
38 |
39 | UISupportedInterfaceOrientations~ipad
40 |
41 | UIInterfaceOrientationPortrait
42 | UIInterfaceOrientationPortraitUpsideDown
43 | UIInterfaceOrientationLandscapeLeft
44 | UIInterfaceOrientationLandscapeRight
45 |
46 | NSCameraUsageDescription
47 | This app accesses the camera for video communications.
48 | NSMicrophoneUsageDescription
49 | This app accesses the microphone for audio communications.
50 |
51 |
52 |
--------------------------------------------------------------------------------
/LearningOpenTok/Images.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | }
88 | ],
89 | "info" : {
90 | "version" : 1,
91 | "author" : "xcode"
92 | }
93 | }
--------------------------------------------------------------------------------
/LearningOpenTok/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // Getting Started
4 | //
5 | // Created by Swartz on 11/19/14.
6 | // Copyright (c) 2014 TokBox, Inc. All rights reserved.
7 | //
8 |
9 | #import "AppDelegate.h"
10 |
11 | @interface AppDelegate ()
12 |
13 | @end
14 |
15 | @implementation AppDelegate
16 |
17 |
18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
19 | // Override point for customization after application launch.
20 | return YES;
21 | }
22 |
23 | - (void)applicationWillResignActive:(UIApplication *)application {
24 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
25 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
26 | }
27 |
28 | - (void)applicationDidEnterBackground:(UIApplication *)application {
29 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
30 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
31 | }
32 |
33 | - (void)applicationWillEnterForeground:(UIApplication *)application {
34 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
35 | }
36 |
37 | - (void)applicationDidBecomeActive:(UIApplication *)application {
38 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
39 | }
40 |
41 | - (void)applicationWillTerminate:(UIApplication *)application {
42 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
43 | }
44 |
45 | @end
46 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing Guidelines
2 |
3 | For anyone looking to get involved to this project, we are glad to hear from you. Here are a few types of contributions
4 | that we would be interested in hearing about.
5 |
6 | - Bug fixes
7 | - If you find a bug, please first report it using Github Issues.
8 | - Issues that have already been identified as a bug will be labelled `bug`.
9 | - If you'd like to submit a fix for a bug, send a Pull Request from your own fork and mention the Issue number.
10 | - Include a test that isolates the bug and verifies that it was fixed.
11 | - New Features
12 | - If you'd like to accomplish something in the library that it doesn't already do, describe the problem in a new Github Issue.
13 | - Issues that have been identified as a feature request will be labelled `enhancement`.
14 | - If you'd like to implement the new feature, please wait for feedback from the project maintainers before spending too much time writing the code. In some cases, `enhancement`s may not align well with the project objectives at the time.
15 | - Tests, Documentation, Miscellaneous
16 | - If you think the test coverage could be improved, the documentation could be clearer, you've got an alternative implementation of something that may have more advantages, or any other change we would still be glad hear about it.
17 | - If its a trivial change, go ahead and send a Pull Request with the changes you have in mind
18 | - If not, open a Github Issue to discuss the idea first.
19 |
20 | ## Requirements
21 |
22 | For a contribution to be accepted:
23 |
24 | - The test suite must be complete and pass
25 | - Code must follow existing styling conventions
26 | - Commit messages must be descriptive. Related issues should be mentioned by number.
27 |
28 | If the contribution doesn't meet these criteria, a maintainer will discuss it with you on the Issue. You can still continue to add more commits to the branch you have sent the Pull Request from.
29 |
30 | ## How To
31 |
32 | 1. Fork this repository on GitHub.
33 | 1. Clone/fetch your fork to your local development machine.
34 | 1. Create a new branch (e.g. `issue-12`, `feat.add_foo`, etc) and check it out.
35 | 1. Make your changes and commit them. (Did the tests pass?)
36 | 1. Push your new branch to your fork. (e.g. `git push myname issue-12`)
37 | 1. Open a Pull Request from your new branch to the original fork's `master` branch.
38 |
--------------------------------------------------------------------------------
/LearningOpenTok/Base.lproj/LaunchScreen.xib:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
20 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
--------------------------------------------------------------------------------
/test.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
138 |
139 |
140 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | - Demonstrating empathy and kindness toward other people
21 | - Being respectful of differing opinions, viewpoints, and experiences
22 | - Giving and gracefully accepting constructive feedback
23 | - Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | - Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | - The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | - Trolling, insulting or derogatory comments, and personal or political attacks
33 | - Public or private harassment
34 | - Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | - Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | devrel@vonage.com.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
--------------------------------------------------------------------------------
/LearningOpenTok/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
48 |
58 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
--------------------------------------------------------------------------------
/LearningOpenTok/ViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.h
3 | // Getting Started
4 | //
5 | // Created by Jeff Swartz on 11/19/14.
6 | // Copyright (c) 2014 TokBox, Inc. All rights reserved.
7 |
8 | #import "ViewController.h"
9 | #import
10 |
11 | @interface ViewController ()
12 |
13 | @property (weak, nonatomic) IBOutlet UIView *controlsView;
14 | @property (weak, nonatomic) IBOutlet UIView *videoContainerView;
15 | @property (weak, nonatomic) IBOutlet UIView *subscriberView;
16 | @property (weak, nonatomic) IBOutlet UIView *publisherView;
17 | @property (weak, nonatomic) IBOutlet UIButton *swapCameraBtn;
18 | @property (weak, nonatomic) IBOutlet UIButton *publisherAudioBtn;
19 | @property (weak, nonatomic) IBOutlet UIButton *subscriberAudioBtn;
20 |
21 | @end
22 |
23 | @implementation ViewController {
24 | OTSession* _session;
25 | OTPublisher* _publisher;
26 | OTSubscriber* _subscriber;
27 | NSString* _archiveId;
28 | NSString* _apiKey;
29 | NSString* _sessionId;
30 | NSString* _token;
31 | }
32 |
33 | #pragma mark - View lifecycle
34 |
35 | - (void)viewDidLoad
36 | {
37 | [super viewDidLoad];
38 | [self getSessionCredentials];
39 | }
40 |
41 | - (void)getSessionCredentials
42 | {
43 | NSString* urlPath = SAMPLE_SERVER_BASE_URL;
44 | urlPath = [urlPath stringByAppendingString:@"/session"];
45 | NSURL *url = [NSURL URLWithString: urlPath];
46 | NSMutableURLRequest *request = [NSMutableURLRequest requestWithURL:url cachePolicy:NSURLRequestReloadIgnoringLocalAndRemoteCacheData timeoutInterval:10];
47 | [request setHTTPMethod: @"GET"];
48 |
49 | [NSURLConnection sendAsynchronousRequest:request queue:[NSOperationQueue mainQueue] completionHandler:^(NSURLResponse *response, NSData *data, NSError *error){
50 | if (error){
51 | NSLog(@"Error,%@, URL: %@", [error localizedDescription],urlPath);
52 | }
53 | else{
54 | NSDictionary *roomInfo = [NSJSONSerialization JSONObjectWithData:data options:kNilOptions error:nil];
55 | _apiKey = [roomInfo objectForKey:@"apiKey"];
56 | _token = [roomInfo objectForKey:@"token"];
57 | _sessionId = [roomInfo objectForKey:@"sessionId"];
58 |
59 | if(!_apiKey || !_token || !_sessionId) {
60 | NSLog(@"Error invalid response from server, URL: %@",urlPath);
61 | } else {
62 | [self doConnect];
63 | }
64 | }
65 | }];
66 | }
67 |
68 | - (BOOL)prefersStatusBarHidden
69 | {
70 | return YES;
71 | }
72 |
73 | - (BOOL)shouldAutorotateToInterfaceOrientation:
74 | (UIInterfaceOrientation)interfaceOrientation
75 | {
76 | // Return YES for supported orientations
77 | if (UIUserInterfaceIdiomPhone == [[UIDevice currentDevice]
78 | userInterfaceIdiom])
79 | {
80 | return NO;
81 | } else {
82 | return YES;
83 | }
84 | }
85 | #pragma mark - OpenTok methods
86 |
87 | - (void)doConnect
88 | {
89 | // Initialize a new instance of OTSession and begin the connection process.
90 | _session = [[OTSession alloc] initWithApiKey:_apiKey
91 | sessionId:_sessionId
92 | delegate:self];
93 | OTError *error = nil;
94 | [_session connectWithToken:_token error:&error];
95 | if (error)
96 | {
97 | NSLog(@"Unable to connect to session (%@)",
98 | error.localizedDescription);
99 | }
100 | }
101 |
102 | - (void)doPublish
103 | {
104 | OTPublisherSettings *settings = [[OTPublisherSettings alloc] init];
105 | _publisher = [[OTPublisher alloc] initWithDelegate:self settings:settings];
106 |
107 | OTError *error = nil;
108 | [_session publish:_publisher error:&error];
109 | if (error)
110 | {
111 | NSLog(@"Unable to publish (%@)",
112 | error.localizedDescription);
113 | }
114 |
115 | [_publisher.view setFrame:CGRectMake(0, 0, _publisherView.bounds.size.width,
116 | _publisherView.bounds.size.height)];
117 | [_publisherView addSubview:_publisher.view];
118 |
119 |
120 | _publisherAudioBtn.hidden = NO;
121 | [_publisherAudioBtn addTarget:self
122 | action:@selector(togglePublisherMic)
123 | forControlEvents:UIControlEventTouchUpInside];
124 |
125 | _swapCameraBtn.hidden = NO;
126 | [_swapCameraBtn addTarget:self
127 | action:@selector(swapCamera)
128 | forControlEvents:UIControlEventTouchUpInside];
129 | }
130 |
131 |
132 | -(void)togglePublisherMic
133 | {
134 | _publisher.publishAudio = !_publisher.publishAudio;
135 | UIImage *buttonImage;
136 | if (_publisher.publishAudio) {
137 | buttonImage = [UIImage imageNamed: @"mic"];
138 | } else {
139 | buttonImage = [UIImage imageNamed: @"muted_mic"];
140 | }
141 | [_publisherAudioBtn setImage:buttonImage forState:UIControlStateNormal];
142 | }
143 |
144 | -(void)toggleSubscriberAudio
145 | {
146 | _subscriber.subscribeToAudio = !_subscriber.subscribeToAudio;
147 | UIImage *buttonImage;
148 | if (_subscriber.subscribeToAudio) {
149 | buttonImage = [UIImage imageNamed: @"audio"];
150 | } else {
151 | buttonImage = [UIImage imageNamed: @"noAudio"];
152 | }
153 | [_subscriberAudioBtn setImage:buttonImage forState:UIControlStateNormal];
154 | }
155 |
156 | -(void)swapCamera
157 | {
158 | if (_publisher.cameraPosition == AVCaptureDevicePositionFront) {
159 | _publisher.cameraPosition = AVCaptureDevicePositionBack;
160 | } else {
161 | _publisher.cameraPosition = AVCaptureDevicePositionFront;
162 | }
163 | }
164 |
165 | - (void)cleanupPublisher {
166 | [_publisher.view removeFromSuperview];
167 | _publisher = nil;
168 | }
169 |
170 | - (void)doSubscribe:(OTStream*)stream
171 | {
172 | _subscriber = [[OTSubscriber alloc] initWithStream:stream
173 | delegate:self];
174 | OTError *error = nil;
175 | [_session subscribe:_subscriber error:&error];
176 | if (error)
177 | {
178 | NSLog(@"Unable to publish (%@)",
179 | error.localizedDescription);
180 | }
181 | }
182 |
183 | - (void)cleanupSubscriber
184 | {
185 | [_subscriber.view removeFromSuperview];
186 | _subscriber = nil;
187 | }
188 |
189 | # pragma mark - OTSession delegate callbacks
190 |
191 | - (void)sessionDidConnect:(OTSession*)session
192 | {
193 | [self doPublish];
194 | }
195 |
196 | - (void)sessionDidDisconnect:(OTSession*)session
197 | {
198 | NSString* alertMessage =
199 | [NSString stringWithFormat:@"Session disconnected: (%@)",
200 | session.sessionId];
201 | NSLog(@"sessionDidDisconnect (%@)", alertMessage);
202 | }
203 |
204 | - (void)session:(OTSession*)session
205 | streamCreated:(OTStream *)stream
206 | {
207 | NSLog(@"session streamCreated (%@)", stream.streamId);
208 |
209 | if (nil == _subscriber)
210 | {
211 | [self doSubscribe:stream];
212 | }
213 | }
214 |
215 | - (void)session:(OTSession*)session
216 | streamDestroyed:(OTStream *)stream
217 | {
218 | NSLog(@"session streamDestroyed (%@)", stream.streamId);
219 |
220 | if ([_subscriber.stream.streamId isEqualToString:stream.streamId])
221 | {
222 | [self cleanupSubscriber];
223 | }
224 | }
225 |
226 | - (void) session:(OTSession *)session
227 | connectionCreated:(OTConnection *)connection
228 | {
229 | NSLog(@"session connectionCreated (%@)", connection.connectionId);
230 | }
231 |
232 | - (void) session:(OTSession *)session
233 | connectionDestroyed:(OTConnection *)connection
234 | {
235 | NSLog(@"session connectionDestroyed (%@)", connection.connectionId);
236 | }
237 |
238 | - (void) session:(OTSession*)session
239 | didFailWithError:(OTError*)error
240 | {
241 | NSLog(@"didFailWithError: (%@)", error);
242 | }
243 |
244 | # pragma mark - OTPublisher delegate callbacks
245 |
246 | - (void)publisher:(OTPublisherKit *)publisher
247 | streamCreated:(OTStream *)stream
248 | {
249 | NSLog(@"Now publishing.");
250 | }
251 |
252 | - (void)publisher:(OTPublisherKit*)publisher
253 | streamDestroyed:(OTStream *)stream
254 | {
255 | [self cleanupPublisher];
256 | }
257 |
258 | - (void)publisher:(OTPublisherKit*)publisher
259 | didFailWithError:(OTError*) error
260 | {
261 | NSLog(@"publisher didFailWithError %@", error);
262 | [self cleanupPublisher];
263 | }
264 |
265 | # pragma mark - OTSubscriber delegate callbacks
266 |
267 | - (void)subscriberDidConnectToStream:(OTSubscriberKit*)subscriber
268 | {
269 | NSLog(@"subscriberDidConnectToStream (%@)",
270 | subscriber.stream.connection.connectionId);
271 | [_subscriber.view setFrame:CGRectMake(0, 0, _subscriberView.bounds.size.width,
272 | _subscriberView.bounds.size.height)];
273 | [_subscriberView addSubview:_subscriber.view];
274 |
275 | _subscriberAudioBtn.hidden = NO;
276 | [_subscriberAudioBtn addTarget:self
277 | action:@selector(toggleSubscriberAudio)
278 | forControlEvents:UIControlEventTouchUpInside];
279 |
280 | }
281 |
282 | - (void)subscriber:(OTSubscriberKit*)subscriber
283 | didFailWithError:(OTError*)error
284 | {
285 | NSLog(@"subscriber %@ didFailWithError %@",
286 | subscriber.stream.streamId,
287 | error);
288 | }
289 |
290 | @end
291 |
--------------------------------------------------------------------------------
/LearningOpenTok.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 46;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 1F1C01141AE1E3AD005B4396 /* VideoToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C01061AE1E3AD005B4396 /* VideoToolbox.framework */; };
11 | 1F1C01151AE1E3AD005B4396 /* libxml2.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C01071AE1E3AD005B4396 /* libxml2.dylib */; };
12 | 1F1C01161AE1E3AD005B4396 /* libc++.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C01081AE1E3AD005B4396 /* libc++.dylib */; };
13 | 1F1C01171AE1E3AD005B4396 /* libsqlite3.dylib in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C01091AE1E3AD005B4396 /* libsqlite3.dylib */; };
14 | 1F1C01181AE1E3AD005B4396 /* OpenGLES.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C010A1AE1E3AD005B4396 /* OpenGLES.framework */; };
15 | 1F1C01191AE1E3AD005B4396 /* GLKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C010B1AE1E3AD005B4396 /* GLKit.framework */; };
16 | 1F1C011A1AE1E3AD005B4396 /* CoreTelephony.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C010C1AE1E3AD005B4396 /* CoreTelephony.framework */; };
17 | 1F1C011B1AE1E3AD005B4396 /* SystemConfiguration.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C010D1AE1E3AD005B4396 /* SystemConfiguration.framework */; };
18 | 1F1C011C1AE1E3AD005B4396 /* AudioToolbox.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C010E1AE1E3AD005B4396 /* AudioToolbox.framework */; };
19 | 1F1C011D1AE1E3AD005B4396 /* CoreVideo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C010F1AE1E3AD005B4396 /* CoreVideo.framework */; };
20 | 1F1C011E1AE1E3AD005B4396 /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C01101AE1E3AD005B4396 /* CoreMedia.framework */; };
21 | 1F1C011F1AE1E3AD005B4396 /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C01111AE1E3AD005B4396 /* Foundation.framework */; };
22 | 1F1C01201AE1E3AD005B4396 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C01121AE1E3AD005B4396 /* CoreGraphics.framework */; };
23 | 1F1C01211AE1E3AD005B4396 /* UIKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C01131AE1E3AD005B4396 /* UIKit.framework */; };
24 | 1F1C01271AE1E5F0005B4396 /* OpenTok.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 1F1C01261AE1E5F0005B4396 /* OpenTok.framework */; };
25 | 1F3FEE661AE19ECA002E2CF8 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F3FEE651AE19ECA002E2CF8 /* main.m */; };
26 | 1F3FEE691AE19ECA002E2CF8 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F3FEE681AE19ECA002E2CF8 /* AppDelegate.m */; };
27 | 1F3FEE6C1AE19ECA002E2CF8 /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 1F3FEE6B1AE19ECA002E2CF8 /* ViewController.m */; };
28 | 1F3FEE6F1AE19ECA002E2CF8 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 1F3FEE6D1AE19ECA002E2CF8 /* Main.storyboard */; };
29 | 1F3FEE711AE19ECA002E2CF8 /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 1F3FEE701AE19ECA002E2CF8 /* Images.xcassets */; };
30 | 1F3FEE741AE19ECA002E2CF8 /* LaunchScreen.xib in Resources */ = {isa = PBXBuildFile; fileRef = 1F3FEE721AE19ECA002E2CF8 /* LaunchScreen.xib */; };
31 | 1FB7CCF31AE1F850001F0D1A /* camera_switch-33.png in Resources */ = {isa = PBXBuildFile; fileRef = 1FB7CCEB1AE1F850001F0D1A /* camera_switch-33.png */; };
32 | 1FB7CCF41AE1F850001F0D1A /* camera-switch_black-33.png in Resources */ = {isa = PBXBuildFile; fileRef = 1FB7CCEC1AE1F850001F0D1A /* camera-switch_black-33.png */; };
33 | 1FB7CCF51AE1F850001F0D1A /* mic_muted-24.png in Resources */ = {isa = PBXBuildFile; fileRef = 1FB7CCED1AE1F850001F0D1A /* mic_muted-24.png */; };
34 | 1FB7CCF61AE1F850001F0D1A /* mic-24.png in Resources */ = {isa = PBXBuildFile; fileRef = 1FB7CCEE1AE1F850001F0D1A /* mic-24.png */; };
35 | 1FB7CCF71AE1F850001F0D1A /* Subscriber-Speaker-35.png in Resources */ = {isa = PBXBuildFile; fileRef = 1FB7CCEF1AE1F850001F0D1A /* Subscriber-Speaker-35.png */; };
36 | 1FB7CCF81AE1F850001F0D1A /* Subscriber-Speaker-35@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 1FB7CCF01AE1F850001F0D1A /* Subscriber-Speaker-35@2x.png */; };
37 | 1FB7CCF91AE1F850001F0D1A /* Subscriber-Speaker-Mute-35.png in Resources */ = {isa = PBXBuildFile; fileRef = 1FB7CCF11AE1F850001F0D1A /* Subscriber-Speaker-Mute-35.png */; };
38 | 1FB7CCFA1AE1F850001F0D1A /* Subscriber-Speaker-Mute-35@2x.png in Resources */ = {isa = PBXBuildFile; fileRef = 1FB7CCF21AE1F850001F0D1A /* Subscriber-Speaker-Mute-35@2x.png */; };
39 | A0C142CF1E95B7E8000120E4 /* Podfile in Resources */ = {isa = PBXBuildFile; fileRef = A0C142CE1E95B7E8000120E4 /* Podfile */; };
40 | /* End PBXBuildFile section */
41 |
42 | /* Begin PBXFileReference section */
43 | 1F1C01061AE1E3AD005B4396 /* VideoToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = VideoToolbox.framework; path = System/Library/Frameworks/VideoToolbox.framework; sourceTree = SDKROOT; };
44 | 1F1C01071AE1E3AD005B4396 /* libxml2.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libxml2.dylib; path = usr/lib/libxml2.dylib; sourceTree = SDKROOT; };
45 | 1F1C01081AE1E3AD005B4396 /* libc++.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = "libc++.dylib"; path = "usr/lib/libc++.dylib"; sourceTree = SDKROOT; };
46 | 1F1C01091AE1E3AD005B4396 /* libsqlite3.dylib */ = {isa = PBXFileReference; lastKnownFileType = "compiled.mach-o.dylib"; name = libsqlite3.dylib; path = usr/lib/libsqlite3.dylib; sourceTree = SDKROOT; };
47 | 1F1C010A1AE1E3AD005B4396 /* OpenGLES.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = OpenGLES.framework; path = System/Library/Frameworks/OpenGLES.framework; sourceTree = SDKROOT; };
48 | 1F1C010B1AE1E3AD005B4396 /* GLKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = GLKit.framework; path = System/Library/Frameworks/GLKit.framework; sourceTree = SDKROOT; };
49 | 1F1C010C1AE1E3AD005B4396 /* CoreTelephony.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreTelephony.framework; path = System/Library/Frameworks/CoreTelephony.framework; sourceTree = SDKROOT; };
50 | 1F1C010D1AE1E3AD005B4396 /* SystemConfiguration.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = SystemConfiguration.framework; path = System/Library/Frameworks/SystemConfiguration.framework; sourceTree = SDKROOT; };
51 | 1F1C010E1AE1E3AD005B4396 /* AudioToolbox.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AudioToolbox.framework; path = System/Library/Frameworks/AudioToolbox.framework; sourceTree = SDKROOT; };
52 | 1F1C010F1AE1E3AD005B4396 /* CoreVideo.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreVideo.framework; path = System/Library/Frameworks/CoreVideo.framework; sourceTree = SDKROOT; };
53 | 1F1C01101AE1E3AD005B4396 /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; };
54 | 1F1C01111AE1E3AD005B4396 /* Foundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Foundation.framework; path = System/Library/Frameworks/Foundation.framework; sourceTree = SDKROOT; };
55 | 1F1C01121AE1E3AD005B4396 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };
56 | 1F1C01131AE1E3AD005B4396 /* UIKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = UIKit.framework; path = System/Library/Frameworks/UIKit.framework; sourceTree = SDKROOT; };
57 | 1F1C01261AE1E5F0005B4396 /* OpenTok.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; path = OpenTok.framework; sourceTree = ""; };
58 | 1F3FEE601AE19ECA002E2CF8 /* LearningOpenTok.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = LearningOpenTok.app; sourceTree = BUILT_PRODUCTS_DIR; };
59 | 1F3FEE641AE19ECA002E2CF8 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
60 | 1F3FEE651AE19ECA002E2CF8 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; };
61 | 1F3FEE671AE19ECA002E2CF8 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; };
62 | 1F3FEE681AE19ECA002E2CF8 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; };
63 | 1F3FEE6A1AE19ECA002E2CF8 /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; };
64 | 1F3FEE6B1AE19ECA002E2CF8 /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; };
65 | 1F3FEE6E1AE19ECA002E2CF8 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
66 | 1F3FEE701AE19ECA002E2CF8 /* Images.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Images.xcassets; sourceTree = ""; };
67 | 1F3FEE731AE19ECA002E2CF8 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.xib; name = Base; path = Base.lproj/LaunchScreen.xib; sourceTree = ""; };
68 | 1F3FEE891AE1A233002E2CF8 /* Config.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = Config.h; sourceTree = ""; };
69 | 1FB7CCEB1AE1F850001F0D1A /* camera_switch-33.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "camera_switch-33.png"; sourceTree = ""; };
70 | 1FB7CCEC1AE1F850001F0D1A /* camera-switch_black-33.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "camera-switch_black-33.png"; sourceTree = ""; };
71 | 1FB7CCED1AE1F850001F0D1A /* mic_muted-24.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "mic_muted-24.png"; sourceTree = ""; };
72 | 1FB7CCEE1AE1F850001F0D1A /* mic-24.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "mic-24.png"; sourceTree = ""; };
73 | 1FB7CCEF1AE1F850001F0D1A /* Subscriber-Speaker-35.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Subscriber-Speaker-35.png"; sourceTree = ""; };
74 | 1FB7CCF01AE1F850001F0D1A /* Subscriber-Speaker-35@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Subscriber-Speaker-35@2x.png"; sourceTree = ""; };
75 | 1FB7CCF11AE1F850001F0D1A /* Subscriber-Speaker-Mute-35.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Subscriber-Speaker-Mute-35.png"; sourceTree = ""; };
76 | 1FB7CCF21AE1F850001F0D1A /* Subscriber-Speaker-Mute-35@2x.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "Subscriber-Speaker-Mute-35@2x.png"; sourceTree = ""; };
77 | A0C142CE1E95B7E8000120E4 /* Podfile */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = Podfile; sourceTree = ""; };
78 | /* End PBXFileReference section */
79 |
80 | /* Begin PBXFrameworksBuildPhase section */
81 | 1F3FEE5D1AE19ECA002E2CF8 /* Frameworks */ = {
82 | isa = PBXFrameworksBuildPhase;
83 | buildActionMask = 2147483647;
84 | files = (
85 | 1F1C01201AE1E3AD005B4396 /* CoreGraphics.framework in Frameworks */,
86 | 1F1C01271AE1E5F0005B4396 /* OpenTok.framework in Frameworks */,
87 | 1F1C01181AE1E3AD005B4396 /* OpenGLES.framework in Frameworks */,
88 | 1F1C011E1AE1E3AD005B4396 /* CoreMedia.framework in Frameworks */,
89 | 1F1C01141AE1E3AD005B4396 /* VideoToolbox.framework in Frameworks */,
90 | 1F1C011A1AE1E3AD005B4396 /* CoreTelephony.framework in Frameworks */,
91 | 1F1C01211AE1E3AD005B4396 /* UIKit.framework in Frameworks */,
92 | 1F1C01161AE1E3AD005B4396 /* libc++.dylib in Frameworks */,
93 | 1F1C01171AE1E3AD005B4396 /* libsqlite3.dylib in Frameworks */,
94 | 1F1C011C1AE1E3AD005B4396 /* AudioToolbox.framework in Frameworks */,
95 | 1F1C011F1AE1E3AD005B4396 /* Foundation.framework in Frameworks */,
96 | 1F1C01191AE1E3AD005B4396 /* GLKit.framework in Frameworks */,
97 | 1F1C011B1AE1E3AD005B4396 /* SystemConfiguration.framework in Frameworks */,
98 | 1F1C01151AE1E3AD005B4396 /* libxml2.dylib in Frameworks */,
99 | 1F1C011D1AE1E3AD005B4396 /* CoreVideo.framework in Frameworks */,
100 | );
101 | runOnlyForDeploymentPostprocessing = 0;
102 | };
103 | /* End PBXFrameworksBuildPhase section */
104 |
105 | /* Begin PBXGroup section */
106 | 1F1C01051AE1E399005B4396 /* Frameworks */ = {
107 | isa = PBXGroup;
108 | children = (
109 | 1F1C01261AE1E5F0005B4396 /* OpenTok.framework */,
110 | 1F1C01061AE1E3AD005B4396 /* VideoToolbox.framework */,
111 | 1F1C01071AE1E3AD005B4396 /* libxml2.dylib */,
112 | 1F1C01081AE1E3AD005B4396 /* libc++.dylib */,
113 | 1F1C01091AE1E3AD005B4396 /* libsqlite3.dylib */,
114 | 1F1C010A1AE1E3AD005B4396 /* OpenGLES.framework */,
115 | 1F1C010B1AE1E3AD005B4396 /* GLKit.framework */,
116 | 1F1C010C1AE1E3AD005B4396 /* CoreTelephony.framework */,
117 | 1F1C010D1AE1E3AD005B4396 /* SystemConfiguration.framework */,
118 | 1F1C010E1AE1E3AD005B4396 /* AudioToolbox.framework */,
119 | 1F1C010F1AE1E3AD005B4396 /* CoreVideo.framework */,
120 | 1F1C01101AE1E3AD005B4396 /* CoreMedia.framework */,
121 | 1F1C01111AE1E3AD005B4396 /* Foundation.framework */,
122 | 1F1C01121AE1E3AD005B4396 /* CoreGraphics.framework */,
123 | 1F1C01131AE1E3AD005B4396 /* UIKit.framework */,
124 | );
125 | name = Frameworks;
126 | sourceTree = "";
127 | };
128 | 1F3FEE571AE19ECA002E2CF8 = {
129 | isa = PBXGroup;
130 | children = (
131 | A0C142CE1E95B7E8000120E4 /* Podfile */,
132 | 1F3FEE621AE19ECA002E2CF8 /* LearningOpenTok */,
133 | 1F1C01051AE1E399005B4396 /* Frameworks */,
134 | 1F3FEE611AE19ECA002E2CF8 /* Products */,
135 | );
136 | sourceTree = "";
137 | };
138 | 1F3FEE611AE19ECA002E2CF8 /* Products */ = {
139 | isa = PBXGroup;
140 | children = (
141 | 1F3FEE601AE19ECA002E2CF8 /* LearningOpenTok.app */,
142 | );
143 | name = Products;
144 | sourceTree = "";
145 | };
146 | 1F3FEE621AE19ECA002E2CF8 /* LearningOpenTok */ = {
147 | isa = PBXGroup;
148 | children = (
149 | 1F3FEE671AE19ECA002E2CF8 /* AppDelegate.h */,
150 | 1F3FEE681AE19ECA002E2CF8 /* AppDelegate.m */,
151 | 1F3FEE891AE1A233002E2CF8 /* Config.h */,
152 | 1F3FEE6A1AE19ECA002E2CF8 /* ViewController.h */,
153 | 1F3FEE6B1AE19ECA002E2CF8 /* ViewController.m */,
154 | 1F3FEE6D1AE19ECA002E2CF8 /* Main.storyboard */,
155 | 1F3FEE701AE19ECA002E2CF8 /* Images.xcassets */,
156 | 1F3FEE721AE19ECA002E2CF8 /* LaunchScreen.xib */,
157 | 1F3FEE631AE19ECA002E2CF8 /* Supporting Files */,
158 | );
159 | path = LearningOpenTok;
160 | sourceTree = "";
161 | };
162 | 1F3FEE631AE19ECA002E2CF8 /* Supporting Files */ = {
163 | isa = PBXGroup;
164 | children = (
165 | 1F3FEE641AE19ECA002E2CF8 /* Info.plist */,
166 | 1F3FEE651AE19ECA002E2CF8 /* main.m */,
167 | 1FB7CCEB1AE1F850001F0D1A /* camera_switch-33.png */,
168 | 1FB7CCEC1AE1F850001F0D1A /* camera-switch_black-33.png */,
169 | 1FB7CCED1AE1F850001F0D1A /* mic_muted-24.png */,
170 | 1FB7CCEE1AE1F850001F0D1A /* mic-24.png */,
171 | 1FB7CCEF1AE1F850001F0D1A /* Subscriber-Speaker-35.png */,
172 | 1FB7CCF01AE1F850001F0D1A /* Subscriber-Speaker-35@2x.png */,
173 | 1FB7CCF11AE1F850001F0D1A /* Subscriber-Speaker-Mute-35.png */,
174 | 1FB7CCF21AE1F850001F0D1A /* Subscriber-Speaker-Mute-35@2x.png */,
175 | );
176 | name = "Supporting Files";
177 | sourceTree = "";
178 | };
179 | /* End PBXGroup section */
180 |
181 | /* Begin PBXNativeTarget section */
182 | 1F3FEE5F1AE19ECA002E2CF8 /* LearningOpenTok */ = {
183 | isa = PBXNativeTarget;
184 | buildConfigurationList = 1F3FEE831AE19ECA002E2CF8 /* Build configuration list for PBXNativeTarget "LearningOpenTok" */;
185 | buildPhases = (
186 | 1F3FEE5C1AE19ECA002E2CF8 /* Sources */,
187 | 1F3FEE5D1AE19ECA002E2CF8 /* Frameworks */,
188 | 1F3FEE5E1AE19ECA002E2CF8 /* Resources */,
189 | );
190 | buildRules = (
191 | );
192 | dependencies = (
193 | );
194 | name = LearningOpenTok;
195 | productName = LearningOpenTok;
196 | productReference = 1F3FEE601AE19ECA002E2CF8 /* LearningOpenTok.app */;
197 | productType = "com.apple.product-type.application";
198 | };
199 | /* End PBXNativeTarget section */
200 |
201 | /* Begin PBXProject section */
202 | 1F3FEE581AE19ECA002E2CF8 /* Project object */ = {
203 | isa = PBXProject;
204 | attributes = {
205 | LastUpgradeCheck = 0600;
206 | ORGANIZATIONNAME = TokBox;
207 | TargetAttributes = {
208 | 1F3FEE5F1AE19ECA002E2CF8 = {
209 | CreatedOnToolsVersion = 6.0.1;
210 | };
211 | };
212 | };
213 | buildConfigurationList = 1F3FEE5B1AE19ECA002E2CF8 /* Build configuration list for PBXProject "LearningOpenTok" */;
214 | compatibilityVersion = "Xcode 3.2";
215 | developmentRegion = English;
216 | hasScannedForEncodings = 0;
217 | knownRegions = (
218 | en,
219 | Base,
220 | );
221 | mainGroup = 1F3FEE571AE19ECA002E2CF8;
222 | productRefGroup = 1F3FEE611AE19ECA002E2CF8 /* Products */;
223 | projectDirPath = "";
224 | projectRoot = "";
225 | targets = (
226 | 1F3FEE5F1AE19ECA002E2CF8 /* LearningOpenTok */,
227 | );
228 | };
229 | /* End PBXProject section */
230 |
231 | /* Begin PBXResourcesBuildPhase section */
232 | 1F3FEE5E1AE19ECA002E2CF8 /* Resources */ = {
233 | isa = PBXResourcesBuildPhase;
234 | buildActionMask = 2147483647;
235 | files = (
236 | 1FB7CCF61AE1F850001F0D1A /* mic-24.png in Resources */,
237 | 1FB7CCF31AE1F850001F0D1A /* camera_switch-33.png in Resources */,
238 | 1F3FEE6F1AE19ECA002E2CF8 /* Main.storyboard in Resources */,
239 | 1FB7CCF81AE1F850001F0D1A /* Subscriber-Speaker-35@2x.png in Resources */,
240 | 1FB7CCF91AE1F850001F0D1A /* Subscriber-Speaker-Mute-35.png in Resources */,
241 | 1F3FEE741AE19ECA002E2CF8 /* LaunchScreen.xib in Resources */,
242 | A0C142CF1E95B7E8000120E4 /* Podfile in Resources */,
243 | 1FB7CCF51AE1F850001F0D1A /* mic_muted-24.png in Resources */,
244 | 1FB7CCF71AE1F850001F0D1A /* Subscriber-Speaker-35.png in Resources */,
245 | 1FB7CCFA1AE1F850001F0D1A /* Subscriber-Speaker-Mute-35@2x.png in Resources */,
246 | 1F3FEE711AE19ECA002E2CF8 /* Images.xcassets in Resources */,
247 | 1FB7CCF41AE1F850001F0D1A /* camera-switch_black-33.png in Resources */,
248 | );
249 | runOnlyForDeploymentPostprocessing = 0;
250 | };
251 | /* End PBXResourcesBuildPhase section */
252 |
253 | /* Begin PBXSourcesBuildPhase section */
254 | 1F3FEE5C1AE19ECA002E2CF8 /* Sources */ = {
255 | isa = PBXSourcesBuildPhase;
256 | buildActionMask = 2147483647;
257 | files = (
258 | 1F3FEE6C1AE19ECA002E2CF8 /* ViewController.m in Sources */,
259 | 1F3FEE691AE19ECA002E2CF8 /* AppDelegate.m in Sources */,
260 | 1F3FEE661AE19ECA002E2CF8 /* main.m in Sources */,
261 | );
262 | runOnlyForDeploymentPostprocessing = 0;
263 | };
264 | /* End PBXSourcesBuildPhase section */
265 |
266 | /* Begin PBXVariantGroup section */
267 | 1F3FEE6D1AE19ECA002E2CF8 /* Main.storyboard */ = {
268 | isa = PBXVariantGroup;
269 | children = (
270 | 1F3FEE6E1AE19ECA002E2CF8 /* Base */,
271 | );
272 | name = Main.storyboard;
273 | sourceTree = "";
274 | };
275 | 1F3FEE721AE19ECA002E2CF8 /* LaunchScreen.xib */ = {
276 | isa = PBXVariantGroup;
277 | children = (
278 | 1F3FEE731AE19ECA002E2CF8 /* Base */,
279 | );
280 | name = LaunchScreen.xib;
281 | sourceTree = "";
282 | };
283 | /* End PBXVariantGroup section */
284 |
285 | /* Begin XCBuildConfiguration section */
286 | 1F3FEE811AE19ECA002E2CF8 /* Debug */ = {
287 | isa = XCBuildConfiguration;
288 | buildSettings = {
289 | ALWAYS_SEARCH_USER_PATHS = NO;
290 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
291 | CLANG_CXX_LIBRARY = "libc++";
292 | CLANG_ENABLE_MODULES = YES;
293 | CLANG_ENABLE_OBJC_ARC = YES;
294 | CLANG_WARN_BOOL_CONVERSION = YES;
295 | CLANG_WARN_CONSTANT_CONVERSION = YES;
296 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
297 | CLANG_WARN_EMPTY_BODY = YES;
298 | CLANG_WARN_ENUM_CONVERSION = YES;
299 | CLANG_WARN_INT_CONVERSION = YES;
300 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
301 | CLANG_WARN_UNREACHABLE_CODE = YES;
302 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
303 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
304 | COPY_PHASE_STRIP = NO;
305 | ENABLE_STRICT_OBJC_MSGSEND = YES;
306 | GCC_C_LANGUAGE_STANDARD = gnu99;
307 | GCC_DYNAMIC_NO_PIC = NO;
308 | GCC_OPTIMIZATION_LEVEL = 0;
309 | GCC_PREPROCESSOR_DEFINITIONS = (
310 | "DEBUG=1",
311 | "$(inherited)",
312 | );
313 | GCC_SYMBOLS_PRIVATE_EXTERN = NO;
314 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
315 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
316 | GCC_WARN_UNDECLARED_SELECTOR = YES;
317 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
318 | GCC_WARN_UNUSED_FUNCTION = YES;
319 | GCC_WARN_UNUSED_VARIABLE = YES;
320 | IPHONEOS_DEPLOYMENT_TARGET = 9.0;
321 | MTL_ENABLE_DEBUG_INFO = YES;
322 | ONLY_ACTIVE_ARCH = YES;
323 | SDKROOT = iphoneos;
324 | TARGETED_DEVICE_FAMILY = "1,2";
325 | };
326 | name = Debug;
327 | };
328 | 1F3FEE821AE19ECA002E2CF8 /* Release */ = {
329 | isa = XCBuildConfiguration;
330 | buildSettings = {
331 | ALWAYS_SEARCH_USER_PATHS = NO;
332 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++0x";
333 | CLANG_CXX_LIBRARY = "libc++";
334 | CLANG_ENABLE_MODULES = YES;
335 | CLANG_ENABLE_OBJC_ARC = YES;
336 | CLANG_WARN_BOOL_CONVERSION = YES;
337 | CLANG_WARN_CONSTANT_CONVERSION = YES;
338 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
339 | CLANG_WARN_EMPTY_BODY = YES;
340 | CLANG_WARN_ENUM_CONVERSION = YES;
341 | CLANG_WARN_INT_CONVERSION = YES;
342 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
343 | CLANG_WARN_UNREACHABLE_CODE = YES;
344 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
345 | "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Developer";
346 | COPY_PHASE_STRIP = YES;
347 | ENABLE_NS_ASSERTIONS = NO;
348 | ENABLE_STRICT_OBJC_MSGSEND = YES;
349 | GCC_C_LANGUAGE_STANDARD = gnu99;
350 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
351 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
352 | GCC_WARN_UNDECLARED_SELECTOR = YES;
353 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
354 | GCC_WARN_UNUSED_FUNCTION = YES;
355 | GCC_WARN_UNUSED_VARIABLE = YES;
356 | IPHONEOS_DEPLOYMENT_TARGET = 9.0;
357 | MTL_ENABLE_DEBUG_INFO = NO;
358 | SDKROOT = iphoneos;
359 | TARGETED_DEVICE_FAMILY = "1,2";
360 | VALIDATE_PRODUCT = YES;
361 | };
362 | name = Release;
363 | };
364 | 1F3FEE841AE19ECA002E2CF8 /* Debug */ = {
365 | isa = XCBuildConfiguration;
366 | buildSettings = {
367 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
368 | FRAMEWORK_SEARCH_PATHS = (
369 | "$(inherited)",
370 | "$(PROJECT_DIR)",
371 | );
372 | INFOPLIST_FILE = LearningOpenTok/Info.plist;
373 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
374 | PRODUCT_NAME = "$(TARGET_NAME)";
375 | };
376 | name = Debug;
377 | };
378 | 1F3FEE851AE19ECA002E2CF8 /* Release */ = {
379 | isa = XCBuildConfiguration;
380 | buildSettings = {
381 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
382 | FRAMEWORK_SEARCH_PATHS = (
383 | "$(inherited)",
384 | "$(PROJECT_DIR)",
385 | );
386 | INFOPLIST_FILE = LearningOpenTok/Info.plist;
387 | LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks";
388 | PRODUCT_NAME = "$(TARGET_NAME)";
389 | };
390 | name = Release;
391 | };
392 | /* End XCBuildConfiguration section */
393 |
394 | /* Begin XCConfigurationList section */
395 | 1F3FEE5B1AE19ECA002E2CF8 /* Build configuration list for PBXProject "LearningOpenTok" */ = {
396 | isa = XCConfigurationList;
397 | buildConfigurations = (
398 | 1F3FEE811AE19ECA002E2CF8 /* Debug */,
399 | 1F3FEE821AE19ECA002E2CF8 /* Release */,
400 | );
401 | defaultConfigurationIsVisible = 0;
402 | defaultConfigurationName = Release;
403 | };
404 | 1F3FEE831AE19ECA002E2CF8 /* Build configuration list for PBXNativeTarget "LearningOpenTok" */ = {
405 | isa = XCConfigurationList;
406 | buildConfigurations = (
407 | 1F3FEE841AE19ECA002E2CF8 /* Debug */,
408 | 1F3FEE851AE19ECA002E2CF8 /* Release */,
409 | );
410 | defaultConfigurationIsVisible = 0;
411 | defaultConfigurationName = Release;
412 | };
413 | /* End XCConfigurationList section */
414 | };
415 | rootObject = 1F3FEE581AE19ECA002E2CF8 /* Project object */;
416 | }
417 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Learning OpenTok iOS Sample App
2 |
3 |
4 |
5 | This sample app shows how to accomplish basic tasks using the OpenTok iOS SDK.
6 | It connects the user with another client so that they can share an OpenTok audio-video
7 | chat session. Additionally, the app uses the OpenTok iOS SDK to implement the following:
8 |
9 | * Controls for muting the audio of each participant
10 | * A control for switching the camera used (between the front and back)
11 | * Text chat for the participants
12 | * The ability to record the chat session, stop the recording, and view the recording
13 | * A simple custom audio driver for audio input and output
14 | * A custom video renderer
15 | * A simple custom video capturer
16 | * A custom video capturer that uses the device camera
17 | * Publishing a screen-sharing stream
18 |
19 | The code for this sample is found the following git branches:
20 |
21 | * *basics* -- This branch shows you how to set up your project to use the OpenTok iOS SDK.
22 | You will also learn how to connect to an OpenTok session, publish a stream to the session,
23 | and subscribe to a stream in the OpenTok session.
24 |
25 | * *archiving* -- This branch shows you how to record the session.
26 |
27 | * *signaling* -- This branch shows you how to implement text chat using the OpenTok
28 | signaling API.
29 |
30 | * *audio-driver* -- This branch shows you how to implement a custom audio driver.
31 |
32 | * *video-renderer-basic* -- This branch shows the basics of implementing a custom video renderer
33 | for an OpenTok publisher.
34 |
35 | * *video-capturer-basic* -- This branch shows the basics of implementing a custom video capturer
36 | for an OpenTok publisher.
37 |
38 | * *video-capturer-camera* - This branch shows you how to use a custom video capturer using
39 | the device camera as the video source.
40 |
41 | * *screen-sharing* - This branch shows you how to use the device's screen (instead of a
42 | camera) as the video source for a published stream.
43 |
44 | You will also need to clone the learning-opentok-php repo and run its code on a
45 | PHP-enabled web server. See the Basics section for more information.
46 |
47 | ## Basics
48 |
49 | *Important* -- To view the code for this functionality, switch to the *basic* branch
50 | of this git repository.
51 |
52 | The code in this branch shows you how to:
53 |
54 | * Set up your project to use the OpenTok iOS SDK.
55 | * Connect to an OpenTok session
56 | * Publish a stream to the session
57 | * Subscribe to a stream in the OpenTok session
58 |
59 | ### Starting point
60 |
61 | Before you can test the application, you need to make some settings in Xcode and set up
62 | a web service to handle some OpenTok-related API calls.
63 |
64 | This project uses CocoaPods to load the OpenTok SDK:
65 |
66 | 1. Install CocoaPods as described in [CocoaPods Getting
67 | Started](https://guides.cocoapods.org/using/getting-started.html#getting-started).
68 |
69 | 2. In Terminal, `cd` to your project directory and enter the following:
70 |
71 | `pod install`
72 |
73 | 3. Open the LearningOpenTok.xcworkspace file in XCode.
74 |
75 | You will notice the project is missing a header file -- Config.h. To fix this,
76 | copy the SampleConfig.h file to a Config.h file.
77 |
78 | * Copy the contents of the SampleConfig.h file to the clipboard. Then select
79 | File > New > File (Command-N). In the dialog that is displayed, select
80 | Header File, click Next, and save the file as Config.h.
81 |
82 | We will set values for the constants defined in this file in a later step.
83 |
84 | ### Creating a session and defining archive REST API calls
85 |
86 | Before you can test the application, you need to set up a web service to handle some
87 | OpenTok-related API calls. The web service securely creates an OpenTok session.
88 |
89 | The [Learning OpenTok PHP](https://github.com/opentok/learning-opentok-php) repo includes code
90 | for setting up a web service that handles the following API calls:
91 |
92 | * "/service" -- The iOS client calls this endpoint to get an OpenTok session ID, token,
93 | and API key.
94 |
95 | * "/start" -- The iOS client calls this endpoint to start recording the OpenTok session to
96 | an archive.
97 |
98 | * "/stop" -- The iOS client calls this endpoint to stop recording the archive.
99 |
100 | * "/view" -- The iOS client load this endpoint in a web browser to display the archive
101 | recording.
102 |
103 | The HTTP POST request to the /session endpoint returns a response that includes the OpenTok
104 | session ID and token.
105 |
106 | Download the repo and run its code on a PHP-enabled web server. You can also deploy and
107 | run the code on Heroku (so you don't have to set up your own PHP server). See the readme
108 | file in the learning-opentok-php repo for instructions.
109 |
110 | The web service also creates a token that the client uses to connect to the OpenTok session.
111 | The HTTP GET request to the /session endpoint returns a response that includes the OpenTok
112 | session ID and token.
113 |
114 | You will want to authenticate each user (using your own server-side authentication techniques)
115 | before sending an OpenTok token. Otherwise, malicious users could call your web service and
116 | use tokens, causing streaming minutes to be charged to your OpenTok developer account. Also,
117 | it is a best practice to use an HTTPS URL for the web service that returns an OpenTok token,
118 | so that it cannot be intercepted and misused.
119 |
120 | ### Connecting to the session
121 |
122 | First, set the app to use the web service described in the previous section:
123 |
124 | * In Xcode, open the Config.h file (see "Starting Point"). Add the base URL,
125 | (such as `@"http://example.com"`) in this line:
126 |
127 | define SAMPLE_SERVER_BASE_URL @"https://YOUR-SERVER-URL"
128 |
129 | In a production application, you will always want to use a web service to obtain a unique token
130 | each time a user connects to an OpenTok session.
131 |
132 | You will want to authenticate each user (using your own server-side authentication techniques)
133 | before sending an OpenTok token. Otherwise, malicious users could call your web service and
134 | use tokens, causing streaming minutes to be charged to your OpenTok developer account. Also,
135 | it is a best practice to use an HTTPS URL for the web service that returns an OpenTok token,
136 | so that it cannot be intercepted and misused.
137 |
138 | You can now test the app in the debugger. On successfully connecting to the session, the
139 | app logs "Session Connected" to the debug console.
140 |
141 | An OpenTok session connects different clients letting them share audio-video streams and
142 | send messages. Clients in the same session can include iOS, Android, and web browsers.
143 |
144 | **Session ID** -- Each client that connects to the session needs the session ID, which identifies
145 | the session. Think of a session as a room, in which clients meet. Depending on the requirements of your application, you will either reuse the same session (and session ID) repeatedly or generate
146 | new session IDs for new groups of clients.
147 |
148 | *Important:* This demo application assumes that only two clients -- the local iOS client and another
149 | client -- will connect in the same OpenTok session. For test purposes, you can reuse the same
150 | session ID each time two clients connect. However, in a production application, your server-side
151 | code must create a unique session ID for each pair of clients. In other applications, you may want
152 | to connect many clients in one OpenTok session (for instance, a meeting room) and connect others
153 | in another session (another meeting room). For examples of apps that connect users in different
154 | ways, see the OpenTok ScheduleKit, Presence Kit, and Link Kit [Starter Kit apps] [3].
155 |
156 | Since this app uses the OpenTok archiving feature to record the session, the session must be set
157 | to use the `routed` media mode, indicating that it will use the OpenTok Media Router. The OpenTok
158 | Media Router provides other advanced features (see [The OpenTok Media Router and media modes] [4]).
159 | If your application does not require the features provided by the OpenTok Media Router, you can set
160 | the media mode to `relayed`.
161 |
162 | **Token** -- The client also needs a token, which grants them access to the session. Each client is
163 | issued a unique token when they connect to the session. Since the user publishes an audio-video stream to the session, the token generated must include the publish role (the default). For more
164 | information about tokens, see the OpenTok [Token creation overview] [5].
165 |
166 | **API key** -- The API key identifies your OpenTok developer account.
167 |
168 | Upon starting up, the application calls the `[self getSessionCredentials:]` method (defined in the
169 | ViewController.m file). This method calls a web service that provides an OpenTok session ID, API key, and token to be used by the client. In the Config.h file (see the previous section), set the
170 | `SAMPLE_SERVER_BASE_URL` constant to the base URL of the web service that handles OpenTok-related
171 | API calls:
172 |
173 | define SAMPLE_SERVER_BASE_URL @"http://YOUR-SERVER-URL/"
174 |
175 | The "/session" endpoint of the web service returns an HTTP response that includes the session ID,
176 | the token, and API key formatted as JSON data:
177 |
178 | {
179 | "sessionId": "2_MX40NDQ0MzEyMn5-fn4",
180 | "apiKey": "12345",
181 | "token": "T1==cGFydG5lcl9pZD00jg="
182 | }
183 |
184 | Upon obtaining the session ID, token, and API, the app calls the `[self doConnect]` method to
185 | initialize an OTSession object and connect to the OpenTok session:
186 |
187 | - (void)doConnect
188 | {
189 | // Initialize a new instance of OTSession and begin the connection process.
190 | _session = [[OTSession alloc] initWithApiKey:_apiKey
191 | sessionId:_sessionId
192 | delegate:self];
193 | OTError *error = nil;
194 | [_session connectWithToken:_token error:&error];
195 | if (error)
196 | {
197 | NSLog(@"Unable to connect to session (%@)",
198 | error.localizedDescription);
199 | }
200 | }
201 |
202 | The OTSession object (`_session`), defined by the OpenTok iOS SDK, represents the OpenTok session
203 | (which connects users).
204 |
205 | The `[OTSession connectWithToken:error]` method connects the iOS app to the OpenTok session.
206 | You must connect before sending or receiving audio-video streams in the session (or before
207 | interacting with the session in any way).
208 |
209 | This app sets `self` to implement the `[OTSessionDelegate]` interface to receive session-related
210 | messages. These messages are sent when other clients connect to the session, when they send
211 | audio-video streams to the session, and upon other session-related events, which we will look
212 | at in the following sections.
213 |
214 | ### Publishing an audio video stream to the session
215 |
216 | 1. In Xcode, launch the app in a connected iOS device or in the iOS simulator.
217 |
218 | 2. On first run, the app asks you for access to the camera:
219 |
220 | LearningOpenTok would like to Access the Camera: Don't Allow / OK
221 |
222 | iOS OS requires apps to automatically ask the user to grant camera permission to an app.
223 |
224 | The published stream appears in the lower-lefthand corner of the video view. (The main storyboard
225 | of the app defines many of the views and UI controls used by the app.)
226 |
227 | 3. Now close the app and find the test.html file in the root of the project. You will use the
228 | test.html file (in located in the root directory of this project), to connect to the OpenTok
229 | session and publish an audio-video stream from a web browser:
230 |
231 | * Edit the test.html file and set the `sessionCredentialsUrl` variable to match the
232 | `SAMPLE_SERVER_BASE_URL` property used in the iOS app. Or -- if you are using hard-coded
233 | session ID, token, and API key settings -- set the `apiKey`,`sessionId`, and `token` variables.
234 |
235 | * Add the test.html file to a web server. (You cannot run WebRTC videos in web pages loaded
236 | from the desktop.)
237 |
238 | * In a browser, load the test.html file from the web server.
239 |
240 | 4. Run the iOS app again. The app will send an audio-video stream to the web client and receive
241 | the web client's stream.
242 |
243 | 5. Click the mute mic button (below the video views).
244 |
245 | This mutes the microphone and prevents audio from being published. Click the button again to
246 | resume publishing audio.
247 |
248 | 6. Click the mute mic button in the subscribed stream view.
249 |
250 | This mutes the local playback of the subscribed stream.
251 |
252 | 7. Click the swap camera button (below the video views).
253 |
254 | This toggles the camera used (between front and back) for the published stream.
255 |
256 | Upon successfully connecting to the OpenTok session (see the previous section), the
257 | `[OTSessionDelegate session:didConnect:]` message is sent. The ViewController.m code implements
258 | this delegate method:
259 |
260 | - (void)sessionDidConnect:(OTSession*)session
261 | {
262 | // We have successfully connected, now start pushing an audio-video stream
263 | // to the OpenTok session.
264 | [self doPublish];
265 | }
266 |
267 | The method calls the `[self doPublish]` method, which first initializes an OTPublisher object,
268 | defined by the OpenTok iSO SDK:
269 |
270 | _publisher = [[OTPublisher alloc]
271 | initWithDelegate:self];
272 |
273 | The code calls the `[OTSession publish:error:]` method to publish an audio-video stream
274 | to the session:
275 |
276 | OTError *error = nil;
277 | [_session publish:_publisher error:&error];
278 | if (error)
279 | {
280 | NSLog(@"Unable to publish (%@)",
281 | error.localizedDescription);
282 | }
283 |
284 | It then adds the publisher's view, which contains its video, as a subview of the
285 | `_publisherView` UIView element, defined in the main storyboard.
286 |
287 | [_publisher.view setFrame:CGRectMake(0, 0, _publisherView.bounds.size.width,
288 | _publisherView.bounds.size.height)];
289 | [_publisherView addSubview:_publisher.view];
290 |
291 | This app sets `self` to implement the OTPublisherDelegate interface and receive publisher-related
292 | events.
293 |
294 | Upon successfully publishing the stream, the implementation of the
295 | `[OTPublisherDelegate publisher:streamCreated]` method is called:
296 |
297 | - (void)publisher:(OTPublisherKit *)publisher
298 | streamCreated:(OTStream *)stream
299 | {
300 | NSLog(@"Now publishing.");
301 | }
302 |
303 | If the publisher stops sending its stream to the session, the implementation of the
304 | `[OTPublisherDelegate publisher:streamDestroyed]` method is called:
305 |
306 | - (void)publisher:(OTPublisherKit*)publisher
307 | streamDestroyed:(OTStream *)stream
308 | {
309 | [self cleanupPublisher];
310 | }
311 |
312 | The `[self cleanupPublisher:]` method removes the publisher's view (its video) from its
313 | superview:
314 |
315 | - (void)cleanupPublisher {
316 | [_publisher.view removeFromSuperview];
317 | _publisher = nil;
318 | }
319 |
320 | ### Subscribing to another client's audio-video stream
321 |
322 | The [OTSessionDelegate session:streamCreated:] message is sent when a new stream is created in
323 | the session. The app implements this delegate method with the following:
324 |
325 | - (void)session:(OTSession*)session
326 | streamCreated:(OTStream *)stream
327 | {
328 | NSLog(@"session streamCreated (%@)", stream.streamId);
329 |
330 | if (nil == _subscriber)
331 | {
332 | [self doSubscribe:stream];
333 | }
334 | }
335 |
336 | The method is passed an OTStream object (defined by the OpenTok iOS SDK), representing the stream
337 | that another client is publishing. Although this app assumes that only one other client is
338 | connecting to the session and publishing, the method checks to see if the app is already
339 | subscribing to a stream (if the `_subscriber` property is set). If not, the session calls
340 | `[self doSubscribe:stream]`, passing in the OTStream object (for the new stream):
341 |
342 | - (void)doSubscribe:(OTStream*)stream
343 | {
344 | _subscriber = [[OTSubscriber alloc] initWithStream:stream
345 | delegate:self];
346 | OTError *error = nil;
347 | [_session subscribe:_subscriber error:&error];
348 | if (error)
349 | {
350 | NSLog(@"Unable to publish (%@)",
351 | error.localizedDescription);
352 | }
353 | }
354 |
355 | The method initializes an OTSubscriber object (`_subscriber`), used to subscribe to the stream,
356 | passing in the OTStream object to the initialization method. It also sets `self` to implement the
357 | OTSubscriberDelegate interface, which is sent messages related to the subscriber.
358 |
359 | It then calls `[OTSession subscribe:error:]` to have the app to subscribe to the stream.
360 |
361 | When the app starts receiving the subscribed stream, the
362 | `[OTDSubscriberDelegate subscriberDidConnectToStream:]` message is sent. The implementation of the
363 | delegate method adds view of the subscriber stream (defined by the `view` property of the
364 | OTSubscriber object) as a subview of the `_subscriberView` UIView object, defined in the main
365 | storyboard:
366 |
367 | - (void)subscriberDidConnectToStream:(OTSubscriberKit*)subscriber
368 | {
369 | NSLog(@"subscriberDidConnectToStream (%@)",
370 | subscriber.stream.connection.connectionId);
371 | [_subscriber.view setFrame:CGRectMake(0, 0, _subscriberView.bounds.size.width,
372 | _subscriberView.bounds.size.height)];
373 | [_subscriberView addSubview:_subscriber.view];
374 | _subscriberAudioBtn.hidden = NO;
375 |
376 | _chatTextInputView.hidden = NO;
377 | }
378 |
379 | It also displays the input text field for the text chat. The app hides this field until
380 | you start viewing the other client's audio-video stream.
381 |
382 | If the subscriber's stream is dropped from the session (perhaps the client chose to stop publishing
383 | or to the implementation of the `[OTSession session:streamDestroyed]` method is called:
384 |
385 | - (void)session:(OTSession*)session
386 | streamDestroyed:(OTStream *)stream
387 | {
388 | NSLog(@"session streamDestroyed (%@)", stream.streamId);
389 | if ([_subscriber.stream.streamId isEqualToString:stream.streamId])
390 | {
391 | [self cleanupSubscriber];
392 | }
393 | }
394 |
395 | The `[self cleanupSubscriber:]` method removes the publisher's view (its video) from its
396 | superview:
397 |
398 | - (void)cleanupPublisher {
399 | [_subscriber.view removeFromSuperview];
400 | _subscriber = nil;
401 | }
402 |
403 | ### Adding user interface controls
404 |
405 | This code adds buttons to mute the publisher and subscriber audio and to toggle the
406 | publisher camera.
407 |
408 | When the user clicks the toggle publisher audio button, the `[self togglePublisherMic]`
409 | method is called:
410 |
411 | -(void)togglePublisherMic
412 | {
413 | _publisher.publishAudio = !_publisher.publishAudio;
414 | UIImage *buttonImage;
415 | if (_publisher.publishAudio) {
416 | buttonImage = [UIImage imageNamed: @"mic-24.png"];
417 | } else {
418 | buttonImage = [UIImage imageNamed: @"mic_muted-24.png"];
419 | }
420 | [_publisherAudioBtn setImage:buttonImage forState:UIControlStateNormal];
421 | }
422 |
423 | The `publishAudio` property of the OTPublisher object is set to a Boolean value indicating whether
424 | the publisher is publishing audio or not. The method toggles the setting when the user clicks the
425 | button.
426 |
427 | Similarly, the `subscribeToAudio` property of the OTSubscriber object is a Boolean value indicating
428 | whether the local iOS device is playing back the subscribed stream's audio or not. When the user
429 | clicks the toggle audio button for the Subscriber, the following method is called:
430 |
431 | -(void)toggleSubscriberAudio
432 | {
433 | _subscriber.subscribeToAudio = !_subscriber.subscribeToAudio;
434 | UIImage *buttonImage;
435 | if (_subscriber.subscribeToAudio) {
436 | buttonImage = [UIImage imageNamed: @"Subscriber-Speaker-35.png"];
437 | } else {
438 | buttonImage = [UIImage imageNamed: @"Subscriber-Speaker-Mute-35.png"];
439 | }
440 | [_subscriberAudioBtn setImage:buttonImage forState:UIControlStateNormal];
441 | }
442 |
443 | When the user clicks the toggle camera button, the `[self swapCamera]` method is called:
444 |
445 | -(void)swapCamera
446 | {
447 | if (_publisher.cameraPosition == AVCaptureDevicePositionFront) {
448 | _publisher.cameraPosition = AVCaptureDevicePositionBack;
449 | } else {
450 | _publisher.cameraPosition = AVCaptureDevicePositionFront;
451 | }
452 | }
453 |
454 | ## Recording an OpenTok session to an archive
455 |
456 | *Important* -- To view the code for this functionality, switch to the *archiving* branch
457 | of this git repository.
458 |
459 | In the archiving branch of this git repository, the following functionality is enabled:
460 |
461 | 1. Tap the *Start recording* button.
462 |
463 | This starts recording the audio video streams on the OpenTok Media Server.
464 |
465 | 2. Click the *Stop recording* button to stop the recording.
466 |
467 | 3. Click the *View recording* button to view the recording in the web browser.
468 |
469 | The OpenTok archiving API lets you record audio-video streams in a session to MP4 files. You use
470 | server-side code to start and stop archive recordings. In the Config.h file, you set the following
471 | constant to the base URL of the web service the app calls to start archive recording, stop
472 | recording, and play back the recorded video:
473 |
474 | #define SAMPLE_SERVER_BASE_URL
475 |
476 | If you do not set this string, the Start Recording, Stop Recording, and View Archive
477 | buttons will not be available in the app.
478 |
479 | When the user clicks the Start Recording and Stop Recording buttons, the app calls the
480 | `[self startArchive:]` and `[self startArchive:]` methods. These call web services that call
481 | server-side code start and stop archive recordings.
482 | (See [Setting up the test web service](#setting-up-the-test-web-service).)
483 |
484 | When archive recording starts, the implementation of the
485 | `[OTSessionDelegate session:archiveStartedWithId:name:]` method is called:
486 |
487 | - (void) session:(OTSession*)session
488 | archiveStartedWithId:(NSString *)archiveId
489 | name:(NSString *)name
490 | {
491 | NSLog(@"session archiving started with id:%@ name:%@", archiveId, name);
492 | _archiveId = archiveId;
493 | _archivingIndicatorImg.hidden = NO;
494 | [_archiveControlBtn setTitle: @"Stop recording" forState:UIControlStateNormal];
495 | _archiveControlBtn.hidden = NO;
496 | [_archiveControlBtn addTarget:self
497 | action:@selector(stopArchive)
498 | forControlEvents:UIControlEventTouchUpInside];
499 | }
500 |
501 | This causes the `_archivingIndicatorImg` image (defined in the main storyboard) to be displayed.
502 | The method stores the archive ID (identifying the archive) to an `archiveId` property.
503 | The method also changes the archiving control button text to change to "Stop recording".
504 |
505 | When the user clicks the Stop Recording button, the app passes the archive ID along to the
506 | web service that stops the archive recording.
507 |
508 | When archive recording stops, the implementation of the
509 | `[OTSessionDelegate session:archiveStartedWithId:name:]` method is called:
510 |
511 | - (void) session:(OTSession*)session
512 | archiveStoppedWithId:(NSString *)archiveId
513 | {
514 | NSLog(@"session archiving stopped with id:%@", archiveId);
515 | _archivingIndicatorImg.hidden = YES;
516 | [_archiveControlBtn setTitle: @"View archive" forState:UIControlStateNormal];
517 | _archiveControlBtn.hidden = NO;
518 | [_archiveControlBtn addTarget:self
519 | action:@selector(loadArchivePlaybackInBrowser)
520 | forControlEvents:UIControlEventTouchUpInside];
521 | }
522 |
523 | This causes the `_archivingIndicatorImg` image (defined in the main storyboard) to be
524 | displayed. It also changes the archiving control button text to change to "View archive".
525 | When the user clicks this button, the `[self loadArchivePlaybackInBrowser:]` method
526 | opens a web page (in Safari) that displays the archive recording.
527 |
528 | ## Using the signaling API to implement text chat
529 |
530 | *Important* -- To view the code for this functionality, switch to the *signaling* branch
531 | of this git repository.
532 |
533 | In the signaling branch of this git repository, the following functionality is enabled:
534 |
535 | * Click in the text chat input field (labeled "Enter text chat message here"), enter a text
536 | chat message and tap the Return button.
537 |
538 | The text chat message is sent to the web client. You can also send a chat message from the web
539 | client to the iOS client.
540 |
541 | When the user enters text in the text chat input text field, the '[self sendChatMessage:]``
542 | method is called:
543 |
544 | - (void) sendChatMessage
545 | {
546 | OTError* error = nil;
547 | [_session signalWithType:@"chat"
548 | string:_chatInputTextField.text
549 | connection:nil error:&error];
550 | if (error) {
551 | NSLog(@"Signal error: %@", error);
552 | } else {
553 | NSLog(@"Signal sent: %@", _chatInputTextField.text);
554 | }
555 | _chatTextInputView.text = @"";
556 | }
557 |
558 | This method calls the `[OTSession signalWithType:string:connection:]` method. This
559 | method sends a message to clients connected to the OpenTok session. Each signal is
560 | defined by a `type` string identifying the type of message (in this case "chat")
561 | and a string containing the message.
562 |
563 | When another client connected to the session (in this app, there is only one) sends
564 | a message, the implementation of the `[OTSessionDelegate session:receivedSignalType:string:]`
565 | method is called:
566 |
567 | - (void)session:(OTSession*)session receivedSignalType:(NSString*)type fromConnection:(OTConnection*)connection withString:(NSString*)string {
568 | NSLog(@"Received signal %@", string);
569 | Boolean fromSelf = NO;
570 | if ([connection.connectionId isEqualToString:session.connection.connectionId]) {
571 | fromSelf = YES;
572 | }
573 | [self logSignalString:string fromSelf:fromSelf];
574 | }
575 |
576 | This method checks to see if the signal was sent by the local iOS client or by the other
577 | client connected to the session:
578 |
579 | Boolean fromSelf = NO;
580 | if ([connection.connectionId isEqualToString:session.connection.connectionId]) {
581 | fromSelf = YES;
582 | }
583 |
584 | The `session` argument represents your clients OTSession object. The OTSession object has
585 | a `connection` property with a `connectionId` property. The `connection` argument represents
586 | the connection of client sending the message. If these match, the signal was sent by the
587 | local iOS app.
588 |
589 | The method calls the `[self logSignalString:]` method which displays the message string in
590 | the text view for chat messages received.
591 |
592 | This app uses the OpenTok signaling API to implement text chat. However, you can use the
593 | signaling API to send messages to other clients (individually or collectively) connected to
594 | the session.
595 |
596 | ## Basic Audio Driver
597 |
598 | To see the code for this sample, switch to the *audio-driver* branch. This branch shows you
599 | how to implement a custom audio driver.
600 |
601 | The OpenTok iOS SDK lets you set up a custom audio driver for publishers and subscribers. You can
602 | use a custom audio driver to customize the audio sent to a publisher's stream. You can also
603 | customize the playback of a subscribed streams' audio.
604 |
605 | This sample application uses the custom audio driver to publish white noise (a random audio signal)
606 | to its audio stream. It also uses the custom audio driver to capture the audio from subscribed
607 | streams and save it to a file.
608 |
609 | ### Setting up the audio device and the audio bus
610 |
611 | In using a custom audio driver, you define a custom audio driver and an audio bus to be
612 | used by the app.
613 |
614 | The OTKBasicAudioDevice class defines a basic audio device interface to be used by the app.
615 | It implements the OTAudioDevice protocol, defined by the OpenTok iOS SDK. To use a custom
616 | audio driver, call the `[OTAudioDeviceManager setAudioDevice:]` method. This sample sets
617 | the audio device to an instance of the OTKBasicAudioDevice class:
618 |
619 | [OTAudioDeviceManager setAudioDevice:[[OTKBasicAudioDevice alloc] init]];
620 |
621 | Use the OTAudioFormat class, defined in the OpenTok iOS SDK, to define the audio format used
622 | by the custom audio driver. The `[OTKBasicAudioDevice init]` method creates an instance
623 | of the OTAudioFormat class, and sets the sample rate and number of channels for the audio format:
624 |
625 | - (id)init
626 | {
627 | self = [super init];
628 | if (self) {
629 | self = [super init];
630 | if (self) {
631 | _otAudioFormat = [[OTAudioFormat alloc] init];
632 | _otAudioFormat.sampleRate = kSampleRate;
633 | _otAudioFormat.numChannels = 1;
634 | }
635 |
636 | // ...
637 | }
638 | return self;
639 | }
640 |
641 | The `init` method also sets up some local properties that report whether the device is capturing,
642 | whether capturing has been initialized, whether it is rendering and whether rendering has been
643 | initialized:
644 |
645 | _isDeviceCapturing = NO;
646 | _isCaptureInitialized = NO;
647 | _isDeviceRendering = NO;
648 | _isRenderingInitialized = NO;
649 |
650 | The `init` method also sets up a file to save the incoming audio to a file. This is done simply
651 | to illustrate a use of the custom audio driver's audio renderer:
652 |
653 | NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,
654 | NSUserDomainMask,
655 | YES);
656 | NSString *path = [paths[0] stringByAppendingPathComponent:kOutputFileSampleName];
657 |
658 | [[NSFileManager defaultManager] createFileAtPath:path
659 | contents:nil
660 | attributes:nil];
661 | _outFile = [NSFileHandle fileHandleForReadingAtPath:path];
662 |
663 | The `[OTKBasicAudioDevice setAudioBus:]` method (defined by the OTAudioDevice protocol) sets
664 | the audio bus to be used by the audio device (defined by the OTAudioBus protocol). The audio
665 | device uses this object to send and receive audio samples to and from a session. This instance of
666 | the object is retained for the lifetime of the implementing object. The publisher will access the
667 | OTAudioBus object to obtain the audio samples. And subscribers will send audio samples (from
668 | subscribed streams) to the OTAudioBus object. Here is the OTKBasicAudioDevice implementation of the
669 | `[OTAudioDevice setAudioBus:]` method:
670 |
671 | - (BOOL)setAudioBus:(id)audioBus
672 | {
673 | self.otAudioBus = audioBus;
674 | return YES;
675 | }
676 |
677 | The `[OTKBasicAudioDevice setAudioBus:]` method (defined by the OTAudioDevice protocol) method
678 | sets the audio rendering format, the OTAudioFormat instance that was created in the the `init`
679 | method:
680 |
681 | - (OTAudioFormat*)renderFormat
682 | {
683 | return self.otAudioFormat;
684 | }
685 |
686 | ### Rendering audio from subscribed streams
687 |
688 | The `[OTAudioDevice startRendering:]` method is called when the audio device should start rendering
689 | (playing back) audio from subscribed streams. The OTKBasicAudioDevice implementation of this method calls the `[self consumeSampleCapture]` method after 0.1 seconds:
690 |
691 | - (BOOL)startRendering
692 | {
693 | self.isDeviceRendering = YES;
694 | dispatch_after(
695 | dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.1 * NSEC_PER_SEC)),
696 | dispatch_get_main_queue(),
697 | ^{
698 | [self consumeSampleCapture];
699 | });
700 | return YES;
701 | }
702 |
703 | The `[OTKBasicAudioDevice consumeSampleCapture]` method gets 1000 samples from the audio
704 | bus by calling the `[OTAudioBus readRenderData:buffer numberOfSamples:]` method (defined by the OpenTok iOS SDK). It then writes the audio data to the file (for sample purposes). And, if the
705 | audio device is still being used to render audio samples, it sets a timer to call `consumeSampleCapture` method again after 0.1 seconds:
706 |
707 | - (void)consumeSampleCapture
708 | {
709 | static int num_samples = 1000;
710 | int16_t *buffer = malloc(sizeof(int16_t) * num_samples);
711 |
712 | uint32_t samples_get = [self.otAudioBus readRenderData:buffer numberOfSamples:num_samples];
713 |
714 | NSData *data = [NSData dataWithBytes:buffer
715 | length:(sizeof(int16_t) * samples_get)];
716 | [self.outFile seekToEndOfFile];
717 | [self.outFile writeData:data];
718 |
719 | free(buffer);
720 |
721 | if (self.isDeviceRendering) {
722 | dispatch_after(dispatch_time(DISPATCH_TIME_NOW,
723 | (int64_t)(0.1 * NSEC_PER_SEC)),
724 | dispatch_get_main_queue(),
725 | ^{
726 | [self consumeSampleCapture];
727 | });
728 | }
729 | }
730 |
731 | This example is intentionally simple for instructional purposes -- it simply writes the audio data
732 | to a file. In a more practical use of a custom audio driver, you could use the custom audio driver
733 | to play back audio to a Bluetooth device or to process audio before playing it back.
734 |
735 | ### Capturing audio to be used by a publisher
736 |
737 | The `[OTAudioDevice startCapture:]` method is called when the audio device should start capturing
738 | audio to be published. The OTKBasicAudioDevice implementation of this method calls the `[self produceSampleCapture]` method after 0.1 seconds:
739 |
740 | - (BOOL)startCapture
741 | {
742 | self.isDeviceCapturing = YES;
743 | dispatch_after(
744 | dispatch_time(DISPATCH_TIME_NOW, (int64_t)(0.1 * NSEC_PER_SEC)),
745 | dispatch_get_main_queue(),
746 | ^{
747 | [self produceSampleCapture];
748 | });
749 |
750 | return YES;
751 | }
752 |
753 | The `[OTKBasicAudioDevice produceSampleCapture]` method produces a buffer containing samples of random data (white noise). It then calls the `[OTAudioBus writeCaptureData: numberOfSamples:]` method of the OTAudioBus object, which sends the samples to the audio bus. The publisher in the
754 | application uses the samples sent to the audio bus to transmit as audio in the published stream.
755 | Then if a capture is still in progress (if the app is publishing), the method calls itself again after 0.1 seconds.
756 |
757 | - (void)produceSampleCapture
758 | {
759 | static int num_frames = 1000;
760 | int16_t *buffer = malloc(sizeof(int16_t) * num_frames);
761 |
762 | for (int frame = 0; frame < num_frames; ++frame) {
763 | Float32 sample = ((double)arc4random() / 0x100000000);
764 | buffer[frame] = (sample * 32767.0f);
765 | }
766 |
767 | [self.otAudioBus writeCaptureData:buffer numberOfSamples:num_frames];
768 |
769 | free(buffer);
770 |
771 | if (self.isDeviceCapturing) {
772 | dispatch_after(dispatch_time(DISPATCH_TIME_NOW,
773 | (int64_t)(0.1 * NSEC_PER_SEC)),
774 | dispatch_get_main_queue(),
775 | ^{
776 | [self produceSampleCapture];
777 | });
778 | }
779 | }
780 |
781 | ### Other notes on the audio driver sample app
782 |
783 | The OTAudioDevice protocol includes other required methods, which are implemented by
784 | the OTKBasicAudioDevice class. However, this sample does not do anything interesting in
785 | these methods, so they are not included in this discussion.
786 |
787 |
788 | ## Basic Video Renderer
789 |
790 | To see the code for this sample, switch to the *video-renderer-basic* branch. This branch shows you
791 | how to make minor modifications to the video renderer used by an OTPublisher object. You can also
792 | use the same techniques to modify the video renderer used by an OTSubscriber object (though this
793 | example only illustrates a custom renderer for a publisher).
794 |
795 | In this example, the app uses a custom video renderer to display a black-and-white version of the
796 | OTPublisher object's video.
797 |
798 | In the main ViewController, after initializing the OTPublisher object, the `videoRender` property
799 | of the OTPublisher object is set to an instance of OTKBasicVideoRender:
800 |
801 | _publisher = [[OTPublisher alloc]
802 | initWithDelegate:self];
803 | _renderer = [[OTKBasicVideoRender alloc] init];
804 |
805 | _publisher.videoRender = _renderer;
806 |
807 | OTKBasicVideoRender is a custom class that implements the OTVideoRender protocol (defined
808 | in the OpenTok iOS SDK). This protocol lets you define a custom video renderer to be used
809 | by an OpenTok publisher or subscriber.
810 |
811 | The `[OTKBasicVideoRender init:]` method sets a `_renderView` property to a UIView object. This is
812 | the UIView object that will contain the view to be rendered (by the publisher or subscriber).
813 | In this sample, the UIView object is defined by the custom OTKCustomRenderView class, which
814 | extends UIView:
815 |
816 | - (id)init
817 | {
818 | self = [super init];
819 | if (self) {
820 | _renderView = [[OTKCustomRenderView alloc] initWithFrame:CGRectZero];
821 | }
822 | return self;
823 | }
824 |
825 | The OTKCustomRenderView class includes methods (discussed later) that convert a video frame to
826 | a black-and-white representation.
827 |
828 | The [OTVideoRender renderVideoFrame:] method is called when the publisher (or subscriber) renders
829 | a video frame to the video renderer. The frame an OTVideoFrame object (defined by the OpenTok iOS
830 | SDK). In the OTKCustomRenderView implementation of this method, it simply takes the frame and
831 | passes it along to the `[renderVideoFrame]` method of the OTKCustomRenderView object:
832 |
833 | - (void)renderVideoFrame:(OTVideoFrame*) frame
834 | {
835 | [(OTKCustomRenderView*)self.renderView renderVideoFrame:frame];
836 | }
837 |
838 | The `[OTKCustomRenderView renderVideoFrame]` method iterates through the pixels in the plane,
839 | adjusts each pixel to a black-and-white value, adds the value to a buffer. I then writes
840 | the buffer to a CGImageRef representing the view's image, and calls `[self setNeedsDisplay]` to
841 | render the image view:
842 |
843 | - (void)renderVideoFrame:(OTVideoFrame *)frame
844 | {
845 | __block OTVideoFrame *frameToRender = frame;
846 | dispatch_sync(self.renderQueue, ^{
847 | if (_img != NULL) {
848 | CGImageRelease(_img);
849 | _img = NULL;
850 | }
851 |
852 | size_t bufferSize = frameToRender.format.imageHeight
853 | * frameToRender.format.imageWidth * 3;
854 | uint8_t *buffer = malloc(bufferSize);
855 |
856 | uint8_t *yplane = [frameToRender.planes pointerAtIndex:0];
857 |
858 | for (int i = 0; i < frameToRender.format.imageHeight; i++) {
859 | for (int j = 0; j < frameToRender.format.imageWidth; j++) {
860 | int starting = (i * frameToRender.format.imageWidth * 3) + (j * 3);
861 | uint8_t yvalue = yplane[(i * frameToRender.format.imageWidth) + j];
862 | // If in a RGB image we copy the same Y value for R, G and B
863 | // we will obtain a Black & White image
864 | buffer[starting] = yvalue;
865 | buffer[starting+1] = yvalue;
866 | buffer[starting+2] = yvalue;
867 | }
868 | }
869 |
870 | CGDataProviderRef imgProvider = CGDataProviderCreateWithData(NULL,
871 | buffer,
872 | bufferSize,
873 | release_frame);
874 |
875 | _img = CGImageCreate(frameToRender.format.imageWidth,
876 | frameToRender.format.imageHeight,
877 | 8,
878 | 24,
879 | 3 * frameToRender.format.imageWidth,
880 | CGColorSpaceCreateDeviceRGB(),
881 | kCGBitmapByteOrder32Big | kCGImageAlphaNone,
882 | imgProvider,
883 | NULL,
884 | false,
885 | kCGRenderingIntentDefault);
886 |
887 |
888 | CGDataProviderRelease(imgProvider);
889 | dispatch_async(dispatch_get_main_queue(), ^{
890 | [self setNeedsDisplay];
891 | });
892 | });
893 | }
894 |
895 | ## Basic Video Capturer
896 |
897 | To see the code for this sample, switch to the *video-capturer-basic* branch. This branch shows you
898 | how to make minor modifications to the video capturer used by the OTPublisher class.
899 |
900 | In this example, the app uses a custom video capturer to publish random pixels (white noise).
901 | This is done simply to illustrate the basic principals of setting up a custom video capturer.
902 | (For a more practical example, see the Camera Video Capturer and Screen Video Capturer examples,
903 | described in the sections that follow.)
904 |
905 | In the main ViewController, after calling `[_session publish:_publisher error:&error]` to
906 | initiate publishing of an audio-video stream, the `videoCapture` property of the OTPublisher
907 | object is set to an instance of OTKBasicVideoCapturer:
908 |
909 | _publisher.videoCapture = [[OTKBasicVideoCapturer alloc] init];
910 |
911 | OTKBasicVideoCapturer is a custom class that implements the OTVideoCapture protocol (defined
912 | in the OpenTok iOS SDK). This protocol lets you define a custom video capturer to be used
913 | by an OpenTok publisher.
914 |
915 | The `[OTVideoCapture initCapture:]` method initializes capture settings to be used by the custom
916 | video capturer. In this sample's custom implementation of OTVideoCapture (OTKBasicVideoCapturer)
917 | the `initCapture` method sets properties of the `format` property of the OTVideoCapture instance:
918 |
919 | - (void)initCapture
920 | {
921 | self.format = [[OTVideoFormat alloc] init];
922 | self.format.pixelFormat = OTPixelFormatARGB;
923 | self.format.bytesPerRow = [@[@(kImageWidth * 4)] mutableCopy];
924 | self.format.imageHeight = kImageHeight;
925 | self.format.imageWidth = kImageWidth;
926 | }
927 |
928 | The OTVideoFormat class (which defines this `format` property) is defined by the OpenTok iOS SDK.
929 | In this sample code, the format of the video capturer is set to use ARGB as the pixel format,
930 | with a specific number of bytes per row, a specific height, and a specific width.
931 |
932 | The `[OTVideoCapture setVideoCaptureConsumer]` sets an OTVideoCaptureConsumer object (defined
933 | by the OpenTok iOS SDK) the the video consumer uses to transmit video frames to the publisher's
934 | stream. In the OTKBasicVideoCapturer, this method sets a local OTVideoCaptureConsumer instance
935 | as the consumer:
936 |
937 | - (void)setVideoCaptureConsumer:(id)videoCaptureConsumer
938 | {
939 | // Save consumer instance in order to use it to send frames to the session
940 | self.consumer = videoCaptureConsumer;
941 | }
942 |
943 | The `[OTVideoCapture startCapture:]` method is called when a publisher starts capturing video
944 | to send as a stream to the OpenTok session. This will occur after the `[Session publish: error:]`
945 | method is called. In the OTKBasicVideoCapturer of this method, the `[self produceFrame]` method
946 | is called on a background queue after a set interval:
947 |
948 | - (int32_t)startCapture
949 | {
950 | self.captureStarted = YES;
951 | dispatch_after(kTimerInterval,
952 | dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0),
953 | ^{
954 | [self produceFrame];
955 | });
956 |
957 | return 0;
958 | }
959 |
960 | The `[self produceFrame]` method generates an OTVideoFrame object (defined by the OpenTok
961 | iOS SDK) that represents a frame of video. In this case, the frame contains random pixels filling
962 | the defined height and width for the sample video format:
963 |
964 | - (void)produceFrame
965 | {
966 | OTVideoFrame *frame = [[OTVideoFrame alloc] initWithFormat:self.format];
967 |
968 | // Generate a image with random pixels
969 | u_int8_t *imageData[1];
970 | imageData[0] = malloc(sizeof(uint8_t) * kImageHeight * kImageWidth * 4);
971 | for (int i = 0; i < kImageWidth * kImageHeight * 4; i+=4) {
972 | imageData[0][i] = rand() % 255; // A
973 | imageData[0][i+1] = rand() % 255; // R
974 | imageData[0][i+2] = rand() % 255; // G
975 | imageData[0][i+3] = rand() % 255; // B
976 | }
977 |
978 | [frame setPlanesWithPointers:imageData numPlanes:1];
979 | [self.consumer consumeFrame:frame];
980 |
981 | free(imageData[0]);
982 |
983 | if (self.captureStarted) {
984 | dispatch_after(kTimerInterval,
985 | dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0),
986 | ^{
987 | [self produceFrame];
988 | });
989 | }
990 | }
991 |
992 | The method passes the frame to the `[consumeFrame]` method of the instance of the
993 | OTVideoCaptureConsumer used by this video capturer (described above). This causes the publisher
994 | to send the frame of data to the video stream in the session.
995 |
996 |
997 | ## Camera video capturer
998 |
999 | To see the code for this sample, switch to the *video-capturer-camera* branch. This branch shows you
1000 | how to use a custom video capturer using the device camera as the video source.
1001 |
1002 | Before studying this sample, see the video-capturer-basic sample.
1003 |
1004 | This sample code uses the Apple AVFoundation framework to capture video from a camera and publish it
1005 | to a connected session. The ViewController class creates a session, instantiates subscribers, and
1006 | sets up the publisher. The `captureOutput` method creates a frame, captures a screenshot, tags the
1007 | frame with a timestamp and saves it in an instance of consumer. The publisher accesses the consumer
1008 | to obtain the video frame.
1009 |
1010 | Note that because this sample needs to access the device's camera, you must test it on an iOS
1011 | device. You cannot test it in the iOS simulator.
1012 |
1013 | ### Initializing and configuring the video capturer
1014 |
1015 | The `[OTKBasicVideoCapturer initWithPreset: andDesiredFrameRate:]` method is an initializer for
1016 | the OTKBasicVideoCapturer class. It calls the `sizeFromAVCapturePreset` method to set the resolution of the image. The image size and frame rate are also set here. A separate queue is created for capturing images, so as not to affect the UI queue.
1017 |
1018 | - (id)initWithPreset:(NSString *)preset andDesiredFrameRate:(NSUInteger)frameRate
1019 | {
1020 | self = [super init];
1021 | if (self) {
1022 | self.sessionPreset = preset;
1023 | CGSize imageSize = [self sizeFromAVCapturePreset:self.sessionPreset];
1024 | _imageHeight = imageSize.height;
1025 | _imageWidth = imageSize.width;
1026 | _desiredFrameRate = frameRate;
1027 |
1028 | _captureQueue = dispatch_queue_create("com.tokbox.OTKBasicVideoCapturer",
1029 | DISPATCH_QUEUE_SERIAL);
1030 | }
1031 | return self;
1032 | }
1033 |
1034 | The `sizeFromAVCapturePreset` method identifies the string value of the image resolution in
1035 | the iOS AVFoundation framework and returns a CGSize representation.
1036 |
1037 | The implementation of the `[OTVideoCapture initCapture]` method uses the AVFoundation framework
1038 | to set the camera to capture images. In the first part of the method an instance of the
1039 | AVCaptureVideoDataOutput is used to produce image frames:
1040 |
1041 | - (void)initCapture
1042 | {
1043 | NSError *error;
1044 | self.captureSession = [[AVCaptureSession alloc] init];
1045 |
1046 | [self.captureSession beginConfiguration];
1047 |
1048 | // Set device capture
1049 | self.captureSession.sessionPreset = self.sessionPreset;
1050 | AVCaptureDevice *videoDevice =
1051 | [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
1052 | self.inputDevice =
1053 | [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];
1054 | [self.captureSession addInput:self.inputDevice];
1055 |
1056 | AVCaptureVideoDataOutput *outputDevice = [[AVCaptureVideoDataOutput alloc] init];
1057 | outputDevice.alwaysDiscardsLateVideoFrames = YES;
1058 | outputDevice.videoSettings =
1059 | @{(NSString *)kCVPixelBufferPixelFormatTypeKey:
1060 | @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange
1061 | )};
1062 |
1063 | [outputDevice setSampleBufferDelegate:self queue:self.captureQueue];
1064 |
1065 | [self.captureSession addOutput:outputDevice];
1066 |
1067 | // See the next section ...
1068 | }
1069 |
1070 | The frames captured with this method are accessed with the
1071 | `[AVCaptureVideoDataOutputSampleBufferDelegate captureOutput:didOutputSampleBuffer:fromConnection:]`
1072 | delegate method. The AVCaptureDevice object represents the camera and its properties. It provides captured images to an AVCaptureSession object.
1073 |
1074 | The second part of the `initCapture` method calls the `bestFrameRateForDevice` method to obtain
1075 | the best frame rate for image capture:
1076 |
1077 | - (void)initCapture
1078 | {
1079 | // See previous section ...
1080 |
1081 | // Set framerate
1082 | double bestFrameRate = [self bestFrameRateForDevice];
1083 |
1084 | CMTime desiredMinFrameDuration = CMTimeMake(1, bestFrameRate);
1085 | CMTime desiredMaxFrameDuration = CMTimeMake(1, bestFrameRate);
1086 |
1087 | [self.inputDevice.device lockForConfiguration:&error];
1088 | self.inputDevice.device.activeVideoMaxFrameDuration = desiredMaxFrameDuration;
1089 | self.inputDevice.device.activeVideoMinFrameDuration = desiredMinFrameDuration;
1090 |
1091 | [self.captureSession commitConfiguration];
1092 |
1093 | self.format = [OTVideoFormat videoFormatNV12WithWidth:self.imageWidth
1094 | height:self.imageHeight];
1095 | }
1096 |
1097 | The `[self bestFrameRateForDevice]` method returns the best frame rate for the capturing device:
1098 |
1099 | - (double)bestFrameRateForDevice
1100 | {
1101 | double bestFrameRate = 0;
1102 | for (AVFrameRateRange* range in
1103 | self.inputDevice.device.activeFormat.videoSupportedFrameRateRanges)
1104 | {
1105 | CMTime currentDuration = range.minFrameDuration;
1106 | double currentFrameRate = currentDuration.timescale / currentDuration.value;
1107 | if (currentFrameRate > bestFrameRate && currentFrameRate < self.desiredFrameRate) {
1108 | bestFrameRate = currentFrameRate;
1109 | }
1110 | }
1111 | return bestFrameRate;
1112 | }
1113 |
1114 | The AVFoundation framework requires a minimum and maximum range of frame rates to optimize the quality of an image capture. This range is set in the `bestFrameRate` object. For simplicity, the minimum and maximum frame rate is set as the same number but you may want to set your own minimum and maximum frame rates to obtain better image quality based on the speed of your network. In this application, the frame rate and resolution are fixed.
1115 |
1116 | This method sets the video capture consumer, defined by the OTVideoCaptureConsumer protocol.
1117 |
1118 | - (void)setVideoCaptureConsumer:(id)videoCaptureConsumer
1119 | {
1120 | self.consumer = videoCaptureConsumer;
1121 | }
1122 |
1123 | The `[OTVideoCapture captureSettings]` method sets the pixel format and size of the image used
1124 | by the video capturer, by setting properties of the OTVideoFormat object.
1125 |
1126 | The `[[OTVideoCapture currentDeviceOrientation]` method queries the orientation of the image in
1127 | AVFoundation framework and returns its equivalent defined by the OTVideoOrientation enum in
1128 | OpenTok iOS SDK.
1129 |
1130 | ### Capturing frames for the publisher's video
1131 |
1132 | The implementation of the `[OTVideoCapture startCapture]` method is called when the
1133 | publisher starts capturing video to publish. It calls the `[AVCaptureSession startRunning]` method
1134 | of the AVCaptureSession object:
1135 |
1136 | - (int32_t)startCapture
1137 | {
1138 | self.captureStarted = YES;
1139 | [self.captureSession startRunning];
1140 |
1141 | return 0;
1142 | }
1143 |
1144 | The
1145 | `[AVCaptureVideoDataOutputSampleBufferDelegate captureOutput:didOutputSampleBuffer:fromConnection:]`
1146 | delegate method is called when a new video frame is available from the camera.
1147 |
1148 | - (void)captureOutput:(AVCaptureOutput *)captureOutput
1149 | didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
1150 | fromConnection:(AVCaptureConnection *)connection
1151 | {
1152 | if (!self.captureStarted)
1153 | return;
1154 |
1155 | CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
1156 | OTVideoFrame *frame = [[OTVideoFrame alloc] initWithFormat:self.format];
1157 |
1158 | NSUInteger planeCount = CVPixelBufferGetPlaneCount(imageBuffer);
1159 |
1160 | uint8_t *buffer = malloc(sizeof(uint8_t) * CVPixelBufferGetDataSize(imageBuffer));
1161 | uint8_t *dst = buffer;
1162 | uint8_t *planes[planeCount];
1163 |
1164 | CVPixelBufferLockBaseAddress(imageBuffer, 0);
1165 | for (int i = 0; i < planeCount; i++) {
1166 | size_t planeSize = CVPixelBufferGetBytesPerRowOfPlane(imageBuffer, i)
1167 | * CVPixelBufferGetHeightOfPlane(imageBuffer, i);
1168 |
1169 | planes[i] = dst;
1170 | dst += planeSize;
1171 |
1172 | memcpy(planes[i],
1173 | CVPixelBufferGetBaseAddressOfPlane(imageBuffer, i),
1174 | planeSize);
1175 | }
1176 |
1177 | CMTime minFrameDuration = self.inputDevice.device.activeVideoMinFrameDuration;
1178 | frame.format.estimatedFramesPerSecond = minFrameDuration.timescale / minFrameDuration.value;
1179 | frame.format.estimatedCaptureDelay = 100;
1180 | frame.orientation = [self currentDeviceOrientation];
1181 |
1182 | CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
1183 | frame.timestamp = time;
1184 | [frame setPlanesWithPointers:planes numPlanes:planeCount];
1185 |
1186 | [self.consumer consumeFrame:frame];
1187 |
1188 | free(buffer);
1189 | CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
1190 | }
1191 |
1192 | This method does the following:
1193 |
1194 | * Creates an OTVideoFrame instance to define the new video frame.
1195 |
1196 | * Saves an image buffer of memory based on the size of the image.
1197 |
1198 | * Writes image data from two planes into one member buffer. Since the image is an NV12, its data is
1199 | distributed over two planes. There is a plane for Y data and a plane for UV data. A for loop is
1200 | executed to iterate through both planes and write their data into one memory buffer.
1201 |
1202 | * Creates a timestamp to tag a captured image. Every image is tagged with a timestamp so both
1203 | publisher and subscriber are able to create the same timeline and reference the frames in the same
1204 | order.
1205 |
1206 | * Calls the `[OTVideoCaptureConsumer consumeFrame:]` method, passing in the OTVideoFrame object.
1207 | This causes the publisher to send the frame in the stream it publishes.
1208 |
1209 | The implementation of the
1210 | `[AVCaptureVideoDataOutputSampleBufferDelegate captureOutput:didDropSampleBuffer:fromConnection]`
1211 | method is called whenever there is a delay in receiving frames. It drops frames to keep publishing
1212 | to the session without interruption:
1213 |
1214 | - (void)captureOutput:(AVCaptureOutput *)captureOutput
1215 | didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer
1216 | fromConnection:(AVCaptureConnection *)connection
1217 | {
1218 | NSLog(@"Frame dropped");
1219 | }
1220 |
1221 | ### Other notes on the camera video capturer sample app
1222 |
1223 | The OTVideoCapture protocol includes other required methods, which are implemented by
1224 | the OTKBasicVideoCapturer class. However, this sample does not do anything interesting in
1225 | these methods, so they are not included in this discussion.
1226 |
1227 |
1228 | ## Screen sharing
1229 |
1230 | To see the code for this sample, switch to the *screen-sharing* branch. This branch shows you
1231 | how to capture the screen (a UIView) using a custom video capturer.
1232 |
1233 | Before studying this sample, see the video-capturer-basic sample.
1234 |
1235 | This sample code demonstrates how to use the OpenTok iOS SDK to publish a screen-sharing video,
1236 | using the device screen as the source for the stream's video. The sample uses the `initCapture`,
1237 | `releaseCapture`, `startCapture`, `stopCapture`, and `isCaptureStarted` methods of the OTVideoKit
1238 | class to manage capture functions of the application. The ViewController class creates a session,
1239 | instantiates subscribers and sets up the publisher. The OTKBasicVideoCapturer class creates a frame,
1240 | captures a screenshot, tags the frame with a timestamp and saves it in an instance of consumer. The
1241 | publisher accesses the consumer to obtain the frame.
1242 |
1243 | The `initCapture` method is used to initialize the capture and sets value for the pixel format of
1244 | an OTVideoFrame object. In this example, it is set to RGB.
1245 |
1246 | - (void)initCapture
1247 | {
1248 | self.format = [[OTVideoFormat alloc] init];
1249 | self.format.pixelFormat = OTPixelFormatARGB;
1250 | }
1251 |
1252 | The `releaseCapture` method clears the memory buffer:
1253 |
1254 | - (void)releaseCapture
1255 | {
1256 | self.format = nil;
1257 | }
1258 |
1259 | The `startCapture` method creates a separate thread and calls the `produceFrame` method to start
1260 | screen captures:
1261 |
1262 | - (int32_t)startCapture
1263 | {
1264 | self.captureStarted = YES;
1265 | dispatch_after(kTimerInterval,
1266 | dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0),
1267 | ^{
1268 | @autoreleasepool {
1269 | [self produceFrame];
1270 | }
1271 | });
1272 |
1273 | return 0;
1274 | }
1275 |
1276 | The `produceFrame` method:
1277 |
1278 | * Defines the frame for captured images.
1279 |
1280 | * Creates a timestamp to tag a captured image.
1281 |
1282 | * Takes a screenshot
1283 |
1284 | * Converts the screenshot to a readable format
1285 |
1286 | * Tags the screenshot with a timestamp
1287 |
1288 | * Calculates the size of the image
1289 |
1290 | * Sets the consumeFrame with the image
1291 |
1292 | * Calls itself 15 times per second once the capture starts
1293 |
1294 | The frame for the captured images is set as an object of OTVideoFrame. Properties of OTVideoFrame define the planes, timestamp, orientation and format of a frame.
1295 |
1296 | OTVideoFrame *frame = [[OTVideoFrame alloc] initWithFormat:self.format];
1297 |
1298 | A timestamp is created to tag the image. Every image is tagged with a timestamp so both publisher and subscriber are able to create the same timeline and reference the frames in the same order.
1299 |
1300 | static mach_timebase_info_data_t time_info;
1301 | uint64_t time_stamp = 0;
1302 |
1303 | time_stamp = mach_absolute_time();
1304 | time_stamp *= time_info.numer;
1305 | time_stamp /= time_info.denom;
1306 |
1307 | The screenshot method is called to obtain an image of the screen.
1308 |
1309 | CGImageRef screenshot = [[self screenshot] CGImage];
1310 |
1311 | The fillPixelBufferFromCGImage method converts the image data of a CGImage into a CVPixelBuffer.
1312 |
1313 | [self fillPixelBufferFromCGImage:screenshot];
1314 |
1315 | The frame is tagged with a timestamp and capture rate in frames per second and delay between captures are set.
1316 |
1317 | CMTime time = CMTimeMake(time_stamp, 1000);
1318 | frame.timestamp = time;
1319 | frame.format.estimatedFramesPerSecond = kFramesPerSecond;
1320 | frame.format.estimatedCaptureDelay = 100;
1321 |
1322 | The number of bytes in a single row is multiplied with the height of the image to obtain the size of the image. Note, the single element array and bytes per row are based on a 4-byte, single plane specification of an RGB image.
1323 |
1324 | frame.format.imageWidth = CVPixelBufferGetWidth(pixelBuffer);
1325 | frame.format.imageHeight = CVPixelBufferGetHeight(pixelBuffer);
1326 | frame.format.bytesPerRow = [@[@(frame.format.imageWidth * 4)] mutableCopy];
1327 | frame.orientation = OTVideoOrientationUp;
1328 |
1329 | CVPixelBufferLockBaseAddress(pixelBuffer, 0);
1330 | uint8_t *planes[1];
1331 |
1332 | planes[0] = CVPixelBufferGetBaseAddress(pixelBuffer);
1333 | [frame setPlanesWithPointers:planes numPlanes:1];
1334 |
1335 | The frame is saved in an instance of consumer. The publisher accesses captured images through the consumer instance.
1336 |
1337 | [self.consumer consumeFrame:frame];
1338 |
1339 | The pixel buffer is cleared and a background-priority queue (separate from the queue used by the UI)
1340 | is used to capture images. If image capture is in progress, the `produceFrame` method calls itself
1341 | 15 times per second.
1342 |
1343 | CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
1344 | if (self.captureStarted) {
1345 | dispatch_after(kTimerInterval,
1346 | dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0),
1347 | ^{
1348 | @autoreleasepool {
1349 | [self produceFrame];
1350 | }
1351 | });
1352 | }
1353 |
1354 | The `screenshot` method takes a screenshot and returns an image. This method is called by the
1355 | `produceFrame` method.
1356 |
1357 | - (UIImage *)screenshot
1358 | {
1359 | CGSize imageSize = CGSizeZero;
1360 |
1361 | imageSize = [UIScreen mainScreen].bounds.size;
1362 |
1363 | UIGraphicsBeginImageContextWithOptions(imageSize, NO, 0);
1364 | UIWindow *window = [UIApplication sharedApplication].keyWindow;
1365 |
1366 | if ([window respondsToSelector:
1367 | @selector(drawViewHierarchyInRect:afterScreenUpdates:)])
1368 | {
1369 | [window drawViewHierarchyInRect:window.bounds afterScreenUpdates:NO];
1370 | }
1371 | else {
1372 | [window.layer renderInContext:UIGraphicsGetCurrentContext()];
1373 | }
1374 |
1375 | UIImage *image = UIGraphicsGetImageFromCurrentImageContext();
1376 | UIGraphicsEndImageContext();
1377 | return image;
1378 | }
1379 |
1380 | ## Development and Contributing
1381 |
1382 | Interested in contributing? We :heart: pull requests! See the [Contribution](CONTRIBUTING.md) guidelines.
1383 |
1384 | ## Getting Help
1385 |
1386 | We love to hear from you so if you have questions, comments or find a bug in the project, let us know! You can either:
1387 |
1388 | - Open an issue on this repository
1389 | - See for support options
1390 | - Tweet at us! We're [@VonageDev](https://twitter.com/VonageDev) on Twitter
1391 | - Or [join the Vonage Developer Community Slack](https://developer.nexmo.com/community/slack)
1392 |
1393 | ## Further Reading
1394 |
1395 | - Check out the Developer Documentation at
1396 | - [API reference][7] -- Provides details on the OpenTok iOS SDK API
1397 | - [Tutorials][8] -- Includes conceptual information and code samples for all OpenTok features
1398 | - [Sample code][9] -- Includes sample apps
1399 | that show more features of the OpenTok iOS SDK
1400 |
1401 | [1]: https://tokbox.com/opentok/libraries/client/ios/
1402 | [2]: https://dashboard.tokbox.com
1403 | [3]: https://tokbox.com/opentok/starter-kits/
1404 | [4]: https://tokbox.com/opentok/tutorials/create-session/#media-mode
1405 | [5]: https://tokbox.com/opentok/tutorials/create-token/
1406 | [6]: https://developer.apple.com/library/mac/documentation/AVFoundation/Reference/AVCaptureDevice_Class
1407 | [7]: https://tokbox.com/opentok/libraries/client/ios/reference/
1408 | [8]: https://tokbox.com/opentok/tutorials/
1409 | [9]: https://github.com/opentok/opentok-ios-sdk-samples
1410 |
--------------------------------------------------------------------------------