├── .gitignore ├── .gitmodules ├── LICENSE ├── LiveStreamer.xcodeproj └── project.pbxproj ├── LiveStreamer ├── Capture │ ├── OWAppleEncoder.h │ ├── OWAppleEncoder.m │ ├── OWCaptureController.h │ ├── OWCaptureController.m │ ├── OWCaptureViewController.h │ ├── OWCaptureViewController.m │ ├── OWRecordingActivityIndicatorView.h │ ├── OWRecordingActivityIndicatorView.m │ ├── OWSegmentingAppleEncoder.h │ ├── OWSegmentingAppleEncoder.m │ ├── OWTimerView.h │ ├── OWTimerView.m │ ├── OWUtilities.h │ ├── OWUtilities.m │ ├── OWVideoProcessor.h │ └── OWVideoProcessor.m ├── Default-568h@2x.png ├── Default.png ├── Default@2x.png ├── LiveStreamer-Info.plist ├── LiveStreamer-Prefix.pch ├── OWAppDelegate.h ├── OWAppDelegate.m ├── OWManifestGenerator.h ├── OWManifestGenerator.m ├── OWRootViewController.h ├── OWRootViewController.m ├── OWSharedS3Client.h ├── OWSharedS3Client.m ├── crossdomain.xml ├── en.lproj │ └── InfoPlist.strings ├── ffmpeg │ ├── libavformat │ │ ├── ffm.h │ │ ├── network.h │ │ ├── os_support.h │ │ └── url.h │ ├── libavresample │ │ ├── avresample.h │ │ └── version.h │ └── libavutil │ │ └── libm.h ├── index.html ├── main.m └── playlist.m3u8 ├── Media └── red_dot@2x.png └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | build 3 | *.mode1v3 4 | *.pbxuser 5 | project.xcworkspace 6 | xcuserdata 7 | .svn 8 | DerivedData 9 | *.orig 10 | *.mp4 11 | OWSecrets.h 12 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "Submodules/CocoaHTTPServer"] 2 | path = Submodules/CocoaHTTPServer 3 | url = https://github.com/robbiehanson/CocoaHTTPServer.git 4 | [submodule "Submodules/BButton"] 5 | path = Submodules/BButton 6 | url = https://github.com/mattlawer/BButton.git 7 | [submodule "Submodules/FFmpegWrapper"] 8 | path = Submodules/FFmpegWrapper 9 | url = git@github.com:OpenWatch/FFmpegWrapper.git 10 | [submodule "Submodules/aws-sdk-ios"] 11 | path = Submodules/aws-sdk-ios 12 | url = https://github.com/aws/aws-sdk-ios.git 13 | [submodule "Submodules/AFNetworking"] 14 | path = Submodules/AFNetworking 15 | url = https://github.com/AFNetworking/AFNetworking.git 16 | [submodule "Submodules/AFAmazonS3Client"] 17 | path = Submodules/AFAmazonS3Client 18 | url = https://github.com/AFNetworking/AFAmazonS3Client.git 19 | [submodule "Submodules/AFKissXMLRequestOperation"] 20 | path = Submodules/AFKissXMLRequestOperation 21 | url = https://github.com/AFNetworking/AFKissXMLRequestOperation.git 22 | [submodule "Submodules/KissXML"] 23 | path = Submodules/KissXML 24 | url = https://github.com/robbiehanson/KissXML.git 25 | [submodule "Submodules/OWS3Client"] 26 | path = Submodules/OWS3Client 27 | url = git@github.com:OpenWatch/OWS3Client.git 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Created by Christopher Ballinger on 9/14/13. 4 | Copyright (c) 2013 OpenWatch, Inc. All rights reserved. 5 | 6 | Permission is hereby granted, free of charge, to any person obtaining a copy of 7 | this software and associated documentation files (the "Software"), to deal in 8 | the Software without restriction, including without limitation the rights to 9 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 10 | the Software, and to permit persons to whom the Software is furnished to do so, 11 | subject to the following conditions: 12 | 13 | The above copyright notice and this permission notice shall be included in all 14 | copies or substantial portions of the Software. 15 | 16 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 18 | FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 19 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 20 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 21 | CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWAppleEncoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWAppleEncoder.h 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 11/13/12. 6 | // Copyright (c) 2012 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface OWAppleEncoder : NSObject { 12 | unsigned long long fileOffset; 13 | __block dispatch_source_t source; 14 | int fileNumber; 15 | 16 | CMFormatDescriptionRef videoFormatDescription; 17 | CMFormatDescriptionRef audioFormatDescription; 18 | } 19 | 20 | @property (nonatomic, strong) NSString *uuid; 21 | @property (nonatomic, retain) NSURL *movieURL; 22 | 23 | @property (atomic, retain) AVAssetWriterInput *audioEncoder; 24 | @property (atomic, retain) AVAssetWriterInput *videoEncoder; 25 | @property (atomic, retain) AVAssetWriter *assetWriter; 26 | 27 | @property (atomic) BOOL readyToRecordAudio; 28 | @property (atomic) BOOL readyToRecordVideo; 29 | @property (nonatomic) AVCaptureVideoOrientation referenceOrientation; 30 | @property (nonatomic) AVCaptureVideoOrientation videoOrientation; 31 | 32 | - (id) initWithURL:(NSURL*)url; 33 | - (id) initWithURL:(NSURL *)url movieFragmentInterval:(CMTime)fragmentInterval; 34 | - (void) writeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(NSString *)mediaType; 35 | - (void) setupAudioEncoderWithFormatDescription:(CMFormatDescriptionRef)formatDescription; 36 | - (void) setupAudioEncoderWithFormatDescription:(CMFormatDescriptionRef)formatDescription bitsPerSecond:(int)bps; 37 | - (void) setupVideoEncoderWithFormatDescription:(CMFormatDescriptionRef)formatDescription; 38 | - (void) setupVideoEncoderWithFormatDescription:(CMFormatDescriptionRef)formatDescription bitsPerSecond:(int)bps; 39 | 40 | - (AVAssetWriterInput*) setupVideoEncoderWithAssetWriter:(AVAssetWriter*)currentAssetWriter formatDescription:(CMFormatDescriptionRef)formatDescription bitsPerSecond:(int)bps; 41 | - (AVAssetWriterInput*) setupAudioEncoderWithAssetWriter:(AVAssetWriter*)currentAssetWriter formatDescription:(CMFormatDescriptionRef)formatDescription bitsPerSecond:(int)bps; 42 | 43 | @property (nonatomic) BOOL watchOutputFile; 44 | 45 | - (void) uploadLocalURL:(NSURL*)url; 46 | 47 | - (void) finishEncoding; 48 | - (void) showError:(NSError*)error; 49 | - (void) handleException:(NSException*)exception; 50 | 51 | @end 52 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWAppleEncoder.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWAppleEncoder.m 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 11/13/12. 6 | // Copyright (c) 2012 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import "OWAppleEncoder.h" 10 | #import 11 | #import 12 | #import 13 | #import 14 | 15 | @implementation OWAppleEncoder 16 | @synthesize assetWriter, audioEncoder, videoEncoder, movieURL, readyToRecordAudio, readyToRecordVideo, referenceOrientation, videoOrientation; 17 | @synthesize watchOutputFile; 18 | 19 | - (id) init { 20 | if (self = [super init]) { 21 | self.uuid = [[NSUUID UUID] UUIDString]; 22 | } 23 | return self; 24 | } 25 | 26 | - (id) initWithURL:(NSURL *)url movieFragmentInterval:(CMTime)fragmentInterval { 27 | if (self = [self init]) { 28 | self.movieURL = url; 29 | NSError *error = nil; 30 | self.assetWriter = [[AVAssetWriter alloc] initWithURL:movieURL fileType:(NSString *)kUTTypeMPEG4 error:&error]; 31 | if (error) { 32 | [self showError:error]; 33 | } 34 | assetWriter.movieFragmentInterval = fragmentInterval; 35 | referenceOrientation = UIDeviceOrientationLandscapeRight; 36 | fileOffset = 0; 37 | fileNumber = 0; 38 | source = NULL; 39 | } 40 | return self; 41 | } 42 | 43 | - (id) initWithURL:(NSURL *)url { 44 | if (self = [self initWithURL:url movieFragmentInterval:kCMTimeInvalid]) { 45 | 46 | } 47 | return self; 48 | } 49 | 50 | // Modified from 51 | // http://www.davidhamrick.com/2011/10/13/Monitoring-Files-With-GCD-Being-Edited-With-A-Text-Editor.html 52 | - (void)watchOutputFileHandle 53 | { 54 | dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0); 55 | int fildes = open([[movieURL path] UTF8String], O_EVTONLY); 56 | 57 | source = dispatch_source_create(DISPATCH_SOURCE_TYPE_VNODE,fildes, 58 | DISPATCH_VNODE_DELETE | DISPATCH_VNODE_WRITE | DISPATCH_VNODE_EXTEND | DISPATCH_VNODE_ATTRIB | DISPATCH_VNODE_LINK | DISPATCH_VNODE_RENAME | DISPATCH_VNODE_REVOKE, 59 | queue); 60 | dispatch_source_set_event_handler(source, ^ 61 | { 62 | unsigned long flags = dispatch_source_get_data(source); 63 | if(flags & DISPATCH_VNODE_DELETE) 64 | { 65 | dispatch_source_cancel(source); 66 | //[blockSelf watchStyleSheet:path]; 67 | } 68 | if(flags & DISPATCH_VNODE_EXTEND) 69 | { 70 | //NSLog(@"File size changed"); 71 | NSError *error = nil; 72 | NSFileHandle *fileHandle = [NSFileHandle fileHandleForReadingFromURL:movieURL error:&error]; 73 | if (error) { 74 | [self showError:error]; 75 | } 76 | [fileHandle seekToFileOffset:fileOffset]; 77 | NSData *newData = [fileHandle readDataToEndOfFile]; 78 | if ([newData length] > 0) { 79 | NSLog(@"newData (%lld): %d bytes", fileOffset, [newData length]); 80 | NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 81 | NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil; 82 | NSString *movieName = [NSString stringWithFormat:@"%d.%lld.%d.mp4", fileNumber, fileOffset, [newData length]]; 83 | NSString *path = [NSString stringWithFormat:@"%@/%@", basePath, movieName]; 84 | [newData writeToFile:path atomically:NO]; 85 | fileNumber++; 86 | fileOffset = [fileHandle offsetInFile]; 87 | } 88 | } 89 | }); 90 | dispatch_source_set_cancel_handler(source, ^(void) 91 | { 92 | close(fildes); 93 | }); 94 | dispatch_resume(source); 95 | } 96 | 97 | 98 | 99 | - (AVAssetWriterInput*) setupVideoEncoderWithAssetWriter:(AVAssetWriter*)currentAssetWriter formatDescription:(CMFormatDescriptionRef)formatDescription bitsPerSecond:(int)bps 100 | { 101 | CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); 102 | CGFloat width = dimensions.width; 103 | CGFloat height = dimensions.height; 104 | AVAssetWriterInput *currentVideoEncoder = nil; 105 | 106 | NSDictionary *videoCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys: 107 | AVVideoCodecH264, AVVideoCodecKey, 108 | [NSNumber numberWithInteger:width], AVVideoWidthKey, 109 | [NSNumber numberWithInteger:height], AVVideoHeightKey, 110 | [NSDictionary dictionaryWithObjectsAndKeys: 111 | [NSNumber numberWithInteger:bps], AVVideoAverageBitRateKey, 112 | [NSNumber numberWithInteger:300], AVVideoMaxKeyFrameIntervalKey, 113 | nil], AVVideoCompressionPropertiesKey, 114 | nil]; 115 | if ([currentAssetWriter canApplyOutputSettings:videoCompressionSettings forMediaType:AVMediaTypeVideo]) { 116 | currentVideoEncoder = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoCompressionSettings]; 117 | currentVideoEncoder.expectsMediaDataInRealTime = YES; 118 | //currentVideoEncoder.transform = [self transformFromCurrentVideoOrientationToOrientation:self.referenceOrientation]; 119 | if ([currentAssetWriter canAddInput:currentVideoEncoder]) { 120 | @try { 121 | [currentAssetWriter addInput:currentVideoEncoder]; 122 | } 123 | @catch (NSException *exception) { 124 | NSLog(@"Couldn't add input: %@", [exception description]); 125 | [self handleException:exception]; 126 | } 127 | } else { 128 | NSLog(@"Couldn't add asset writer video input."); 129 | } 130 | } 131 | else { 132 | NSLog(@"Couldn't apply video output settings."); 133 | } 134 | return currentVideoEncoder; 135 | } 136 | 137 | - (void) setupVideoEncoderWithFormatDescription:(CMFormatDescriptionRef)formatDescription bitsPerSecond:(int)bps { 138 | videoFormatDescription = formatDescription; 139 | videoEncoder = [self setupVideoEncoderWithAssetWriter:assetWriter formatDescription:formatDescription bitsPerSecond:bps]; 140 | self.readyToRecordVideo = YES; 141 | } 142 | 143 | - (void) setupVideoEncoderWithFormatDescription:(CMFormatDescriptionRef)formatDescription; 144 | { 145 | float bitsPerPixel; 146 | 147 | CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); 148 | CGFloat width = dimensions.width; 149 | CGFloat height = dimensions.height; 150 | 151 | int numPixels = width * height; 152 | int bitsPerSecond; 153 | 154 | bitsPerPixel = 4.05; 155 | 156 | bitsPerSecond = numPixels * bitsPerPixel; 157 | [self setupVideoEncoderWithFormatDescription:formatDescription bitsPerSecond:bitsPerSecond]; 158 | } 159 | 160 | - (CGFloat)angleOffsetFromPortraitOrientationToOrientation:(AVCaptureVideoOrientation)orientation 161 | { 162 | CGFloat angle = 0.0; 163 | 164 | switch (orientation) { 165 | case AVCaptureVideoOrientationPortrait: 166 | angle = 0.0; 167 | break; 168 | case AVCaptureVideoOrientationPortraitUpsideDown: 169 | angle = M_PI; 170 | break; 171 | case AVCaptureVideoOrientationLandscapeRight: 172 | angle = -M_PI_2; 173 | break; 174 | case AVCaptureVideoOrientationLandscapeLeft: 175 | angle = M_PI_2; 176 | break; 177 | default: 178 | break; 179 | } 180 | 181 | return angle; 182 | } 183 | 184 | - (CGAffineTransform)transformFromCurrentVideoOrientationToOrientation:(AVCaptureVideoOrientation)orientation 185 | { 186 | CGAffineTransform transform = CGAffineTransformIdentity; 187 | 188 | // Calculate offsets from an arbitrary reference orientation (portrait) 189 | CGFloat orientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:orientation]; 190 | CGFloat videoOrientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:self.videoOrientation]; 191 | 192 | // Find the difference in angle between the passed in orientation and the current video orientation 193 | CGFloat angleOffset = orientationAngleOffset - videoOrientationAngleOffset; 194 | transform = CGAffineTransformMakeRotation(angleOffset); 195 | 196 | return transform; 197 | } 198 | 199 | - (void) setupAudioEncoderWithFormatDescription:(CMFormatDescriptionRef)formatDescription { 200 | [self setupAudioEncoderWithFormatDescription:formatDescription bitsPerSecond:64000]; 201 | } 202 | - (void) setupAudioEncoderWithFormatDescription:(CMFormatDescriptionRef)formatDescription bitsPerSecond:(int)bps { 203 | audioFormatDescription = formatDescription; 204 | audioEncoder = [self setupAudioEncoderWithAssetWriter:assetWriter formatDescription:formatDescription bitsPerSecond:bps]; 205 | self.readyToRecordAudio = YES; 206 | } 207 | 208 | - (AVAssetWriterInput*) setupAudioEncoderWithAssetWriter:(AVAssetWriter*)currentAssetWriter formatDescription:(CMFormatDescriptionRef)formatDescription bitsPerSecond:(int)bps 209 | { 210 | const AudioStreamBasicDescription *currentASBD = CMAudioFormatDescriptionGetStreamBasicDescription(formatDescription); 211 | AVAssetWriterInput *currentAudioEncoder = nil; 212 | 213 | size_t aclSize = 0; 214 | const AudioChannelLayout *currentChannelLayout = CMAudioFormatDescriptionGetChannelLayout(formatDescription, &aclSize); 215 | NSData *currentChannelLayoutData = nil; 216 | 217 | // AVChannelLayoutKey must be specified, but if we don't know any better give an empty data and let AVAssetWriter decide. 218 | if ( currentChannelLayout && aclSize > 0 ) 219 | currentChannelLayoutData = [NSData dataWithBytes:currentChannelLayout length:aclSize]; 220 | else 221 | currentChannelLayoutData = [NSData data]; 222 | 223 | NSDictionary *audioCompressionSettings = [NSDictionary dictionaryWithObjectsAndKeys: 224 | [NSNumber numberWithInteger:kAudioFormatMPEG4AAC], AVFormatIDKey, 225 | [NSNumber numberWithFloat:currentASBD->mSampleRate], AVSampleRateKey, 226 | [NSNumber numberWithInt:64000], AVEncoderBitRatePerChannelKey, 227 | [NSNumber numberWithInteger:currentASBD->mChannelsPerFrame], AVNumberOfChannelsKey, 228 | currentChannelLayoutData, AVChannelLayoutKey, 229 | nil]; 230 | if ([currentAssetWriter canApplyOutputSettings:audioCompressionSettings forMediaType:AVMediaTypeAudio]) { 231 | currentAudioEncoder = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioCompressionSettings]; 232 | currentAudioEncoder.expectsMediaDataInRealTime = YES; 233 | if ([currentAssetWriter canAddInput:currentAudioEncoder]) { 234 | @try { 235 | [currentAssetWriter addInput:currentAudioEncoder]; 236 | } 237 | @catch (NSException *exception) { 238 | NSLog(@"Couldn't add audio input: %@", [exception description]); 239 | [self handleException:exception]; 240 | } 241 | } else { 242 | NSLog(@"Couldn't add asset writer audio input."); 243 | } 244 | } 245 | else { 246 | NSLog(@"Couldn't apply audio output settings."); 247 | } 248 | return currentAudioEncoder; 249 | } 250 | 251 | - (void) writeSampleBuffer:(CMSampleBufferRef)sampleBuffer ofType:(NSString *)mediaType 252 | { 253 | if ( assetWriter.status == AVAssetWriterStatusUnknown ) { 254 | if ([assetWriter startWriting]) { 255 | @try { 256 | [assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)]; 257 | } 258 | @catch (NSException *exception) { 259 | [self handleException:exception]; 260 | } 261 | } 262 | else { 263 | [self showError:[assetWriter error]]; 264 | } 265 | } 266 | 267 | if ( assetWriter.status == AVAssetWriterStatusWriting ) { 268 | if (watchOutputFile && !source) { 269 | [self watchOutputFileHandle]; 270 | } 271 | 272 | if (mediaType == AVMediaTypeVideo) { 273 | if (videoEncoder.readyForMoreMediaData) { 274 | @try { 275 | if (![videoEncoder appendSampleBuffer:sampleBuffer]) { 276 | [self showError:[assetWriter error]]; 277 | } 278 | } 279 | @catch (NSException *exception) { 280 | NSLog(@"Couldn't append video sample buffer: %@", [exception description]); 281 | [self handleException:exception]; 282 | } 283 | } 284 | } 285 | else if (mediaType == AVMediaTypeAudio) { 286 | if (audioEncoder.readyForMoreMediaData) { 287 | @try { 288 | if (![audioEncoder appendSampleBuffer:sampleBuffer]) { 289 | [self showError:[assetWriter error]]; 290 | } 291 | } 292 | @catch (NSException *exception) { 293 | NSLog(@"Couldn't append audio sample buffer: %@", [exception description]); 294 | [self handleException:exception]; 295 | } 296 | } 297 | } 298 | } 299 | } 300 | 301 | - (void) handleException:(NSException *)exception { 302 | NSLog(@"Exception caught: %@", exception.description); 303 | } 304 | 305 | - (void) finishEncoding { 306 | self.readyToRecordAudio = NO; 307 | self.readyToRecordVideo = NO; 308 | if (assetWriter.status == AVAssetWriterStatusWriting) { 309 | @try { 310 | [self.audioEncoder markAsFinished]; 311 | [self.videoEncoder markAsFinished]; 312 | [assetWriter finishWritingWithCompletionHandler:^{ 313 | if (assetWriter.status == AVAssetWriterStatusFailed) { 314 | [self showError:[assetWriter error]]; 315 | } else { 316 | [self uploadLocalURL:assetWriter.outputURL]; 317 | } 318 | }]; 319 | } 320 | @catch (NSException *exception) { 321 | NSLog(@"Error: Caught exception: %@", [exception description]); 322 | [self handleException:exception]; 323 | } 324 | 325 | } 326 | if(source) { 327 | dispatch_source_cancel(source); 328 | source = NULL; 329 | } 330 | } 331 | 332 | - (void) uploadLocalURL:(NSURL*)url { 333 | NSLog(@"upload: %@", url); 334 | } 335 | 336 | - (void) showError:(NSError*)error { 337 | NSLog(@"Error: %@%@", [error localizedDescription], [error userInfo]); 338 | } 339 | 340 | @end 341 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWCaptureController.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWCaptureController.h 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 11/29/12. 6 | // Copyright (c) 2012 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "OWVideoProcessor.h" 11 | 12 | @interface OWCaptureController : NSObject 13 | 14 | @property (nonatomic, strong) OWVideoProcessor *videoProcessor; 15 | 16 | + (OWCaptureController *)sharedInstance; 17 | 18 | @end 19 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWCaptureController.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWCaptureController.m 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 11/29/12. 6 | // Copyright (c) 2012 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import "OWCaptureController.h" 10 | 11 | @implementation OWCaptureController 12 | @synthesize videoProcessor; 13 | 14 | + (OWCaptureController *)sharedInstance { 15 | static OWCaptureController *_sharedClient = nil; 16 | static dispatch_once_t onceToken; 17 | dispatch_once(&onceToken, ^{ 18 | _sharedClient = [[OWCaptureController alloc] init]; 19 | }); 20 | return _sharedClient; 21 | } 22 | 23 | - (id) init { 24 | if (self = [super init]) { 25 | self.videoProcessor = [[OWVideoProcessor alloc] init]; 26 | } 27 | return self; 28 | } 29 | 30 | @end 31 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWCaptureViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWCaptureViewController.h 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 11/13/12. 6 | // Copyright (c) 2012 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "OWVideoProcessor.h" 11 | #import "OWTimerView.h" 12 | #import "OWRecordingActivityIndicatorView.h" 13 | #import "BButton.h" 14 | 15 | @class AVCaptureVideoPreviewLayer, OWCaptureViewController; 16 | 17 | @protocol OWCaptureDelegate 18 | @optional 19 | - (void) captureViewControllerDidFinishRecording:(OWCaptureViewController*)captureViewController; 20 | - (void) captureViewControllerDidCancel:(OWCaptureViewController*)captureViewController; 21 | @end 22 | 23 | @interface OWCaptureViewController : UIViewController { 24 | UIBackgroundTaskIdentifier backgroundRecordingID; 25 | } 26 | 27 | @property (nonatomic, weak) id delegate; 28 | @property (nonatomic, strong) OWTimerView *timerView; 29 | @property (nonatomic, strong) OWRecordingActivityIndicatorView *recordingIndicator; 30 | @property (nonatomic, strong) UILabel *uploadStatusLabel; 31 | 32 | @property (nonatomic, strong) UILabel *startRecordingLabel; 33 | @property (nonatomic, strong) UIButton *fullscreenRecordButton; 34 | @property (nonatomic, strong) BButton *finishButton; 35 | 36 | @property (nonatomic, strong) OWVideoProcessor *videoProcessor; 37 | @property (nonatomic, strong) UIView *videoPreviewView; 38 | @property (nonatomic, strong) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer; 39 | 40 | @end 41 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWCaptureViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWCaptureViewController.m 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 11/13/12. 6 | // Copyright (c) 2012 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import "OWCaptureViewController.h" 10 | #import "OWCaptureController.h" 11 | #import "OWAppDelegate.h" 12 | #import "OWUtilities.h" 13 | 14 | @interface OWCaptureViewController () 15 | @end 16 | 17 | @implementation OWCaptureViewController 18 | @synthesize videoPreviewView, captureVideoPreviewLayer, videoProcessor, fullscreenRecordButton, recordingIndicator, timerView, delegate, uploadStatusLabel, finishButton, startRecordingLabel; 19 | 20 | - (id) init { 21 | if (self = [super init]) { 22 | self.videoProcessor = [OWCaptureController sharedInstance].videoProcessor; 23 | self.videoProcessor.delegate = self; 24 | [self.videoProcessor setupAndStartCaptureSession]; 25 | self.videoPreviewView = [[UIView alloc] init]; 26 | self.title = @"Streaming"; 27 | [self setupFinishButton]; 28 | self.recordingIndicator = [[OWRecordingActivityIndicatorView alloc] init]; 29 | self.timerView = [[OWTimerView alloc] init]; 30 | self.uploadStatusLabel = [[UILabel alloc] init]; 31 | self.uploadStatusLabel.text = @"Streaming..."; 32 | self.uploadStatusLabel.textAlignment = NSTextAlignmentRight; 33 | self.uploadStatusLabel.backgroundColor = [UIColor clearColor]; 34 | 35 | self.fullscreenRecordButton = [UIButton buttonWithType:UIButtonTypeCustom]; 36 | [fullscreenRecordButton addTarget:self action:@selector(startRecordingPressed:) forControlEvents:UIControlEventTouchUpInside]; 37 | 38 | [self setupStartRecordingLabel]; 39 | } 40 | return self; 41 | } 42 | 43 | - (void) setupStartRecordingLabel { 44 | self.startRecordingLabel = [[UILabel alloc] init]; 45 | self.startRecordingLabel.font = [UIFont fontWithName:@"HelveticaNeue-Bold" size:30.0f]; 46 | self.startRecordingLabel.numberOfLines = 0; 47 | self.startRecordingLabel.textColor = [UIColor whiteColor]; 48 | self.startRecordingLabel.textAlignment = NSTextAlignmentCenter; 49 | self.startRecordingLabel.text = @"Touch anywhere to start."; 50 | self.startRecordingLabel.backgroundColor = [UIColor clearColor]; 51 | self.startRecordingLabel.layer.shadowRadius = 2.5; 52 | self.startRecordingLabel.layer.masksToBounds = NO; 53 | self.startRecordingLabel.layer.shadowOpacity = 0.7; 54 | self.startRecordingLabel.layer.shouldRasterize = YES; 55 | self.startRecordingLabel.layer.shadowOffset = CGSizeMake(0, 0); 56 | } 57 | 58 | - (void) startRecordingPressed:(id)sender { 59 | [self.fullscreenRecordButton removeFromSuperview]; 60 | [self.startRecordingLabel removeFromSuperview]; 61 | [self.view addSubview:uploadStatusLabel]; 62 | [videoProcessor startRecording]; 63 | [self.finishButton setTitle:@"Stop" forState:UIControlStateNormal]; 64 | } 65 | 66 | - (void) setupFinishButton { 67 | self.finishButton = [[BButton alloc] initWithFrame:CGRectZero type:BButtonTypeDanger]; 68 | self.finishButton.layer.opacity = 0.7; 69 | [finishButton setTitle:@"Cancel" forState:UIControlStateNormal]; 70 | [self.finishButton addTarget:self action:@selector(finishButtonPressed:) forControlEvents:UIControlEventTouchUpInside]; 71 | } 72 | 73 | - (void) loadView { 74 | [super loadView]; 75 | self.videoPreviewView.frame = self.view.bounds; 76 | self.videoPreviewView.autoresizingMask = UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleRightMargin | UIViewAutoresizingFlexibleBottomMargin; 77 | [self.view addSubview:videoPreviewView]; 78 | [self.view addSubview:startRecordingLabel]; 79 | [self.view addSubview:fullscreenRecordButton]; 80 | [self.view addSubview:finishButton]; 81 | [self.view addSubview:recordingIndicator]; 82 | [self.view addSubview:timerView]; 83 | } 84 | 85 | - (void) finishButtonPressed:(id)sender { 86 | if (![videoProcessor isRecording]) { 87 | OW_APP_DELEGATE.forceLandscapeRight = NO; 88 | [self.delegate captureViewControllerDidCancel:self]; 89 | } else { 90 | UIAlertView *alert = [[UIAlertView alloc] initWithTitle:@"Stop Recording?" message:nil delegate:self cancelButtonTitle:@"No" otherButtonTitles:@"Yes", nil]; 91 | [alert show]; 92 | } 93 | } 94 | 95 | - (void)viewDidLoad 96 | { 97 | [super viewDidLoad]; 98 | [[UIApplication sharedApplication] setStatusBarHidden:YES]; 99 | self.captureVideoPreviewLayer = [AVCaptureVideoPreviewLayer layerWithSession:videoProcessor.captureSession]; 100 | self.captureVideoPreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight; 101 | 102 | UIView *view = [self videoPreviewView]; 103 | CALayer *viewLayer = [view layer]; 104 | [viewLayer setMasksToBounds:YES]; 105 | 106 | CGRect bounds = [view bounds]; 107 | [captureVideoPreviewLayer setFrame:bounds]; 108 | 109 | /* iOS 6 doesn't like this 110 | if ([captureVideoPreviewLayer isOrientationSupported]) { 111 | [captureVideoPreviewLayer setOrientation:AVCaptureVideoOrientationLandscapeRight]; 112 | } 113 | */ 114 | 115 | [captureVideoPreviewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 116 | 117 | [videoPreviewView.layer addSublayer:captureVideoPreviewLayer]; 118 | } 119 | 120 | - (void) viewWillDisappear:(BOOL)animated { 121 | [super viewWillDisappear:animated]; 122 | [[UIApplication sharedApplication] setStatusBarHidden:NO]; 123 | } 124 | 125 | - (void) viewWillAppear:(BOOL)animated { 126 | [super viewWillAppear:animated]; 127 | OW_APP_DELEGATE.forceLandscapeRight = YES; 128 | self.videoPreviewView.frame = self.view.bounds; 129 | CGFloat buttonWidth = 100.0f; 130 | CGFloat buttonHeight = 45.0f; 131 | CGFloat padding = 10.0f; 132 | CGFloat labelWidth = 100.0f; 133 | CGFloat labelHeight = 30.0f; 134 | 135 | CGFloat frameWidth = self.view.bounds.size.width; 136 | CGFloat frameHeight = self.view.bounds.size.height; 137 | 138 | self.uploadStatusLabel.frame = CGRectMake(frameWidth - labelWidth - padding, padding, labelWidth, labelHeight); 139 | self.recordingIndicator.frame = CGRectMake(padding, padding, 35, 35); 140 | self.finishButton.frame = CGRectMake(frameWidth - buttonWidth - padding, frameHeight - buttonHeight - padding, buttonWidth, buttonHeight); 141 | self.fullscreenRecordButton.frame = self.view.bounds; 142 | self.startRecordingLabel.frame = self.view.bounds; 143 | 144 | self.timerView.frame = CGRectMake([OWUtilities rightOfView:recordingIndicator], padding, 100, 35); 145 | [captureVideoPreviewLayer setFrame:self.view.bounds]; 146 | } 147 | 148 | 149 | - (void)didReceiveMemoryWarning 150 | { 151 | [super didReceiveMemoryWarning]; 152 | // Dispose of any resources that can be recreated. 153 | } 154 | 155 | #pragma mark OWVideoProcessorDelegate 156 | 157 | - (void)recordingWillStart 158 | { 159 | dispatch_async(dispatch_get_main_queue(), ^{ 160 | [[self finishButton] setEnabled:NO]; 161 | [[self finishButton] setTitle:@"Stop" forState:UIControlStateNormal]; 162 | 163 | // Disable the idle timer while we are recording 164 | [UIApplication sharedApplication].idleTimerDisabled = YES; 165 | 166 | // Make sure we have time to finish saving the movie if the app is backgrounded during recording 167 | if ([[UIDevice currentDevice] isMultitaskingSupported]) 168 | backgroundRecordingID = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:^{}]; 169 | }); 170 | } 171 | 172 | - (void)recordingDidStart 173 | { 174 | dispatch_async(dispatch_get_main_queue(), ^{ 175 | [[self finishButton] setEnabled:YES]; 176 | [self.timerView startTimer]; 177 | [self.recordingIndicator startAnimating]; 178 | }); 179 | } 180 | 181 | - (void)recordingWillStop 182 | { 183 | dispatch_async(dispatch_get_main_queue(), ^{ 184 | OW_APP_DELEGATE.forceLandscapeRight = NO; 185 | // Disable until saving to the camera roll is complete 186 | [[self finishButton] setEnabled:NO]; 187 | }); 188 | } 189 | 190 | - (void)recordingDidStop 191 | { 192 | dispatch_async(dispatch_get_main_queue(), ^{ 193 | [self.timerView stopTimer]; 194 | [self.recordingIndicator stopAnimating]; 195 | 196 | [UIApplication sharedApplication].idleTimerDisabled = NO; 197 | 198 | if ([[UIDevice currentDevice] isMultitaskingSupported]) { 199 | [[UIApplication sharedApplication] endBackgroundTask:backgroundRecordingID]; 200 | backgroundRecordingID = UIBackgroundTaskInvalid; 201 | } 202 | 203 | 204 | if (self.delegate && [self.delegate respondsToSelector:@selector(captureViewControllerDidFinishRecording:)]) { 205 | [self.delegate captureViewControllerDidFinishRecording:self]; 206 | } 207 | }); 208 | } 209 | 210 | - (BOOL) shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interfaceOrientation { 211 | return UIInterfaceOrientationLandscapeRight == interfaceOrientation; 212 | } 213 | 214 | - (BOOL) shouldAutorotate { 215 | return NO; 216 | } 217 | 218 | -(NSUInteger) supportedInterfaceOrientations { 219 | return UIInterfaceOrientationMaskLandscapeRight; 220 | } 221 | 222 | - (UIInterfaceOrientation)preferredInterfaceOrientationForPresentation 223 | { 224 | return UIInterfaceOrientationLandscapeRight; 225 | } 226 | 227 | -(void)willAnimateRotationToInterfaceOrientation: 228 | (UIInterfaceOrientation)toInterfaceOrientation 229 | duration:(NSTimeInterval)duration { 230 | 231 | [CATransaction begin]; 232 | if (toInterfaceOrientation==UIInterfaceOrientationLandscapeRight){ 233 | captureVideoPreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight; 234 | } else { 235 | captureVideoPreviewLayer.orientation = AVCaptureVideoOrientationLandscapeRight; 236 | } 237 | 238 | [CATransaction commit]; 239 | [super willAnimateRotationToInterfaceOrientation:toInterfaceOrientation duration:duration]; 240 | } 241 | 242 | 243 | - (void) alertView:(UIAlertView *)alertView didDismissWithButtonIndex:(NSInteger)buttonIndex { 244 | if (buttonIndex != alertView.cancelButtonIndex) { 245 | if ( [videoProcessor isRecording] ) { 246 | [[self finishButton] setEnabled:NO]; 247 | [videoProcessor stopRecording]; 248 | } 249 | } 250 | } 251 | 252 | 253 | @end 254 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWRecordingActivityIndicatorView.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWRecordingActivityIndicatorView.h 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 5/9/13. 6 | // Copyright (c) 2013 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface OWRecordingActivityIndicatorView : UIView 12 | 13 | @property (nonatomic, readonly) BOOL isAnimating; 14 | 15 | @property (nonatomic, strong) UIImageView *imageView; 16 | @property (nonatomic, strong) NSTimer *animationTimer; 17 | 18 | - (void) startAnimating; 19 | - (void) stopAnimating; 20 | 21 | @end 22 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWRecordingActivityIndicatorView.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWRecordingActivityIndicatorView.m 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 5/9/13. 6 | // Copyright (c) 2013 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import "OWRecordingActivityIndicatorView.h" 10 | #import 11 | 12 | #define kAnimationDuration 0.9f 13 | 14 | @interface OWRecordingActivityIndicatorView() 15 | @property (nonatomic) BOOL isAnimating; 16 | @end 17 | 18 | 19 | @implementation OWRecordingActivityIndicatorView 20 | @synthesize imageView, isAnimating, animationTimer; 21 | 22 | - (id)initWithFrame:(CGRect)frame 23 | { 24 | self = [super initWithFrame:frame]; 25 | if (self) { 26 | self.imageView = [[UIImageView alloc] initWithImage:[UIImage imageNamed:@"red_dot.png"]]; 27 | self.imageView.frame = CGRectMake(0, 0, frame.size.width, frame.size.height); 28 | self.imageView.contentMode = UIViewContentModeScaleAspectFit; 29 | self.imageView.layer.opacity = 0.0f; 30 | [self addSubview:imageView]; 31 | self.isAnimating = NO; 32 | } 33 | return self; 34 | } 35 | 36 | - (void) setFrame:(CGRect)frame { 37 | [super setFrame:frame]; 38 | self.imageView.frame = CGRectMake(0, 0, frame.size.width, frame.size.height); 39 | } 40 | 41 | - (void) startAnimating { 42 | [self scheduleTimer]; 43 | self.isAnimating = YES; 44 | } 45 | 46 | - (void) scheduleTimer { 47 | self.animationTimer = [NSTimer scheduledTimerWithTimeInterval:0.2 target:self selector:@selector(animationTimerFired:) userInfo:nil repeats:NO]; 48 | } 49 | 50 | - (void) animationTimerFired:(NSTimer*)timer { 51 | [self.animationTimer invalidate]; 52 | self.animationTimer = nil; 53 | [UIView animateWithDuration:kAnimationDuration animations:^{ 54 | self.imageView.layer.opacity = 1.0f; 55 | } completion:^(BOOL finished) { 56 | [UIView animateWithDuration:kAnimationDuration animations:^{ 57 | self.imageView.layer.opacity = 0.0f; 58 | } completion:^(BOOL finished) { 59 | if (isAnimating) { 60 | [self scheduleTimer]; 61 | } 62 | }]; 63 | }]; 64 | } 65 | 66 | - (void) stopAnimating { 67 | self.isAnimating = NO; 68 | [self.animationTimer invalidate]; 69 | self.animationTimer = nil; 70 | [UIView animateWithDuration:kAnimationDuration animations:^{ 71 | self.imageView.layer.opacity = 0.0f; 72 | } completion:nil]; 73 | } 74 | 75 | /* 76 | // Only override drawRect: if you perform custom drawing. 77 | // An empty implementation adversely affects performance during animation. 78 | - (void)drawRect:(CGRect)rect 79 | { 80 | // Drawing code 81 | } 82 | */ 83 | 84 | @end 85 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWSegmentingAppleEncoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWSegmentingAppleEncoder.h 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 11/13/12. 6 | // Copyright (c) 2012 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import "OWAppleEncoder.h" 10 | #import "OWManifestGenerator.h" 11 | #import "FFmpegWrapper.h" 12 | 13 | @interface OWSegmentingAppleEncoder : OWAppleEncoder { 14 | dispatch_queue_t segmentingQueue; 15 | NSTimeInterval segmentationInterval; 16 | } 17 | 18 | @property (nonatomic, strong) FFmpegWrapper *ffmpegWrapper; 19 | @property (nonatomic, strong) OWManifestGenerator *manifestGenerator; 20 | @property (atomic, retain) AVAssetWriter *queuedAssetWriter; 21 | @property (atomic, retain) AVAssetWriterInput *queuedAudioEncoder; 22 | @property (atomic, retain) AVAssetWriterInput *queuedVideoEncoder; 23 | @property (atomic) BOOL shouldBeRecording; 24 | @property (atomic) NSUInteger segmentCount; 25 | @property (nonatomic, strong) NSString *basePath; 26 | 27 | @property (atomic) int videoBPS; // bits/sec 28 | @property (atomic) int audioBPS; // bits/sec 29 | 30 | @property (atomic, retain) NSTimer *segmentationTimer; 31 | 32 | - (id) initWithBasePath:(NSString*)basePath segmentationInterval:(NSTimeInterval)timeInterval; 33 | 34 | @end 35 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWSegmentingAppleEncoder.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWSegmentingAppleEncoder.m 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 11/13/12. 6 | // Copyright (c) 2012 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import "OWSegmentingAppleEncoder.h" 10 | #import 11 | #import "OWUtilities.h" 12 | #import "OWAppDelegate.h" 13 | 14 | #import "OWSharedS3Client.h" 15 | 16 | #define kMinVideoBitrate 100000 17 | #define kMaxVideoBitrate 400000 18 | 19 | #define BUCKET_NAME @"openwatch-livestreamer" 20 | 21 | @implementation OWSegmentingAppleEncoder 22 | @synthesize segmentationTimer, queuedAssetWriter; 23 | @synthesize queuedAudioEncoder, queuedVideoEncoder; 24 | @synthesize audioBPS, videoBPS, shouldBeRecording; 25 | @synthesize segmentCount; 26 | @synthesize manifestGenerator; 27 | @synthesize ffmpegWrapper; 28 | 29 | - (void) dealloc { 30 | if (self.segmentationTimer) { 31 | [self performSelectorOnMainThread:@selector(invalidateTimer) withObject:nil waitUntilDone:NO]; 32 | } 33 | } 34 | 35 | - (void) finishEncoding { 36 | self.readyToRecordAudio = NO; 37 | self.readyToRecordVideo = NO; 38 | self.shouldBeRecording = NO; 39 | if (self.segmentationTimer) { 40 | [self performSelectorOnMainThread:@selector(invalidateTimer) withObject:nil waitUntilDone:NO]; 41 | } 42 | [[NSNotificationCenter defaultCenter] removeObserver:self]; 43 | [super finishEncoding]; 44 | //[[OWCaptureAPIClient sharedClient] finishedRecording:self.recording]; 45 | } 46 | 47 | - (void) invalidateTimer { 48 | [self.segmentationTimer invalidate]; 49 | self.segmentationTimer = nil; 50 | } 51 | 52 | - (void) createAndScheduleTimer { 53 | self.segmentationTimer = [NSTimer scheduledTimerWithTimeInterval:segmentationInterval target:self selector:@selector(segmentRecording:) userInfo:nil repeats:YES]; 54 | //[[NSRunLoop mainRunLoop] addTimer:segmentationTimer forMode:NSDefaultRunLoopMode]; 55 | } 56 | 57 | - (id) initWithBasePath:(NSString *)newBasePath segmentationInterval:(NSTimeInterval)timeInterval { 58 | if (self = [super init]) { 59 | self.basePath = newBasePath; 60 | self.shouldBeRecording = YES; 61 | segmentationInterval = timeInterval; 62 | [self performSelectorOnMainThread:@selector(createAndScheduleTimer) withObject:nil waitUntilDone:NO]; 63 | //[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(receivedBandwidthUpdateNotification:) name:kOWCaptureAPIClientBandwidthNotification object:nil]; 64 | segmentingQueue = dispatch_queue_create("Segmenting Queue", DISPATCH_QUEUE_SERIAL); 65 | self.segmentCount = 0; 66 | self.ffmpegWrapper = [[FFmpegWrapper alloc] init]; 67 | NSString *manifestFileName = @"chunklist.m3u8"; 68 | NSString *m3u8Path = [newBasePath stringByAppendingPathComponent:manifestFileName]; 69 | self.manifestGenerator = [[OWManifestGenerator alloc] initWithM3U8Path:m3u8Path targetSegmentDuration:(int)timeInterval]; 70 | 71 | NSError *error = nil; 72 | NSString *htmlFilePath = [[NSBundle mainBundle] pathForResource:@"index" ofType:@"html"]; 73 | NSString *crossDomainPath = [[NSBundle mainBundle] pathForResource:@"crossdomain" ofType:@"xml"]; 74 | NSString *crossdomainOutputPath = [self.basePath stringByAppendingPathComponent:@"crossdomain.xml"]; 75 | NSFileManager *fileManager = [NSFileManager defaultManager]; 76 | [fileManager copyItemAtPath:crossDomainPath toPath:crossdomainOutputPath error:&error]; 77 | if (error) { 78 | NSLog(@"error copying cross domain file: %@", error.userInfo); 79 | } 80 | NSString *rootPlaylistPath = [[NSBundle mainBundle] pathForResource:@"playlist" ofType:@"m3u8"]; 81 | NSString *rootPlaylistOutputPath = [self.basePath stringByAppendingPathComponent:@"playlist.m3u8"]; 82 | [fileManager copyItemAtPath:rootPlaylistPath toPath:rootPlaylistOutputPath error:&error]; 83 | if (error) { 84 | NSLog(@"error copying cross domain file: %@", error.userInfo); 85 | } 86 | 87 | NSString *html = [NSString stringWithContentsOfFile:htmlFilePath encoding:NSUTF8StringEncoding error:&error]; 88 | if (error) { 89 | NSLog(@"error loading html: %@", error.userInfo); 90 | } 91 | NSString *newHTML = [html stringByReplacingOccurrencesOfString:@"{% manifest_file_name %}" withString:manifestFileName]; 92 | NSString *htmlIndexPath = [self.basePath stringByAppendingPathComponent:@"index.html"]; 93 | [newHTML writeToFile:htmlIndexPath atomically:YES encoding:NSUTF8StringEncoding error:&error]; 94 | if (error) { 95 | NSLog(@"error writing index.html: %@", error.userInfo); 96 | } 97 | 98 | NSString *playlistPath = [[NSBundle mainBundle] pathForResource:@"playlist" ofType:@"m3u8"]; 99 | NSString *playlistKey = [NSString stringWithFormat:@"%@/%@",self.uuid, [playlistPath lastPathComponent]]; 100 | [[OWSharedS3Client sharedClient] postObjectWithFile:playlistPath bucket:BUCKET_NAME key:playlistKey acl:@"public-read" success:^(S3PutObjectResponse *responseObject) { 101 | NSLog(@"success sending first manifest"); 102 | } failure:^(NSError *error) { 103 | NSLog(@"error: %@", error.userInfo); 104 | }]; 105 | } 106 | return self; 107 | } 108 | 109 | - (void) receivedBandwidthUpdateNotification:(NSNotification*)notification { 110 | double bps = [[[notification userInfo] objectForKey:@"bps"] doubleValue]; 111 | double vbps = (bps*0.5) - audioBPS; 112 | if (vbps < kMinVideoBitrate) { 113 | vbps = kMinVideoBitrate; 114 | } 115 | if (vbps > kMaxVideoBitrate) { 116 | vbps = kMaxVideoBitrate; 117 | } 118 | self.videoBPS = vbps; 119 | //self.videoBPS = videoBPS * 0.75; 120 | NSLog(@"bps: %f\tvideoBPS: %d\taudioBPS: %d", bps, videoBPS, audioBPS); 121 | } 122 | 123 | 124 | 125 | - (void) segmentRecording:(NSTimer*)timer { 126 | if (!shouldBeRecording) { 127 | [timer invalidate]; 128 | } 129 | AVAssetWriter *tempAssetWriter = self.assetWriter; 130 | AVAssetWriterInput *tempAudioEncoder = self.audioEncoder; 131 | AVAssetWriterInput *tempVideoEncoder = self.videoEncoder; 132 | self.assetWriter = queuedAssetWriter; 133 | self.audioEncoder = queuedAudioEncoder; 134 | self.videoEncoder = queuedVideoEncoder; 135 | NSLog(@"Switching encoders"); 136 | 137 | dispatch_async(segmentingQueue, ^{ 138 | if (tempAssetWriter.status == AVAssetWriterStatusWriting) { 139 | @try { 140 | [tempAudioEncoder markAsFinished]; 141 | [tempVideoEncoder markAsFinished]; 142 | [tempAssetWriter finishWritingWithCompletionHandler:^{ 143 | if (tempAssetWriter.status == AVAssetWriterStatusFailed) { 144 | [self showError:tempAssetWriter.error]; 145 | } else { 146 | [self uploadLocalURL:tempAssetWriter.outputURL]; 147 | } 148 | }]; 149 | } 150 | @catch (NSException *exception) { 151 | NSLog(@"Caught exception: %@", [exception description]); 152 | //[BugSenseController logException:exception withExtraData:nil]; 153 | } 154 | } 155 | self.segmentCount++; 156 | if (self.readyToRecordAudio && self.readyToRecordVideo) { 157 | NSError *error = nil; 158 | self.queuedAssetWriter = [[AVAssetWriter alloc] initWithURL:[OWUtilities urlForRecordingSegmentCount:segmentCount basePath:self.basePath] fileType:(NSString *)kUTTypeMPEG4 error:&error]; 159 | if (error) { 160 | [self showError:error]; 161 | } 162 | self.queuedVideoEncoder = [self setupVideoEncoderWithAssetWriter:self.queuedAssetWriter formatDescription:videoFormatDescription bitsPerSecond:videoBPS]; 163 | self.queuedAudioEncoder = [self setupAudioEncoderWithAssetWriter:self.queuedAssetWriter formatDescription:audioFormatDescription bitsPerSecond:audioBPS]; 164 | //NSLog(@"Encoder switch finished"); 165 | } 166 | }); 167 | } 168 | 169 | 170 | 171 | - (void) setupVideoEncoderWithFormatDescription:(CMFormatDescriptionRef)formatDescription bitsPerSecond:(int)bps { 172 | videoFormatDescription = formatDescription; 173 | videoBPS = bps; 174 | if (!self.assetWriter) { 175 | NSError *error = nil; 176 | self.assetWriter = [[AVAssetWriter alloc] initWithURL:[OWUtilities urlForRecordingSegmentCount:segmentCount basePath:self.basePath] fileType:(NSString *)kUTTypeMPEG4 error:&error]; 177 | if (error) { 178 | [self showError:error]; 179 | } 180 | } 181 | self.videoEncoder = [self setupVideoEncoderWithAssetWriter:self.assetWriter formatDescription:formatDescription bitsPerSecond:bps]; 182 | 183 | if (!queuedAssetWriter) { 184 | self.segmentCount++; 185 | NSError *error = nil; 186 | self.queuedAssetWriter = [[AVAssetWriter alloc] initWithURL:[OWUtilities urlForRecordingSegmentCount:segmentCount basePath:self.basePath] fileType:(NSString *)kUTTypeMPEG4 error:&error]; 187 | if (error) { 188 | [self showError:error]; 189 | } 190 | } 191 | self.queuedVideoEncoder = [self setupVideoEncoderWithAssetWriter:self.queuedAssetWriter formatDescription:formatDescription bitsPerSecond:bps]; 192 | self.readyToRecordVideo = YES; 193 | } 194 | 195 | 196 | 197 | - (void) setupAudioEncoderWithFormatDescription:(CMFormatDescriptionRef)formatDescription bitsPerSecond:(int)bps { 198 | audioFormatDescription = formatDescription; 199 | audioBPS = bps; 200 | if (!self.assetWriter) { 201 | NSError *error = nil; 202 | self.assetWriter = [[AVAssetWriter alloc] initWithURL:[OWUtilities urlForRecordingSegmentCount:segmentCount basePath:self.basePath] fileType:(NSString *)kUTTypeMPEG4 error:&error]; 203 | if (error) { 204 | [self showError:error]; 205 | } 206 | } 207 | self.audioEncoder = [self setupAudioEncoderWithAssetWriter:self.assetWriter formatDescription:formatDescription bitsPerSecond:bps]; 208 | 209 | if (!queuedAssetWriter) { 210 | self.segmentCount++; 211 | NSError *error = nil; 212 | self.queuedAssetWriter = [[AVAssetWriter alloc] initWithURL:[OWUtilities urlForRecordingSegmentCount:segmentCount basePath:self.basePath] fileType:(NSString *)kUTTypeMPEG4 error:&error]; 213 | if (error) { 214 | [self showError:error]; 215 | } 216 | } 217 | self.queuedAudioEncoder = [self setupAudioEncoderWithAssetWriter:self.queuedAssetWriter formatDescription:formatDescription bitsPerSecond:bps]; 218 | self.readyToRecordAudio = YES; 219 | } 220 | 221 | - (void) handleException:(NSException *)exception { 222 | [super handleException:exception]; 223 | [self segmentRecording:nil]; 224 | } 225 | 226 | - (void) uploadLocalURL:(NSURL*)url { 227 | NSLog(@"upload local url: %@", url); 228 | NSString *inputPath = [url path]; 229 | NSString *outputPath = [inputPath stringByReplacingOccurrencesOfString:@".mp4" withString:@".ts"]; 230 | NSString *outputFileName = [outputPath lastPathComponent]; 231 | NSDictionary *options = @{kFFmpegOutputFormatKey: @"mpegts"}; 232 | NSLog(@"%@ conversion...", outputFileName); 233 | [ffmpegWrapper convertInputPath:[url path] outputPath:outputPath options:options progressBlock:nil completionBlock:^(BOOL success, NSError *error) { 234 | if (success) { 235 | NSLog(@"%@ conversion complete", outputFileName); 236 | NSString *segmentKey = [NSString stringWithFormat:@"%@/%@", self.uuid, outputFileName]; 237 | [[OWSharedS3Client sharedClient] postObjectWithFile:outputPath bucket:BUCKET_NAME key:segmentKey acl:@"public-read" success:^(S3PutObjectResponse *responseObject) { 238 | [manifestGenerator appendSegmentPath:outputPath duration:(int)segmentationInterval sequence:segmentCount completionBlock:^(BOOL success, NSError *error) { 239 | if (success) { 240 | NSString *manifestKey = [NSString stringWithFormat:@"%@/%@", self.uuid, [manifestGenerator.manifestPath lastPathComponent]]; 241 | [[OWSharedS3Client sharedClient] postObjectWithFile:manifestGenerator.manifestPath bucket:BUCKET_NAME key:manifestKey acl:@"public-read" success:^(S3PutObjectResponse *responseObject) { 242 | NSLog(@"success updating manifest after uploading %@", outputFileName); 243 | } failure:^(NSError *error) { 244 | NSLog(@"error uplaoding manifest after %@", outputFileName); 245 | }]; 246 | } else { 247 | NSLog(@"Error creating manifest: %@", error.userInfo); 248 | } 249 | }]; 250 | NSLog(@"%@ upload complete: %@", outputFileName, responseObject.description); 251 | } failure:^(NSError *error) { 252 | NSLog(@"error posting segment %@: %@", outputFileName, error.userInfo); 253 | }]; 254 | } else { 255 | NSLog(@"conversion error: %@", error.userInfo); 256 | } 257 | }]; 258 | } 259 | 260 | 261 | 262 | @end 263 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWTimerView.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWTimerView.h 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 5/9/13. 6 | // Copyright (c) 2013 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface OWTimerView : UIView 12 | 13 | @property (nonatomic, readonly) BOOL isAnimating; 14 | 15 | @property (nonatomic, strong) NSDate *startDate; 16 | @property (nonatomic, strong) NSTimer *animationTimer; 17 | @property (nonatomic, strong) UILabel *timeLabel; 18 | 19 | - (void) startTimer; 20 | - (void) stopTimer; 21 | - (void) resetTimer; 22 | 23 | @end 24 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWTimerView.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWTimerView.m 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 5/9/13. 6 | // Copyright (c) 2013 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import "OWTimerView.h" 10 | 11 | @implementation OWTimerView 12 | @synthesize startDate, animationTimer, timeLabel; 13 | 14 | - (void) dealloc { 15 | [self.animationTimer invalidate]; 16 | self.animationTimer = nil; 17 | } 18 | 19 | - (id)initWithFrame:(CGRect)frame 20 | { 21 | self = [super initWithFrame:frame]; 22 | if (self) { 23 | self.timeLabel = [[UILabel alloc] initWithFrame:CGRectMake(0, 0, frame.size.width, frame.size.height)]; 24 | self.timeLabel.textAlignment = NSTextAlignmentCenter; 25 | self.timeLabel.backgroundColor = [UIColor clearColor]; 26 | self.timeLabel.font = [UIFont boldSystemFontOfSize:18.0f]; 27 | self.timeLabel.text = @"00:00:00"; 28 | [self addSubview:timeLabel]; 29 | } 30 | return self; 31 | } 32 | 33 | - (void) setFrame:(CGRect)frame { 34 | [super setFrame:frame]; 35 | self.timeLabel.frame = CGRectMake(0, 0, frame.size.width, frame.size.height); 36 | } 37 | 38 | -(void) updateLabel:(UILabel*)label withTime:(NSTimeInterval)time 39 | { 40 | int hour, minute, second; 41 | hour = time / 3600; 42 | minute = (time - hour * 3600) / 60; 43 | second = (time - hour * 3600 - minute * 60); 44 | label.text = [NSString stringWithFormat:@"%02d:%02d:%02d", hour, minute, second]; 45 | } 46 | 47 | - (void) animationTimerDidFire:(NSTimer*)timer { 48 | [self refreshLabel]; 49 | } 50 | 51 | - (void) refreshLabel { 52 | NSTimeInterval secondsElapsed = [[NSDate date] timeIntervalSinceDate:startDate]; 53 | [self updateLabel:timeLabel withTime:secondsElapsed]; 54 | } 55 | 56 | - (BOOL) isAnimating { 57 | if (self.animationTimer) { 58 | return YES; 59 | } 60 | return NO; 61 | } 62 | 63 | - (void) startTimer { 64 | if (!self.startDate) { 65 | self.startDate = [NSDate date]; 66 | } 67 | self.animationTimer = [NSTimer scheduledTimerWithTimeInterval:1.0 target:self selector:@selector(animationTimerDidFire:) userInfo:nil repeats:YES]; 68 | } 69 | 70 | - (void) stopTimer { 71 | [self.animationTimer invalidate]; 72 | self.animationTimer = nil; 73 | } 74 | 75 | - (void) resetTimer { 76 | self.startDate = [NSDate date]; 77 | [self refreshLabel]; 78 | } 79 | 80 | /* 81 | // Only override drawRect: if you perform custom drawing. 82 | // An empty implementation adversely affects performance during animation. 83 | - (void)drawRect:(CGRect)rect 84 | { 85 | // Drawing code 86 | } 87 | */ 88 | 89 | @end 90 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWUtilities.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWUtilities.h 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 11/29/12. 6 | // Copyright (c) 2012 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface OWUtilities : NSObject 12 | 13 | + (CGFloat) bottomOfView:(UIView*)view; 14 | + (CGFloat) rightOfView:(UIView*)view; 15 | + (NSURL*) urlForRecordingSegmentCount:(NSUInteger)count basePath:(NSString*)basePath; 16 | + (NSString*) applicationDocumentsDirectory; 17 | 18 | @end 19 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWUtilities.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWUtilities.m 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 11/29/12. 6 | // Copyright (c) 2012 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import "OWUtilities.h" 10 | #import 11 | 12 | @implementation OWUtilities 13 | 14 | + (CGFloat) bottomOfView:(UIView *)view { 15 | return view.frame.origin.y + view.frame.size.height; 16 | } 17 | 18 | + (CGFloat) rightOfView:(UIView *)view { 19 | return view.frame.origin.x + view.frame.size.width; 20 | } 21 | 22 | + (NSURL*) urlForRecordingSegmentCount:(NSUInteger)count basePath:(NSString*)basePath { 23 | NSString *movieName = [NSString stringWithFormat:@"%d.mp4", count+1]; 24 | NSString *path = [basePath stringByAppendingPathComponent:movieName]; 25 | NSURL *newMovieURL = [NSURL fileURLWithPath:path]; 26 | return newMovieURL; 27 | } 28 | 29 | + (NSString*) applicationDocumentsDirectory { 30 | NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 31 | NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil; 32 | return basePath; 33 | } 34 | 35 | @end 36 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWVideoProcessor.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWVideoProcessor.h 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 11/13/12. 6 | // Copyright (c) 2012 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | #include 12 | #import "OWAppleEncoder.h" 13 | #import "OWSegmentingAppleEncoder.h" 14 | 15 | @protocol OWVideoProcessorDelegate 16 | @required 17 | - (void)recordingWillStart; 18 | - (void)recordingDidStart; 19 | - (void)recordingWillStop; 20 | - (void)recordingDidStop; 21 | @end 22 | 23 | @interface OWVideoProcessor : NSObject 24 | { 25 | id __weak delegate; 26 | 27 | NSMutableArray *previousSecondTimestamps; 28 | Float64 videoFrameRate; 29 | CMVideoDimensions videoDimensions; 30 | CMVideoCodecType videoType; 31 | 32 | AVCaptureConnection *audioConnection; 33 | AVCaptureConnection *videoConnection; 34 | 35 | dispatch_queue_t movieWritingQueue; 36 | 37 | CMFormatDescriptionRef videoFormatDescription; 38 | CMFormatDescriptionRef audioFormatDescription; 39 | 40 | // Only accessed on movie writing queue 41 | BOOL recordingWillBeStarted; 42 | BOOL recordingWillBeStopped; 43 | 44 | BOOL recording; 45 | } 46 | 47 | @property (readwrite, weak) id delegate; 48 | 49 | @property (readonly) Float64 videoFrameRate; 50 | @property (readonly) CMVideoDimensions videoDimensions; 51 | @property (readonly) CMVideoCodecType videoType; 52 | @property (nonatomic, strong) OWAppleEncoder *appleEncoder1; 53 | @property (nonatomic, strong) OWSegmentingAppleEncoder *appleEncoder2; 54 | @property (nonatomic, strong) AVCaptureSession *captureSession; 55 | 56 | @property (nonatomic) AVCaptureVideoOrientation referenceOrientation; 57 | @property (nonatomic) AVCaptureVideoOrientation videoOrientation; 58 | 59 | - (void) showError:(NSError*)error; 60 | 61 | - (void) setupAndStartCaptureSession; 62 | - (void) stopAndTearDownCaptureSession; 63 | 64 | - (void) startRecording; 65 | - (void) stopRecording; 66 | 67 | - (void) pauseCaptureSession; // Pausing while a recording is in progress will cause the recording to be stopped and saved. 68 | - (void) resumeCaptureSession; 69 | 70 | - (CGAffineTransform)transformFromCurrentVideoOrientationToOrientation:(AVCaptureVideoOrientation)orientation; 71 | 72 | @property(readonly, getter=isRecording) BOOL recording; 73 | 74 | @end 75 | 76 | 77 | -------------------------------------------------------------------------------- /LiveStreamer/Capture/OWVideoProcessor.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWVideoProcessor.m 3 | // OpenWatch 4 | // 5 | // Created by Christopher Ballinger on 11/13/12. 6 | // Copyright (c) 2012 OpenWatch FPC. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | #import "OWVideoProcessor.h" 12 | #import "OWUtilities.h" 13 | 14 | @interface OWVideoProcessor () 15 | @property (readwrite) Float64 videoFrameRate; 16 | @property (readwrite) CMVideoDimensions videoDimensions; 17 | @property (readwrite) CMVideoCodecType videoType; 18 | @property (readwrite, getter=isRecording) BOOL recording; 19 | @end 20 | 21 | @implementation OWVideoProcessor 22 | 23 | @synthesize delegate; 24 | @synthesize videoFrameRate, videoDimensions, videoType; 25 | @synthesize videoOrientation; 26 | @synthesize recording; 27 | @synthesize appleEncoder1, appleEncoder2; 28 | @synthesize captureSession; 29 | 30 | - (id) init 31 | { 32 | if (self = [super init]) { 33 | previousSecondTimestamps = [[NSMutableArray alloc] init]; 34 | } 35 | return self; 36 | } 37 | 38 | - (CGFloat)angleOffsetFromPortraitOrientationToOrientation:(AVCaptureVideoOrientation)orientation 39 | { 40 | CGFloat angle = 0.0; 41 | 42 | switch (orientation) { 43 | case AVCaptureVideoOrientationPortrait: 44 | angle = 0.0; 45 | break; 46 | case AVCaptureVideoOrientationPortraitUpsideDown: 47 | angle = M_PI; 48 | break; 49 | case AVCaptureVideoOrientationLandscapeRight: 50 | angle = -M_PI_2; 51 | break; 52 | case AVCaptureVideoOrientationLandscapeLeft: 53 | angle = M_PI_2; 54 | break; 55 | default: 56 | break; 57 | } 58 | 59 | return angle; 60 | } 61 | 62 | - (CGAffineTransform)transformFromCurrentVideoOrientationToOrientation:(AVCaptureVideoOrientation)orientation 63 | { 64 | CGAffineTransform transform = CGAffineTransformIdentity; 65 | 66 | // Calculate offsets from an arbitrary reference orientation (portrait) 67 | CGFloat orientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:orientation]; 68 | CGFloat videoOrientationAngleOffset = [self angleOffsetFromPortraitOrientationToOrientation:self.videoOrientation]; 69 | 70 | // Find the difference in angle between the passed in orientation and the current video orientation 71 | CGFloat angleOffset = orientationAngleOffset - videoOrientationAngleOffset; 72 | transform = CGAffineTransformMakeRotation(angleOffset); 73 | 74 | return transform; 75 | } 76 | 77 | #pragma mark Utilities 78 | 79 | - (void) calculateFramerateAtTimestamp:(CMTime) timestamp 80 | { 81 | [previousSecondTimestamps addObject:[NSValue valueWithCMTime:timestamp]]; 82 | 83 | CMTime oneSecond = CMTimeMake( 1, 1 ); 84 | CMTime oneSecondAgo = CMTimeSubtract( timestamp, oneSecond ); 85 | 86 | while( CMTIME_COMPARE_INLINE( [[previousSecondTimestamps objectAtIndex:0] CMTimeValue], <, oneSecondAgo ) ) 87 | [previousSecondTimestamps removeObjectAtIndex:0]; 88 | 89 | Float64 newRate = (Float64) [previousSecondTimestamps count]; 90 | self.videoFrameRate = (self.videoFrameRate + newRate) / 2; 91 | } 92 | 93 | - (void)removeFile:(NSURL *)fileURL 94 | { 95 | NSFileManager *fileManager = [NSFileManager defaultManager]; 96 | NSString *filePath = [fileURL path]; 97 | if ([fileManager fileExistsAtPath:filePath]) { 98 | NSError *error; 99 | BOOL success = [fileManager removeItemAtPath:filePath error:&error]; 100 | if (!success) 101 | [self showError:error]; 102 | } 103 | } 104 | 105 | 106 | 107 | #pragma mark Recording 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | - (void) startRecording 116 | { 117 | dispatch_async(movieWritingQueue, ^{ 118 | if ( recordingWillBeStarted || self.recording ) 119 | return; 120 | 121 | recordingWillBeStarted = YES; 122 | 123 | // recordingDidStart is called from captureOutput:didOutputSampleBuffer:fromConnection: once the asset writer is setup 124 | [self.delegate recordingWillStart]; 125 | 126 | [self initializeAssetWriters]; 127 | }); 128 | } 129 | 130 | 131 | 132 | - (void) initializeAssetWriters { 133 | static NSString *testCountKey = @"test_count"; 134 | // Create an asset writer 135 | NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults]; 136 | NSUInteger testCount = [[defaults objectForKey:testCountKey] unsignedIntegerValue]; 137 | 138 | 139 | NSString *basePath = [OWUtilities applicationDocumentsDirectory]; 140 | NSString *folderName = [NSString stringWithFormat:@"/%d/", testCount]; 141 | NSString *folderPath = [basePath stringByAppendingPathComponent:folderName]; 142 | NSFileManager *fileManager = [NSFileManager defaultManager]; 143 | NSError *error = nil; 144 | 145 | if ([fileManager fileExistsAtPath:folderPath]) { 146 | [fileManager removeItemAtPath:folderPath error:&error]; 147 | } 148 | if (error) { 149 | [self showError:error]; 150 | } 151 | 152 | [fileManager createDirectoryAtPath:folderPath withIntermediateDirectories:YES attributes:nil error:&error]; 153 | if (error) { 154 | [self showError:error]; 155 | } 156 | NSString *hqFilePath = [folderPath stringByAppendingPathComponent:@"hq.mp4"]; 157 | NSURL *hqURL = [NSURL fileURLWithPath:hqFilePath]; 158 | 159 | self.appleEncoder1 = [[OWAppleEncoder alloc] initWithURL:hqURL movieFragmentInterval:CMTimeMakeWithSeconds(5, 30)]; 160 | self.appleEncoder2 = [[OWSegmentingAppleEncoder alloc] initWithBasePath:folderPath segmentationInterval:5.0f]; 161 | 162 | testCount++; 163 | [defaults setObject:@(testCount) forKey:testCountKey]; 164 | } 165 | 166 | - (void) stopRecording 167 | { 168 | 169 | dispatch_async(movieWritingQueue, ^{ 170 | if ( recordingWillBeStopped || self.recording == NO) 171 | return; 172 | recordingWillBeStopped = YES; 173 | 174 | // recordingDidStop is called from saveMovieToCameraRoll 175 | [self.delegate recordingWillStop]; 176 | [appleEncoder1 finishEncoding]; 177 | recordingWillBeStopped = NO; 178 | self.recording = NO; 179 | [self.delegate recordingDidStop]; 180 | [appleEncoder2 finishEncoding]; 181 | self.appleEncoder1 = nil; 182 | self.appleEncoder2 = nil; 183 | }); 184 | } 185 | 186 | #pragma mark Capture 187 | 188 | - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 189 | { 190 | CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); 191 | 192 | if ( connection == videoConnection ) { 193 | 194 | // Get framerate 195 | CMTime timestamp = CMSampleBufferGetPresentationTimeStamp( sampleBuffer ); 196 | [self calculateFramerateAtTimestamp:timestamp]; 197 | 198 | // Get frame dimensions (for onscreen display) 199 | if (self.videoDimensions.width == 0 && self.videoDimensions.height == 0) 200 | self.videoDimensions = CMVideoFormatDescriptionGetDimensions( formatDescription ); 201 | 202 | // Get buffer type 203 | if ( self.videoType == 0 ) 204 | self.videoType = CMFormatDescriptionGetMediaSubType( formatDescription ); 205 | } 206 | // 207 | CFRetain(sampleBuffer); 208 | CFRetain(formatDescription); 209 | 210 | dispatch_async(movieWritingQueue, ^{ 211 | if ( appleEncoder1 && (self.recording || recordingWillBeStarted)) { 212 | 213 | BOOL wasReadyToRecord = (appleEncoder1.readyToRecordAudio && appleEncoder1.readyToRecordVideo); 214 | 215 | if (connection == videoConnection) { 216 | 217 | // Initialize the video input if this is not done yet 218 | if (!appleEncoder1.readyToRecordVideo) { 219 | [appleEncoder1 setupVideoEncoderWithFormatDescription:formatDescription]; 220 | } 221 | 222 | // Write video data to file 223 | if (appleEncoder1.readyToRecordVideo && appleEncoder1.readyToRecordAudio) { 224 | [appleEncoder1 writeSampleBuffer:sampleBuffer ofType:AVMediaTypeVideo]; 225 | } 226 | } 227 | else if (connection == audioConnection) { 228 | 229 | // Initialize the audio input if this is not done yet 230 | if (!appleEncoder1.readyToRecordAudio) { 231 | [appleEncoder1 setupAudioEncoderWithFormatDescription:formatDescription]; 232 | } 233 | 234 | // Write audio data to file 235 | if (appleEncoder1.readyToRecordAudio && appleEncoder1.readyToRecordVideo) 236 | [appleEncoder1 writeSampleBuffer:sampleBuffer ofType:AVMediaTypeAudio]; 237 | } 238 | 239 | BOOL isReadyToRecord = (appleEncoder1.readyToRecordAudio && appleEncoder1.readyToRecordVideo); 240 | if ( !wasReadyToRecord && isReadyToRecord ) { 241 | recordingWillBeStarted = NO; 242 | self.recording = YES; 243 | [self.delegate recordingDidStart]; 244 | } 245 | } 246 | if ( appleEncoder2 && (self.recording || recordingWillBeStarted)) { 247 | 248 | BOOL wasReadyToRecord = (appleEncoder2.readyToRecordAudio && appleEncoder2.readyToRecordVideo); 249 | 250 | if (connection == videoConnection) { 251 | 252 | // Initialize the video input if this is not done yet 253 | if (!appleEncoder2.readyToRecordVideo) { 254 | [appleEncoder2 setupVideoEncoderWithFormatDescription:formatDescription bitsPerSecond:400000]; 255 | } 256 | 257 | // Write video data to file 258 | if (appleEncoder2.readyToRecordVideo && appleEncoder2.readyToRecordAudio) { 259 | [appleEncoder2 writeSampleBuffer:sampleBuffer ofType:AVMediaTypeVideo]; 260 | } 261 | } 262 | else if (connection == audioConnection) { 263 | 264 | // Initialize the audio input if this is not done yet 265 | if (!appleEncoder2.readyToRecordAudio) { 266 | [appleEncoder2 setupAudioEncoderWithFormatDescription:formatDescription]; 267 | } 268 | 269 | // Write audio data to file 270 | if (appleEncoder2.readyToRecordAudio && appleEncoder2.readyToRecordVideo) 271 | [appleEncoder2 writeSampleBuffer:sampleBuffer ofType:AVMediaTypeAudio]; 272 | } 273 | 274 | BOOL isReadyToRecord = (appleEncoder2.readyToRecordAudio && appleEncoder2.readyToRecordVideo); 275 | if ( !wasReadyToRecord && isReadyToRecord ) { 276 | recordingWillBeStarted = NO; 277 | self.recording = YES; 278 | [self.delegate recordingDidStart]; 279 | } 280 | } 281 | 282 | CFRelease(sampleBuffer); 283 | CFRelease(formatDescription); 284 | }); 285 | } 286 | 287 | - (AVCaptureDevice *)videoDeviceWithPosition:(AVCaptureDevicePosition)position 288 | { 289 | NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 290 | for (AVCaptureDevice *device in devices) 291 | if ([device position] == position) 292 | return device; 293 | 294 | return nil; 295 | } 296 | 297 | - (AVCaptureDevice *)audioDevice 298 | { 299 | NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; 300 | if ([devices count] > 0) 301 | return [devices objectAtIndex:0]; 302 | 303 | return nil; 304 | } 305 | 306 | - (BOOL) setupCaptureSession 307 | { 308 | /* 309 | * Create capture session 310 | */ 311 | captureSession = [[AVCaptureSession alloc] init]; 312 | captureSession.sessionPreset = AVCaptureSessionPreset640x480; 313 | 314 | /* 315 | * Create audio connection 316 | */ 317 | AVCaptureDeviceInput *audioIn = [[AVCaptureDeviceInput alloc] initWithDevice:[self audioDevice] error:nil]; 318 | if ([captureSession canAddInput:audioIn]) 319 | [captureSession addInput:audioIn]; 320 | 321 | AVCaptureAudioDataOutput *audioOut = [[AVCaptureAudioDataOutput alloc] init]; 322 | dispatch_queue_t audioCaptureQueue = dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL); 323 | [audioOut setSampleBufferDelegate:self queue:audioCaptureQueue]; 324 | if ([captureSession canAddOutput:audioOut]) 325 | [captureSession addOutput:audioOut]; 326 | audioConnection = [audioOut connectionWithMediaType:AVMediaTypeAudio]; 327 | 328 | /* 329 | * Create video connection 330 | */ 331 | AVCaptureDeviceInput *videoIn = [[AVCaptureDeviceInput alloc] initWithDevice:[self videoDeviceWithPosition:AVCaptureDevicePositionBack] error:nil]; 332 | if ([captureSession canAddInput:videoIn]) 333 | [captureSession addInput:videoIn]; 334 | 335 | AVCaptureVideoDataOutput *videoOut = [[AVCaptureVideoDataOutput alloc] init]; 336 | 337 | [videoOut setAlwaysDiscardsLateVideoFrames:YES]; 338 | 339 | [videoOut setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]]; 340 | dispatch_queue_t videoCaptureQueue = dispatch_queue_create("Video Capture Queue", DISPATCH_QUEUE_SERIAL); 341 | [videoOut setSampleBufferDelegate:self queue:videoCaptureQueue]; 342 | if ([captureSession canAddOutput:videoOut]) 343 | [captureSession addOutput:videoOut]; 344 | videoConnection = [videoOut connectionWithMediaType:AVMediaTypeVideo]; 345 | // TODO FIXME iOS 6: 346 | self.videoOrientation = AVCaptureVideoOrientationLandscapeRight; 347 | videoConnection.videoOrientation = self.videoOrientation; 348 | return YES; 349 | } 350 | 351 | - (void) setupAndStartCaptureSession 352 | { 353 | // Create serial queue for movie writing 354 | movieWritingQueue = dispatch_queue_create("Movie Writing Queue", DISPATCH_QUEUE_SERIAL); 355 | 356 | if ( !captureSession ) 357 | [self setupCaptureSession]; 358 | 359 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(captureSessionStoppedRunningNotification:) name:AVCaptureSessionDidStopRunningNotification object:captureSession]; 360 | 361 | if ( !captureSession.isRunning ) { 362 | [captureSession startRunning]; 363 | } 364 | } 365 | 366 | - (void) pauseCaptureSession 367 | { 368 | if ( captureSession.isRunning ) 369 | [captureSession stopRunning]; 370 | } 371 | 372 | - (void) resumeCaptureSession 373 | { 374 | if ( !captureSession.isRunning ) { 375 | [captureSession startRunning]; 376 | } 377 | } 378 | 379 | - (void)captureSessionStoppedRunningNotification:(NSNotification *)notification 380 | { 381 | dispatch_async(movieWritingQueue, ^{ 382 | if ( [self isRecording] ) { 383 | [self stopRecording]; 384 | } 385 | }); 386 | } 387 | 388 | - (void) stopAndTearDownCaptureSession 389 | { 390 | [captureSession stopRunning]; 391 | if (captureSession) 392 | [[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureSessionDidStopRunningNotification object:captureSession]; 393 | captureSession = nil; 394 | if (movieWritingQueue) { 395 | movieWritingQueue = NULL; 396 | } 397 | } 398 | 399 | #pragma mark Error Handling 400 | 401 | - (void)showError:(NSError *)error 402 | { 403 | NSLog(@"Error: %@", [error userInfo]); 404 | } 405 | 406 | @end 407 | -------------------------------------------------------------------------------- /LiveStreamer/Default-568h@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenWatch/LiveStreamer-iOS/bdc873b03e7eba3d9d8092b612a73839933d17b7/LiveStreamer/Default-568h@2x.png -------------------------------------------------------------------------------- /LiveStreamer/Default.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenWatch/LiveStreamer-iOS/bdc873b03e7eba3d9d8092b612a73839933d17b7/LiveStreamer/Default.png -------------------------------------------------------------------------------- /LiveStreamer/Default@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenWatch/LiveStreamer-iOS/bdc873b03e7eba3d9d8092b612a73839933d17b7/LiveStreamer/Default@2x.png -------------------------------------------------------------------------------- /LiveStreamer/LiveStreamer-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | ${PRODUCT_NAME} 9 | CFBundleExecutable 10 | ${EXECUTABLE_NAME} 11 | CFBundleIdentifier 12 | net.openwatch.${PRODUCT_NAME:rfc1034identifier} 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | ${PRODUCT_NAME} 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleSignature 22 | ???? 23 | CFBundleVersion 24 | 1.0 25 | LSRequiresIPhoneOS 26 | 27 | UIFileSharingEnabled 28 | 29 | UIRequiredDeviceCapabilities 30 | 31 | armv7 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | UIInterfaceOrientationLandscapeLeft 37 | UIInterfaceOrientationLandscapeRight 38 | 39 | UISupportedInterfaceOrientations~ipad 40 | 41 | UIInterfaceOrientationPortrait 42 | UIInterfaceOrientationPortraitUpsideDown 43 | UIInterfaceOrientationLandscapeLeft 44 | UIInterfaceOrientationLandscapeRight 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /LiveStreamer/LiveStreamer-Prefix.pch: -------------------------------------------------------------------------------- 1 | // 2 | // Prefix header for all source files of the 'LiveStreamer' target in the 'LiveStreamer' project 3 | // 4 | 5 | #import 6 | 7 | #ifndef __IPHONE_3_0 8 | #warning "This project uses features only available in iOS SDK 3.0 and later." 9 | #endif 10 | 11 | #ifdef __OBJC__ 12 | #import 13 | #import 14 | #endif 15 | -------------------------------------------------------------------------------- /LiveStreamer/OWAppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWAppDelegate.h 3 | // LiveStreamer 4 | // 5 | // Created by Christopher Ballinger on 9/11/13. 6 | // Copyright (c) 2013 OpenWatch, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @class HTTPServer; 12 | 13 | #define OW_APP_DELEGATE ((OWAppDelegate*)[UIApplication sharedApplication].delegate) 14 | 15 | @interface OWAppDelegate : UIResponder 16 | 17 | @property (strong, nonatomic) UIWindow *window; 18 | @property (nonatomic) BOOL forceLandscapeRight; 19 | 20 | @end 21 | -------------------------------------------------------------------------------- /LiveStreamer/OWAppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWAppDelegate.m 3 | // LiveStreamer 4 | // 5 | // Created by Christopher Ballinger on 9/11/13. 6 | // Copyright (c) 2013 OpenWatch, Inc. All rights reserved. 7 | // 8 | 9 | #import "OWAppDelegate.h" 10 | #import "OWRootViewController.h" 11 | 12 | 13 | @implementation OWAppDelegate 14 | 15 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 16 | { 17 | self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; 18 | // Override point for customization after application launch. 19 | self.window.backgroundColor = [UIColor whiteColor]; 20 | self.window.rootViewController = [[OWRootViewController alloc] init]; 21 | [self.window makeKeyAndVisible]; 22 | return YES; 23 | } 24 | 25 | - (void)applicationWillResignActive:(UIApplication *)application 26 | { 27 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 28 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 29 | } 30 | 31 | - (void)applicationDidEnterBackground:(UIApplication *)application 32 | { 33 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 34 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 35 | } 36 | 37 | - (void)applicationWillEnterForeground:(UIApplication *)application 38 | { 39 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 40 | } 41 | 42 | - (void)applicationDidBecomeActive:(UIApplication *)application 43 | { 44 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 45 | } 46 | 47 | - (void)applicationWillTerminate:(UIApplication *)application 48 | { 49 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 50 | } 51 | 52 | -(NSUInteger)application:(UIApplication *)application supportedInterfaceOrientationsForWindow:(UIWindow *)window 53 | { 54 | if (self.forceLandscapeRight) { 55 | return UIInterfaceOrientationMaskLandscapeRight; 56 | } 57 | 58 | return UIInterfaceOrientationMaskPortrait; 59 | } 60 | 61 | 62 | @end 63 | -------------------------------------------------------------------------------- /LiveStreamer/OWManifestGenerator.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWManifestGenerator.h 3 | // LiveStreamer 4 | // 5 | // Created by Christopher Ballinger on 10/1/13. 6 | // Copyright (c) 2013 OpenWatch, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | typedef void(^OWManifestGeneratorCompletionBlock)(BOOL success, NSError *error); 12 | 13 | @interface OWManifestGenerator : NSObject 14 | 15 | @property (nonatomic, strong) NSString *manifestPath; 16 | @property (nonatomic, strong) NSMutableArray *segments; 17 | @property (nonatomic, strong) NSString *header; 18 | @property (nonatomic) int currentSegmentNumber; 19 | @property (nonatomic) int targetSegmentDuration; 20 | 21 | - (id) initWithM3U8Path:(NSString*)path targetSegmentDuration:(int)duration; 22 | 23 | - (void) appendSegmentPath:(NSString *)segmentPath duration:(int)duration sequence:(int)sequence completionBlock:(OWManifestGeneratorCompletionBlock)completionBlock; 24 | 25 | @end 26 | -------------------------------------------------------------------------------- /LiveStreamer/OWManifestGenerator.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWManifestGenerator.m 3 | // LiveStreamer 4 | // 5 | // Created by Christopher Ballinger on 10/1/13. 6 | // Copyright (c) 2013 OpenWatch, Inc. All rights reserved. 7 | // 8 | 9 | #import "OWManifestGenerator.h" 10 | 11 | @implementation OWManifestGenerator 12 | @synthesize manifestPath, segments, header; 13 | 14 | - (NSString*) lineForFileName:(NSString*)fileName duration:(int)duration { 15 | return [NSString stringWithFormat:@"#EXTINF:%d,\n%@\n", duration, fileName]; 16 | } 17 | 18 | - (id) initWithM3U8Path:(NSString*)path targetSegmentDuration:(int)duration { 19 | if (self = [super init]) { 20 | self.currentSegmentNumber = -1; 21 | self.manifestPath = path; 22 | self.targetSegmentDuration = duration; 23 | self.header = [NSString stringWithFormat:@"#EXTM3U\n#EXT-X-VERSION:3\n#EXT-X-ALLOW-CACHE:NO\n#EXT-X-TARGETDURATION:%d\n", duration]; 24 | //#EXT-X-MEDIA-SEQUENCE:25755\n#EXTINF:11,\nmedia_25756.ts?wowzasessionid=2003983250\n#EXTINF:9,\nmedia_25757.ts?wowzasessionid=2003983250"; 25 | self.segments = [NSMutableArray array]; 26 | } 27 | return self; 28 | } 29 | 30 | - (void) appendSegmentPath:(NSString *)segmentPath duration:(int)duration sequence:(int)sequence completionBlock:(OWManifestGeneratorCompletionBlock)completionBlock { 31 | NSString *segmentName = [segmentPath lastPathComponent]; 32 | [self.segments addObject:segmentName]; 33 | 34 | NSMutableArray *lastSegments = [NSMutableArray arrayWithCapacity:3]; 35 | 36 | if (segments.count < 3) { 37 | [lastSegments addObjectsFromArray:segments]; 38 | } else { 39 | int thirdToLastSegmentIndex = segments.count - 3; 40 | for (int i = 0; i < 3; i++) { 41 | [lastSegments addObject:[segments objectAtIndex:thirdToLastSegmentIndex + i]]; 42 | } 43 | } 44 | 45 | NSString *firstFileName = [lastSegments objectAtIndex:0]; 46 | NSString *mediaSequence = [NSString stringWithFormat:@"#EXT-X-MEDIA-SEQUENCE:%@\n", [firstFileName stringByDeletingPathExtension]]; 47 | 48 | NSMutableString *manifestFileString = [NSMutableString stringWithString:header]; 49 | [manifestFileString appendString:mediaSequence]; 50 | 51 | for (int i = 0; i < lastSegments.count; i++) { 52 | NSString *fileName = [lastSegments objectAtIndex:i]; 53 | 54 | [manifestFileString appendString:[self lineForFileName:fileName duration:duration]]; 55 | } 56 | NSError *error = nil; 57 | NSLog(@"Manifest:\n%@\n", manifestFileString); 58 | [manifestFileString writeToFile:manifestPath atomically:YES encoding:NSUTF8StringEncoding error:&error]; 59 | BOOL success = YES; 60 | if (error) { 61 | success = NO; 62 | } 63 | if (completionBlock) { 64 | completionBlock(success, error); 65 | } 66 | } 67 | 68 | @end 69 | -------------------------------------------------------------------------------- /LiveStreamer/OWRootViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWRootViewController.h 3 | // LiveStreamer 4 | // 5 | // Created by Christopher Ballinger on 9/11/13. 6 | // Copyright (c) 2013 OpenWatch, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "OWCaptureViewController.h" 11 | #import "FFmpegWrapper.h" 12 | 13 | @interface OWRootViewController : UIViewController 14 | 15 | @property (nonatomic, strong) UIButton *testButton; 16 | @property (nonatomic, strong) FFmpegWrapper *wrapper; 17 | 18 | @end 19 | -------------------------------------------------------------------------------- /LiveStreamer/OWRootViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWRootViewController.m 3 | // LiveStreamer 4 | // 5 | // Created by Christopher Ballinger on 9/11/13. 6 | // Copyright (c) 2013 OpenWatch, Inc. All rights reserved. 7 | // 8 | 9 | #import "OWRootViewController.h" 10 | #import "OWCaptureViewController.h" 11 | #import "OWUtilities.h" 12 | #import "FFmpegWrapper.h" 13 | 14 | @interface OWRootViewController () 15 | 16 | @end 17 | 18 | @implementation OWRootViewController 19 | @synthesize wrapper; 20 | 21 | - (id)init 22 | { 23 | self = [super init]; 24 | if (self) { 25 | // Custom initialization 26 | self.testButton = [UIButton buttonWithType:UIButtonTypeRoundedRect]; 27 | [self.testButton setTitle:@"Start Test" forState:UIControlStateNormal]; 28 | [self.testButton addTarget:self action:@selector(testButtonPressed:) forControlEvents:UIControlEventTouchUpInside]; 29 | [self.view addSubview:self.testButton]; 30 | self.wrapper = [[FFmpegWrapper alloc] init]; 31 | 32 | } 33 | return self; 34 | } 35 | 36 | - (void) testButtonPressed:(id)sender { 37 | /* 38 | NSArray *paths = [[NSBundle mainBundle] pathsForResourcesOfType:@"mp4" inDirectory:@"web"]; 39 | NSString *basePath = [OWUtilities applicationDocumentsDirectory]; 40 | NSString *outputFile = [basePath stringByAppendingPathComponent:@"test.ts"]; 41 | NSFileManager *fileManager = [NSFileManager defaultManager]; 42 | if([fileManager fileExistsAtPath:outputFile]) { 43 | [fileManager removeItemAtPath:outputFile error:nil]; 44 | } 45 | NSDictionary *options = @{kFFmpegOutputFormatKey: @"mpegts"}; 46 | 47 | [paths enumerateObjectsUsingBlock:^(NSString *path, NSUInteger idx, BOOL *stop) { 48 | NSLog(@"inputPath %@, outputPath %@", path, outputFile); 49 | [wrapper convertInputPath:path outputPath:outputFile options:options progressBlock:^(NSUInteger bytesRead, uint64_t totalBytesRead, uint64_t totalBytesExpectedToRead) { 50 | float progress = (float)totalBytesRead / totalBytesExpectedToRead; 51 | NSLog(@"progress: %f", progress); 52 | } completionBlock:^(BOOL success, NSError *error) { 53 | if (success) { 54 | NSLog(@"success!"); 55 | } else { 56 | NSLog(@"error: %@", error.userInfo); 57 | } 58 | }]; 59 | *stop = YES; 60 | }]; 61 | */ 62 | 63 | 64 | OWCaptureViewController *captureViewController = [[OWCaptureViewController alloc] init]; 65 | captureViewController.delegate = self; 66 | [self presentViewController:captureViewController animated:YES completion:nil]; 67 | 68 | 69 | 70 | /*NSString *testPath = [[NSBundle mainBundle] pathForResource:@"2" ofType:@"ts"]; 71 | [[OWS3Client sharedClient] postObjectWithFile:testPath destinationPath:@"https://openwatch-livestreamer.s3.amazonaws.com/playlist.m3u8" parameters:nil progress:^(NSUInteger bytesWritten, long long totalBytesWritten, long long totalBytesExpectedToWrite) { 72 | NSLog(@"progress: %f", (float)totalBytesWritten/totalBytesExpectedToWrite); 73 | } success:^(id responseObject) { 74 | NSLog(@"success: %@", responseObject); 75 | } failure:^(NSError *error) { 76 | NSLog(@"error uploadin: %@", error.userInfo); 77 | }]; 78 | */ 79 | } 80 | 81 | 82 | - (void) captureViewControllerDidCancel:(OWCaptureViewController *)captureViewController { 83 | [captureViewController dismissViewControllerAnimated:YES completion:nil]; 84 | } 85 | 86 | - (void) captureViewControllerDidFinishRecording:(OWCaptureViewController *)captureViewController { 87 | [captureViewController dismissViewControllerAnimated:YES completion:nil]; 88 | } 89 | 90 | - (void)viewDidLoad 91 | { 92 | [super viewDidLoad]; 93 | // Do any additional setup after loading the view. 94 | } 95 | 96 | - (void) viewWillAppear:(BOOL)animated { 97 | self.testButton.frame = CGRectMake(50, 50, 100, 50); 98 | } 99 | 100 | - (void)didReceiveMemoryWarning 101 | { 102 | [super didReceiveMemoryWarning]; 103 | // Dispose of any resources that can be recreated. 104 | } 105 | 106 | @end 107 | -------------------------------------------------------------------------------- /LiveStreamer/OWSharedS3Client.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWSharedS3Client.h 3 | // LiveStreamer 4 | // 5 | // Created by Christopher Ballinger on 10/4/13. 6 | // Copyright (c) 2013 OpenWatch, Inc. All rights reserved. 7 | // 8 | 9 | #import "OWS3Client.h" 10 | 11 | @interface OWSharedS3Client : OWS3Client 12 | 13 | + (OWSharedS3Client*) sharedClient; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /LiveStreamer/OWSharedS3Client.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWSharedS3Client.m 3 | // LiveStreamer 4 | // 5 | // Created by Christopher Ballinger on 10/4/13. 6 | // Copyright (c) 2013 OpenWatch, Inc. All rights reserved. 7 | // 8 | 9 | #import "OWSharedS3Client.h" 10 | #import "OWSecrets.h" 11 | 12 | @implementation OWSharedS3Client 13 | 14 | + (OWSharedS3Client*) sharedClient { 15 | static OWSharedS3Client *_sharedInstance = nil; 16 | static dispatch_once_t onceToken; 17 | dispatch_once(&onceToken, ^{ 18 | _sharedInstance = [[OWSharedS3Client alloc] init]; 19 | }); 20 | return _sharedInstance; 21 | } 22 | 23 | - (id) init { 24 | if (self = [super initWithAccessKey:AWS_ACCESS_KEY_ID secretKey:AWS_SECRET_KEY]) { 25 | self.region = US_EAST_1; 26 | self.useSSL = NO; 27 | } 28 | return self; 29 | } 30 | 31 | @end 32 | -------------------------------------------------------------------------------- /LiveStreamer/crossdomain.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /LiveStreamer/en.lproj/InfoPlist.strings: -------------------------------------------------------------------------------- 1 | /* Localized versions of Info.plist keys */ 2 | 3 | -------------------------------------------------------------------------------- /LiveStreamer/ffmpeg/libavformat/ffm.h: -------------------------------------------------------------------------------- 1 | /* 2 | * FFM (ffserver live feed) common header 3 | * Copyright (c) 2001 Fabrice Bellard 4 | * 5 | * This file is part of FFmpeg. 6 | * 7 | * FFmpeg is free software; you can redistribute it and/or 8 | * modify it under the terms of the GNU Lesser General Public 9 | * License as published by the Free Software Foundation; either 10 | * version 2.1 of the License, or (at your option) any later version. 11 | * 12 | * FFmpeg is distributed in the hope that it will be useful, 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 | * Lesser General Public License for more details. 16 | * 17 | * You should have received a copy of the GNU Lesser General Public 18 | * License along with FFmpeg; if not, write to the Free Software 19 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 20 | */ 21 | 22 | #ifndef AVFORMAT_FFM_H 23 | #define AVFORMAT_FFM_H 24 | 25 | #include 26 | #include "avformat.h" 27 | #include "avio.h" 28 | 29 | /* The FFM file is made of blocks of fixed size */ 30 | #define FFM_HEADER_SIZE 14 31 | #define FFM_PACKET_SIZE 4096 32 | #define PACKET_ID 0x666d 33 | 34 | /* each packet contains frames (which can span several packets */ 35 | #define FRAME_HEADER_SIZE 16 36 | #define FLAG_KEY_FRAME 0x01 37 | #define FLAG_DTS 0x02 38 | 39 | enum { 40 | READ_HEADER, 41 | READ_DATA, 42 | }; 43 | 44 | typedef struct FFMContext { 45 | /* only reading mode */ 46 | int64_t write_index, file_size; 47 | int read_state; 48 | uint8_t header[FRAME_HEADER_SIZE+4]; 49 | 50 | /* read and write */ 51 | int first_packet; /* true if first packet, needed to set the discontinuity tag */ 52 | int packet_size; 53 | int frame_offset; 54 | int64_t dts; 55 | uint8_t *packet_ptr, *packet_end; 56 | uint8_t packet[FFM_PACKET_SIZE]; 57 | int64_t start_time; 58 | } FFMContext; 59 | 60 | #endif /* AVFORMAT_FFM_H */ 61 | -------------------------------------------------------------------------------- /LiveStreamer/ffmpeg/libavformat/network.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2007 The FFmpeg Project 3 | * 4 | * This file is part of FFmpeg. 5 | * 6 | * FFmpeg is free software; you can redistribute it and/or 7 | * modify it under the terms of the GNU Lesser General Public 8 | * License as published by the Free Software Foundation; either 9 | * version 2.1 of the License, or (at your option) any later version. 10 | * 11 | * FFmpeg is distributed in the hope that it will be useful, 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14 | * Lesser General Public License for more details. 15 | * 16 | * You should have received a copy of the GNU Lesser General Public 17 | * License along with FFmpeg; if not, write to the Free Software 18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 19 | */ 20 | 21 | #ifndef AVFORMAT_NETWORK_H 22 | #define AVFORMAT_NETWORK_H 23 | 24 | #include 25 | #include 26 | 27 | #include "config.h" 28 | #include "libavutil/error.h" 29 | #include "os_support.h" 30 | #include "avio.h" 31 | #include "url.h" 32 | 33 | #if HAVE_UNISTD_H 34 | #include 35 | #endif 36 | 37 | #if HAVE_WINSOCK2_H 38 | #include 39 | #include 40 | 41 | #ifndef EPROTONOSUPPORT 42 | #define EPROTONOSUPPORT WSAEPROTONOSUPPORT 43 | #endif 44 | #ifndef ETIMEDOUT 45 | #define ETIMEDOUT WSAETIMEDOUT 46 | #endif 47 | #ifndef ECONNREFUSED 48 | #define ECONNREFUSED WSAECONNREFUSED 49 | #endif 50 | #ifndef EINPROGRESS 51 | #define EINPROGRESS WSAEINPROGRESS 52 | #endif 53 | 54 | #define getsockopt(a, b, c, d, e) getsockopt(a, b, c, (char*) d, e) 55 | #define setsockopt(a, b, c, d, e) setsockopt(a, b, c, (const char*) d, e) 56 | 57 | int ff_neterrno(void); 58 | #else 59 | #include 60 | #include 61 | #include 62 | #include 63 | 64 | #define ff_neterrno() AVERROR(errno) 65 | #endif 66 | 67 | #if HAVE_ARPA_INET_H 68 | #include 69 | #endif 70 | 71 | #if HAVE_POLL_H 72 | #include 73 | #endif 74 | 75 | int ff_socket_nonblock(int socket, int enable); 76 | 77 | extern int ff_network_inited_globally; 78 | int ff_network_init(void); 79 | void ff_network_close(void); 80 | 81 | void ff_tls_init(void); 82 | void ff_tls_deinit(void); 83 | 84 | int ff_network_wait_fd(int fd, int write); 85 | 86 | /** 87 | * This works similarly to ff_network_wait_fd, but waits up to 'timeout' microseconds 88 | * Uses ff_network_wait_fd in a loop 89 | * 90 | * @fd Socket descriptor 91 | * @write Set 1 to wait for socket able to be read, 0 to be written 92 | * @timeout Timeout interval, in microseconds. Actual precision is 100000 mcs, due to ff_network_wait_fd usage 93 | * @param int_cb Interrupt callback, is checked after each ff_network_wait_fd call 94 | * @return 0 if data can be read/written, AVERROR(ETIMEDOUT) if timeout expired, or negative error code 95 | */ 96 | int ff_network_wait_fd_timeout(int fd, int write, int64_t timeout, AVIOInterruptCB *int_cb); 97 | 98 | int ff_inet_aton (const char * str, struct in_addr * add); 99 | 100 | #if !HAVE_STRUCT_SOCKADDR_STORAGE 101 | struct sockaddr_storage { 102 | #if HAVE_STRUCT_SOCKADDR_SA_LEN 103 | uint8_t ss_len; 104 | uint8_t ss_family; 105 | #else 106 | uint16_t ss_family; 107 | #endif 108 | char ss_pad1[6]; 109 | int64_t ss_align; 110 | char ss_pad2[112]; 111 | }; 112 | #endif 113 | 114 | #if !HAVE_STRUCT_ADDRINFO 115 | struct addrinfo { 116 | int ai_flags; 117 | int ai_family; 118 | int ai_socktype; 119 | int ai_protocol; 120 | int ai_addrlen; 121 | struct sockaddr *ai_addr; 122 | char *ai_canonname; 123 | struct addrinfo *ai_next; 124 | }; 125 | #endif 126 | 127 | /* getaddrinfo constants */ 128 | #ifndef EAI_AGAIN 129 | #define EAI_AGAIN 2 130 | #endif 131 | #ifndef EAI_BADFLAGS 132 | #define EAI_BADFLAGS 3 133 | #endif 134 | #ifndef EAI_FAIL 135 | #define EAI_FAIL 4 136 | #endif 137 | #ifndef EAI_FAMILY 138 | #define EAI_FAMILY 5 139 | #endif 140 | #ifndef EAI_MEMORY 141 | #define EAI_MEMORY 6 142 | #endif 143 | #ifndef EAI_NODATA 144 | #define EAI_NODATA 7 145 | #endif 146 | #ifndef EAI_NONAME 147 | #define EAI_NONAME 8 148 | #endif 149 | #ifndef EAI_SERVICE 150 | #define EAI_SERVICE 9 151 | #endif 152 | #ifndef EAI_SOCKTYPE 153 | #define EAI_SOCKTYPE 10 154 | #endif 155 | 156 | #ifndef AI_PASSIVE 157 | #define AI_PASSIVE 1 158 | #endif 159 | 160 | #ifndef AI_CANONNAME 161 | #define AI_CANONNAME 2 162 | #endif 163 | 164 | #ifndef AI_NUMERICHOST 165 | #define AI_NUMERICHOST 4 166 | #endif 167 | 168 | #ifndef NI_NOFQDN 169 | #define NI_NOFQDN 1 170 | #endif 171 | 172 | #ifndef NI_NUMERICHOST 173 | #define NI_NUMERICHOST 2 174 | #endif 175 | 176 | #ifndef NI_NAMERQD 177 | #define NI_NAMERQD 4 178 | #endif 179 | 180 | #ifndef NI_NUMERICSERV 181 | #define NI_NUMERICSERV 8 182 | #endif 183 | 184 | #ifndef NI_DGRAM 185 | #define NI_DGRAM 16 186 | #endif 187 | 188 | #if !HAVE_GETADDRINFO 189 | int ff_getaddrinfo(const char *node, const char *service, 190 | const struct addrinfo *hints, struct addrinfo **res); 191 | void ff_freeaddrinfo(struct addrinfo *res); 192 | int ff_getnameinfo(const struct sockaddr *sa, int salen, 193 | char *host, int hostlen, 194 | char *serv, int servlen, int flags); 195 | #define getaddrinfo ff_getaddrinfo 196 | #define freeaddrinfo ff_freeaddrinfo 197 | #define getnameinfo ff_getnameinfo 198 | #endif 199 | #if !HAVE_GETADDRINFO || HAVE_WINSOCK2_H 200 | const char *ff_gai_strerror(int ecode); 201 | #undef gai_strerror 202 | #define gai_strerror ff_gai_strerror 203 | #endif 204 | 205 | #ifndef INADDR_LOOPBACK 206 | #define INADDR_LOOPBACK 0x7f000001 207 | #endif 208 | 209 | #ifndef INET_ADDRSTRLEN 210 | #define INET_ADDRSTRLEN 16 211 | #endif 212 | 213 | #ifndef INET6_ADDRSTRLEN 214 | #define INET6_ADDRSTRLEN INET_ADDRSTRLEN 215 | #endif 216 | 217 | #ifndef IN_MULTICAST 218 | #define IN_MULTICAST(a) ((((uint32_t)(a)) & 0xf0000000) == 0xe0000000) 219 | #endif 220 | #ifndef IN6_IS_ADDR_MULTICAST 221 | #define IN6_IS_ADDR_MULTICAST(a) (((uint8_t *) (a))[0] == 0xff) 222 | #endif 223 | 224 | int ff_is_multicast_address(struct sockaddr *addr); 225 | 226 | #define POLLING_TIME 100 /// Time in milliseconds between interrupt check 227 | 228 | /** 229 | * Bind to a file descriptor and poll for a connection. 230 | * 231 | * @param fd First argument of bind(). 232 | * @param addr Second argument of bind(). 233 | * @param addrlen Third argument of bind(). 234 | * @param timeout Polling timeout in milliseconds. 235 | * @param h URLContext providing interrupt check 236 | * callback and logging context. 237 | * @return A non-blocking file descriptor on success 238 | * or an AVERROR on failure. 239 | */ 240 | int ff_listen_bind(int fd, const struct sockaddr *addr, 241 | socklen_t addrlen, int timeout, 242 | URLContext *h); 243 | 244 | /** 245 | * Connect to a file descriptor and poll for result. 246 | * 247 | * @param fd First argument of connect(), 248 | * will be set as non-blocking. 249 | * @param addr Second argument of connect(). 250 | * @param addrlen Third argument of connect(). 251 | * @param timeout Polling timeout in milliseconds. 252 | * @param h URLContext providing interrupt check 253 | * callback and logging context. 254 | * @return 0 on success, AVERROR on failure. 255 | */ 256 | int ff_listen_connect(int fd, const struct sockaddr *addr, 257 | socklen_t addrlen, int timeout, 258 | URLContext *h); 259 | 260 | int ff_http_match_no_proxy(const char *no_proxy, const char *hostname); 261 | 262 | #endif /* AVFORMAT_NETWORK_H */ 263 | -------------------------------------------------------------------------------- /LiveStreamer/ffmpeg/libavformat/os_support.h: -------------------------------------------------------------------------------- 1 | /* 2 | * various OS-feature replacement utilities 3 | * copyright (c) 2000, 2001, 2002 Fabrice Bellard 4 | * 5 | * This file is part of FFmpeg. 6 | * 7 | * FFmpeg is free software; you can redistribute it and/or 8 | * modify it under the terms of the GNU Lesser General Public 9 | * License as published by the Free Software Foundation; either 10 | * version 2.1 of the License, or (at your option) any later version. 11 | * 12 | * FFmpeg is distributed in the hope that it will be useful, 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 15 | * Lesser General Public License for more details. 16 | * 17 | * You should have received a copy of the GNU Lesser General Public 18 | * License along with FFmpeg; if not, write to the Free Software 19 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 20 | */ 21 | 22 | #ifndef AVFORMAT_OS_SUPPORT_H 23 | #define AVFORMAT_OS_SUPPORT_H 24 | 25 | /** 26 | * @file 27 | * miscellaneous OS support macros and functions. 28 | */ 29 | 30 | #include "config.h" 31 | 32 | #include 33 | 34 | #if defined(_WIN32) && !defined(__MINGW32CE__) 35 | # include 36 | # ifdef lseek 37 | # undef lseek 38 | # endif 39 | # define lseek(f,p,w) _lseeki64((f), (p), (w)) 40 | # ifdef stat 41 | # undef stat 42 | # endif 43 | # define stat _stati64 44 | # ifdef fstat 45 | # undef fstat 46 | # endif 47 | # define fstat(f,s) _fstati64((f), (s)) 48 | #endif /* defined(__MINGW32__) && !defined(__MINGW32CE__) */ 49 | 50 | #ifdef _WIN32 51 | #if HAVE_DIRECT_H 52 | #include 53 | #elif HAVE_IO_H 54 | #include 55 | #endif 56 | #define mkdir(a, b) _mkdir(a) 57 | #else 58 | #include 59 | #endif 60 | 61 | static inline int is_dos_path(const char *path) 62 | { 63 | #if HAVE_DOS_PATHS 64 | if (path[0] && path[1] == ':') 65 | return 1; 66 | #endif 67 | return 0; 68 | } 69 | 70 | #if defined(__OS2__) || defined(__Plan9__) 71 | #define SHUT_RD 0 72 | #define SHUT_WR 1 73 | #define SHUT_RDWR 2 74 | #endif 75 | 76 | #if defined(_WIN32) 77 | #define SHUT_RD SD_RECEIVE 78 | #define SHUT_WR SD_SEND 79 | #define SHUT_RDWR SD_BOTH 80 | 81 | #ifndef S_IRUSR 82 | #define S_IRUSR S_IREAD 83 | #endif 84 | #ifndef S_IWUSR 85 | #define S_IWUSR S_IWRITE 86 | #endif 87 | #endif 88 | 89 | #if defined(_WIN32) && !defined(__MINGW32CE__) 90 | int ff_win32_open(const char *filename, int oflag, int pmode); 91 | #define open ff_win32_open 92 | #endif 93 | 94 | #if CONFIG_NETWORK 95 | #if !HAVE_SOCKLEN_T 96 | typedef int socklen_t; 97 | #endif 98 | 99 | /* most of the time closing a socket is just closing an fd */ 100 | #if !HAVE_CLOSESOCKET 101 | #define closesocket close 102 | #endif 103 | 104 | #if !HAVE_POLL_H 105 | typedef unsigned long nfds_t; 106 | 107 | #if HAVE_WINSOCK2_H 108 | #include 109 | #endif 110 | #if !HAVE_STRUCT_POLLFD 111 | struct pollfd { 112 | int fd; 113 | short events; /* events to look for */ 114 | short revents; /* events that occurred */ 115 | }; 116 | 117 | /* events & revents */ 118 | #define POLLIN 0x0001 /* any readable data available */ 119 | #define POLLOUT 0x0002 /* file descriptor is writeable */ 120 | #define POLLRDNORM POLLIN 121 | #define POLLWRNORM POLLOUT 122 | #define POLLRDBAND 0x0008 /* priority readable data */ 123 | #define POLLWRBAND 0x0010 /* priority data can be written */ 124 | #define POLLPRI 0x0020 /* high priority readable data */ 125 | 126 | /* revents only */ 127 | #define POLLERR 0x0004 /* errors pending */ 128 | #define POLLHUP 0x0080 /* disconnected */ 129 | #define POLLNVAL 0x1000 /* invalid file descriptor */ 130 | #endif 131 | 132 | 133 | int ff_poll(struct pollfd *fds, nfds_t numfds, int timeout); 134 | #define poll ff_poll 135 | #endif /* HAVE_POLL_H */ 136 | #endif /* CONFIG_NETWORK */ 137 | 138 | #endif /* AVFORMAT_OS_SUPPORT_H */ 139 | -------------------------------------------------------------------------------- /LiveStreamer/ffmpeg/libavformat/url.h: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * This file is part of FFmpeg. 4 | * 5 | * FFmpeg is free software; you can redistribute it and/or 6 | * modify it under the terms of the GNU Lesser General Public 7 | * License as published by the Free Software Foundation; either 8 | * version 2.1 of the License, or (at your option) any later version. 9 | * 10 | * FFmpeg is distributed in the hope that it will be useful, 11 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 13 | * Lesser General Public License for more details. 14 | * 15 | * You should have received a copy of the GNU Lesser General Public 16 | * License along with FFmpeg; if not, write to the Free Software 17 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 18 | */ 19 | 20 | /** 21 | * @file 22 | * unbuffered private I/O API 23 | */ 24 | 25 | #ifndef AVFORMAT_URL_H 26 | #define AVFORMAT_URL_H 27 | 28 | #include "avio.h" 29 | #include "libavformat/version.h" 30 | 31 | #include "libavutil/dict.h" 32 | #include "libavutil/log.h" 33 | 34 | #define URL_PROTOCOL_FLAG_NESTED_SCHEME 1 /*< The protocol name can be the first part of a nested protocol scheme */ 35 | #define URL_PROTOCOL_FLAG_NETWORK 2 /*< The protocol uses network */ 36 | 37 | extern int (*url_interrupt_cb)(void); 38 | 39 | extern const AVClass ffurl_context_class; 40 | 41 | typedef struct URLContext { 42 | const AVClass *av_class; /**< information for av_log(). Set by url_open(). */ 43 | struct URLProtocol *prot; 44 | void *priv_data; 45 | char *filename; /**< specified URL */ 46 | int flags; 47 | int max_packet_size; /**< if non zero, the stream is packetized with this max packet size */ 48 | int is_streamed; /**< true if streamed (no seek possible), default = false */ 49 | int is_connected; 50 | AVIOInterruptCB interrupt_callback; 51 | int64_t rw_timeout; /**< maximum time to wait for (network) read/write operation completion, in mcs */ 52 | } URLContext; 53 | 54 | typedef struct URLProtocol { 55 | const char *name; 56 | int (*url_open)( URLContext *h, const char *url, int flags); 57 | /** 58 | * This callback is to be used by protocols which open further nested 59 | * protocols. options are then to be passed to ffurl_open()/ffurl_connect() 60 | * for those nested protocols. 61 | */ 62 | int (*url_open2)(URLContext *h, const char *url, int flags, AVDictionary **options); 63 | 64 | /** 65 | * Read data from the protocol. 66 | * If data is immediately available (even less than size), EOF is 67 | * reached or an error occurs (including EINTR), return immediately. 68 | * Otherwise: 69 | * In non-blocking mode, return AVERROR(EAGAIN) immediately. 70 | * In blocking mode, wait for data/EOF/error with a short timeout (0.1s), 71 | * and return AVERROR(EAGAIN) on timeout. 72 | * Checking interrupt_callback, looping on EINTR and EAGAIN and until 73 | * enough data has been read is left to the calling function; see 74 | * retry_transfer_wrapper in avio.c. 75 | */ 76 | int (*url_read)( URLContext *h, unsigned char *buf, int size); 77 | int (*url_write)(URLContext *h, const unsigned char *buf, int size); 78 | int64_t (*url_seek)( URLContext *h, int64_t pos, int whence); 79 | int (*url_close)(URLContext *h); 80 | struct URLProtocol *next; 81 | int (*url_read_pause)(URLContext *h, int pause); 82 | int64_t (*url_read_seek)(URLContext *h, int stream_index, 83 | int64_t timestamp, int flags); 84 | int (*url_get_file_handle)(URLContext *h); 85 | int (*url_get_multi_file_handle)(URLContext *h, int **handles, 86 | int *numhandles); 87 | int (*url_shutdown)(URLContext *h, int flags); 88 | int priv_data_size; 89 | const AVClass *priv_data_class; 90 | int flags; 91 | int (*url_check)(URLContext *h, int mask); 92 | } URLProtocol; 93 | 94 | /** 95 | * Create a URLContext for accessing to the resource indicated by 96 | * url, but do not initiate the connection yet. 97 | * 98 | * @param puc pointer to the location where, in case of success, the 99 | * function puts the pointer to the created URLContext 100 | * @param flags flags which control how the resource indicated by url 101 | * is to be opened 102 | * @param int_cb interrupt callback to use for the URLContext, may be 103 | * NULL 104 | * @return 0 in case of success, a negative value corresponding to an 105 | * AVERROR code in case of failure 106 | */ 107 | int ffurl_alloc(URLContext **puc, const char *filename, int flags, 108 | const AVIOInterruptCB *int_cb); 109 | 110 | /** 111 | * Connect an URLContext that has been allocated by ffurl_alloc 112 | * 113 | * @param options A dictionary filled with options for nested protocols, 114 | * i.e. it will be passed to url_open2() for protocols implementing it. 115 | * This parameter will be destroyed and replaced with a dict containing options 116 | * that were not found. May be NULL. 117 | */ 118 | int ffurl_connect(URLContext *uc, AVDictionary **options); 119 | 120 | /** 121 | * Create an URLContext for accessing to the resource indicated by 122 | * url, and open it. 123 | * 124 | * @param puc pointer to the location where, in case of success, the 125 | * function puts the pointer to the created URLContext 126 | * @param flags flags which control how the resource indicated by url 127 | * is to be opened 128 | * @param int_cb interrupt callback to use for the URLContext, may be 129 | * NULL 130 | * @param options A dictionary filled with protocol-private options. On return 131 | * this parameter will be destroyed and replaced with a dict containing options 132 | * that were not found. May be NULL. 133 | * @return 0 in case of success, a negative value corresponding to an 134 | * AVERROR code in case of failure 135 | */ 136 | int ffurl_open(URLContext **puc, const char *filename, int flags, 137 | const AVIOInterruptCB *int_cb, AVDictionary **options); 138 | 139 | /** 140 | * Read up to size bytes from the resource accessed by h, and store 141 | * the read bytes in buf. 142 | * 143 | * @return The number of bytes actually read, or a negative value 144 | * corresponding to an AVERROR code in case of error. A value of zero 145 | * indicates that it is not possible to read more from the accessed 146 | * resource (except if the value of the size argument is also zero). 147 | */ 148 | int ffurl_read(URLContext *h, unsigned char *buf, int size); 149 | 150 | /** 151 | * Read as many bytes as possible (up to size), calling the 152 | * read function multiple times if necessary. 153 | * This makes special short-read handling in applications 154 | * unnecessary, if the return value is < size then it is 155 | * certain there was either an error or the end of file was reached. 156 | */ 157 | int ffurl_read_complete(URLContext *h, unsigned char *buf, int size); 158 | 159 | /** 160 | * Write size bytes from buf to the resource accessed by h. 161 | * 162 | * @return the number of bytes actually written, or a negative value 163 | * corresponding to an AVERROR code in case of failure 164 | */ 165 | int ffurl_write(URLContext *h, const unsigned char *buf, int size); 166 | 167 | /** 168 | * Change the position that will be used by the next read/write 169 | * operation on the resource accessed by h. 170 | * 171 | * @param pos specifies the new position to set 172 | * @param whence specifies how pos should be interpreted, it must be 173 | * one of SEEK_SET (seek from the beginning), SEEK_CUR (seek from the 174 | * current position), SEEK_END (seek from the end), or AVSEEK_SIZE 175 | * (return the filesize of the requested resource, pos is ignored). 176 | * @return a negative value corresponding to an AVERROR code in case 177 | * of failure, or the resulting file position, measured in bytes from 178 | * the beginning of the file. You can use this feature together with 179 | * SEEK_CUR to read the current file position. 180 | */ 181 | int64_t ffurl_seek(URLContext *h, int64_t pos, int whence); 182 | 183 | /** 184 | * Close the resource accessed by the URLContext h, and free the 185 | * memory used by it. Also set the URLContext pointer to NULL. 186 | * 187 | * @return a negative value if an error condition occurred, 0 188 | * otherwise 189 | */ 190 | int ffurl_closep(URLContext **h); 191 | int ffurl_close(URLContext *h); 192 | 193 | /** 194 | * Return the filesize of the resource accessed by h, AVERROR(ENOSYS) 195 | * if the operation is not supported by h, or another negative value 196 | * corresponding to an AVERROR error code in case of failure. 197 | */ 198 | int64_t ffurl_size(URLContext *h); 199 | 200 | /** 201 | * Return the file descriptor associated with this URL. For RTP, this 202 | * will return only the RTP file descriptor, not the RTCP file descriptor. 203 | * 204 | * @return the file descriptor associated with this URL, or <0 on error. 205 | */ 206 | int ffurl_get_file_handle(URLContext *h); 207 | 208 | /** 209 | * Return the file descriptors associated with this URL. 210 | * 211 | * @return 0 on success or <0 on error. 212 | */ 213 | int ffurl_get_multi_file_handle(URLContext *h, int **handles, int *numhandles); 214 | 215 | /** 216 | * Signal the URLContext that we are done reading or writing the stream. 217 | * 218 | * @param h pointer to the resource 219 | * @param flags flags which control how the resource indicated by url 220 | * is to be shutdown 221 | * 222 | * @return a negative value if an error condition occurred, 0 223 | * otherwise 224 | */ 225 | int ffurl_shutdown(URLContext *h, int flags); 226 | 227 | /** 228 | * Register the URLProtocol protocol. 229 | * 230 | * @param size the size of the URLProtocol struct referenced 231 | */ 232 | int ffurl_register_protocol(URLProtocol *protocol, int size); 233 | 234 | /** 235 | * Check if the user has requested to interrup a blocking function 236 | * associated with cb. 237 | */ 238 | int ff_check_interrupt(AVIOInterruptCB *cb); 239 | 240 | /** 241 | * Iterate over all available protocols. 242 | * 243 | * @param prev result of the previous call to this functions or NULL. 244 | */ 245 | URLProtocol *ffurl_protocol_next(URLProtocol *prev); 246 | 247 | /* udp.c */ 248 | int ff_udp_set_remote_url(URLContext *h, const char *uri); 249 | int ff_udp_get_local_port(URLContext *h); 250 | 251 | /** 252 | * Assemble a URL string from components. This is the reverse operation 253 | * of av_url_split. 254 | * 255 | * Note, this requires networking to be initialized, so the caller must 256 | * ensure ff_network_init has been called. 257 | * 258 | * @see av_url_split 259 | * 260 | * @param str the buffer to fill with the url 261 | * @param size the size of the str buffer 262 | * @param proto the protocol identifier, if null, the separator 263 | * after the identifier is left out, too 264 | * @param authorization an optional authorization string, may be null. 265 | * An empty string is treated the same as a null string. 266 | * @param hostname the host name string 267 | * @param port the port number, left out from the string if negative 268 | * @param fmt a generic format string for everything to add after the 269 | * host/port, may be null 270 | * @return the number of characters written to the destination buffer 271 | */ 272 | int ff_url_join(char *str, int size, const char *proto, 273 | const char *authorization, const char *hostname, 274 | int port, const char *fmt, ...) av_printf_format(7, 8); 275 | 276 | /** 277 | * Convert a relative url into an absolute url, given a base url. 278 | * 279 | * @param buf the buffer where output absolute url is written 280 | * @param size the size of buf 281 | * @param base the base url, may be equal to buf. 282 | * @param rel the new url, which is interpreted relative to base 283 | */ 284 | void ff_make_absolute_url(char *buf, int size, const char *base, 285 | const char *rel); 286 | 287 | 288 | #endif /* AVFORMAT_URL_H */ 289 | -------------------------------------------------------------------------------- /LiveStreamer/ffmpeg/libavresample/avresample.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2012 Justin Ruggles 3 | * 4 | * This file is part of Libav. 5 | * 6 | * Libav is free software; you can redistribute it and/or 7 | * modify it under the terms of the GNU Lesser General Public 8 | * License as published by the Free Software Foundation; either 9 | * version 2.1 of the License, or (at your option) any later version. 10 | * 11 | * Libav is distributed in the hope that it will be useful, 12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 14 | * Lesser General Public License for more details. 15 | * 16 | * You should have received a copy of the GNU Lesser General Public 17 | * License along with Libav; if not, write to the Free Software 18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 19 | */ 20 | 21 | #ifndef AVRESAMPLE_AVRESAMPLE_H 22 | #define AVRESAMPLE_AVRESAMPLE_H 23 | 24 | /** 25 | * @file 26 | * @ingroup lavr 27 | * external API header 28 | */ 29 | 30 | /** 31 | * @defgroup lavr Libavresample 32 | * @{ 33 | * 34 | * Libavresample (lavr) is a library that handles audio resampling, sample 35 | * format conversion and mixing. 36 | * 37 | * Interaction with lavr is done through AVAudioResampleContext, which is 38 | * allocated with avresample_alloc_context(). It is opaque, so all parameters 39 | * must be set with the @ref avoptions API. 40 | * 41 | * For example the following code will setup conversion from planar float sample 42 | * format to interleaved signed 16-bit integer, downsampling from 48kHz to 43 | * 44.1kHz and downmixing from 5.1 channels to stereo (using the default mixing 44 | * matrix): 45 | * @code 46 | * AVAudioResampleContext *avr = avresample_alloc_context(); 47 | * av_opt_set_int(avr, "in_channel_layout", AV_CH_LAYOUT_5POINT1, 0); 48 | * av_opt_set_int(avr, "out_channel_layout", AV_CH_LAYOUT_STEREO, 0); 49 | * av_opt_set_int(avr, "in_sample_rate", 48000, 0); 50 | * av_opt_set_int(avr, "out_sample_rate", 44100, 0); 51 | * av_opt_set_int(avr, "in_sample_fmt", AV_SAMPLE_FMT_FLTP, 0); 52 | * av_opt_set_int(avr, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0); 53 | * @endcode 54 | * 55 | * Once the context is initialized, it must be opened with avresample_open(). If 56 | * you need to change the conversion parameters, you must close the context with 57 | * avresample_close(), change the parameters as described above, then reopen it 58 | * again. 59 | * 60 | * The conversion itself is done by repeatedly calling avresample_convert(). 61 | * Note that the samples may get buffered in two places in lavr. The first one 62 | * is the output FIFO, where the samples end up if the output buffer is not 63 | * large enough. The data stored in there may be retrieved at any time with 64 | * avresample_read(). The second place is the resampling delay buffer, 65 | * applicable only when resampling is done. The samples in it require more input 66 | * before they can be processed. Their current amount is returned by 67 | * avresample_get_delay(). At the end of conversion the resampling buffer can be 68 | * flushed by calling avresample_convert() with NULL input. 69 | * 70 | * The following code demonstrates the conversion loop assuming the parameters 71 | * from above and caller-defined functions get_input() and handle_output(): 72 | * @code 73 | * uint8_t **input; 74 | * int in_linesize, in_samples; 75 | * 76 | * while (get_input(&input, &in_linesize, &in_samples)) { 77 | * uint8_t *output 78 | * int out_linesize; 79 | * int out_samples = avresample_available(avr) + 80 | * av_rescale_rnd(avresample_get_delay(avr) + 81 | * in_samples, 44100, 48000, AV_ROUND_UP); 82 | * av_samples_alloc(&output, &out_linesize, 2, out_samples, 83 | * AV_SAMPLE_FMT_S16, 0); 84 | * out_samples = avresample_convert(avr, &output, out_linesize, out_samples, 85 | * input, in_linesize, in_samples); 86 | * handle_output(output, out_linesize, out_samples); 87 | * av_freep(&output); 88 | * } 89 | * @endcode 90 | * 91 | * When the conversion is finished and the FIFOs are flushed if required, the 92 | * conversion context and everything associated with it must be freed with 93 | * avresample_free(). 94 | */ 95 | 96 | #include "libavutil/avutil.h" 97 | #include "libavutil/channel_layout.h" 98 | #include "libavutil/dict.h" 99 | #include "libavutil/log.h" 100 | 101 | #include "libavresample/version.h" 102 | 103 | #define AVRESAMPLE_MAX_CHANNELS 32 104 | 105 | typedef struct AVAudioResampleContext AVAudioResampleContext; 106 | 107 | /** Mixing Coefficient Types */ 108 | enum AVMixCoeffType { 109 | AV_MIX_COEFF_TYPE_Q8, /** 16-bit 8.8 fixed-point */ 110 | AV_MIX_COEFF_TYPE_Q15, /** 32-bit 17.15 fixed-point */ 111 | AV_MIX_COEFF_TYPE_FLT, /** floating-point */ 112 | AV_MIX_COEFF_TYPE_NB, /** Number of coeff types. Not part of ABI */ 113 | }; 114 | 115 | /** Resampling Filter Types */ 116 | enum AVResampleFilterType { 117 | AV_RESAMPLE_FILTER_TYPE_CUBIC, /**< Cubic */ 118 | AV_RESAMPLE_FILTER_TYPE_BLACKMAN_NUTTALL, /**< Blackman Nuttall Windowed Sinc */ 119 | AV_RESAMPLE_FILTER_TYPE_KAISER, /**< Kaiser Windowed Sinc */ 120 | }; 121 | 122 | enum AVResampleDitherMethod { 123 | AV_RESAMPLE_DITHER_NONE, /**< Do not use dithering */ 124 | AV_RESAMPLE_DITHER_RECTANGULAR, /**< Rectangular Dither */ 125 | AV_RESAMPLE_DITHER_TRIANGULAR, /**< Triangular Dither*/ 126 | AV_RESAMPLE_DITHER_TRIANGULAR_HP, /**< Triangular Dither with High Pass */ 127 | AV_RESAMPLE_DITHER_TRIANGULAR_NS, /**< Triangular Dither with Noise Shaping */ 128 | AV_RESAMPLE_DITHER_NB, /**< Number of dither types. Not part of ABI. */ 129 | }; 130 | 131 | /** 132 | * Return the LIBAVRESAMPLE_VERSION_INT constant. 133 | */ 134 | unsigned avresample_version(void); 135 | 136 | /** 137 | * Return the libavresample build-time configuration. 138 | * @return configure string 139 | */ 140 | const char *avresample_configuration(void); 141 | 142 | /** 143 | * Return the libavresample license. 144 | */ 145 | const char *avresample_license(void); 146 | 147 | /** 148 | * Get the AVClass for AVAudioResampleContext. 149 | * 150 | * Can be used in combination with AV_OPT_SEARCH_FAKE_OBJ for examining options 151 | * without allocating a context. 152 | * 153 | * @see av_opt_find(). 154 | * 155 | * @return AVClass for AVAudioResampleContext 156 | */ 157 | const AVClass *avresample_get_class(void); 158 | 159 | /** 160 | * Allocate AVAudioResampleContext and set options. 161 | * 162 | * @return allocated audio resample context, or NULL on failure 163 | */ 164 | AVAudioResampleContext *avresample_alloc_context(void); 165 | 166 | /** 167 | * Initialize AVAudioResampleContext. 168 | * 169 | * @param avr audio resample context 170 | * @return 0 on success, negative AVERROR code on failure 171 | */ 172 | int avresample_open(AVAudioResampleContext *avr); 173 | 174 | /** 175 | * Close AVAudioResampleContext. 176 | * 177 | * This closes the context, but it does not change the parameters. The context 178 | * can be reopened with avresample_open(). It does, however, clear the output 179 | * FIFO and any remaining leftover samples in the resampling delay buffer. If 180 | * there was a custom matrix being used, that is also cleared. 181 | * 182 | * @see avresample_convert() 183 | * @see avresample_set_matrix() 184 | * 185 | * @param avr audio resample context 186 | */ 187 | void avresample_close(AVAudioResampleContext *avr); 188 | 189 | /** 190 | * Free AVAudioResampleContext and associated AVOption values. 191 | * 192 | * This also calls avresample_close() before freeing. 193 | * 194 | * @param avr audio resample context 195 | */ 196 | void avresample_free(AVAudioResampleContext **avr); 197 | 198 | /** 199 | * Generate a channel mixing matrix. 200 | * 201 | * This function is the one used internally by libavresample for building the 202 | * default mixing matrix. It is made public just as a utility function for 203 | * building custom matrices. 204 | * 205 | * @param in_layout input channel layout 206 | * @param out_layout output channel layout 207 | * @param center_mix_level mix level for the center channel 208 | * @param surround_mix_level mix level for the surround channel(s) 209 | * @param lfe_mix_level mix level for the low-frequency effects channel 210 | * @param normalize if 1, coefficients will be normalized to prevent 211 | * overflow. if 0, coefficients will not be 212 | * normalized. 213 | * @param[out] matrix mixing coefficients; matrix[i + stride * o] is 214 | * the weight of input channel i in output channel o. 215 | * @param stride distance between adjacent input channels in the 216 | * matrix array 217 | * @param matrix_encoding matrixed stereo downmix mode (e.g. dplii) 218 | * @return 0 on success, negative AVERROR code on failure 219 | */ 220 | int avresample_build_matrix(uint64_t in_layout, uint64_t out_layout, 221 | double center_mix_level, double surround_mix_level, 222 | double lfe_mix_level, int normalize, double *matrix, 223 | int stride, enum AVMatrixEncoding matrix_encoding); 224 | 225 | /** 226 | * Get the current channel mixing matrix. 227 | * 228 | * If no custom matrix has been previously set or the AVAudioResampleContext is 229 | * not open, an error is returned. 230 | * 231 | * @param avr audio resample context 232 | * @param matrix mixing coefficients; matrix[i + stride * o] is the weight of 233 | * input channel i in output channel o. 234 | * @param stride distance between adjacent input channels in the matrix array 235 | * @return 0 on success, negative AVERROR code on failure 236 | */ 237 | int avresample_get_matrix(AVAudioResampleContext *avr, double *matrix, 238 | int stride); 239 | 240 | /** 241 | * Set channel mixing matrix. 242 | * 243 | * Allows for setting a custom mixing matrix, overriding the default matrix 244 | * generated internally during avresample_open(). This function can be called 245 | * anytime on an allocated context, either before or after calling 246 | * avresample_open(), as long as the channel layouts have been set. 247 | * avresample_convert() always uses the current matrix. 248 | * Calling avresample_close() on the context will clear the current matrix. 249 | * 250 | * @see avresample_close() 251 | * 252 | * @param avr audio resample context 253 | * @param matrix mixing coefficients; matrix[i + stride * o] is the weight of 254 | * input channel i in output channel o. 255 | * @param stride distance between adjacent input channels in the matrix array 256 | * @return 0 on success, negative AVERROR code on failure 257 | */ 258 | int avresample_set_matrix(AVAudioResampleContext *avr, const double *matrix, 259 | int stride); 260 | 261 | /** 262 | * Set a customized input channel mapping. 263 | * 264 | * This function can only be called when the allocated context is not open. 265 | * Also, the input channel layout must have already been set. 266 | * 267 | * Calling avresample_close() on the context will clear the channel mapping. 268 | * 269 | * The map for each input channel specifies the channel index in the source to 270 | * use for that particular channel, or -1 to mute the channel. Source channels 271 | * can be duplicated by using the same index for multiple input channels. 272 | * 273 | * Examples: 274 | * 275 | * Reordering 5.1 AAC order (C,L,R,Ls,Rs,LFE) to Libav order (L,R,C,LFE,Ls,Rs): 276 | * { 1, 2, 0, 5, 3, 4 } 277 | * 278 | * Muting the 3rd channel in 4-channel input: 279 | * { 0, 1, -1, 3 } 280 | * 281 | * Duplicating the left channel of stereo input: 282 | * { 0, 0 } 283 | * 284 | * @param avr audio resample context 285 | * @param channel_map customized input channel mapping 286 | * @return 0 on success, negative AVERROR code on failure 287 | */ 288 | int avresample_set_channel_mapping(AVAudioResampleContext *avr, 289 | const int *channel_map); 290 | 291 | /** 292 | * Set compensation for resampling. 293 | * 294 | * This can be called anytime after avresample_open(). If resampling is not 295 | * automatically enabled because of a sample rate conversion, the 296 | * "force_resampling" option must have been set to 1 when opening the context 297 | * in order to use resampling compensation. 298 | * 299 | * @param avr audio resample context 300 | * @param sample_delta compensation delta, in samples 301 | * @param compensation_distance compensation distance, in samples 302 | * @return 0 on success, negative AVERROR code on failure 303 | */ 304 | int avresample_set_compensation(AVAudioResampleContext *avr, int sample_delta, 305 | int compensation_distance); 306 | 307 | /** 308 | * Convert input samples and write them to the output FIFO. 309 | * 310 | * The upper bound on the number of output samples is given by 311 | * avresample_available() + (avresample_get_delay() + number of input samples) * 312 | * output sample rate / input sample rate. 313 | * 314 | * The output data can be NULL or have fewer allocated samples than required. 315 | * In this case, any remaining samples not written to the output will be added 316 | * to an internal FIFO buffer, to be returned at the next call to this function 317 | * or to avresample_read(). 318 | * 319 | * If converting sample rate, there may be data remaining in the internal 320 | * resampling delay buffer. avresample_get_delay() tells the number of remaining 321 | * samples. To get this data as output, call avresample_convert() with NULL 322 | * input. 323 | * 324 | * At the end of the conversion process, there may be data remaining in the 325 | * internal FIFO buffer. avresample_available() tells the number of remaining 326 | * samples. To get this data as output, either call avresample_convert() with 327 | * NULL input or call avresample_read(). 328 | * 329 | * @see avresample_available() 330 | * @see avresample_read() 331 | * @see avresample_get_delay() 332 | * 333 | * @param avr audio resample context 334 | * @param output output data pointers 335 | * @param out_plane_size output plane size, in bytes. 336 | * This can be 0 if unknown, but that will lead to 337 | * optimized functions not being used directly on the 338 | * output, which could slow down some conversions. 339 | * @param out_samples maximum number of samples that the output buffer can hold 340 | * @param input input data pointers 341 | * @param in_plane_size input plane size, in bytes 342 | * This can be 0 if unknown, but that will lead to 343 | * optimized functions not being used directly on the 344 | * input, which could slow down some conversions. 345 | * @param in_samples number of input samples to convert 346 | * @return number of samples written to the output buffer, 347 | * not including converted samples added to the internal 348 | * output FIFO 349 | */ 350 | int avresample_convert(AVAudioResampleContext *avr, uint8_t **output, 351 | int out_plane_size, int out_samples, uint8_t **input, 352 | int in_plane_size, int in_samples); 353 | 354 | /** 355 | * Return the number of samples currently in the resampling delay buffer. 356 | * 357 | * When resampling, there may be a delay between the input and output. Any 358 | * unconverted samples in each call are stored internally in a delay buffer. 359 | * This function allows the user to determine the current number of samples in 360 | * the delay buffer, which can be useful for synchronization. 361 | * 362 | * @see avresample_convert() 363 | * 364 | * @param avr audio resample context 365 | * @return number of samples currently in the resampling delay buffer 366 | */ 367 | int avresample_get_delay(AVAudioResampleContext *avr); 368 | 369 | /** 370 | * Return the number of available samples in the output FIFO. 371 | * 372 | * During conversion, if the user does not specify an output buffer or 373 | * specifies an output buffer that is smaller than what is needed, remaining 374 | * samples that are not written to the output are stored to an internal FIFO 375 | * buffer. The samples in the FIFO can be read with avresample_read() or 376 | * avresample_convert(). 377 | * 378 | * @see avresample_read() 379 | * @see avresample_convert() 380 | * 381 | * @param avr audio resample context 382 | * @return number of samples available for reading 383 | */ 384 | int avresample_available(AVAudioResampleContext *avr); 385 | 386 | /** 387 | * Read samples from the output FIFO. 388 | * 389 | * During conversion, if the user does not specify an output buffer or 390 | * specifies an output buffer that is smaller than what is needed, remaining 391 | * samples that are not written to the output are stored to an internal FIFO 392 | * buffer. This function can be used to read samples from that internal FIFO. 393 | * 394 | * @see avresample_available() 395 | * @see avresample_convert() 396 | * 397 | * @param avr audio resample context 398 | * @param output output data pointers. May be NULL, in which case 399 | * nb_samples of data is discarded from output FIFO. 400 | * @param nb_samples number of samples to read from the FIFO 401 | * @return the number of samples written to output 402 | */ 403 | int avresample_read(AVAudioResampleContext *avr, uint8_t **output, int nb_samples); 404 | 405 | /** 406 | * @} 407 | */ 408 | 409 | #endif /* AVRESAMPLE_AVRESAMPLE_H */ 410 | -------------------------------------------------------------------------------- /LiveStreamer/ffmpeg/libavresample/version.h: -------------------------------------------------------------------------------- 1 | /* 2 | * This file is part of Libav. 3 | * 4 | * Libav is free software; you can redistribute it and/or 5 | * modify it under the terms of the GNU Lesser General Public 6 | * License as published by the Free Software Foundation; either 7 | * version 2.1 of the License, or (at your option) any later version. 8 | * 9 | * Libav is distributed in the hope that it will be useful, 10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 12 | * Lesser General Public License for more details. 13 | * 14 | * You should have received a copy of the GNU Lesser General Public 15 | * License along with Libav; if not, write to the Free Software 16 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 17 | */ 18 | 19 | #ifndef AVRESAMPLE_VERSION_H 20 | #define AVRESAMPLE_VERSION_H 21 | 22 | /** 23 | * @file 24 | * @ingroup lavr 25 | * Libavresample version macros. 26 | */ 27 | 28 | #define LIBAVRESAMPLE_VERSION_MAJOR 1 29 | #define LIBAVRESAMPLE_VERSION_MINOR 1 30 | #define LIBAVRESAMPLE_VERSION_MICRO 0 31 | 32 | #define LIBAVRESAMPLE_VERSION_INT AV_VERSION_INT(LIBAVRESAMPLE_VERSION_MAJOR, \ 33 | LIBAVRESAMPLE_VERSION_MINOR, \ 34 | LIBAVRESAMPLE_VERSION_MICRO) 35 | #define LIBAVRESAMPLE_VERSION AV_VERSION(LIBAVRESAMPLE_VERSION_MAJOR, \ 36 | LIBAVRESAMPLE_VERSION_MINOR, \ 37 | LIBAVRESAMPLE_VERSION_MICRO) 38 | #define LIBAVRESAMPLE_BUILD LIBAVRESAMPLE_VERSION_INT 39 | 40 | #define LIBAVRESAMPLE_IDENT "Lavr" AV_STRINGIFY(LIBAVRESAMPLE_VERSION) 41 | 42 | /** 43 | * FF_API_* defines may be placed below to indicate public API that will be 44 | * dropped at a future version bump. The defines themselves are not part of 45 | * the public API and may change, break or disappear at any time. 46 | */ 47 | 48 | #ifndef FF_API_RESAMPLE_CLOSE_OPEN 49 | #define FF_API_RESAMPLE_CLOSE_OPEN (LIBAVRESAMPLE_VERSION_MAJOR < 2) 50 | #endif 51 | 52 | #endif /* AVRESAMPLE_VERSION_H */ 53 | -------------------------------------------------------------------------------- /LiveStreamer/ffmpeg/libavutil/libm.h: -------------------------------------------------------------------------------- 1 | /* 2 | * This file is part of FFmpeg. 3 | * 4 | * FFmpeg is free software; you can redistribute it and/or 5 | * modify it under the terms of the GNU Lesser General Public 6 | * License as published by the Free Software Foundation; either 7 | * version 2.1 of the License, or (at your option) any later version. 8 | * 9 | * FFmpeg is distributed in the hope that it will be useful, 10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 12 | * Lesser General Public License for more details. 13 | * 14 | * You should have received a copy of the GNU Lesser General Public 15 | * License along with FFmpeg; if not, write to the Free Software 16 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA 17 | */ 18 | 19 | /** 20 | * @file 21 | * Replacements for frequently missing libm functions 22 | */ 23 | 24 | #ifndef AVUTIL_LIBM_H 25 | #define AVUTIL_LIBM_H 26 | 27 | #include 28 | #include "config.h" 29 | #include "attributes.h" 30 | #include "intfloat.h" 31 | 32 | #if HAVE_MIPSFPU && HAVE_INLINE_ASM 33 | #include "libavutil/mips/libm_mips.h" 34 | #endif /* HAVE_MIPSFPU && HAVE_INLINE_ASM*/ 35 | 36 | #if !HAVE_ATANF 37 | #undef atanf 38 | #define atanf(x) ((float)atan(x)) 39 | #endif 40 | 41 | #if !HAVE_ATAN2F 42 | #undef atan2f 43 | #define atan2f(y, x) ((float)atan2(y, x)) 44 | #endif 45 | 46 | #if !HAVE_POWF 47 | #undef powf 48 | #define powf(x, y) ((float)pow(x, y)) 49 | #endif 50 | 51 | #if !HAVE_CBRT 52 | static av_always_inline double cbrt(double x) 53 | { 54 | return x < 0 ? -pow(-x, 1.0 / 3.0) : pow(x, 1.0 / 3.0); 55 | } 56 | #endif 57 | 58 | #if !HAVE_CBRTF 59 | static av_always_inline float cbrtf(float x) 60 | { 61 | return x < 0 ? -powf(-x, 1.0 / 3.0) : powf(x, 1.0 / 3.0); 62 | } 63 | #endif 64 | 65 | #if !HAVE_COSF 66 | #undef cosf 67 | #define cosf(x) ((float)cos(x)) 68 | #endif 69 | 70 | #if !HAVE_EXPF 71 | #undef expf 72 | #define expf(x) ((float)exp(x)) 73 | #endif 74 | 75 | #if !HAVE_EXP2 76 | #undef exp2 77 | #define exp2(x) exp((x) * 0.693147180559945) 78 | #endif /* HAVE_EXP2 */ 79 | 80 | #if !HAVE_EXP2F 81 | #undef exp2f 82 | #define exp2f(x) ((float)exp2(x)) 83 | #endif /* HAVE_EXP2F */ 84 | 85 | #if !HAVE_ISINF 86 | static av_always_inline av_const int isinf(float x) 87 | { 88 | uint32_t v = av_float2int(x); 89 | if ((v & 0x7f800000) != 0x7f800000) 90 | return 0; 91 | return !(v & 0x007fffff); 92 | } 93 | #endif /* HAVE_ISINF */ 94 | 95 | #if !HAVE_ISNAN 96 | static av_always_inline av_const int isnan(float x) 97 | { 98 | uint32_t v = av_float2int(x); 99 | if ((v & 0x7f800000) != 0x7f800000) 100 | return 0; 101 | return v & 0x007fffff; 102 | } 103 | #endif /* HAVE_ISNAN */ 104 | 105 | #if !HAVE_LDEXPF 106 | #undef ldexpf 107 | #define ldexpf(x, exp) ((float)ldexp(x, exp)) 108 | #endif 109 | 110 | #if !HAVE_LLRINT 111 | #undef llrint 112 | #define llrint(x) ((long long)rint(x)) 113 | #endif /* HAVE_LLRINT */ 114 | 115 | #if !HAVE_LLRINTF 116 | #undef llrintf 117 | #define llrintf(x) ((long long)rint(x)) 118 | #endif /* HAVE_LLRINT */ 119 | 120 | #if !HAVE_LOG2 121 | #undef log2 122 | #define log2(x) (log(x) * 1.44269504088896340736) 123 | #endif /* HAVE_LOG2 */ 124 | 125 | #if !HAVE_LOG2F 126 | #undef log2f 127 | #define log2f(x) ((float)log2(x)) 128 | #endif /* HAVE_LOG2F */ 129 | 130 | #if !HAVE_LOG10F 131 | #undef log10f 132 | #define log10f(x) ((float)log10(x)) 133 | #endif 134 | 135 | #if !HAVE_SINF 136 | #undef sinf 137 | #define sinf(x) ((float)sin(x)) 138 | #endif 139 | 140 | #if !HAVE_RINT 141 | static inline double rint(double x) 142 | { 143 | return x >= 0 ? floor(x + 0.5) : ceil(x - 0.5); 144 | } 145 | #endif /* HAVE_RINT */ 146 | 147 | #if !HAVE_LRINT 148 | static av_always_inline av_const long int lrint(double x) 149 | { 150 | return rint(x); 151 | } 152 | #endif /* HAVE_LRINT */ 153 | 154 | #if !HAVE_LRINTF 155 | static av_always_inline av_const long int lrintf(float x) 156 | { 157 | return (int)(rint(x)); 158 | } 159 | #endif /* HAVE_LRINTF */ 160 | 161 | #if !HAVE_ROUND 162 | static av_always_inline av_const double round(double x) 163 | { 164 | return (x > 0) ? floor(x + 0.5) : ceil(x - 0.5); 165 | } 166 | #endif /* HAVE_ROUND */ 167 | 168 | #if !HAVE_ROUNDF 169 | static av_always_inline av_const float roundf(float x) 170 | { 171 | return (x > 0) ? floor(x + 0.5) : ceil(x - 0.5); 172 | } 173 | #endif /* HAVE_ROUNDF */ 174 | 175 | #if !HAVE_TRUNC 176 | static av_always_inline av_const double trunc(double x) 177 | { 178 | return (x > 0) ? floor(x) : ceil(x); 179 | } 180 | #endif /* HAVE_TRUNC */ 181 | 182 | #if !HAVE_TRUNCF 183 | static av_always_inline av_const float truncf(float x) 184 | { 185 | return (x > 0) ? floor(x) : ceil(x); 186 | } 187 | #endif /* HAVE_TRUNCF */ 188 | 189 | #endif /* AVUTIL_LIBM_H */ 190 | -------------------------------------------------------------------------------- /LiveStreamer/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | iPhone HTTP Server Example 4 | 5 | 6 |

Welcome to CocoaHTTPServer!

7 | 8 | 11 | 12 | 13 | -------------------------------------------------------------------------------- /LiveStreamer/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // LiveStreamer 4 | // 5 | // Created by Christopher Ballinger on 9/11/13. 6 | // Copyright (c) 2013 OpenWatch, Inc. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | #import "OWAppDelegate.h" 12 | 13 | int main(int argc, char *argv[]) 14 | { 15 | @autoreleasepool { 16 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([OWAppDelegate class])); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /LiveStreamer/playlist.m3u8: -------------------------------------------------------------------------------- 1 | #EXTM3U 2 | #EXT-X-VERSION:3 3 | #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=1007584,CODECS="avc1.77.31, mp4a.40.2",RESOLUTION=640x480 4 | chunklist.m3u8 -------------------------------------------------------------------------------- /Media/red_dot@2x.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OpenWatch/LiveStreamer-iOS/bdc873b03e7eba3d9d8092b612a73839933d17b7/Media/red_dot@2x.png -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # LiveStreamer 2 | 3 | This is a test of the OpenWatch Live Broadcasting System. --------------------------------------------------------------------------------