├── .gitignore ├── .gitmodules ├── FFmpegEncoder.xcodeproj └── project.pbxproj ├── FFmpegEncoder ├── AACEncoder.h ├── AACEncoder.m ├── Android │ ├── FFmpegWrapper.c │ ├── FFmpegWrapper.java │ ├── FileUtils.java │ ├── HLSFileObserver.java │ ├── HLSRecorder.java │ ├── HWRecorderActivity.java │ ├── LiveHLSRecorder.java │ └── S3Client.java ├── CBAppDelegate.h ├── CBAppDelegate.m ├── DirectoryWatcher.h ├── DirectoryWatcher.m ├── FFmpegEncoder-Info.plist ├── FFmpegEncoder-Prefix.pch ├── H264Encoder │ ├── AVEncoder.h │ ├── AVEncoder.mm │ ├── CameraServer.h │ ├── CameraServer.mm │ ├── EncoderDemoAppDelegate.h │ ├── EncoderDemoAppDelegate.m │ ├── EncoderDemoViewController.h │ ├── EncoderDemoViewController.m │ ├── LICENSE.markdown │ ├── MP4Atom.h │ ├── MP4Atom.m │ ├── NALUnit.cpp │ ├── NALUnit.h │ ├── RTSPClientConnection.h │ ├── RTSPClientConnection.mm │ ├── RTSPMessage.h │ ├── RTSPMessage.m │ ├── RTSPServer.h │ ├── RTSPServer.m │ ├── VideoEncoder.h │ └── VideoEncoder.m ├── HLSUploader.h ├── HLSUploader.m ├── HLSWriter.h ├── HLSWriter.m ├── Images.xcassets │ ├── AppIcon.appiconset │ │ └── Contents.json │ └── LaunchImage.launchimage │ │ └── Contents.json ├── Kickflip │ ├── KFAPIClient.h │ ├── KFAPIClient.m │ ├── KFBroadcaster.h │ ├── KFBroadcaster.m │ ├── KFEndpointResponse.h │ ├── KFEndpointResponse.m │ ├── KFHLSMonitor.h │ ├── KFHLSMonitor.m │ ├── KFLogging.h │ ├── KFPreviewView.h │ ├── KFPreviewView.m │ ├── KFRTMPEndpointResponse.h │ ├── KFRTMPEndpointResponse.m │ ├── KFRecorder.h │ ├── KFRecorder.m │ ├── KFRecordingViewController.h │ ├── KFRecordingViewController.m │ ├── KFS3EndpointResponse.h │ ├── KFS3EndpointResponse.m │ ├── KFS3Uploader.h │ ├── KFS3Uploader.m │ ├── KFUploader.h │ ├── KFUploader.m │ ├── KFUser.h │ ├── KFUser.m │ ├── Kickflip.h │ └── Kickflip.m ├── OWSharedS3Client.h ├── OWSharedS3Client.m ├── en.lproj │ └── InfoPlist.strings └── main.m ├── LICENSE └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | build 3 | *.mode1v3 4 | *.pbxuser 5 | project.xcworkspace 6 | xcuserdata 7 | .svn 8 | DerivedData 9 | *.orig 10 | OWSecrets.h 11 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "Submodules/FFmpegWrapper"] 2 | path = Submodules/FFmpegWrapper 3 | url = git@github.com:OpenWatch/FFmpegWrapper.git 4 | [submodule "Submodules/CocoaHTTPServer"] 5 | path = Submodules/CocoaHTTPServer 6 | url = git@github.com:robbiehanson/CocoaHTTPServer.git 7 | [submodule "Submodules/OWS3Client"] 8 | path = Submodules/OWS3Client 9 | url = git@github.com:OpenWatch/OWS3Client.git 10 | [submodule "Submodules/AFNetworking"] 11 | path = Submodules/AFNetworking 12 | url = git@github.com:AFNetworking/AFNetworking.git 13 | [submodule "Submodules/AFOAuth2Client"] 14 | path = Submodules/AFOAuth2Client 15 | url = git@github.com:AFNetworking/AFOAuth2Client.git 16 | [submodule "Submodules/SSKeychain"] 17 | path = Submodules/SSKeychain 18 | url = git@github.com:soffes/sskeychain.git 19 | [submodule "Submodules/CocoaLumberjack"] 20 | path = Submodules/CocoaLumberjack 21 | url = git@github.com:CocoaLumberjack/CocoaLumberjack.git 22 | -------------------------------------------------------------------------------- /FFmpegEncoder/AACEncoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // AACEncoder.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 12/18/13. 6 | // Copyright (c) 2013 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @interface AACEncoder : NSObject 13 | 14 | @property (nonatomic) dispatch_queue_t encoderQueue; 15 | @property (nonatomic) dispatch_queue_t callbackQueue; 16 | 17 | - (void) encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer completionBlock:(void (^)(NSData *encodedData, NSError* error))completionBlock; 18 | 19 | 20 | @end 21 | -------------------------------------------------------------------------------- /FFmpegEncoder/AACEncoder.m: -------------------------------------------------------------------------------- 1 | // 2 | // AACEncoder.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 12/18/13. 6 | // Copyright (c) 2013 Christopher Ballinger. All rights reserved. 7 | // 8 | // http://stackoverflow.com/questions/10817036/can-i-use-avcapturesession-to-encode-an-aac-stream-to-memory 9 | 10 | #import "AACEncoder.h" 11 | #import 12 | #import 13 | 14 | @interface AACEncoder() 15 | @property (nonatomic) AudioConverterRef audioConverter; 16 | @property (nonatomic) uint8_t *aacBuffer; 17 | @property (nonatomic) NSUInteger aacBufferSize; 18 | @property (nonatomic) char *pcmBuffer; 19 | @property (nonatomic) size_t pcmBufferSize; 20 | 21 | @end 22 | 23 | @implementation AACEncoder 24 | 25 | - (void) dealloc { 26 | AudioConverterDispose(_audioConverter); 27 | free(_aacBuffer); 28 | } 29 | 30 | - (id) init { 31 | if (self = [super init]) { 32 | _encoderQueue = dispatch_queue_create("AAC Encoder Queue", DISPATCH_QUEUE_SERIAL); 33 | _callbackQueue = dispatch_queue_create("AAC Encoder Callback Queue", DISPATCH_QUEUE_SERIAL); 34 | _audioConverter = NULL; 35 | _pcmBufferSize = 0; 36 | _pcmBuffer = NULL; 37 | _aacBufferSize = 1024; 38 | _aacBuffer = malloc(_aacBufferSize * sizeof(uint8_t)); 39 | memset(_aacBuffer, 0, _aacBufferSize); 40 | } 41 | return self; 42 | } 43 | 44 | - (void) setupEncoderFromSampleBuffer:(CMSampleBufferRef)sampleBuffer { 45 | AudioStreamBasicDescription inAudioStreamBasicDescription = *CMAudioFormatDescriptionGetStreamBasicDescription((CMAudioFormatDescriptionRef)CMSampleBufferGetFormatDescription(sampleBuffer)); 46 | 47 | AudioStreamBasicDescription outAudioStreamBasicDescription = {0}; // Always initialize the fields of a new audio stream basic description structure to zero, as shown here: ... 48 | outAudioStreamBasicDescription.mSampleRate = inAudioStreamBasicDescription.mSampleRate; // The number of frames per second of the data in the stream, when the stream is played at normal speed. For compressed formats, this field indicates the number of frames per second of equivalent decompressed data. The mSampleRate field must be nonzero, except when this structure is used in a listing of supported formats (see “kAudioStreamAnyRate”). 49 | outAudioStreamBasicDescription.mFormatID = kAudioFormatMPEG4AAC; // kAudioFormatMPEG4AAC_HE does not work. Can't find `AudioClassDescription`. `mFormatFlags` is set to 0. 50 | outAudioStreamBasicDescription.mFormatFlags = kMPEG4Object_AAC_LC; // Format-specific flags to specify details of the format. Set to 0 to indicate no format flags. See “Audio Data Format Identifiers” for the flags that apply to each format. 51 | outAudioStreamBasicDescription.mBytesPerPacket = 0; // The number of bytes in a packet of audio data. To indicate variable packet size, set this field to 0. For a format that uses variable packet size, specify the size of each packet using an AudioStreamPacketDescription structure. 52 | outAudioStreamBasicDescription.mFramesPerPacket = 1024; // The number of frames in a packet of audio data. For uncompressed audio, the value is 1. For variable bit-rate formats, the value is a larger fixed number, such as 1024 for AAC. For formats with a variable number of frames per packet, such as Ogg Vorbis, set this field to 0. 53 | outAudioStreamBasicDescription.mBytesPerFrame = 0; // The number of bytes from the start of one frame to the start of the next frame in an audio buffer. Set this field to 0 for compressed formats. ... 54 | outAudioStreamBasicDescription.mChannelsPerFrame = 1; // The number of channels in each frame of audio data. This value must be nonzero. 55 | outAudioStreamBasicDescription.mBitsPerChannel = 0; // ... Set this field to 0 for compressed formats. 56 | outAudioStreamBasicDescription.mReserved = 0; // Pads the structure out to force an even 8-byte alignment. Must be set to 0. 57 | AudioClassDescription *description = [self 58 | getAudioClassDescriptionWithType:kAudioFormatMPEG4AAC 59 | fromManufacturer:kAppleSoftwareAudioCodecManufacturer]; 60 | 61 | OSStatus status = AudioConverterNewSpecific(&inAudioStreamBasicDescription, &outAudioStreamBasicDescription, 1, description, &_audioConverter); 62 | if (status != 0) { 63 | NSLog(@"setup converter: %d", (int)status); 64 | } 65 | } 66 | 67 | - (AudioClassDescription *)getAudioClassDescriptionWithType:(UInt32)type 68 | fromManufacturer:(UInt32)manufacturer 69 | { 70 | static AudioClassDescription desc; 71 | 72 | UInt32 encoderSpecifier = type; 73 | OSStatus st; 74 | 75 | UInt32 size; 76 | st = AudioFormatGetPropertyInfo(kAudioFormatProperty_Encoders, 77 | sizeof(encoderSpecifier), 78 | &encoderSpecifier, 79 | &size); 80 | if (st) { 81 | NSLog(@"error getting audio format propery info: %d", (int)(st)); 82 | return nil; 83 | } 84 | 85 | unsigned int count = size / sizeof(AudioClassDescription); 86 | AudioClassDescription descriptions[count]; 87 | st = AudioFormatGetProperty(kAudioFormatProperty_Encoders, 88 | sizeof(encoderSpecifier), 89 | &encoderSpecifier, 90 | &size, 91 | descriptions); 92 | if (st) { 93 | NSLog(@"error getting audio format propery: %d", (int)(st)); 94 | return nil; 95 | } 96 | 97 | for (unsigned int i = 0; i < count; i++) { 98 | if ((type == descriptions[i].mSubType) && 99 | (manufacturer == descriptions[i].mManufacturer)) { 100 | memcpy(&desc, &(descriptions[i]), sizeof(desc)); 101 | return &desc; 102 | } 103 | } 104 | 105 | return nil; 106 | } 107 | 108 | static OSStatus inInputDataProc(AudioConverterRef inAudioConverter, UInt32 *ioNumberDataPackets, AudioBufferList *ioData, AudioStreamPacketDescription **outDataPacketDescription, void *inUserData) 109 | { 110 | AACEncoder *encoder = (__bridge AACEncoder *)(inUserData); 111 | UInt32 requestedPackets = *ioNumberDataPackets; 112 | //NSLog(@"Number of packets requested: %d", (unsigned int)requestedPackets); 113 | size_t copiedSamples = [encoder copyPCMSamplesIntoBuffer:ioData]; 114 | if (copiedSamples < requestedPackets) { 115 | //NSLog(@"PCM buffer isn't full enough!"); 116 | *ioNumberDataPackets = 0; 117 | return -1; 118 | } 119 | *ioNumberDataPackets = 1; 120 | //NSLog(@"Copied %zu samples into ioData", copiedSamples); 121 | return noErr; 122 | } 123 | 124 | - (size_t) copyPCMSamplesIntoBuffer:(AudioBufferList*)ioData { 125 | size_t originalBufferSize = _pcmBufferSize; 126 | if (!originalBufferSize) { 127 | return 0; 128 | } 129 | ioData->mBuffers[0].mData = _pcmBuffer; 130 | ioData->mBuffers[0].mDataByteSize = _pcmBufferSize; 131 | _pcmBuffer = NULL; 132 | _pcmBufferSize = 0; 133 | return originalBufferSize; 134 | } 135 | 136 | 137 | - (void) encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer completionBlock:(void (^)(NSData * encodedData, NSError* error))completionBlock { 138 | CFRetain(sampleBuffer); 139 | dispatch_async(_encoderQueue, ^{ 140 | if (!_audioConverter) { 141 | [self setupEncoderFromSampleBuffer:sampleBuffer]; 142 | } 143 | CMBlockBufferRef blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); 144 | CFRetain(blockBuffer); 145 | OSStatus status = CMBlockBufferGetDataPointer(blockBuffer, 0, NULL, &_pcmBufferSize, &_pcmBuffer); 146 | NSError *error = nil; 147 | if (status != kCMBlockBufferNoErr) { 148 | error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil]; 149 | } 150 | //NSLog(@"PCM Buffer Size: %zu", _pcmBufferSize); 151 | 152 | memset(_aacBuffer, 0, _aacBufferSize); 153 | AudioBufferList outAudioBufferList = {0}; 154 | outAudioBufferList.mNumberBuffers = 1; 155 | outAudioBufferList.mBuffers[0].mNumberChannels = 1; 156 | outAudioBufferList.mBuffers[0].mDataByteSize = _aacBufferSize; 157 | outAudioBufferList.mBuffers[0].mData = _aacBuffer; 158 | AudioStreamPacketDescription *outPacketDescription = NULL; 159 | UInt32 ioOutputDataPacketSize = 1; 160 | status = AudioConverterFillComplexBuffer(_audioConverter, inInputDataProc, (__bridge void *)(self), &ioOutputDataPacketSize, &outAudioBufferList, outPacketDescription); 161 | //NSLog(@"ioOutputDataPacketSize: %d", (unsigned int)ioOutputDataPacketSize); 162 | NSData *data = nil; 163 | if (status == 0) { 164 | NSData *rawAAC = [NSData dataWithBytes:outAudioBufferList.mBuffers[0].mData length:outAudioBufferList.mBuffers[0].mDataByteSize]; 165 | NSData *adtsHeader = [self adtsDataForPacketLength:rawAAC.length]; 166 | NSMutableData *fullData = [NSMutableData dataWithData:adtsHeader]; 167 | [fullData appendData:rawAAC]; 168 | data = fullData; 169 | } else { 170 | error = [NSError errorWithDomain:NSOSStatusErrorDomain code:status userInfo:nil]; 171 | } 172 | if (completionBlock) { 173 | dispatch_async(_callbackQueue, ^{ 174 | completionBlock(data, error); 175 | }); 176 | } 177 | CFRelease(sampleBuffer); 178 | CFRelease(blockBuffer); 179 | }); 180 | } 181 | 182 | 183 | 184 | /** 185 | * Add ADTS header at the beginning of each and every AAC packet. 186 | * This is needed as MediaCodec encoder generates a packet of raw 187 | * AAC data. 188 | * 189 | * Note the packetLen must count in the ADTS header itself. 190 | * See: http://wiki.multimedia.cx/index.php?title=ADTS 191 | * Also: http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Channel_Configurations 192 | **/ 193 | - (NSData*) adtsDataForPacketLength:(NSUInteger)packetLength { 194 | int adtsLength = 7; 195 | char *packet = malloc(sizeof(char) * adtsLength); 196 | // Variables Recycled by addADTStoPacket 197 | int profile = 2; //AAC LC 198 | //39=MediaCodecInfo.CodecProfileLevel.AACObjectELD; 199 | int freqIdx = 4; //44.1KHz 200 | int chanCfg = 1; //MPEG-4 Audio Channel Configuration. 1 Channel front-center 201 | NSUInteger fullLength = adtsLength + packetLength; 202 | // fill in ADTS data 203 | packet[0] = (char)0xFF; // 11111111 = syncword 204 | packet[1] = (char)0xF9; // 1111 1 00 1 = syncword MPEG-2 Layer CRC 205 | packet[2] = (char)(((profile-1)<<6) + (freqIdx<<2) +(chanCfg>>2)); 206 | packet[3] = (char)(((chanCfg&3)<<6) + (fullLength>>11)); 207 | packet[4] = (char)((fullLength&0x7FF) >> 3); 208 | packet[5] = (char)(((fullLength&7)<<5) + 0x1F); 209 | packet[6] = (char)0xFC; 210 | NSData *data = [NSData dataWithBytesNoCopy:packet length:adtsLength freeWhenDone:YES]; 211 | return data; 212 | } 213 | 214 | 215 | @end 216 | -------------------------------------------------------------------------------- /FFmpegEncoder/Android/FFmpegWrapper.c: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2013, David Brodsky. All rights reserved. 3 | * 4 | * This program is free software: you can redistribute it and/or modify 5 | * it under the terms of the GNU General Public License as published by 6 | * the Free Software Foundation, either version 3 of the License, or 7 | * (at your option) any later version. 8 | * 9 | * This program is distributed in the hope that it will be useful, 10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 | * GNU General Public License for more details. 13 | * 14 | * You should have received a copy of the GNU General Public License 15 | * along with this program. If not, see . 16 | */ 17 | 18 | #include 19 | #include 20 | #include 21 | #include "libavcodec/avcodec.h" 22 | #include "libavformat/avformat.h" 23 | 24 | #define LOG_TAG "FFmpegWrapper" 25 | 26 | #define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__) 27 | #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__) 28 | 29 | // Output 30 | const char *outputPath; 31 | const char *outputFormatName = "hls"; 32 | int hlsSegmentDurationSec = 10; 33 | int audioStreamIndex = -1; 34 | int videoStreamIndex = -1; 35 | 36 | // Video 37 | int VIDEO_PIX_FMT = PIX_FMT_YUV420P; 38 | int VIDEO_CODEC_ID = CODEC_ID_H264; 39 | int VIDEO_WIDTH = 1280; 40 | int VIDEO_HEIGHT = 720; 41 | 42 | // Audio 43 | int AUDIO_CODEC_ID = CODEC_ID_AAC; 44 | int AUDIO_SAMPLE_FMT = AV_SAMPLE_FMT_S16; 45 | int AUDIO_SAMPLE_RATE = 44100; 46 | int AUDIO_CHANNELS = 1; 47 | 48 | AVFormatContext *outputFormatContext; 49 | AVStream *audioStream; 50 | AVStream *videoStream; 51 | AVCodec *audioCodec; 52 | AVCodec *videoCodec; 53 | AVRational *videoSourceTimeBase; 54 | AVRational *audioSourceTimeBase; 55 | 56 | AVPacket *packet; // recycled across calls to writeAVPacketFromEncodedData 57 | 58 | // Example h264 file: Used to configure AVFormatContext 59 | const char *sampleFilePath = "/sdcard/sample.ts"; 60 | 61 | // Debugging 62 | int videoFrameCount = 0; 63 | int WRITE_RAW_FILE = 0; // Write raw video packets to file 64 | 65 | FILE *raw_video; 66 | 67 | // FFmpeg Utilities 68 | 69 | void init(){ 70 | av_register_all(); 71 | avformat_network_init(); 72 | avcodec_register_all(); 73 | 74 | if(WRITE_RAW_FILE){ 75 | raw_video = fopen("/sdcard/raw.h264", "w"); 76 | } 77 | } 78 | 79 | char* stringForAVErrorNumber(int errorNumber){ 80 | char *errorBuffer = malloc(sizeof(char) * AV_ERROR_MAX_STRING_SIZE); 81 | 82 | int strErrorResult = av_strerror(errorNumber, errorBuffer, AV_ERROR_MAX_STRING_SIZE); 83 | if (strErrorResult != 0) { 84 | LOGE("av_strerror error: %d", strErrorResult); 85 | return NULL; 86 | } 87 | return errorBuffer; 88 | } 89 | 90 | void addVideoStream(AVFormatContext *dest){ 91 | AVCodecContext *c; 92 | AVStream *st; 93 | AVCodec *codec; 94 | 95 | /* find the video encoder */ 96 | codec = avcodec_find_encoder(VIDEO_CODEC_ID); 97 | if (!codec) { 98 | LOGI("add_video_stream codec not found, as expected. No encoding necessary"); 99 | } 100 | 101 | st = avformat_new_stream(dest, codec); 102 | if (!st) { 103 | LOGE("add_video_stream could not alloc stream"); 104 | } 105 | 106 | videoStreamIndex = st->index; 107 | LOGI("addVideoStream at index %d", videoStreamIndex); 108 | c = st->codec; 109 | 110 | avcodec_get_context_defaults3(c, codec); 111 | 112 | c->codec_id = VIDEO_CODEC_ID; 113 | 114 | /* Put sample parameters. */ 115 | // c->bit_rate = 400000; 116 | /* Resolution must be a multiple of two. */ 117 | c->width = VIDEO_WIDTH; 118 | c->height = VIDEO_HEIGHT; 119 | 120 | /* timebase: This is the fundamental unit of time (in seconds) in terms 121 | * of which frame timestamps are represented. For fixed-fps content, 122 | * timebase should be 1/framerate and timestamp increments should be 123 | * identical to 1. */ 124 | c->time_base.den = 30; 125 | c->time_base.num = 1; 126 | /* 127 | c->gop_size = 12; // emit one intra frame every twelve frames at most 128 | */ 129 | c->pix_fmt = VIDEO_PIX_FMT; 130 | 131 | /* Not encoding 132 | if(codec_id == CODEC_ID_H264){ 133 | av_opt_set(c->priv_data, "preset", "ultrafast", 0); 134 | if(crf) 135 | av_opt_set_double(c->priv_data, "crf", crf, 0); 136 | else 137 | av_opt_set_double(c->priv_data, "crf", 24.0, 0); 138 | } 139 | */ 140 | 141 | /* Some formats want stream headers to be separate. */ 142 | if (dest->oformat->flags & AVFMT_GLOBALHEADER) 143 | c->flags |= CODEC_FLAG_GLOBAL_HEADER; 144 | 145 | } 146 | 147 | void addAudioStream(AVFormatContext *formatContext){ 148 | AVCodecContext *codecContext; 149 | AVStream *st; 150 | AVCodec *codec; 151 | 152 | /* find the audio encoder */ 153 | codec = avcodec_find_encoder(AUDIO_CODEC_ID); 154 | if (!codec) { 155 | LOGE("add_audio_stream codec not found"); 156 | } 157 | //LOGI("add_audio_stream found codec_id: %d",codec_id); 158 | st = avformat_new_stream(formatContext, codec); 159 | if (!st) { 160 | LOGE("add_audio_stream could not alloc stream"); 161 | } 162 | 163 | audioStreamIndex = st->index; 164 | 165 | //st->id = 1; 166 | codecContext = st->codec; 167 | avcodec_get_context_defaults3(codecContext, codec); 168 | codecContext->strict_std_compliance = FF_COMPLIANCE_UNOFFICIAL; // for native aac support 169 | /* put sample parameters */ 170 | //codecContext->sample_fmt = AV_SAMPLE_FMT_FLT; 171 | codecContext->sample_fmt = AUDIO_SAMPLE_FMT; 172 | codecContext->time_base.den = 44100; 173 | codecContext->time_base.num = 1; 174 | //c->bit_rate = bit_rate; 175 | codecContext->sample_rate = AUDIO_SAMPLE_RATE; 176 | codecContext->channels = AUDIO_CHANNELS; 177 | LOGI("addAudioStream sample_rate %d index %d", codecContext->sample_rate, st->index); 178 | //LOGI("add_audio_stream parameters: sample_fmt: %d bit_rate: %d sample_rate: %d", codec_audio_sample_fmt, bit_rate, audio_sample_rate); 179 | // some formats want stream headers to be separate 180 | if (formatContext->oformat->flags & AVFMT_GLOBALHEADER) 181 | codecContext->flags |= CODEC_FLAG_GLOBAL_HEADER; 182 | } 183 | 184 | void copyAVFormatContext(AVFormatContext **dest, AVFormatContext **source){ 185 | int numStreams = (*source)->nb_streams; 186 | LOGI("copyAVFormatContext source has %d streams", numStreams); 187 | int i; 188 | for (i = 0; i < numStreams; i++) { 189 | // Get input stream 190 | AVStream *inputStream = (*source)->streams[i]; 191 | AVCodecContext *inputCodecContext = inputStream->codec; 192 | 193 | // Add new stream to output with codec from input stream 194 | //LOGI("Attempting to find encoder %s", avcodec_get_name(inputCodecContext->codec_id)); 195 | AVCodec *outputCodec = avcodec_find_encoder(inputCodecContext->codec_id); 196 | if(outputCodec == NULL){ 197 | LOGI("Unable to find encoder %s", avcodec_get_name(inputCodecContext->codec_id)); 198 | } 199 | 200 | AVStream *outputStream = avformat_new_stream(*dest, outputCodec); 201 | AVCodecContext *outputCodecContext = outputStream->codec; 202 | 203 | // Copy input stream's codecContext for output stream's codecContext 204 | avcodec_copy_context(outputCodecContext, inputCodecContext); 205 | outputCodecContext->strict_std_compliance = FF_COMPLIANCE_UNOFFICIAL; 206 | 207 | LOGI("copyAVFormatContext Copied stream %d with codec %s sample_fmt %s", i, avcodec_get_name(inputCodecContext->codec_id), av_get_sample_fmt_name(inputCodecContext->sample_fmt)); 208 | } 209 | } 210 | 211 | // FFInputFile functions 212 | // Using these to deduce codec parameters from test file 213 | 214 | AVFormatContext* avFormatContextForInputPath(const char *inputPath, const char *inputFormatString){ 215 | // You can override the detected input format 216 | AVFormatContext *inputFormatContext = NULL; 217 | AVInputFormat *inputFormat = NULL; 218 | //AVDictionary *inputOptions = NULL; 219 | 220 | if (inputFormatString) { 221 | inputFormat = av_find_input_format(inputFormatString); 222 | LOGI("avFormatContextForInputPath got inputFormat from string"); 223 | } 224 | LOGI("avFormatContextForInputPath post av_Find_input_format"); 225 | // It's possible to send more options to the parser 226 | // av_dict_set(&inputOptions, "video_size", "640x480", 0); 227 | // av_dict_set(&inputOptions, "pixel_format", "rgb24", 0); 228 | // av_dict_free(&inputOptions); // Don't forget to free 229 | 230 | LOGI("avFormatContextForInputPath pre avformat_open_input path: %s format: %s", inputPath, inputFormatString); 231 | int openInputResult = avformat_open_input(&inputFormatContext, inputPath, inputFormat, /*&inputOptions*/ NULL); 232 | LOGI("avFormatContextForInputPath avformat_open_input result: %d", openInputResult); 233 | if (openInputResult != 0) { 234 | LOGE("avformat_open_input failed: %s", stringForAVErrorNumber(openInputResult)); 235 | avformat_close_input(&inputFormatContext); 236 | return NULL; 237 | } 238 | 239 | int streamInfoResult = avformat_find_stream_info(inputFormatContext, NULL); 240 | LOGI("avFormatContextForInputPath avformat_find_stream_info result: %d", streamInfoResult); 241 | if (streamInfoResult < 0) { 242 | avformat_close_input(&inputFormatContext); 243 | LOGE("avformat_find_stream_info failed: %s", stringForAVErrorNumber(openInputResult)); 244 | return NULL; 245 | } 246 | 247 | LOGI("avFormatContextForInputPath Complete!"); 248 | LOGI("AVInputFormat %s Stream0 codec: %s Stream1 codec: %s", inputFormatContext->iformat->name, avcodec_get_name(inputFormatContext->streams[0]->codec->codec_id), avcodec_get_name(inputFormatContext->streams[1]->codec->codec_id) ); 249 | LOGI("Stream0 time_base: (num: %d, den: %d)", inputFormatContext->streams[0]->codec->time_base.num, inputFormatContext->streams[0]->codec->time_base.den); 250 | LOGI("Stream1 time_base: (num: %d, den: %d)", inputFormatContext->streams[1]->codec->time_base.num, inputFormatContext->streams[1]->codec->time_base.den); 251 | return inputFormatContext; 252 | } 253 | 254 | // FFOutputFile functions 255 | 256 | AVFormatContext* avFormatContextForOutputPath(const char *path, const char *formatName){ 257 | AVFormatContext *outputFormatContext; 258 | LOGI("avFormatContextForOutputPath format: %s path: %s", formatName, path); 259 | int openOutputValue = avformat_alloc_output_context2(&outputFormatContext, NULL, formatName, path); 260 | if (openOutputValue < 0) { 261 | avformat_free_context(outputFormatContext); 262 | } 263 | return outputFormatContext; 264 | } 265 | 266 | int openFileForWriting(AVFormatContext *avfc, const char *path){ 267 | if (!(avfc->oformat->flags & AVFMT_NOFILE)) { 268 | LOGI("Opening output file for writing at path %s", path); 269 | return avio_open(&avfc->pb, path, AVIO_FLAG_WRITE); 270 | } 271 | return 0; // This format does not require a file 272 | } 273 | 274 | int writeFileHeader(AVFormatContext *avfc){ 275 | AVDictionary *options = NULL; 276 | 277 | // Write header for output file 278 | int writeHeaderResult = avformat_write_header(avfc, &options); 279 | if (writeHeaderResult < 0) { 280 | LOGE("Error writing header: %s", stringForAVErrorNumber(writeHeaderResult)); 281 | av_dict_free(&options); 282 | } 283 | LOGI("Wrote file header"); 284 | av_dict_free(&options); 285 | return writeHeaderResult; 286 | } 287 | 288 | int writeFileTrailer(AVFormatContext *avfc){ 289 | if(WRITE_RAW_FILE){ 290 | fclose(raw_video); 291 | } 292 | return av_write_trailer(avfc); 293 | } 294 | 295 | ///////////////////// 296 | // JNI FUNCTIONS // 297 | ///////////////////// 298 | 299 | /* 300 | * Prepares an AVFormatContext for output. 301 | * Currently, the output format and codecs are hardcoded in this file. 302 | */ 303 | void Java_com_example_ffmpegtest_recorder_FFmpegWrapper_prepareAVFormatContext(JNIEnv *env, jobject obj, jstring jOutputPath){ 304 | init(); 305 | 306 | // Create AVRational that expects timestamps in microseconds 307 | videoSourceTimeBase = av_malloc(sizeof(AVRational)); 308 | videoSourceTimeBase->num = 1; 309 | videoSourceTimeBase->den = 1000000; 310 | 311 | audioSourceTimeBase = av_malloc(sizeof(AVRational)); 312 | audioSourceTimeBase->num = 1; 313 | audioSourceTimeBase->den = 1000000; 314 | 315 | AVFormatContext *inputFormatContext; 316 | outputPath = (*env)->GetStringUTFChars(env, jOutputPath, NULL); 317 | 318 | outputFormatContext = avFormatContextForOutputPath(outputPath, outputFormatName); 319 | LOGI("post avFormatContextForOutputPath"); 320 | 321 | // For copying AVFormatContext from sample file: 322 | /* 323 | inputFormatContext = avFormatContextForInputPath(sampleFilePath, outputFormatName); 324 | LOGI("post avFormatContextForInputPath"); 325 | copyAVFormatContext(&outputFormatContext, &inputFormatContext); 326 | LOGI("post copyAVFormatContext"); 327 | */ 328 | 329 | // For manually crafting AVFormatContext 330 | addVideoStream(outputFormatContext); 331 | addAudioStream(outputFormatContext); 332 | av_opt_set_int(outputFormatContext->priv_data, "hls_time", hlsSegmentDurationSec, 0); 333 | 334 | int result = openFileForWriting(outputFormatContext, outputPath); 335 | if(result < 0){ 336 | LOGE("openFileForWriting error: %d", result); 337 | } 338 | 339 | writeFileHeader(outputFormatContext); 340 | } 341 | 342 | /* 343 | * Override default AV Options. Must be called before prepareAVFormatContext 344 | */ 345 | 346 | void Java_com_example_ffmpegtest_recorder_FFmpegWrapper_setAVOptions(JNIEnv *env, jobject obj, jobject jOpts){ 347 | // 1: Get your Java object's "jclass"! 348 | jclass ClassAVOptions = (*env)->GetObjectClass(env, jOpts); 349 | 350 | // 2: Get Java object field ids using the jclasss and field name as **hardcoded** strings! 351 | jfieldID jVideoHeightId = (*env)->GetFieldID(env, ClassAVOptions, "videoHeight", "I"); 352 | jfieldID jVideoWidthId = (*env)->GetFieldID(env, ClassAVOptions, "videoWidth", "I"); 353 | 354 | jfieldID jAudioSampleRateId = (*env)->GetFieldID(env, ClassAVOptions, "audioSampleRate", "I"); 355 | jfieldID jNumAudioChannelsId = (*env)->GetFieldID(env, ClassAVOptions, "numAudioChannels", "I"); 356 | 357 | jfieldID jHlsSegmentDurationSec = (*env)->GetFieldID(env, ClassAVOptions, "hlsSegmentDurationSec", "I"); 358 | 359 | // 3: Get the Java object field values with the field ids! 360 | VIDEO_HEIGHT = (*env)->GetIntField(env, jOpts, jVideoHeightId); 361 | VIDEO_WIDTH = (*env)->GetIntField(env, jOpts, jVideoWidthId); 362 | 363 | AUDIO_SAMPLE_RATE = (*env)->GetIntField(env, jOpts, jAudioSampleRateId); 364 | AUDIO_CHANNELS = (*env)->GetIntField(env, jOpts, jNumAudioChannelsId); 365 | 366 | hlsSegmentDurationSec = (*env)->GetIntField(env, jOpts, jHlsSegmentDurationSec); 367 | 368 | // that's how easy love can be! 369 | } 370 | 371 | /* 372 | * Consruct an AVPacket from MediaCodec output and call 373 | * av_interleaved_write_frame with our AVFormatContext 374 | */ 375 | void Java_com_example_ffmpegtest_recorder_FFmpegWrapper_writeAVPacketFromEncodedData(JNIEnv *env, jobject obj, jobject jData, jint jIsVideo, jint jOffset, jint jSize, jint jFlags, jlong jPts){ 376 | if(packet == NULL){ 377 | packet = av_malloc(sizeof(AVPacket)); 378 | LOGI("av_malloc packet"); 379 | } 380 | 381 | if( ((int) jIsVideo) == JNI_TRUE ){ 382 | videoFrameCount++; 383 | } 384 | 385 | // jData is a ByteBuffer managed by Android's MediaCodec. 386 | // Because the audo track of the resulting output mostly works, I'm inclined to rule out this data marshaling being an issue 387 | uint8_t *data = (*env)->GetDirectBufferAddress(env, jData); 388 | 389 | if( WRITE_RAW_FILE && ((int) jIsVideo) == JNI_TRUE ){ 390 | fwrite(data, sizeof(uint8_t), (int)jSize, raw_video); 391 | } 392 | 393 | if(((int) jSize ) < 15){ 394 | if( ((int) jIsVideo) == JNI_TRUE ){ 395 | //LOGI("video: %d data: %s size: %d videoPacket#: %d", (int) jIsVideo, (char*)data, (int) jSize, videoFrameCount); 396 | }else{ 397 | //LOGI("video: %d data: %s size: %d", (int) jIsVideo, data, (int) jSize); 398 | } 399 | //return; 400 | } 401 | 402 | av_init_packet(packet); 403 | 404 | if( ((int) jIsVideo) == JNI_TRUE){ 405 | packet->stream_index = videoStreamIndex; 406 | }else{ 407 | packet->stream_index = audioStreamIndex; 408 | } 409 | 410 | packet->size = (int) jSize; 411 | packet->data = data; 412 | packet->pts = (int) jPts; 413 | 414 | packet->pts = av_rescale_q(packet->pts, *videoSourceTimeBase, (outputFormatContext->streams[packet->stream_index]->time_base)); 415 | 416 | /* Use this to break on specific frame */ 417 | if(videoFrameCount == 3){ 418 | //LOGI("break on frame"); 419 | //LOGI("Payload size: %d", (int) jSize); 420 | } 421 | 422 | 423 | int writeFrameResult = av_interleaved_write_frame(outputFormatContext, packet); 424 | if(writeFrameResult < 0){ 425 | LOGE("av_interleaved_write_frame video: %d pkt: %d size: %d error: %s", ((int) jIsVideo), videoFrameCount, ((int) jSize), stringForAVErrorNumber(writeFrameResult)); 426 | } 427 | av_free_packet(packet); 428 | } 429 | 430 | /* 431 | * Finalize file. Basically a wrapper around av_write_trailer 432 | */ 433 | void Java_com_example_ffmpegtest_recorder_FFmpegWrapper_finalizeAVFormatContext(JNIEnv *env, jobject obj){ 434 | LOGI("finalizeAVFormatContext"); 435 | int writeTrailerResult = writeFileTrailer(outputFormatContext); 436 | if(writeTrailerResult < 0){ 437 | LOGE("av_write_trailer error: %d", writeTrailerResult); 438 | } 439 | } 440 | -------------------------------------------------------------------------------- /FFmpegEncoder/Android/FFmpegWrapper.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2013, David Brodsky. All rights reserved. 3 | * 4 | * This program is free software: you can redistribute it and/or modify 5 | * it under the terms of the GNU General Public License as published by 6 | * the Free Software Foundation, either version 3 of the License, or 7 | * (at your option) any later version. 8 | * 9 | * This program is distributed in the hope that it will be useful, 10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 | * GNU General Public License for more details. 13 | * 14 | * You should have received a copy of the GNU General Public License 15 | * along with this program. If not, see . 16 | */ 17 | 18 | package com.example.ffmpegtest.recorder; 19 | 20 | import java.nio.ByteBuffer; 21 | 22 | import android.util.Log; 23 | 24 | /** 25 | * A wrapper around the FFmpeg C libraries 26 | * designed for muxing encoded AV packets 27 | * into various output formats not supported by 28 | * Android's MediaMuxer, which is currently limited to .mp4 29 | * 30 | * As this is designed to complement Android's MediaCodec class, 31 | * the only supported formats for jData in writeAVPacketFromEncodedData are: 32 | * H264 (YUV420P pixel format) / AAC (16 bit signed integer samples, one center channel) 33 | * 34 | * Methods of this class must be called in the following order: 35 | * 0. (optional) setAVOptions 36 | * 1. prepareAVFormatContext 37 | * 2. (repeat for each packet) writeAVPacketFromEncodedData 38 | * 3. finalizeAVFormatContext 39 | * @author davidbrodsky 40 | * 41 | */ 42 | public class FFmpegWrapper { 43 | 44 | static { 45 | System.loadLibrary("FFmpegWrapper"); 46 | } 47 | 48 | public native void setAVOptions(AVOptions jOpts); 49 | public native void prepareAVFormatContext(String jOutputPath); 50 | public native void writeAVPacketFromEncodedData(ByteBuffer jData, int jIsVideo, int jOffset, int jSize, int jFlags, long jPts); 51 | public native void finalizeAVFormatContext(); 52 | 53 | /** 54 | * Used to configure the muxer's options. 55 | * Note the name of this class's fields 56 | * have to be hardcoded in the native method 57 | * for retrieval. 58 | * @author davidbrodsky 59 | * 60 | */ 61 | static public class AVOptions{ 62 | int videoHeight = 1280; 63 | int videoWidth = 720; 64 | 65 | int audioSampleRate = 44100; 66 | int numAudioChannels = 1; 67 | 68 | int hlsSegmentDurationSec = 10; 69 | } 70 | 71 | } 72 | -------------------------------------------------------------------------------- /FFmpegEncoder/Android/FileUtils.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2013, David Brodsky. All rights reserved. 3 | * 4 | * This program is free software: you can redistribute it and/or modify 5 | * it under the terms of the GNU General Public License as published by 6 | * the Free Software Foundation, either version 3 of the License, or 7 | * (at your option) any later version. 8 | * 9 | * This program is distributed in the hope that it will be useful, 10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 | * GNU General Public License for more details. 13 | * 14 | * You should have received a copy of the GNU General Public License 15 | * along with this program. If not, see . 16 | */ 17 | 18 | package com.example.ffmpegtest; 19 | 20 | import android.content.Context; 21 | import android.os.Environment; 22 | import android.util.Log; 23 | 24 | import java.io.BufferedReader; 25 | import java.io.File; 26 | import java.io.FileInputStream; 27 | import java.io.FileOutputStream; 28 | import java.io.IOException; 29 | import java.io.InputStream; 30 | import java.io.InputStreamReader; 31 | import java.io.OutputStream; 32 | 33 | public class FileUtils { 34 | 35 | static final String TAG = "FileUtils"; 36 | 37 | static final String OUTPUT_DIR = "HWEncodingExperiments"; // Directory relative to External or Internal (fallback) Storage 38 | 39 | /** 40 | * Returns a Java File initialized to a directory of given name 41 | * at the root storage location, with preference to external storage. 42 | * If the directory did not exist, it will be created at the conclusion of this call. 43 | * If a file with conflicting name exists, this method returns null; 44 | * 45 | * @param c the context to determine the internal storage location, if external is unavailable 46 | * @param directory_name the name of the directory desired at the storage location 47 | * @return a File pointing to the storage directory, or null if a file with conflicting name 48 | * exists 49 | */ 50 | public static File getRootStorageDirectory(Context c, String directory_name){ 51 | File result; 52 | // First, try getting access to the sdcard partition 53 | if(Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)){ 54 | Log.d(TAG,"Using sdcard"); 55 | result = new File(Environment.getExternalStorageDirectory(), directory_name); 56 | } else { 57 | // Else, use the internal storage directory for this application 58 | Log.d(TAG,"Using internal storage"); 59 | result = new File(c.getApplicationContext().getFilesDir(), directory_name); 60 | } 61 | 62 | if(!result.exists()) 63 | result.mkdir(); 64 | else if(result.isFile()){ 65 | return null; 66 | } 67 | Log.d("getRootStorageDirectory", result.getAbsolutePath()); 68 | return result; 69 | } 70 | 71 | /** 72 | * Returns a Java File initialized to a directory of given name 73 | * within the given location. 74 | * 75 | * @param parent_directory a File representing the directory in which the new child will reside 76 | * @return a File pointing to the desired directory, or null if a file with conflicting name 77 | * exists or if getRootStorageDirectory was not called first 78 | */ 79 | public static File getStorageDirectory(File parent_directory, String new_child_directory_name){ 80 | 81 | File result = new File(parent_directory, new_child_directory_name); 82 | if(!result.exists()) 83 | if(result.mkdir()) 84 | return result; 85 | else{ 86 | Log.e("getStorageDirectory", "Error creating " + result.getAbsolutePath()); 87 | return null; 88 | } 89 | else if(result.isFile()){ 90 | return null; 91 | } 92 | 93 | Log.d("getStorageDirectory", "directory ready: " + result.getAbsolutePath()); 94 | return result; 95 | } 96 | 97 | /** 98 | * Returns a TempFile with given root, filename, and extension. 99 | * The resulting TempFile is safe for use with Android's MediaRecorder 100 | * @param c 101 | * @param root 102 | * @param filename 103 | * @param extension 104 | * @return 105 | */ 106 | public static File createTempFile(Context c, File root, String filename, String extension){ 107 | File output = null; 108 | try { 109 | if(filename != null){ 110 | if(!extension.contains(".")) 111 | extension = "." + extension; 112 | output = new File(root, filename + extension); 113 | output.createNewFile(); 114 | //output = File.createTempFile(filename, extension, root); 115 | Log.i(TAG, "Created temp file: " + output.getAbsolutePath()); 116 | } 117 | return output; 118 | } catch (IOException e) { 119 | e.printStackTrace(); 120 | return null; 121 | } 122 | } 123 | 124 | public static File createTempFileInRootAppStorage(Context c, String filename){ 125 | File recordingDir = FileUtils.getRootStorageDirectory(c, OUTPUT_DIR); 126 | return createTempFile(c, recordingDir, filename.split("\\.")[0], filename.split("\\.")[1]); 127 | } 128 | 129 | public static String convertStreamToString(InputStream is) throws Exception { 130 | BufferedReader reader = new BufferedReader(new InputStreamReader(is)); 131 | StringBuilder sb = new StringBuilder(); 132 | String line = null; 133 | while ((line = reader.readLine()) != null) { 134 | sb.append(line).append("\n"); 135 | } 136 | return sb.toString(); 137 | } 138 | 139 | public static String getStringFromFile (String filePath) throws Exception { 140 | File fl = new File(filePath); 141 | FileInputStream fin = new FileInputStream(fl); 142 | String ret = convertStreamToString(fin); 143 | //Make sure you close all streams. 144 | fin.close(); 145 | return ret; 146 | } 147 | 148 | public static void copy(File src, File dst) throws IOException { 149 | InputStream in = new FileInputStream(src); 150 | OutputStream out = new FileOutputStream(dst); 151 | 152 | // Transfer bytes from in to out 153 | byte[] buf = new byte[1024]; 154 | int len; 155 | while ((len = in.read(buf)) > 0) { 156 | out.write(buf, 0, len); 157 | } 158 | in.close(); 159 | out.close(); 160 | } 161 | 162 | } -------------------------------------------------------------------------------- /FFmpegEncoder/Android/HLSFileObserver.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2013, David Brodsky. All rights reserved. 3 | * 4 | * This program is free software: you can redistribute it and/or modify 5 | * it under the terms of the GNU General Public License as published by 6 | * the Free Software Foundation, either version 3 of the License, or 7 | * (at your option) any later version. 8 | * 9 | * This program is distributed in the hope that it will be useful, 10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 | * GNU General Public License for more details. 13 | * 14 | * You should have received a copy of the GNU General Public License 15 | * along with this program. If not, see . 16 | */ 17 | 18 | package com.example.ffmpegtest; 19 | 20 | import java.io.File; 21 | 22 | import android.os.FileObserver; 23 | 24 | /** 25 | * A FileObserver that listens for actions 26 | * specific to the creation of an HLS stream 27 | * e.g: A .ts segment is written 28 | * or a .m3u8 manifest is modified 29 | * @author davidbrodsky 30 | * 31 | */ 32 | public class HLSFileObserver extends FileObserver{ 33 | 34 | private static final String M3U8_EXT = "m3u8"; 35 | private static final String TS_EXT = "ts"; 36 | String targetDir; 37 | 38 | private HLSCallback callback; 39 | 40 | public interface HLSCallback{ 41 | public void onSegmentComplete(String path); 42 | public void onManifestUpdated(String path); 43 | } 44 | 45 | /** 46 | * Begin observing the given path for changes 47 | * to .ts and .m3u8 files 48 | * @param path the absolute path to observe. 49 | * @param callback a callback to be notified when HLS files are modified 50 | */ 51 | public HLSFileObserver(String path, HLSCallback callback){ 52 | super(path, CLOSE_WRITE); 53 | this.callback = callback; 54 | targetDir = path; 55 | } 56 | 57 | @Override 58 | public void onEvent(int event, String path) { 59 | String ext = path.substring(path.lastIndexOf('.') + 1); 60 | if(ext.compareTo(M3U8_EXT) == 0){ 61 | callback.onManifestUpdated(targetDir + File.separator + path); 62 | }else if(ext.compareTo(TS_EXT) == 0){ 63 | callback.onSegmentComplete(targetDir + File.separator + path); 64 | } 65 | } 66 | 67 | } 68 | -------------------------------------------------------------------------------- /FFmpegEncoder/Android/HWRecorderActivity.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright (c) 2013, David Brodsky. All rights reserved. 3 | * 4 | * This program is free software: you can redistribute it and/or modify 5 | * it under the terms of the GNU General Public License as published by 6 | * the Free Software Foundation, either version 3 of the License, or 7 | * (at your option) any later version. 8 | * 9 | * This program is distributed in the hope that it will be useful, 10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of 11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 12 | * GNU General Public License for more details. 13 | * 14 | * You should have received a copy of the GNU General Public License 15 | * along with this program. If not, see . 16 | */ 17 | 18 | package com.example.ffmpegtest; 19 | 20 | import com.example.ffmpegtest.recorder.HLSRecorder; 21 | import com.example.ffmpegtest.recorder.LiveHLSRecorder; 22 | 23 | import android.app.Activity; 24 | import android.content.BroadcastReceiver; 25 | import android.content.Context; 26 | import android.content.Intent; 27 | import android.content.IntentFilter; 28 | import android.graphics.Color; 29 | import android.os.Bundle; 30 | import android.support.v4.content.LocalBroadcastManager; 31 | import android.util.Log; 32 | import android.view.Gravity; 33 | import android.view.LayoutInflater; 34 | import android.view.View; 35 | import android.view.Window; 36 | import android.view.WindowManager; 37 | import android.view.animation.AnimationUtils; 38 | import android.widget.Button; 39 | import android.widget.TextSwitcher; 40 | import android.widget.TextView; 41 | import android.widget.ViewSwitcher.ViewFactory; 42 | 43 | public class HWRecorderActivity extends Activity { 44 | private static final String TAG = "HWRecorderActivity"; 45 | boolean recording = false; 46 | LiveHLSRecorder liveRecorder; 47 | 48 | TextView liveIndicator; 49 | String broadcastUrl; 50 | 51 | //GLSurfaceView glSurfaceView; 52 | //GlSurfaceViewRenderer glSurfaceViewRenderer = new GlSurfaceViewRenderer(); 53 | LayoutInflater inflater; 54 | 55 | protected void onCreate (Bundle savedInstanceState){ 56 | super.onCreate(savedInstanceState); 57 | requestWindowFeature(Window.FEATURE_NO_TITLE); 58 | getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); 59 | setContentView(R.layout.activity_hwrecorder); 60 | inflater = (LayoutInflater) this.getSystemService(LAYOUT_INFLATER_SERVICE); 61 | liveIndicator = (TextView) findViewById(R.id.liveLabel); 62 | //glSurfaceView = (GLSurfaceView) findViewById(R.id.glSurfaceView); 63 | //glSurfaceView.setRenderer(glSurfaceViewRenderer); 64 | 65 | LocalBroadcastManager.getInstance(this).registerReceiver(mMessageReceiver, 66 | new IntentFilter(LiveHLSRecorder.INTENT_ACTION)); 67 | } 68 | 69 | @Override 70 | public void onPause(){ 71 | super.onPause(); 72 | //glSurfaceView.onPause(); 73 | } 74 | 75 | @Override 76 | public void onResume(){ 77 | super.onResume(); 78 | //glSurfaceView.onResume(); 79 | } 80 | 81 | @Override 82 | protected void onDestroy() { 83 | // Unregister since the activity is about to be closed. 84 | LocalBroadcastManager.getInstance(this).unregisterReceiver(mMessageReceiver); 85 | super.onDestroy(); 86 | } 87 | 88 | public void onRecordButtonClicked(View v){ 89 | if(!recording){ 90 | broadcastUrl = null; 91 | 92 | try { 93 | liveRecorder = new LiveHLSRecorder(getApplicationContext()); 94 | liveRecorder.startRecording(null); 95 | ((Button) v).setText("Stop Recording"); 96 | } catch (Throwable throwable) { 97 | throwable.printStackTrace(); 98 | } 99 | }else{ 100 | liveRecorder.stopRecording(); 101 | ((Button) v).setText("Start Recording"); 102 | liveIndicator.startAnimation(AnimationUtils.loadAnimation(this, R.anim.slide_to_left)); 103 | } 104 | recording = !recording; 105 | } 106 | 107 | private BroadcastReceiver mMessageReceiver = new BroadcastReceiver() { 108 | @Override 109 | public void onReceive(Context context, Intent intent) { 110 | // Get extra data included in the Intent 111 | if (LiveHLSRecorder.HLS_STATUS.LIVE == (LiveHLSRecorder.HLS_STATUS) intent.getSerializableExtra("status")){ 112 | broadcastUrl = intent.getStringExtra("url"); 113 | liveIndicator.startAnimation(AnimationUtils.loadAnimation(getApplicationContext(), R.anim.slide_from_left)); 114 | liveIndicator.setVisibility(View.VISIBLE); 115 | } 116 | } 117 | }; 118 | 119 | public void onUrlLabelClick(View v){ 120 | if(broadcastUrl != null){ 121 | shareUrl(broadcastUrl); 122 | } 123 | } 124 | 125 | private void shareUrl(String url) { 126 | Intent shareIntent = new Intent(Intent.ACTION_SEND); 127 | shareIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET); 128 | shareIntent.setType("text/plain"); 129 | startActivity(Intent.createChooser(shareIntent, "Share Broadcast!")); 130 | } 131 | 132 | /* 133 | static EGLContext context; 134 | 135 | public class GlSurfaceViewRenderer implements GLSurfaceView.Renderer{ 136 | 137 | @Override 138 | public void onSurfaceCreated(GL10 gl, EGLConfig config) { 139 | Log.i(TAG, "GLSurfaceView created"); 140 | context = EGL14.eglGetCurrentContext(); 141 | if(context == EGL14.EGL_NO_CONTEXT) 142 | Log.e(TAG, "failed to get valid EGLContext"); 143 | 144 | EGL14.eglMakeCurrent(EGL14.eglGetCurrentDisplay(), EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT); 145 | } 146 | 147 | @Override 148 | public void onSurfaceChanged(GL10 gl, int width, int height) { 149 | 150 | } 151 | 152 | @Override 153 | public void onDrawFrame(GL10 gl) { 154 | } 155 | } 156 | */ 157 | 158 | } -------------------------------------------------------------------------------- /FFmpegEncoder/Android/LiveHLSRecorder.java: -------------------------------------------------------------------------------- 1 | package com.example.ffmpegtest.recorder; 2 | 3 | import java.io.File; 4 | import java.io.IOException; 5 | import java.util.UUID; 6 | import java.util.concurrent.ExecutorService; 7 | import java.util.concurrent.Executors; 8 | 9 | import android.content.Context; 10 | import android.content.Intent; 11 | import android.os.Trace; 12 | import android.support.v4.content.LocalBroadcastManager; 13 | import android.util.Log; 14 | 15 | import com.amazonaws.auth.BasicAWSCredentials; 16 | import com.amazonaws.services.s3.AmazonS3Client; 17 | import com.amazonaws.services.s3.model.ProgressEvent; 18 | import com.example.ffmpegtest.FileUtils; 19 | import com.example.ffmpegtest.HLSFileObserver; 20 | import com.example.ffmpegtest.HLSFileObserver.HLSCallback; 21 | import com.example.ffmpegtest.S3Client; 22 | import com.example.ffmpegtest.S3Client.S3Callback; 23 | import com.example.ffmpegtest.SECRETS; 24 | import com.readystatesoftware.simpl3r.Uploader; 25 | import com.readystatesoftware.simpl3r.Uploader.UploadProgressListener; 26 | 27 | public class LiveHLSRecorder extends HLSRecorder{ 28 | private final String TAG = "LiveHLSRecorder"; 29 | private final boolean VERBOSE = false; // lots of logging 30 | private final boolean TRACE = true; // Enable systrace markers 31 | private final boolean UPLOAD_TO_S3 = true; // live uploading 32 | 33 | private Context c; 34 | private String uuid; // Recording UUID 35 | private HLSFileObserver observer; // Must hold reference to observer to continue receiving events 36 | private ExecutorService uploadService; 37 | 38 | public static final String INTENT_ACTION = "HLS"; // Intent action broadcast to LocalBroadcastManager 39 | public enum HLS_STATUS { OFFLINE, LIVE }; 40 | 41 | private boolean sentIsLiveBroadcast = false; // Only send "broadcast is live" intent once per recording 42 | private int lastSegmentWritten = 0; 43 | File temp; // Temporary directory to store .m3u8s for each upload state 44 | 45 | // Amazon S3 46 | private final String S3_BUCKET = "openwatch-livestreamer"; 47 | private S3Client s3Client; 48 | 49 | public LiveHLSRecorder(Context c){ 50 | super(c); 51 | s3Client = new S3Client(c, SECRETS.AWS_KEY, SECRETS.AWS_SECRET); 52 | s3Client.setBucket(S3_BUCKET); 53 | uploadService = Executors.newSingleThreadExecutor(); 54 | lastSegmentWritten = 0; 55 | this.c = c; 56 | } 57 | 58 | /** 59 | * We'll create a single thread ExecutorService for uploading, and immediately 60 | * submit the .ts and .m3u8 jobs in tick-tock fashion. 61 | * Currently, the fileObserver callbacks don't return until the entire upload 62 | * is complete, which means by the time the first .ts uploads, the the next callback (the .m3u8 write) 63 | * is called when the underlying action has been negated by future (but uncalled) events 64 | */ 65 | @Override 66 | public void startRecording(final String outputDir){ 67 | super.startRecording(outputDir); 68 | temp = new File(getOutputDirectory(), "temp"); // make temp directory for .m3u8s for each upload state 69 | temp.mkdirs(); 70 | sentIsLiveBroadcast = false; 71 | if (!UPLOAD_TO_S3) return; 72 | observer = new HLSFileObserver(getOutputDirectory().getAbsolutePath(), new HLSCallback(){ 73 | 74 | @Override 75 | public void onSegmentComplete(final String path) { 76 | lastSegmentWritten++; 77 | if (VERBOSE) Log.i(TAG, ".ts segment written: " + path); 78 | uploadService.submit(new Runnable(){ 79 | 80 | @Override 81 | public void run() { 82 | File orig = new File(path); 83 | String url = s3Client.upload(getUUID() + File.separator + orig.getName(), orig, segmentUploadedCallback); 84 | if (VERBOSE) Log.i(TAG, ".ts segment destination url received: " + url); 85 | } 86 | }); 87 | } 88 | 89 | @Override 90 | public void onManifestUpdated(String path) { 91 | if (VERBOSE) Log.i(TAG, ".m3u8 written: " + path); 92 | // Copy m3u8 at this moment and queue it to uploading service 93 | final File orig = new File(path); 94 | final File copy = new File(temp, orig.getName().replace(".m3u8", "_" + lastSegmentWritten + ".m3u8")); 95 | 96 | if (TRACE) Trace.beginSection("copyM3u8"); 97 | try { 98 | FileUtils.copy(orig, copy); 99 | } catch (IOException e) { 100 | e.printStackTrace(); 101 | } 102 | if (TRACE) Trace.endSection(); 103 | uploadService.submit(new Runnable(){ 104 | 105 | @Override 106 | public void run() { 107 | String url = s3Client.upload(getUUID() + File.separator + orig.getName(), copy, manifestUploadedCallback); 108 | // TODO: Delete copy 109 | if (VERBOSE) Log.i(TAG, ".m3u8 destination url received: " + url); 110 | 111 | if(!sentIsLiveBroadcast){ 112 | broadcastRecordingIsLive(url); 113 | sentIsLiveBroadcast = true; 114 | } 115 | } 116 | }); 117 | } 118 | 119 | }); 120 | observer.startWatching(); 121 | Log.i(TAG, "Watching " + getOutputDirectory() + " for changes"); 122 | } 123 | 124 | S3Callback segmentUploadedCallback = new S3Callback(){ 125 | 126 | @Override 127 | public void onProgress(ProgressEvent progressEvent, long bytesUploaded, 128 | int percentUploaded) { 129 | if (VERBOSE) Log.i(TAG, String.format(".ts segment upload progress: %d event: %d", percentUploaded, progressEvent.getEventCode())); 130 | if(progressEvent.getEventCode() == ProgressEvent.COMPLETED_EVENT_CODE){ 131 | if (VERBOSE) Log.i(TAG, ".ts segment upload success"); 132 | } else if(progressEvent.getEventCode() == ProgressEvent.FAILED_EVENT_CODE){ 133 | if (VERBOSE) Log.i(TAG, ".ts segment upload failed"); 134 | } 135 | } 136 | 137 | }; 138 | 139 | S3Callback manifestUploadedCallback = new S3Callback(){ 140 | 141 | @Override 142 | public void onProgress(ProgressEvent progressEvent, long bytesUploaded, 143 | int percentUploaded) { 144 | if (VERBOSE) Log.i(TAG, String.format(".m3u8 upload progress: %d event: %d", percentUploaded, progressEvent.getEventCode())); 145 | if(progressEvent.getEventCode() == ProgressEvent.COMPLETED_EVENT_CODE){ 146 | if (VERBOSE) Log.i(TAG, ".m3u8 upload success"); 147 | } else if(progressEvent.getEventCode() == ProgressEvent.FAILED_EVENT_CODE){ 148 | if (VERBOSE) Log.i(TAG, ".m3u8 upload failed"); 149 | } 150 | } 151 | 152 | }; 153 | 154 | /** 155 | * Broadcasts a message to the LocalBroadcastManager 156 | * indicating the HLS stream is live. 157 | * This message is receivable only within the 158 | * hosting application 159 | * @param url address of the HLS stream 160 | */ 161 | private void broadcastRecordingIsLive(String url) { 162 | Log.d(TAG, String.format("Broadcasting Live HLS link: %s", url)); 163 | Intent intent = new Intent(INTENT_ACTION); 164 | intent.putExtra("url", url); 165 | intent.putExtra("status", HLS_STATUS.LIVE); 166 | LocalBroadcastManager.getInstance(c).sendBroadcast(intent); 167 | } 168 | } 169 | -------------------------------------------------------------------------------- /FFmpegEncoder/Android/S3Client.java: -------------------------------------------------------------------------------- 1 | package com.example.ffmpegtest; 2 | 3 | import java.io.File; 4 | 5 | import android.content.Context; 6 | import android.util.Log; 7 | 8 | import com.amazonaws.auth.BasicAWSCredentials; 9 | import com.amazonaws.services.s3.AmazonS3Client; 10 | import com.amazonaws.services.s3.model.ProgressEvent; 11 | import com.readystatesoftware.simpl3r.Uploader; 12 | import com.readystatesoftware.simpl3r.Uploader.UploadProgressListener; 13 | 14 | public class S3Client { 15 | private static final String TAG = "S3Client"; 16 | 17 | AmazonS3Client s3; 18 | Context c; 19 | 20 | String bucket; 21 | 22 | public interface S3Callback{ 23 | public void onProgress(ProgressEvent progressEvent, long bytesUploaded, int percentUploaded); 24 | } 25 | 26 | public S3Client(Context c, String AWS_KEY, String AWS_SECRET){ 27 | s3 = new AmazonS3Client(new BasicAWSCredentials(SECRETS.AWS_KEY, SECRETS.AWS_SECRET)); 28 | this.c = c; 29 | } 30 | 31 | /** 32 | * Set the target S3 bucket of this client. 33 | * Must be set before calling upload. 34 | * @param bucket The name of the target S3 bucket. 35 | */ 36 | public void setBucket(String bucket){ 37 | this.bucket = bucket; 38 | } 39 | 40 | /** 41 | * Begin an upload to S3. Returns the url to the completed upload. 42 | * @param key Path relative to provided S3 bucket. 43 | * @param source File reference to be uploaded. 44 | * @param callback Callback providing upload progress. 45 | * @return 46 | */ 47 | public String upload(String key, File source, final S3Callback callback){ 48 | if(bucket == null){ 49 | Log.e(TAG, "Bucket not set! Call setBucket(String bucket)"); 50 | return ""; 51 | } 52 | Uploader uploader = new Uploader(c, s3, bucket, key, source); 53 | uploader.setProgressListener(new UploadProgressListener() { 54 | @Override 55 | public void progressChanged(ProgressEvent progressEvent, 56 | long bytesUploaded, int percentUploaded) { 57 | if(callback != null) 58 | callback.onProgress(progressEvent, bytesUploaded, percentUploaded); 59 | } 60 | }); 61 | return uploader.start(); 62 | } 63 | 64 | 65 | } 66 | -------------------------------------------------------------------------------- /FFmpegEncoder/CBAppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // CBAppDelegate.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 12/15/13. 6 | // Copyright (c) 2013 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface CBAppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /FFmpegEncoder/CBAppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // CBAppDelegate.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 12/15/13. 6 | // Copyright (c) 2013 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "CBAppDelegate.h" 10 | 11 | @implementation CBAppDelegate 12 | 13 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 14 | { 15 | self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; 16 | // Override point for customization after application launch. 17 | self.window.backgroundColor = [UIColor whiteColor]; 18 | [self.window makeKeyAndVisible]; 19 | return YES; 20 | } 21 | 22 | - (void)applicationWillResignActive:(UIApplication *)application 23 | { 24 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 25 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 26 | } 27 | 28 | - (void)applicationDidEnterBackground:(UIApplication *)application 29 | { 30 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 31 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 32 | } 33 | 34 | - (void)applicationWillEnterForeground:(UIApplication *)application 35 | { 36 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 37 | } 38 | 39 | - (void)applicationDidBecomeActive:(UIApplication *)application 40 | { 41 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 42 | } 43 | 44 | - (void)applicationWillTerminate:(UIApplication *)application 45 | { 46 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 47 | } 48 | 49 | @end 50 | -------------------------------------------------------------------------------- /FFmpegEncoder/DirectoryWatcher.h: -------------------------------------------------------------------------------- 1 | /* 2 | File: DirectoryWatcher.h 3 | Abstract: 4 | Object used to monitor the contents of a given directory by using 5 | "kqueue": a kernel event notification mechanism. 6 | 7 | Version: 1.5 8 | 9 | Disclaimer: IMPORTANT: This Apple software is supplied to you by Apple 10 | Inc. ("Apple") in consideration of your agreement to the following 11 | terms, and your use, installation, modification or redistribution of 12 | this Apple software constitutes acceptance of these terms. If you do 13 | not agree with these terms, please do not use, install, modify or 14 | redistribute this Apple software. 15 | 16 | In consideration of your agreement to abide by the following terms, and 17 | subject to these terms, Apple grants you a personal, non-exclusive 18 | license, under Apple's copyrights in this original Apple software (the 19 | "Apple Software"), to use, reproduce, modify and redistribute the Apple 20 | Software, with or without modifications, in source and/or binary forms; 21 | provided that if you redistribute the Apple Software in its entirety and 22 | without modifications, you must retain this notice and the following 23 | text and disclaimers in all such redistributions of the Apple Software. 24 | Neither the name, trademarks, service marks or logos of Apple Inc. may 25 | be used to endorse or promote products derived from the Apple Software 26 | without specific prior written permission from Apple. Except as 27 | expressly stated in this notice, no other rights or licenses, express or 28 | implied, are granted by Apple herein, including but not limited to any 29 | patent rights that may be infringed by your derivative works or by other 30 | works in which the Apple Software may be incorporated. 31 | 32 | The Apple Software is provided by Apple on an "AS IS" basis. APPLE 33 | MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION 34 | THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS 35 | FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND 36 | OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS. 37 | 38 | IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL 39 | OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 40 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 41 | INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION, 42 | MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED 43 | AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE), 44 | STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE 45 | POSSIBILITY OF SUCH DAMAGE. 46 | 47 | Copyright (C) 2013 Apple Inc. All Rights Reserved. 48 | 49 | */ 50 | 51 | #import 52 | 53 | @class DirectoryWatcher; 54 | 55 | @protocol DirectoryWatcherDelegate 56 | @required 57 | - (void)directoryDidChange:(DirectoryWatcher *)folderWatcher; 58 | @end 59 | 60 | @interface DirectoryWatcher : NSObject 61 | { 62 | id __weak delegate; 63 | 64 | int dirFD; 65 | int kq; 66 | 67 | CFFileDescriptorRef dirKQRef; 68 | } 69 | @property (nonatomic, weak) id delegate; 70 | 71 | + (DirectoryWatcher *)watchFolderWithPath:(NSString *)watchPath delegate:(id)watchDelegate; 72 | - (void)invalidate; 73 | @end 74 | -------------------------------------------------------------------------------- /FFmpegEncoder/DirectoryWatcher.m: -------------------------------------------------------------------------------- 1 | /* 2 | File: DirectoryWatcher.m 3 | Abstract: 4 | Object used to monitor the contents of a given directory by using 5 | "kqueue": a kernel event notification mechanism. 6 | 7 | Version: 1.5 8 | 9 | Disclaimer: IMPORTANT: This Apple software is supplied to you by Apple 10 | Inc. ("Apple") in consideration of your agreement to the following 11 | terms, and your use, installation, modification or redistribution of 12 | this Apple software constitutes acceptance of these terms. If you do 13 | not agree with these terms, please do not use, install, modify or 14 | redistribute this Apple software. 15 | 16 | In consideration of your agreement to abide by the following terms, and 17 | subject to these terms, Apple grants you a personal, non-exclusive 18 | license, under Apple's copyrights in this original Apple software (the 19 | "Apple Software"), to use, reproduce, modify and redistribute the Apple 20 | Software, with or without modifications, in source and/or binary forms; 21 | provided that if you redistribute the Apple Software in its entirety and 22 | without modifications, you must retain this notice and the following 23 | text and disclaimers in all such redistributions of the Apple Software. 24 | Neither the name, trademarks, service marks or logos of Apple Inc. may 25 | be used to endorse or promote products derived from the Apple Software 26 | without specific prior written permission from Apple. Except as 27 | expressly stated in this notice, no other rights or licenses, express or 28 | implied, are granted by Apple herein, including but not limited to any 29 | patent rights that may be infringed by your derivative works or by other 30 | works in which the Apple Software may be incorporated. 31 | 32 | The Apple Software is provided by Apple on an "AS IS" basis. APPLE 33 | MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION 34 | THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS 35 | FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND 36 | OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS. 37 | 38 | IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL 39 | OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 40 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 41 | INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION, 42 | MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED 43 | AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE), 44 | STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE 45 | POSSIBILITY OF SUCH DAMAGE. 46 | 47 | Copyright (C) 2013 Apple Inc. All Rights Reserved. 48 | 49 | */ 50 | 51 | #import "DirectoryWatcher.h" 52 | 53 | #include 54 | #include 55 | #include 56 | #include 57 | #include 58 | 59 | #import 60 | 61 | @interface DirectoryWatcher (DirectoryWatcherPrivate) 62 | - (BOOL)startMonitoringDirectory:(NSString *)dirPath; 63 | - (void)kqueueFired; 64 | @end 65 | 66 | 67 | #pragma mark - 68 | 69 | @implementation DirectoryWatcher 70 | 71 | @synthesize delegate; 72 | 73 | - (instancetype)init 74 | { 75 | self = [super init]; 76 | delegate = NULL; 77 | 78 | dirFD = -1; 79 | kq = -1; 80 | dirKQRef = NULL; 81 | 82 | return self; 83 | } 84 | 85 | - (void)dealloc 86 | { 87 | [self invalidate]; 88 | } 89 | 90 | + (DirectoryWatcher *)watchFolderWithPath:(NSString *)watchPath delegate:(id)watchDelegate 91 | { 92 | DirectoryWatcher *retVal = NULL; 93 | if ((watchDelegate != NULL) && (watchPath != NULL)) 94 | { 95 | DirectoryWatcher *tempManager = [[DirectoryWatcher alloc] init]; 96 | tempManager.delegate = watchDelegate; 97 | if ([tempManager startMonitoringDirectory: watchPath]) 98 | { 99 | // Everything appears to be in order, so return the DirectoryWatcher. 100 | // Otherwise we'll fall through and return NULL. 101 | retVal = tempManager; 102 | } 103 | } 104 | return retVal; 105 | } 106 | 107 | - (void)invalidate 108 | { 109 | if (dirKQRef != NULL) 110 | { 111 | CFFileDescriptorInvalidate(dirKQRef); 112 | CFRelease(dirKQRef); 113 | dirKQRef = NULL; 114 | // We don't need to close the kq, CFFileDescriptorInvalidate closed it instead. 115 | // Change the value so no one thinks it's still live. 116 | kq = -1; 117 | } 118 | 119 | if(dirFD != -1) 120 | { 121 | close(dirFD); 122 | dirFD = -1; 123 | } 124 | } 125 | 126 | @end 127 | 128 | 129 | #pragma mark - 130 | 131 | @implementation DirectoryWatcher (DirectoryWatcherPrivate) 132 | 133 | - (void)kqueueFired 134 | { 135 | assert(kq >= 0); 136 | 137 | struct kevent event; 138 | struct timespec timeout = {0, 0}; 139 | int eventCount; 140 | 141 | eventCount = kevent(kq, NULL, 0, &event, 1, &timeout); 142 | assert((eventCount >= 0) && (eventCount < 2)); 143 | 144 | // call our delegate of the directory change 145 | [delegate directoryDidChange:self]; 146 | 147 | CFFileDescriptorEnableCallBacks(dirKQRef, kCFFileDescriptorReadCallBack); 148 | } 149 | 150 | static void KQCallback(CFFileDescriptorRef kqRef, CFOptionFlags callBackTypes, void *info) 151 | { 152 | DirectoryWatcher *obj; 153 | 154 | obj = (__bridge DirectoryWatcher *)info; 155 | assert([obj isKindOfClass:[DirectoryWatcher class]]); 156 | assert(kqRef == obj->dirKQRef); 157 | assert(callBackTypes == kCFFileDescriptorReadCallBack); 158 | 159 | [obj kqueueFired]; 160 | } 161 | 162 | - (BOOL)startMonitoringDirectory:(NSString *)dirPath 163 | { 164 | // Double initializing is not going to work... 165 | if ((dirKQRef == NULL) && (dirFD == -1) && (kq == -1)) 166 | { 167 | // Open the directory we're going to watch 168 | dirFD = open([dirPath fileSystemRepresentation], O_EVTONLY); 169 | if (dirFD >= 0) 170 | { 171 | // Create a kqueue for our event messages... 172 | kq = kqueue(); 173 | if (kq >= 0) 174 | { 175 | struct kevent eventToAdd; 176 | eventToAdd.ident = dirFD; 177 | eventToAdd.filter = EVFILT_VNODE; 178 | eventToAdd.flags = EV_ADD | EV_CLEAR; 179 | eventToAdd.fflags = NOTE_WRITE; 180 | eventToAdd.data = 0; 181 | eventToAdd.udata = NULL; 182 | 183 | int errNum = kevent(kq, &eventToAdd, 1, NULL, 0, NULL); 184 | if (errNum == 0) 185 | { 186 | CFFileDescriptorContext context = { 0, (__bridge void *)(self), NULL, NULL, NULL }; 187 | CFRunLoopSourceRef rls; 188 | 189 | // Passing true in the third argument so CFFileDescriptorInvalidate will close kq. 190 | dirKQRef = CFFileDescriptorCreate(NULL, kq, true, KQCallback, &context); 191 | if (dirKQRef != NULL) 192 | { 193 | rls = CFFileDescriptorCreateRunLoopSource(NULL, dirKQRef, 0); 194 | if (rls != NULL) 195 | { 196 | CFRunLoopAddSource(CFRunLoopGetCurrent(), rls, kCFRunLoopDefaultMode); 197 | CFRelease(rls); 198 | CFFileDescriptorEnableCallBacks(dirKQRef, kCFFileDescriptorReadCallBack); 199 | 200 | // If everything worked, return early and bypass shutting things down 201 | return YES; 202 | } 203 | // Couldn't create a runloop source, invalidate and release the CFFileDescriptorRef 204 | CFFileDescriptorInvalidate(dirKQRef); 205 | CFRelease(dirKQRef); 206 | dirKQRef = NULL; 207 | } 208 | } 209 | // kq is active, but something failed, close the handle... 210 | close(kq); 211 | kq = -1; 212 | } 213 | // file handle is open, but something failed, close the handle... 214 | close(dirFD); 215 | dirFD = -1; 216 | } 217 | } 218 | return NO; 219 | } 220 | 221 | @end 222 | -------------------------------------------------------------------------------- /FFmpegEncoder/FFmpegEncoder-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | ${PRODUCT_NAME} 9 | CFBundleExecutable 10 | ${EXECUTABLE_NAME} 11 | CFBundleIdentifier 12 | com.chrisballinger.${PRODUCT_NAME:rfc1034identifier} 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | ${PRODUCT_NAME} 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0 21 | CFBundleSignature 22 | ???? 23 | CFBundleVersion 24 | 1.0 25 | LSRequiresIPhoneOS 26 | 27 | UIFileSharingEnabled 28 | 29 | UIRequiredDeviceCapabilities 30 | 31 | armv7 32 | 33 | UISupportedInterfaceOrientations 34 | 35 | UIInterfaceOrientationPortrait 36 | UIInterfaceOrientationLandscapeLeft 37 | UIInterfaceOrientationLandscapeRight 38 | 39 | UISupportedInterfaceOrientations~ipad 40 | 41 | UIInterfaceOrientationPortrait 42 | UIInterfaceOrientationPortraitUpsideDown 43 | UIInterfaceOrientationLandscapeLeft 44 | UIInterfaceOrientationLandscapeRight 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /FFmpegEncoder/FFmpegEncoder-Prefix.pch: -------------------------------------------------------------------------------- 1 | // 2 | // Prefix header 3 | // 4 | // The contents of this file are implicitly included at the beginning of every source file. 5 | // 6 | 7 | #import 8 | 9 | #ifndef __IPHONE_3_0 10 | #warning "This project uses features only available in iOS SDK 3.0 and later." 11 | #endif 12 | 13 | #ifdef __OBJC__ 14 | #import 15 | #import 16 | #endif 17 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/AVEncoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // AVEncoder.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 14/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | #import "AVFoundation/AVAssetWriter.h" 11 | #import "AVFoundation/AVAssetWriterInput.h" 12 | #import "AVFoundation/AVMediaFormat.h" 13 | #import "AVFoundation/AVVideoSettings.h" 14 | #import "sys/stat.h" 15 | #import "VideoEncoder.h" 16 | #import "MP4Atom.h" 17 | 18 | typedef int (^encoder_handler_t)(NSArray* data, double pts); 19 | typedef int (^param_handler_t)(NSData* params); 20 | 21 | @interface AVEncoder : NSObject 22 | 23 | + (AVEncoder*) encoderForHeight:(int) height andWidth:(int) width; 24 | 25 | - (void) encodeWithBlock:(encoder_handler_t) block onParams: (param_handler_t) paramsHandler; 26 | - (void) encodeFrame:(CMSampleBufferRef) sampleBuffer; 27 | - (NSData*) getConfigData; 28 | - (void) shutdown; 29 | 30 | 31 | @property (readonly, atomic) int bitspersecond; 32 | 33 | @end 34 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/AVEncoder.mm: -------------------------------------------------------------------------------- 1 | // 2 | // AVEncoder.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 14/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "AVEncoder.h" 10 | #import "NALUnit.h" 11 | 12 | static unsigned int to_host(unsigned char* p) 13 | { 14 | return (p[0] << 24) + (p[1] << 16) + (p[2] << 8) + p[3]; 15 | } 16 | 17 | #define OUTPUT_FILE_SWITCH_POINT (50 * 1024 * 1024) // 50 MB switch point 18 | #define MAX_FILENAME_INDEX 5 // filenames "capture1.mp4" wraps at capture5.mp4 19 | 20 | 21 | @interface AVEncoder () 22 | 23 | { 24 | // initial writer, used to obtain SPS/PPS from header 25 | VideoEncoder* _headerWriter; 26 | 27 | // main encoder/writer 28 | VideoEncoder* _writer; 29 | 30 | // writer output file (input to our extractor) and monitoring 31 | NSFileHandle* _inputFile; 32 | dispatch_queue_t _readQueue; 33 | dispatch_source_t _readSource; 34 | 35 | // index of current file name 36 | BOOL _swapping; 37 | int _currentFile; 38 | int _height; 39 | int _width; 40 | 41 | // param set data 42 | NSData* _avcC; 43 | int _lengthSize; 44 | 45 | // location of mdat 46 | BOOL _foundMDAT; 47 | uint64_t _posMDAT; 48 | int _bytesToNextAtom; 49 | BOOL _needParams; 50 | 51 | // tracking if NALU is next frame 52 | int _prev_nal_idc; 53 | int _prev_nal_type; 54 | // array of NSData comprising a single frame. each data is one nalu with no start code 55 | NSMutableArray* _pendingNALU; 56 | 57 | // FIFO for frame times 58 | NSMutableArray* _times; 59 | 60 | encoder_handler_t _outputBlock; 61 | param_handler_t _paramsBlock; 62 | 63 | // estimate bitrate over first second 64 | int _bitspersecond; 65 | double _firstpts; 66 | } 67 | 68 | - (void) initForHeight:(int) height andWidth:(int) width; 69 | 70 | @end 71 | 72 | @implementation AVEncoder 73 | 74 | @synthesize bitspersecond = _bitspersecond; 75 | 76 | + (AVEncoder*) encoderForHeight:(int) height andWidth:(int) width 77 | { 78 | AVEncoder* enc = [AVEncoder alloc]; 79 | [enc initForHeight:height andWidth:width]; 80 | return enc; 81 | } 82 | 83 | - (NSString*) makeFilename 84 | { 85 | NSString* filename = [NSString stringWithFormat:@"capture%d.mp4", _currentFile]; 86 | NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:filename]; 87 | return path; 88 | } 89 | - (void) initForHeight:(int)height andWidth:(int)width 90 | { 91 | _height = height; 92 | _width = width; 93 | NSString* path = [NSTemporaryDirectory() stringByAppendingPathComponent:@"params.mp4"]; 94 | _headerWriter = [VideoEncoder encoderForPath:path Height:height andWidth:width]; 95 | _times = [NSMutableArray arrayWithCapacity:10]; 96 | 97 | // swap between 3 filenames 98 | _currentFile = 1; 99 | _writer = [VideoEncoder encoderForPath:[self makeFilename] Height:height andWidth:width]; 100 | } 101 | 102 | - (void) encodeWithBlock:(encoder_handler_t) block onParams: (param_handler_t) paramsHandler 103 | { 104 | _outputBlock = block; 105 | _paramsBlock = paramsHandler; 106 | _needParams = YES; 107 | _pendingNALU = nil; 108 | _firstpts = -1; 109 | _bitspersecond = 0; 110 | } 111 | 112 | - (BOOL) parseParams:(NSString*) path 113 | { 114 | NSFileHandle* file = [NSFileHandle fileHandleForReadingAtPath:path]; 115 | struct stat s; 116 | fstat([file fileDescriptor], &s); 117 | MP4Atom* movie = [MP4Atom atomAt:0 size:s.st_size type:(OSType)('file') inFile:file]; 118 | MP4Atom* moov = [movie childOfType:(OSType)('moov') startAt:0]; 119 | MP4Atom* trak = nil; 120 | if (moov != nil) 121 | { 122 | for (;;) 123 | { 124 | trak = [moov nextChild]; 125 | if (trak == nil) 126 | { 127 | break; 128 | } 129 | 130 | if (trak.type == (OSType)('trak')) 131 | { 132 | MP4Atom* tkhd = [trak childOfType:(OSType)('tkhd') startAt:0]; 133 | NSData* verflags = [tkhd readAt:0 size:4]; 134 | unsigned char* p = (unsigned char*)[verflags bytes]; 135 | if (p[3] & 1) 136 | { 137 | break; 138 | } 139 | else 140 | { 141 | tkhd = nil; 142 | } 143 | } 144 | } 145 | } 146 | MP4Atom* stsd = nil; 147 | if (trak != nil) 148 | { 149 | MP4Atom* media = [trak childOfType:(OSType)('mdia') startAt:0]; 150 | if (media != nil) 151 | { 152 | MP4Atom* minf = [media childOfType:(OSType)('minf') startAt:0]; 153 | if (minf != nil) 154 | { 155 | MP4Atom* stbl = [minf childOfType:(OSType)('stbl') startAt:0]; 156 | if (stbl != nil) 157 | { 158 | stsd = [stbl childOfType:(OSType)('stsd') startAt:0]; 159 | } 160 | } 161 | } 162 | } 163 | if (stsd != nil) 164 | { 165 | MP4Atom* avc1 = [stsd childOfType:(OSType)('avc1') startAt:8]; 166 | if (avc1 != nil) 167 | { 168 | MP4Atom* esd = [avc1 childOfType:(OSType)('avcC') startAt:78]; 169 | if (esd != nil) 170 | { 171 | // this is the avcC record that we are looking for 172 | _avcC = [esd readAt:0 size:esd.length]; 173 | if (_avcC != nil) 174 | { 175 | // extract size of length field 176 | unsigned char* p = (unsigned char*)[_avcC bytes]; 177 | _lengthSize = (p[4] & 3) + 1; 178 | return YES; 179 | } 180 | } 181 | } 182 | } 183 | return NO; 184 | } 185 | 186 | - (void) onParamsCompletion 187 | { 188 | // the initial one-frame-only file has been completed 189 | // Extract the avcC structure and then start monitoring the 190 | // main file to extract video from the mdat chunk. 191 | if ([self parseParams:_headerWriter.path]) 192 | { 193 | if (_paramsBlock) 194 | { 195 | _paramsBlock(_avcC); 196 | } 197 | _headerWriter = nil; 198 | _swapping = NO; 199 | _inputFile = [NSFileHandle fileHandleForReadingAtPath:_writer.path]; 200 | _readQueue = dispatch_queue_create("uk.co.gdcl.avencoder.read", DISPATCH_QUEUE_SERIAL); 201 | 202 | _readSource = dispatch_source_create(DISPATCH_SOURCE_TYPE_READ, [_inputFile fileDescriptor], 0, _readQueue); 203 | dispatch_source_set_event_handler(_readSource, ^{ 204 | [self onFileUpdate]; 205 | }); 206 | dispatch_resume(_readSource); 207 | } 208 | } 209 | 210 | - (void) encodeFrame:(CMSampleBufferRef) sampleBuffer 211 | { 212 | @synchronized(self) 213 | { 214 | if (_needParams) 215 | { 216 | // the avcC record is needed for decoding and it's not written to the file until 217 | // completion. We get round that by writing the first frame to two files; the first 218 | // file (containing only one frame) is then finished, so we can extract the avcC record. 219 | // Only when we've got that do we start reading from the main file. 220 | _needParams = NO; 221 | if ([_headerWriter encodeFrame:sampleBuffer]) 222 | { 223 | [_headerWriter finishWithCompletionHandler:^{ 224 | [self onParamsCompletion]; 225 | }]; 226 | } 227 | } 228 | } 229 | CMTime prestime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 230 | double dPTS = (double)(prestime.value) / prestime.timescale; 231 | NSNumber* pts = [NSNumber numberWithDouble:dPTS]; 232 | @synchronized(_times) 233 | { 234 | [_times addObject:pts]; 235 | } 236 | @synchronized(self) 237 | { 238 | // switch output files when we reach a size limit 239 | // to avoid runaway storage use. 240 | if (!_swapping) 241 | { 242 | struct stat st; 243 | fstat([_inputFile fileDescriptor], &st); 244 | if (st.st_size > OUTPUT_FILE_SWITCH_POINT) 245 | { 246 | _swapping = YES; 247 | VideoEncoder* oldVideo = _writer; 248 | 249 | // construct a new writer to the next filename 250 | if (++_currentFile > MAX_FILENAME_INDEX) 251 | { 252 | _currentFile = 1; 253 | } 254 | NSLog(@"Swap to file %d", _currentFile); 255 | _writer = [VideoEncoder encoderForPath:[self makeFilename] Height:_height andWidth:_width]; 256 | 257 | 258 | // to do this seamlessly requires a few steps in the right order 259 | // first, suspend the read source 260 | dispatch_source_cancel(_readSource); 261 | // execute the next step as a block on the same queue, to be sure the suspend is done 262 | dispatch_async(_readQueue, ^{ 263 | // finish the file, writing moov, before reading any more from the file 264 | // since we don't yet know where the mdat ends 265 | _readSource = nil; 266 | [oldVideo finishWithCompletionHandler:^{ 267 | [self swapFiles:oldVideo.path]; 268 | }]; 269 | }); 270 | } 271 | } 272 | [_writer encodeFrame:sampleBuffer]; 273 | } 274 | } 275 | 276 | - (void) swapFiles:(NSString*) oldPath 277 | { 278 | // save current position 279 | uint64_t pos = [_inputFile offsetInFile]; 280 | 281 | // re-read mdat length 282 | [_inputFile seekToFileOffset:_posMDAT]; 283 | NSData* hdr = [_inputFile readDataOfLength:4]; 284 | unsigned char* p = (unsigned char*) [hdr bytes]; 285 | int lenMDAT = to_host(p); 286 | 287 | // extract nalus from saved position to mdat end 288 | uint64_t posEnd = _posMDAT + lenMDAT; 289 | uint32_t cRead = (uint32_t)(posEnd - pos); 290 | [_inputFile seekToFileOffset:pos]; 291 | [self readAndDeliver:cRead]; 292 | 293 | // close and remove file 294 | [_inputFile closeFile]; 295 | _foundMDAT = false; 296 | _bytesToNextAtom = 0; 297 | [[NSFileManager defaultManager] removeItemAtPath:oldPath error:nil]; 298 | 299 | 300 | // open new file and set up dispatch source 301 | _inputFile = [NSFileHandle fileHandleForReadingAtPath:_writer.path]; 302 | _readSource = dispatch_source_create(DISPATCH_SOURCE_TYPE_READ, [_inputFile fileDescriptor], 0, _readQueue); 303 | dispatch_source_set_event_handler(_readSource, ^{ 304 | [self onFileUpdate]; 305 | }); 306 | dispatch_resume(_readSource); 307 | _swapping = NO; 308 | } 309 | 310 | 311 | - (void) readAndDeliver:(uint32_t) cReady 312 | { 313 | // Identify the individual NALUs and extract them 314 | while (cReady > _lengthSize) 315 | { 316 | NSData* lenField = [_inputFile readDataOfLength:_lengthSize]; 317 | cReady -= _lengthSize; 318 | unsigned char* p = (unsigned char*) [lenField bytes]; 319 | unsigned int lenNALU = to_host(p); 320 | 321 | if (lenNALU > cReady) 322 | { 323 | // whole NALU not present -- seek back to start of NALU and wait for more 324 | [_inputFile seekToFileOffset:[_inputFile offsetInFile] - 4]; 325 | break; 326 | } 327 | NSData* nalu = [_inputFile readDataOfLength:lenNALU]; 328 | cReady -= lenNALU; 329 | 330 | [self onNALU:nalu]; 331 | } 332 | } 333 | 334 | - (void) onFileUpdate 335 | { 336 | // called whenever there is more data to read in the main encoder output file. 337 | 338 | struct stat s; 339 | fstat([_inputFile fileDescriptor], &s); 340 | int cReady = s.st_size - [_inputFile offsetInFile]; 341 | 342 | // locate the mdat atom if needed 343 | while (!_foundMDAT && (cReady > 8)) 344 | { 345 | if (_bytesToNextAtom == 0) 346 | { 347 | NSData* hdr = [_inputFile readDataOfLength:8]; 348 | cReady -= 8; 349 | unsigned char* p = (unsigned char*) [hdr bytes]; 350 | int lenAtom = to_host(p); 351 | unsigned int nameAtom = to_host(p+4); 352 | if (nameAtom == (unsigned int)('mdat')) 353 | { 354 | _foundMDAT = true; 355 | _posMDAT = [_inputFile offsetInFile] - 8; 356 | } 357 | else 358 | { 359 | _bytesToNextAtom = lenAtom - 8; 360 | } 361 | } 362 | if (_bytesToNextAtom > 0) 363 | { 364 | int cThis = cReady < _bytesToNextAtom ? cReady :_bytesToNextAtom; 365 | _bytesToNextAtom -= cThis; 366 | [_inputFile seekToFileOffset:[_inputFile offsetInFile]+cThis]; 367 | cReady -= cThis; 368 | } 369 | } 370 | if (!_foundMDAT) 371 | { 372 | return; 373 | } 374 | 375 | // the mdat must be just encoded video. 376 | [self readAndDeliver:cReady]; 377 | } 378 | 379 | - (void) onEncodedFrame 380 | { 381 | double pts = 0; 382 | @synchronized(_times) 383 | { 384 | if ([_times count] > 0) 385 | { 386 | pts = [_times[0] doubleValue]; 387 | [_times removeObjectAtIndex:0]; 388 | if (_firstpts < 0) 389 | { 390 | _firstpts = pts; 391 | } 392 | if ((pts - _firstpts) < 1) 393 | { 394 | int bytes = 0; 395 | for (NSData* data in _pendingNALU) 396 | { 397 | bytes += [data length]; 398 | } 399 | _bitspersecond += (bytes * 8); 400 | } 401 | } 402 | else 403 | { 404 | NSLog(@"no pts for buffer"); 405 | } 406 | } 407 | if (_outputBlock != nil) 408 | { 409 | _outputBlock(_pendingNALU, pts); 410 | } 411 | } 412 | 413 | // combine multiple NALUs into a single frame, and in the process, convert to BSF 414 | // by adding 00 00 01 startcodes before each NALU. 415 | - (void) onNALU:(NSData*) nalu 416 | { 417 | unsigned char* pNal = (unsigned char*)[nalu bytes]; 418 | int idc = pNal[0] & 0x60; 419 | int naltype = pNal[0] & 0x1f; 420 | 421 | if (_pendingNALU) 422 | { 423 | NALUnit nal(pNal, [nalu length]); 424 | 425 | // we have existing data —is this the same frame? 426 | // typically there are a couple of NALUs per frame in iOS encoding. 427 | // This is not general-purpose: it assumes that arbitrary slice ordering is not allowed. 428 | BOOL bNew = NO; 429 | if ((idc != _prev_nal_idc) && ((idc * _prev_nal_idc) == 0)) 430 | { 431 | bNew = YES; 432 | } 433 | else if ((naltype != _prev_nal_type) && ((naltype == 5) || (_prev_nal_type == 5))) 434 | { 435 | bNew = YES; 436 | } 437 | else if ((naltype >= 1) && (naltype <= 5)) 438 | { 439 | nal.Skip(8); 440 | int first_mb = nal.GetUE(); 441 | if (first_mb == 0) 442 | { 443 | bNew = YES; 444 | } 445 | } 446 | if (bNew) 447 | { 448 | [self onEncodedFrame]; 449 | _pendingNALU = nil; 450 | } 451 | } 452 | _prev_nal_type = naltype; 453 | _prev_nal_idc = idc; 454 | if (_pendingNALU == nil) 455 | { 456 | _pendingNALU = [NSMutableArray arrayWithCapacity:2]; 457 | } 458 | [_pendingNALU addObject:nalu]; 459 | } 460 | 461 | - (NSData*) getConfigData 462 | { 463 | return [_avcC copy]; 464 | } 465 | 466 | - (void) shutdown 467 | { 468 | @synchronized(self) 469 | { 470 | _readSource = nil; 471 | if (_headerWriter) 472 | { 473 | [_headerWriter finishWithCompletionHandler:^{ 474 | _headerWriter = nil; 475 | }]; 476 | } 477 | if (_writer) 478 | { 479 | [_writer finishWithCompletionHandler:^{ 480 | _writer = nil; 481 | }]; 482 | } 483 | // !! wait for these to finish before returning and delete temp files 484 | } 485 | } 486 | 487 | @end 488 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/CameraServer.h: -------------------------------------------------------------------------------- 1 | // 2 | // CameraServer.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 19/02/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | #import "AVFoundation/AVCaptureSession.h" 11 | #import "AVFoundation/AVCaptureOutput.h" 12 | #import "AVFoundation/AVCaptureDevice.h" 13 | #import "AVFoundation/AVCaptureInput.h" 14 | #import "AVFoundation/AVCaptureVideoPreviewLayer.h" 15 | #import "AVFoundation/AVMediaFormat.h" 16 | #import "HLSUploader.h" 17 | @class HLSWriter; 18 | 19 | @interface CameraServer : NSObject 20 | 21 | + (CameraServer*) server; 22 | - (void) startup; 23 | - (void) shutdown; 24 | - (NSString*) getURL; 25 | - (AVCaptureVideoPreviewLayer*) getPreviewLayer; 26 | 27 | @property (nonatomic, strong) HLSUploader *hlsUploader; 28 | @property (nonatomic, strong) HLSWriter *hlsWriter; 29 | 30 | @end 31 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/CameraServer.mm: -------------------------------------------------------------------------------- 1 | // 2 | // CameraServer.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 19/02/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "CameraServer.h" 10 | #import "AVEncoder.h" 11 | #import "RTSPServer.h" 12 | #import "NALUnit.h" 13 | #import "HLSWriter.h" 14 | #import "AACEncoder.h" 15 | #import "HTTPServer.h" 16 | #import "HLSUploader.h" 17 | 18 | static const int VIDEO_WIDTH = 1280; 19 | static const int VIDEO_HEIGHT = 720; 20 | static const int SAMPLE_RATE = 44100; 21 | 22 | static CameraServer* theServer; 23 | 24 | @interface CameraServer () 25 | { 26 | AVCaptureSession* _session; 27 | AVCaptureVideoPreviewLayer* _preview; 28 | AVCaptureVideoDataOutput* _videoOutput; 29 | AVCaptureAudioDataOutput* _audioOutput; 30 | dispatch_queue_t _videoQueue; 31 | dispatch_queue_t _audioQueue; 32 | AVCaptureConnection* _audioConnection; 33 | AVCaptureConnection* _videoConnection; 34 | 35 | AVEncoder* _encoder; 36 | 37 | RTSPServer* _rtsp; 38 | } 39 | 40 | @property (nonatomic, strong) NSData *naluStartCode; 41 | @property (nonatomic, strong) NSMutableData *videoSPSandPPS; 42 | @property (nonatomic, strong) AACEncoder *aacEncoder; 43 | 44 | @property (nonatomic, strong) NSFileHandle *debugFileHandle; 45 | 46 | @property (nonatomic, strong) HTTPServer *httpServer; 47 | 48 | @end 49 | 50 | 51 | @implementation CameraServer 52 | 53 | + (void) initialize 54 | { 55 | // test recommended to avoid duplicate init via subclass 56 | if (self == [CameraServer class]) 57 | { 58 | theServer = [[CameraServer alloc] init]; 59 | } 60 | } 61 | 62 | + (CameraServer*) server 63 | { 64 | return theServer; 65 | } 66 | 67 | - (AVCaptureDevice *)audioDevice 68 | { 69 | NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio]; 70 | if ([devices count] > 0) 71 | return [devices objectAtIndex:0]; 72 | 73 | return nil; 74 | } 75 | 76 | - (void) setupHLSWriter { 77 | NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 78 | NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil; 79 | NSTimeInterval time = [[NSDate date] timeIntervalSince1970]; 80 | NSString *folderName = [NSString stringWithFormat:@"%f.hls", time]; 81 | NSString *hlsDirectoryPath = [basePath stringByAppendingPathComponent:folderName]; 82 | [[NSFileManager defaultManager] createDirectoryAtPath:hlsDirectoryPath withIntermediateDirectories:YES attributes:nil error:nil]; 83 | self.hlsWriter = [[HLSWriter alloc] initWithDirectoryPath:hlsDirectoryPath]; 84 | } 85 | 86 | - (void) initializeNALUnitStartCode { 87 | NSUInteger naluLength = 4; 88 | uint8_t *nalu = (uint8_t*)malloc(naluLength * sizeof(uint8_t)); 89 | nalu[0] = 0x00; 90 | nalu[1] = 0x00; 91 | nalu[2] = 0x00; 92 | nalu[3] = 0x01; 93 | _naluStartCode = [NSData dataWithBytesNoCopy:nalu length:naluLength freeWhenDone:YES]; 94 | } 95 | 96 | - (void) setupAudioCapture { 97 | _aacEncoder = [[AACEncoder alloc] init]; 98 | // create capture device with video input 99 | 100 | /* 101 | * Create audio connection 102 | */ 103 | AVCaptureDevice *audioDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio]; 104 | NSError *error = nil; 105 | AVCaptureDeviceInput *audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioDevice error:&error]; 106 | if (error) { 107 | NSLog(@"Error getting audio input device: %@", error.description); 108 | } 109 | if ([_session canAddInput:audioInput]) { 110 | [_session addInput:audioInput]; 111 | } 112 | 113 | _audioQueue = dispatch_queue_create("Audio Capture Queue", DISPATCH_QUEUE_SERIAL); 114 | _audioOutput = [[AVCaptureAudioDataOutput alloc] init]; 115 | [_audioOutput setSampleBufferDelegate:self queue:_audioQueue]; 116 | if ([_session canAddOutput:_audioOutput]) { 117 | [_session addOutput:_audioOutput]; 118 | } 119 | _audioConnection = [_audioOutput connectionWithMediaType:AVMediaTypeAudio]; 120 | [_hlsWriter addAudioStreamWithSampleRate:SAMPLE_RATE]; 121 | } 122 | 123 | - (void) setupVideoCapture { 124 | NSError *error = nil; 125 | AVCaptureDevice* videoDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; 126 | AVCaptureDeviceInput* videoInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error]; 127 | if (error) { 128 | NSLog(@"Error getting video input device: %@", error.description); 129 | } 130 | if ([_session canAddInput:videoInput]) { 131 | [_session addInput:videoInput]; 132 | } 133 | 134 | // create an output for YUV output with self as delegate 135 | _videoQueue = dispatch_queue_create("Video Capture Queue", DISPATCH_QUEUE_SERIAL); 136 | _videoOutput = [[AVCaptureVideoDataOutput alloc] init]; 137 | [_videoOutput setSampleBufferDelegate:self queue:_videoQueue]; 138 | NSDictionary *captureSettings = @{(NSString*)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)}; 139 | _videoOutput.videoSettings = captureSettings; 140 | _videoOutput.alwaysDiscardsLateVideoFrames = YES; 141 | if ([_session canAddOutput:_videoOutput]) { 142 | [_session addOutput:_videoOutput]; 143 | } 144 | _videoConnection = [_videoOutput connectionWithMediaType:AVMediaTypeVideo]; 145 | 146 | [_hlsWriter addVideoStreamWithWidth:VIDEO_WIDTH height:VIDEO_HEIGHT]; 147 | 148 | } 149 | 150 | - (void) startup 151 | { 152 | if (_session == nil) 153 | { 154 | _session = [[AVCaptureSession alloc] init]; 155 | NSLog(@"Starting up server"); 156 | [self initializeNALUnitStartCode]; 157 | [self setupHLSWriter]; 158 | [self setupVideoCapture]; 159 | [self setupAudioCapture]; 160 | NSError *error = nil; 161 | [_hlsWriter prepareForWriting:&error]; 162 | if (error) { 163 | NSLog(@"Error preparing for writing: %@", error); 164 | } 165 | 166 | _httpServer = [[HTTPServer alloc] init]; 167 | [_httpServer setDocumentRoot:_hlsWriter.directoryPath]; 168 | _httpServer.port = 9001; 169 | [_httpServer start:&error]; 170 | if (error) { 171 | NSLog(@"Error starting http server: %@", error.description); 172 | } 173 | 174 | _hlsUploader = [[HLSUploader alloc] initWithDirectoryPath:_hlsWriter.directoryPath remoteFolderName:_hlsWriter.uuid]; 175 | 176 | // create an encoder 177 | _encoder = [AVEncoder encoderForHeight:VIDEO_HEIGHT andWidth:VIDEO_WIDTH]; 178 | [_encoder encodeWithBlock:^int(NSArray* dataArray, double pts) { 179 | [self writeVideoFrames:dataArray pts:pts]; 180 | //[self writeDebugFileForDataArray:dataArray pts:pts]; 181 | if (_rtsp != nil) 182 | { 183 | _rtsp.bitrate = _encoder.bitspersecond; 184 | [_rtsp onVideoData:dataArray time:pts]; 185 | } 186 | return 0; 187 | } onParams:^int(NSData *data) { 188 | _rtsp = [RTSPServer setupListener:data]; 189 | return 0; 190 | }]; 191 | 192 | // start capture and a preview layer 193 | [_session startRunning]; 194 | 195 | 196 | _preview = [AVCaptureVideoPreviewLayer layerWithSession:_session]; 197 | _preview.videoGravity = AVLayerVideoGravityResizeAspectFill; 198 | 199 | 200 | } 201 | } 202 | 203 | - (void) writeVideoFrames:(NSArray*)frames pts:(double)pts { 204 | if (pts == 0) { 205 | NSLog(@"PTS of 0, skipping frame"); 206 | return; 207 | } 208 | if (!_videoSPSandPPS) { 209 | NSData* config = _encoder.getConfigData; 210 | 211 | avcCHeader avcC((const BYTE*)[config bytes], [config length]); 212 | SeqParamSet seqParams; 213 | seqParams.Parse(avcC.sps()); 214 | 215 | NSData* spsData = [NSData dataWithBytes:avcC.sps()->Start() length:avcC.sps()->Length()]; 216 | NSData *ppsData = [NSData dataWithBytes:avcC.pps()->Start() length:avcC.pps()->Length()]; 217 | 218 | _videoSPSandPPS = [NSMutableData dataWithCapacity:avcC.sps()->Length() + avcC.pps()->Length() + _naluStartCode.length * 2]; 219 | [_videoSPSandPPS appendData:_naluStartCode]; 220 | [_videoSPSandPPS appendData:spsData]; 221 | [_videoSPSandPPS appendData:_naluStartCode]; 222 | [_videoSPSandPPS appendData:ppsData]; 223 | } 224 | 225 | for (NSData *data in frames) { 226 | unsigned char* pNal = (unsigned char*)[data bytes]; 227 | //int idc = pNal[0] & 0x60; 228 | int naltype = pNal[0] & 0x1f; 229 | NSData *videoData = nil; 230 | if (naltype == 5) { // IDR 231 | NSMutableData *IDRData = [NSMutableData dataWithData:_videoSPSandPPS]; 232 | [IDRData appendData:_naluStartCode]; 233 | [IDRData appendData:data]; 234 | videoData = IDRData; 235 | } else { 236 | NSMutableData *regularData = [NSMutableData dataWithData:_naluStartCode]; 237 | [regularData appendData:data]; 238 | videoData = regularData; 239 | } 240 | //NSMutableData *nalu = [[NSMutableData alloc] initWithData:_naluStartCode]; 241 | //[nalu appendData:data]; 242 | //NSLog(@"%f: %@", pts, videoData.description); 243 | [_hlsWriter processEncodedData:videoData presentationTimestamp:pts streamIndex:0]; 244 | } 245 | 246 | } 247 | 248 | - (void) captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 249 | { 250 | // pass frame to encoder 251 | if (connection == _videoConnection) { 252 | [_encoder encodeFrame:sampleBuffer]; 253 | } else if (connection == _audioConnection) { 254 | CMTime pts = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 255 | double dPTS = (double)(pts.value) / pts.timescale; 256 | [_aacEncoder encodeSampleBuffer:sampleBuffer completionBlock:^(NSData *encodedData, NSError *error) { 257 | if (encodedData) { 258 | //NSLog(@"Encoded data (%d): %@", encodedData.length, encodedData.description); 259 | [_hlsWriter processEncodedData:encodedData presentationTimestamp:dPTS streamIndex:1]; 260 | //[self writeDebugFileForData:encodedData pts:dPTS]; 261 | } else { 262 | NSLog(@"Error encoding AAC: %@", error); 263 | } 264 | }]; 265 | } 266 | } 267 | 268 | - (void) writeDebugFileForData:(NSData*)data pts:(double)pts { 269 | if (!_debugFileHandle) { 270 | NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES); 271 | NSString *basePath = ([paths count] > 0) ? [paths objectAtIndex:0] : nil; 272 | NSTimeInterval time = [[NSDate date] timeIntervalSince1970]; 273 | NSString *folderName = [NSString stringWithFormat:@"%f.aacdebug", time]; 274 | NSString *debugDirectoryPath = [basePath stringByAppendingPathComponent:folderName]; 275 | [[NSFileManager defaultManager] createDirectoryAtPath:debugDirectoryPath withIntermediateDirectories:YES attributes:nil error:nil]; 276 | 277 | 278 | NSString *fileName = @"test.aac"; 279 | NSString *outputFilePath = [debugDirectoryPath stringByAppendingPathComponent:fileName]; 280 | NSURL *fileURL = [NSURL fileURLWithPath:outputFilePath]; 281 | NSError *error = nil; 282 | [[NSFileManager defaultManager] createFileAtPath:outputFilePath contents:nil attributes:nil]; 283 | _debugFileHandle = [NSFileHandle fileHandleForWritingToURL:fileURL error:&error]; 284 | if (error) { 285 | NSLog(@"Error opening file for writing: %@", error.description); 286 | } 287 | } 288 | 289 | [_debugFileHandle writeData:data]; 290 | [_debugFileHandle synchronizeFile]; 291 | } 292 | 293 | - (void) shutdown 294 | { 295 | NSLog(@"shutting down server"); 296 | if (_session) 297 | { 298 | [_session stopRunning]; 299 | _session = nil; 300 | } 301 | if (_rtsp) 302 | { 303 | [_rtsp shutdownServer]; 304 | } 305 | if (_encoder) 306 | { 307 | [ _encoder shutdown]; 308 | } 309 | } 310 | 311 | - (NSString*) getURL 312 | { 313 | NSString* ipaddr = [RTSPServer getIPAddress]; 314 | NSString* url = [NSString stringWithFormat:@"rtsp://%@/", ipaddr]; 315 | return url; 316 | } 317 | 318 | - (AVCaptureVideoPreviewLayer*) getPreviewLayer 319 | { 320 | return _preview; 321 | } 322 | 323 | @end 324 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/EncoderDemoAppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // EncoderDemoAppDelegate.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 11/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | 11 | @interface EncoderDemoAppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/EncoderDemoAppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // EncoderDemoAppDelegate.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 11/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "EncoderDemoAppDelegate.h" 10 | #import "CameraServer.h" 11 | #import "EncoderDemoViewController.h" 12 | 13 | @implementation EncoderDemoAppDelegate 14 | 15 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions 16 | { 17 | self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen] bounds]]; 18 | self.window.backgroundColor = [UIColor whiteColor]; 19 | 20 | // Override point for customization after application launch. 21 | self.window.rootViewController = [[EncoderDemoViewController alloc] init]; 22 | 23 | //[[CameraServer server] startup]; 24 | 25 | [self.window makeKeyAndVisible]; 26 | 27 | return YES; 28 | } 29 | 30 | - (void)applicationWillResignActive:(UIApplication *)application 31 | { 32 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 33 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game. 34 | } 35 | 36 | - (void)applicationDidEnterBackground:(UIApplication *)application 37 | { 38 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 39 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 40 | [[CameraServer server] shutdown]; 41 | } 42 | 43 | - (void)applicationWillEnterForeground:(UIApplication *)application 44 | { 45 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background. 46 | } 47 | 48 | - (void)applicationDidBecomeActive:(UIApplication *)application 49 | { 50 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 51 | [[CameraServer server] startup]; 52 | EncoderDemoViewController* view = (EncoderDemoViewController*) self.window.rootViewController; 53 | [view startPreview]; 54 | } 55 | 56 | - (void)applicationWillTerminate:(UIApplication *)application 57 | { 58 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 59 | } 60 | 61 | @end 62 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/EncoderDemoViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // EncoderDemoViewController.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 11/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | 11 | @interface EncoderDemoViewController : UIViewController 12 | 13 | @property (strong, nonatomic) UIView *cameraView; 14 | @property (strong, nonatomic) UILabel *serverAddress; 15 | @property (nonatomic, strong) UIButton *shareButton; 16 | 17 | - (void) startPreview; 18 | 19 | @end 20 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/EncoderDemoViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // EncoderDemoViewController.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 11/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "EncoderDemoViewController.h" 10 | #import "CameraServer.h" 11 | #import "HLSWriter.h" 12 | #import "OWSecrets.h" 13 | #import "KFAPIClient.h" 14 | #import "KFUser.h" 15 | 16 | @implementation EncoderDemoViewController 17 | 18 | - (id) init { 19 | if (self = [super init]) { 20 | _cameraView = [[UIView alloc] init]; 21 | _cameraView.autoresizingMask = UIViewAutoresizingFlexibleHeight | UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleRightMargin | UIViewAutoresizingFlexibleBottomMargin; 22 | _serverAddress = [[UILabel alloc] init]; 23 | _shareButton = [UIButton buttonWithType:UIButtonTypeRoundedRect]; 24 | [_shareButton addTarget:self action:@selector(shareButtonPressed:) forControlEvents:UIControlEventTouchUpInside]; 25 | [_shareButton setTitle:@"Share" forState:UIControlStateNormal]; 26 | } 27 | return self; 28 | } 29 | 30 | - (void) shareButtonPressed:(id)sender { 31 | NSString *kickflipURLString = [NSString stringWithFormat:@"http://kickflip.io/video.html?v=%@", [CameraServer server].hlsWriter.uuid]; 32 | NSURL *kickflipURL = [NSURL URLWithString:kickflipURLString]; 33 | NSURL *manifestURL = [CameraServer server].hlsUploader.manifestURL; 34 | UIActivityViewController *activityViewController = [[UIActivityViewController alloc] initWithActivityItems:@[kickflipURL, manifestURL] applicationActivities:nil]; 35 | 36 | UIActivityViewControllerCompletionHandler completionHandler = ^(NSString *activityType, BOOL completed) { 37 | NSLog(@"activity: %@", activityType); 38 | }; 39 | 40 | activityViewController.completionHandler = completionHandler; 41 | 42 | [self presentViewController:activityViewController animated:YES completion:nil]; 43 | } 44 | 45 | - (void)viewDidLoad 46 | { 47 | [super viewDidLoad]; 48 | [self.view addSubview:_cameraView]; 49 | [self.view addSubview:_serverAddress]; 50 | [self.view addSubview:_shareButton]; 51 | 52 | [self startPreview]; 53 | } 54 | 55 | - (void) viewWillAppear:(BOOL)animated { 56 | [super viewWillAppear:animated]; 57 | 58 | _cameraView.frame = self.view.bounds; 59 | _serverAddress.frame = CGRectMake(50, 50, 200, 30); 60 | _shareButton.frame = CGRectMake(50, 100, 200, 30); 61 | 62 | [self testOAuthStuff]; 63 | } 64 | 65 | - (void) testOAuthStuff { 66 | [[KFAPIClient sharedClient] requestRecordingEndpoint:^(KFEndpointResponse *endpointResponse, NSError *error) { 67 | if (!error) { 68 | NSLog(@"broadcast url: %@", endpointResponse.broadcastURL); 69 | } else { 70 | NSLog(@"error getting endpoint: %@", error); 71 | } 72 | }]; 73 | } 74 | 75 | 76 | 77 | - (void) willAnimateRotationToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration 78 | { 79 | // this is not the most beautiful animation... 80 | AVCaptureVideoPreviewLayer* preview = [[CameraServer server] getPreviewLayer]; 81 | preview.frame = self.cameraView.bounds; 82 | [[preview connection] setVideoOrientation:toInterfaceOrientation]; 83 | } 84 | 85 | - (void) startPreview 86 | { 87 | AVCaptureVideoPreviewLayer* preview = [[CameraServer server] getPreviewLayer]; 88 | [preview removeFromSuperlayer]; 89 | preview.frame = self.cameraView.bounds; 90 | [[preview connection] setVideoOrientation:UIInterfaceOrientationPortrait]; 91 | 92 | [self.cameraView.layer addSublayer:preview]; 93 | 94 | self.serverAddress.text = [[CameraServer server] getURL]; 95 | } 96 | 97 | - (void)didReceiveMemoryWarning 98 | { 99 | [super didReceiveMemoryWarning]; 100 | // Dispose of any resources that can be recreated. 101 | } 102 | 103 | 104 | 105 | @end 106 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/LICENSE.markdown: -------------------------------------------------------------------------------- 1 | # GDCL Source Code License 2 | 3 | Last updated: 20th February 2013 4 | 5 | **License Agreement for Source Code provided by GDCL** 6 | 7 | This software is supplied to you by Geraint Davies Consulting Ltd ('GDCL') in consideration of your agreement to the following terms, and your use, installation, modification or redistribution of this software constitutes acceptance of these terms. If you do not agree with these terms, please do not use, install, modify or redistribute this software. 8 | 9 | In consideration of your agreement to abide by the following terms, and subject to these terms, GDCL grants you a personal, non-exclusive license, to use, reproduce, modify and redistribute the software, with or without modifications, in source and/or binary forms; provided that if you redistribute the software in its entirety and without modifications, you must retain this notice and the following text and disclaimers in all such redistributions of the software, and that in all cases attribution of GDCL as the original author of the source code shall be included in all such resulting software products or distributions. 10 | 11 | Neither the name, trademarks, service marks or logos of Geraint Davies or GDCL may be used to endorse or promote products derived from the software without specific prior written permission from GDCL. Except as expressly stated in this notice, no other rights or licenses, express or implied, are granted by GDCL herein, including but not limited to any patent rights that may be infringed by your derivative works or by other works in which the software may be incorporated. 12 | 13 | The software is provided by GDCL on an "AS IS" basis. GDCL MAKE NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE, REGARDING THE SOFTWARE OR ITS USE AND OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS. 14 | 15 | IN NO EVENT SHALL GDCL BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION, MODIFICATION AND/OR DISTRIBUTION OF THE SOFTWARE, HOWEVER CAUSED AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE), STRICT LIABILITY OR OTHERWISE, EVEN IF GDCL HAVE BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/MP4Atom.h: -------------------------------------------------------------------------------- 1 | // 2 | // MP4Atom.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 15/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | 11 | @interface MP4Atom : NSObject 12 | 13 | { 14 | NSFileHandle* _file; 15 | int64_t _offset; 16 | int64_t _length; 17 | OSType _type; 18 | int64_t _nextChild; 19 | } 20 | @property OSType type; 21 | @property int64_t length; 22 | 23 | + (MP4Atom*) atomAt:(int64_t) offset size:(int) length type:(OSType) fourcc inFile:(NSFileHandle*) handle; 24 | - (BOOL) init:(int64_t) offset size:(int) length type:(OSType) fourcc inFile:(NSFileHandle*) handle; 25 | - (NSData*) readAt:(int64_t) offset size:(int) length; 26 | - (BOOL) setChildOffset:(int64_t) offset; 27 | - (MP4Atom*) nextChild; 28 | - (MP4Atom*) childOfType:(OSType) fourcc startAt:(int64_t) offset; 29 | 30 | @end 31 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/MP4Atom.m: -------------------------------------------------------------------------------- 1 | // 2 | // MP4Atom.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 15/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "MP4Atom.h" 10 | 11 | static unsigned int to_host(unsigned char* p) 12 | { 13 | return (p[0] << 24) + (p[1] << 16) + (p[2] << 8) + p[3]; 14 | } 15 | 16 | @implementation MP4Atom 17 | 18 | @synthesize type = _type; 19 | @synthesize length = _length; 20 | 21 | + (MP4Atom*) atomAt:(int64_t) offset size:(int) length type:(OSType) fourcc inFile:(NSFileHandle*) handle 22 | { 23 | MP4Atom* atom = [MP4Atom alloc]; 24 | if (![atom init:offset size:length type:fourcc inFile:handle]) 25 | { 26 | return nil; 27 | } 28 | return atom; 29 | } 30 | 31 | - (BOOL) init:(int64_t) offset size:(int) length type:(OSType) fourcc inFile:(NSFileHandle*) handle 32 | { 33 | _file = handle; 34 | _offset = offset; 35 | _length = length; 36 | _type = fourcc; 37 | _nextChild = 0; 38 | 39 | return YES; 40 | } 41 | 42 | - (NSData*) readAt:(int64_t) offset size:(int) length 43 | { 44 | [_file seekToFileOffset:_offset + offset]; 45 | return [_file readDataOfLength:length]; 46 | } 47 | 48 | - (BOOL) setChildOffset:(int64_t) offset 49 | { 50 | _nextChild = offset; 51 | return YES; 52 | } 53 | 54 | - (MP4Atom*) nextChild 55 | { 56 | if (_nextChild <= (_length - 8)) 57 | { 58 | [_file seekToFileOffset:_offset + _nextChild]; 59 | NSData* data = [_file readDataOfLength:8]; 60 | int cHeader = 8; 61 | unsigned char* p = (unsigned char*) [data bytes]; 62 | int64_t len = to_host(p); 63 | OSType fourcc = to_host(p + 4); 64 | if (len == 1) 65 | { 66 | // 64-bit extended length 67 | cHeader+= 8; 68 | data = [_file readDataOfLength:8]; 69 | p = (unsigned char*) [data bytes]; 70 | len = to_host(p); 71 | len = (len << 32) + to_host(p + 4); 72 | } 73 | else if (len == 0) 74 | { 75 | // whole remaining parent space 76 | len = _length - _nextChild; 77 | } 78 | if (fourcc == (OSType)('uuid')) 79 | { 80 | cHeader += 16; 81 | } 82 | if ((len < 0) || ((len + _nextChild) > _length)) 83 | { 84 | return nil; 85 | } 86 | int64_t offset = _nextChild + cHeader; 87 | _nextChild += len; 88 | len -= cHeader; 89 | return [MP4Atom atomAt:offset+_offset size:len type:fourcc inFile:_file]; 90 | } 91 | return nil; 92 | } 93 | 94 | - (MP4Atom*) childOfType:(OSType) fourcc startAt:(int64_t) offset 95 | { 96 | [self setChildOffset:offset]; 97 | MP4Atom* child = nil; 98 | do { 99 | child = [self nextChild]; 100 | } while ((child != nil) && (child.type != fourcc)); 101 | return child; 102 | } 103 | 104 | @end 105 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/NALUnit.cpp: -------------------------------------------------------------------------------- 1 | 2 | // 3 | // NALUnit.cpp 4 | // 5 | // Implementation of Basic parsing of H.264 NAL Units 6 | // 7 | // Geraint Davies, March 2004 8 | // 9 | // Copyright (c) GDCL 2004-2008 http://www.gdcl.co.uk/license.htm 10 | 11 | 12 | 13 | #include "NALUnit.h" 14 | 15 | 16 | // --- core NAL Unit implementation ------------------------------ 17 | 18 | NALUnit::NALUnit() 19 | : m_pStart(NULL), 20 | m_cBytes(0) 21 | { 22 | } 23 | 24 | bool 25 | NALUnit::GetStartCode(const BYTE*& pBegin, const BYTE*& pStart, int& cRemain) 26 | { 27 | // start code is any number of 00 followed by 00 00 01 28 | // We need to record the first 00 in pBegin and the first byte 29 | // following the startcode in pStart. 30 | // if no start code is found, pStart and cRemain should be unchanged. 31 | 32 | const BYTE* pThis = pStart; 33 | int cBytes = cRemain; 34 | 35 | pBegin = NULL; 36 | while (cBytes>= 4) 37 | { 38 | if (pThis[0] == 0) 39 | { 40 | // remember first 00 41 | if (pBegin == NULL) 42 | { 43 | pBegin = pThis; 44 | } 45 | if ((pThis[1] == 0) && 46 | (pThis[2] == 1)) 47 | { 48 | // point to type byte of NAL unit 49 | pStart = pThis + 3; 50 | cRemain = cBytes - 3; 51 | return true; 52 | } 53 | } else { 54 | pBegin = NULL; 55 | } 56 | cBytes--; 57 | pThis++; 58 | } 59 | return false; 60 | } 61 | 62 | bool 63 | NALUnit::Parse(const BYTE* pBuffer, int cSpace, int LengthSize, bool bEnd) 64 | { 65 | // if we get the start code but not the whole 66 | // NALU, we can return false but still have the length property valid 67 | m_cBytes = 0; 68 | 69 | ResetBitstream(); 70 | 71 | if (LengthSize > 0) 72 | { 73 | m_pStartCodeStart = pBuffer; 74 | 75 | if (LengthSize > cSpace) 76 | { 77 | return false; 78 | } 79 | 80 | m_cBytes = 0; 81 | for (int i = 0; i < LengthSize; i++) 82 | { 83 | m_cBytes <<= 8; 84 | m_cBytes += *pBuffer++; 85 | } 86 | 87 | if ((m_cBytes+LengthSize) <= cSpace) 88 | { 89 | m_pStart = pBuffer; 90 | return true; 91 | } 92 | } else { 93 | // this is not length-delimited: we must look for start codes 94 | const BYTE* pBegin; 95 | if (GetStartCode(pBegin, pBuffer, cSpace)) 96 | { 97 | m_pStart = pBuffer; 98 | m_pStartCodeStart = pBegin; 99 | 100 | // either we find another startcode, or we continue to the 101 | // buffer end (if this is the last block of data) 102 | if (GetStartCode(pBegin, pBuffer, cSpace)) 103 | { 104 | m_cBytes = int(pBegin - m_pStart); 105 | return true; 106 | } else if (bEnd) 107 | { 108 | // current element extends to end of buffer 109 | m_cBytes = cSpace; 110 | return true; 111 | } 112 | } 113 | } 114 | return false; 115 | } 116 | 117 | // bitwise access to data 118 | void 119 | NALUnit::ResetBitstream() 120 | { 121 | m_idx = 0; 122 | m_nBits = 0; 123 | m_cZeros = 0; 124 | } 125 | 126 | void 127 | NALUnit::Skip(int nBits) 128 | { 129 | if (nBits < m_nBits) 130 | { 131 | m_nBits -= nBits; 132 | } else { 133 | nBits -= m_nBits; 134 | while (nBits >= 8) 135 | { 136 | GetBYTE(); 137 | nBits -= 8; 138 | } 139 | if (nBits) 140 | { 141 | m_byte = GetBYTE(); 142 | m_nBits = 8; 143 | 144 | m_nBits -= nBits; 145 | } 146 | } 147 | } 148 | 149 | // get the next byte, removing emulation prevention bytes 150 | BYTE 151 | NALUnit::GetBYTE() 152 | { 153 | if (m_idx >= m_cBytes) 154 | { 155 | return 0; 156 | } 157 | 158 | BYTE b = m_pStart[m_idx++]; 159 | 160 | // to avoid start-code emulation, a byte 0x03 is inserted 161 | // after any 00 00 pair. Discard that here. 162 | if (b == 0) 163 | { 164 | m_cZeros++; 165 | if ((m_idx < m_cBytes) && (m_cZeros == 2) && (m_pStart[m_idx] == 0x03)) 166 | { 167 | m_idx++; 168 | m_cZeros=0; 169 | } 170 | } else { 171 | m_cZeros = 0; 172 | } 173 | return b; 174 | } 175 | 176 | unsigned long 177 | NALUnit::GetBit() 178 | { 179 | if (m_nBits == 0) 180 | { 181 | m_byte = GetBYTE(); 182 | m_nBits = 8; 183 | } 184 | m_nBits--; 185 | return (m_byte >> m_nBits) & 0x1; 186 | } 187 | 188 | unsigned long 189 | NALUnit::GetWord(int nBits) 190 | { 191 | unsigned long u = 0; 192 | while (nBits > 0) 193 | { 194 | u <<= 1; 195 | u |= GetBit(); 196 | nBits--; 197 | } 198 | return u; 199 | } 200 | 201 | unsigned long 202 | NALUnit::GetUE() 203 | { 204 | // Exp-Golomb entropy coding: leading zeros, then a one, then 205 | // the data bits. The number of leading zeros is the number of 206 | // data bits, counting up from that number of 1s as the base. 207 | // That is, if you see 208 | // 0001010 209 | // You have three leading zeros, so there are three data bits (010) 210 | // counting up from a base of 111: thus 111 + 010 = 1001 = 9 211 | int cZeros = 0; 212 | while (GetBit() == 0) 213 | { 214 | cZeros++; 215 | } 216 | return GetWord(cZeros) + ((1 << cZeros)-1); 217 | } 218 | 219 | 220 | long 221 | NALUnit::GetSE() 222 | { 223 | // same as UE but signed. 224 | // basically the unsigned numbers are used as codes to indicate signed numbers in pairs 225 | // in increasing value. Thus the encoded values 226 | // 0, 1, 2, 3, 4 227 | // mean 228 | // 0, 1, -1, 2, -2 etc 229 | 230 | unsigned long UE = GetUE(); 231 | bool bPositive = UE & 1; 232 | long SE = (UE + 1) >> 1; 233 | if (!bPositive) 234 | { 235 | SE = -SE; 236 | } 237 | return SE; 238 | } 239 | 240 | // --- sequence params parsing --------------- 241 | SeqParamSet::SeqParamSet() 242 | : m_cx(0), 243 | m_cy(0), 244 | m_FrameBits(0) 245 | { 246 | // SetRect(&m_rcFrame, 0, 0, 0, 0); 247 | } 248 | 249 | void 250 | ScalingList(int size, NALUnit* pnalu) 251 | { 252 | long lastScale = 8; 253 | long nextScale = 8; 254 | for (int j = 0 ; j < size; j++) 255 | { 256 | if (nextScale != 0) 257 | { 258 | long delta = pnalu->GetSE(); 259 | nextScale = (lastScale + delta + 256) %256; 260 | } 261 | int scaling_list_j = (nextScale == 0) ? lastScale : nextScale; 262 | lastScale = scaling_list_j; 263 | } 264 | } 265 | 266 | 267 | bool 268 | SeqParamSet::Parse(NALUnit* pnalu) 269 | { 270 | if (pnalu->Type() != NALUnit::NAL_Sequence_Params) 271 | { 272 | return false; 273 | } 274 | 275 | // with the UE/SE type encoding, we must decode all the values 276 | // to get through to the ones we want 277 | pnalu->ResetBitstream(); 278 | pnalu->Skip(8); // type 279 | m_Profile = pnalu->GetWord(8); 280 | m_Compatibility = (BYTE) pnalu->GetWord(8); 281 | m_Level = pnalu->GetWord(8); 282 | 283 | /*int seq_param_id =*/ pnalu->GetUE(); 284 | 285 | if ((m_Profile == 100) || (m_Profile == 110) || (m_Profile == 122) || (m_Profile == 144)) 286 | { 287 | int chroma_fmt = pnalu->GetUE(); 288 | if (chroma_fmt == 3) 289 | { 290 | pnalu->Skip(1); 291 | } 292 | /* int bit_depth_luma_minus8 = */ pnalu->GetUE(); 293 | /* int bit_depth_chroma_minus8 = */ pnalu->GetUE(); 294 | pnalu->Skip(1); 295 | int seq_scaling_matrix_present = pnalu->GetBit(); 296 | if (seq_scaling_matrix_present) 297 | { 298 | for (int i = 0; i < 8; i++) 299 | { 300 | if (pnalu->GetBit()) 301 | { 302 | if (i < 6) 303 | { 304 | ScalingList(16, pnalu); 305 | } 306 | else 307 | { 308 | ScalingList(64, pnalu); 309 | } 310 | } 311 | } 312 | } 313 | } 314 | 315 | int log2_frame_minus4 = pnalu->GetUE(); 316 | m_FrameBits = log2_frame_minus4 + 4; 317 | int POCtype = pnalu->GetUE(); 318 | if (POCtype == 0) 319 | { 320 | /*int log2_poc_minus4 =*/ pnalu->GetUE(); 321 | } else if (POCtype == 1) 322 | { 323 | pnalu->Skip(1); // delta always zero 324 | /*int nsp_offset =*/ pnalu->GetSE(); 325 | /*int nsp_top_to_bottom = */ pnalu->GetSE(); 326 | int num_ref_in_cycle = pnalu->GetUE(); 327 | for (int i = 0; i < num_ref_in_cycle; i++) 328 | { 329 | /*int sf_offset =*/ pnalu->GetSE(); 330 | } 331 | } 332 | else if (POCtype != 2) 333 | { 334 | return false; 335 | } 336 | // else for POCtype == 2, no additional data in stream 337 | 338 | /*int num_ref_frames =*/ pnalu->GetUE(); 339 | /*int gaps_allowed =*/ pnalu->GetBit(); 340 | 341 | int mbs_width = pnalu->GetUE(); 342 | int mbs_height = pnalu->GetUE(); 343 | m_cx = (mbs_width+1) * 16; 344 | m_cy = (mbs_height+1) * 16; 345 | 346 | // smoke test validation of sps 347 | if ((m_cx > 2000) || (m_cy > 2000)) 348 | { 349 | return false; 350 | } 351 | 352 | // if this is false, then sizes are field sizes and need adjusting 353 | m_bFrameOnly = pnalu->GetBit() ? true : false; 354 | 355 | if (!m_bFrameOnly) 356 | { 357 | pnalu->Skip(1); // adaptive frame/field 358 | } 359 | pnalu->Skip(1); // direct 8x8 360 | 361 | #if 0 362 | SetRect(&m_rcFrame, 0, 0, 0, 0); 363 | bool bCrop = pnalu->GetBit() ? true : false; 364 | if (bCrop) { 365 | // get cropping rect 366 | // store as exclusive, pixel parameters relative to frame 367 | m_rcFrame.left = pnalu->GetUE() * 2; 368 | m_rcFrame.right = pnalu->GetUE() * 2; 369 | m_rcFrame.top = pnalu->GetUE() * 2; 370 | m_rcFrame.bottom = pnalu->GetUE() * 2; 371 | } 372 | 373 | if (!IsRectEmpty(&m_rcFrame)) 374 | { 375 | m_rcFrame.right = m_cx - m_rcFrame.right; 376 | m_rcFrame.bottom = m_cy - m_rcFrame.bottom; 377 | } 378 | #endif 379 | // adjust rect from 2x2 units to pixels 380 | 381 | if (!m_bFrameOnly) 382 | { 383 | // adjust heights from field to frame 384 | m_cy *= 2; 385 | #if 0 386 | m_rcFrame.top *= 2; 387 | m_rcFrame.bottom *= 2; 388 | #endif 389 | } 390 | 391 | // .. rest are not interesting yet 392 | m_nalu = *pnalu; 393 | return true; 394 | } 395 | 396 | // --- slice header -------------------- 397 | bool 398 | SliceHeader::Parse(NALUnit* pnalu) 399 | { 400 | switch(pnalu->Type()) 401 | { 402 | case NALUnit::NAL_IDR_Slice: 403 | case NALUnit::NAL_Slice: 404 | case NALUnit::NAL_PartitionA: 405 | // all these begin with a slice header 406 | break; 407 | 408 | default: 409 | return false; 410 | } 411 | 412 | // slice header has the 1-byte type, then one UE value, 413 | // then the frame number. 414 | pnalu->ResetBitstream(); 415 | pnalu->Skip(8); // NALU type 416 | pnalu->GetUE(); // first mb in slice 417 | pnalu->GetUE(); // slice type 418 | pnalu->GetUE(); // pic param set id 419 | 420 | m_framenum = pnalu->GetWord(m_nBitsFrame); 421 | return true; 422 | } 423 | 424 | // --- SEI ---------------------- 425 | 426 | 427 | SEIMessage::SEIMessage(NALUnit* pnalu) 428 | { 429 | m_pnalu = pnalu; 430 | const BYTE* p = pnalu->Start(); 431 | p++; // nalu type byte 432 | m_type = 0; 433 | while (*p == 0xff) 434 | { 435 | m_type += 255; 436 | p++; 437 | } 438 | m_type += *p; 439 | p++; 440 | m_length = 0; 441 | while (*p == 0xff) 442 | { 443 | m_type += 255; 444 | p++; 445 | } 446 | m_length += *p; 447 | p++; 448 | m_idxPayload = int(p - m_pnalu->Start()); 449 | } 450 | 451 | avcCHeader::avcCHeader(const BYTE* header, int cBytes) 452 | { 453 | if (cBytes < 8) 454 | { 455 | return; 456 | } 457 | const BYTE* pEnd = header + cBytes; 458 | 459 | int cSeq = header[5] & 0x1f; 460 | header += 6; 461 | for (int i = 0; i < cSeq; i++) 462 | { 463 | if ((header+2) > pEnd) 464 | { 465 | return; 466 | } 467 | int cThis = (header[0] << 8) + header[1]; 468 | header += 2; 469 | if ((header+cThis) > pEnd) 470 | { 471 | return; 472 | } 473 | if (i == 0) 474 | { 475 | NALUnit n(header, cThis); 476 | m_sps = n; 477 | } 478 | header += cThis; 479 | } 480 | if ((header + 3) >= pEnd) 481 | { 482 | return; 483 | } 484 | int cPPS = header[0]; 485 | if (cPPS > 0) 486 | { 487 | int cThis = (header[1] << 8) + header[2]; 488 | header += 3; 489 | NALUnit n(header, cThis); 490 | m_pps = n; 491 | } 492 | } 493 | 494 | 495 | 496 | 497 | 498 | 499 | 500 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/NALUnit.h: -------------------------------------------------------------------------------- 1 | 2 | // 3 | // NALUnit.h 4 | // 5 | // Basic parsing of H.264 NAL Units 6 | // 7 | // Geraint Davies, March 2004 8 | // 9 | // Copyright (c) GDCL 2004-2008 http://www.gdcl.co.uk/license.htm 10 | 11 | 12 | 13 | #pragma once 14 | 15 | typedef unsigned char BYTE; 16 | typedef unsigned long ULONG; 17 | #ifndef NULL 18 | #define NULL 0 19 | #endif 20 | 21 | class NALUnit 22 | { 23 | public: 24 | NALUnit(); 25 | NALUnit(const BYTE* pStart, int len) 26 | { 27 | m_pStart = m_pStartCodeStart = pStart; 28 | m_cBytes = len; 29 | ResetBitstream(); 30 | } 31 | virtual ~NALUnit() {} 32 | 33 | // assignment copies a pointer into a fixed buffer managed elsewhere. We do not copy the data 34 | NALUnit(const NALUnit& r) 35 | { 36 | m_pStart = r.m_pStart; 37 | m_cBytes = r.m_cBytes; 38 | ResetBitstream(); 39 | } 40 | const NALUnit& operator=(const NALUnit& r) 41 | { 42 | m_pStart = r.m_pStart; 43 | m_cBytes = r.m_cBytes; 44 | ResetBitstream(); 45 | return *this; 46 | } 47 | 48 | enum eNALType 49 | { 50 | NAL_Slice = 1, 51 | NAL_PartitionA = 2, 52 | NAL_PartitionB = 3, 53 | NAL_PartitionC = 4, 54 | NAL_IDR_Slice = 5, 55 | NAL_SEI = 6, 56 | NAL_Sequence_Params = 7, 57 | NAL_Picture_Params = 8, 58 | NAL_AUD = 9, 59 | }; 60 | 61 | // identify a NAL unit within a buffer. 62 | // If LengthSize is non-zero, it is the number of bytes 63 | // of length field we expect. Otherwise, we expect start-code 64 | // delimiters. 65 | bool Parse(const BYTE* pBuffer, int cSpace, int LengthSize, bool bEnd); 66 | 67 | eNALType Type() 68 | { 69 | if (m_pStart == NULL) 70 | { 71 | return eNALType(0); 72 | } 73 | return eNALType(m_pStart[0] & 0x1F); 74 | } 75 | 76 | int Length() 77 | { 78 | return m_cBytes; 79 | } 80 | 81 | const BYTE* Start() 82 | { 83 | return m_pStart; 84 | } 85 | 86 | // bitwise access to data 87 | void ResetBitstream(); 88 | void Skip(int nBits); 89 | 90 | unsigned long GetWord(int nBits); 91 | unsigned long GetUE(); 92 | long GetSE(); 93 | BYTE GetBYTE(); 94 | unsigned long GetBit(); 95 | 96 | const BYTE* StartCodeStart() { return m_pStartCodeStart; } 97 | 98 | 99 | private: 100 | bool GetStartCode(const BYTE*& pBegin, const BYTE*& pStart, int& cRemain); 101 | 102 | private: 103 | const BYTE* m_pStartCodeStart; 104 | const BYTE* m_pStart; 105 | int m_cBytes; 106 | 107 | // bitstream access 108 | int m_idx; 109 | int m_nBits; 110 | BYTE m_byte; 111 | int m_cZeros; 112 | }; 113 | 114 | 115 | 116 | // simple parser for the Sequence parameter set things that we need 117 | class SeqParamSet 118 | { 119 | public: 120 | SeqParamSet(); 121 | bool Parse(NALUnit* pnalu); 122 | int FrameBits() 123 | { 124 | return m_FrameBits; 125 | } 126 | long EncodedWidth() 127 | { 128 | return m_cx; 129 | } 130 | long EncodedHeight() 131 | { 132 | return m_cy; 133 | } 134 | #if 0 135 | long CroppedWidth() 136 | { 137 | if (IsRectEmpty(&m_rcFrame)) 138 | { 139 | return EncodedWidth(); 140 | } 141 | return m_rcFrame.right - m_rcFrame.left; 142 | } 143 | long CroppedHeight() 144 | { 145 | if (IsRectEmpty(&m_rcFrame)) 146 | { 147 | return EncodedHeight(); 148 | } 149 | return m_rcFrame.bottom - m_rcFrame.top; 150 | } 151 | RECT* CropRect() 152 | { 153 | return &m_rcFrame; 154 | } 155 | #endif 156 | bool Interlaced() 157 | { 158 | return !m_bFrameOnly; 159 | } 160 | unsigned int Profile() { return m_Profile; } 161 | unsigned int Level() { return m_Level; } 162 | BYTE Compat() { return m_Compatibility; } 163 | NALUnit* NALU() {return &m_nalu; } 164 | 165 | private: 166 | NALUnit m_nalu; 167 | int m_FrameBits; 168 | long m_cx; 169 | long m_cy; 170 | // RECT m_rcFrame; 171 | bool m_bFrameOnly; 172 | 173 | int m_Profile; 174 | int m_Level; 175 | BYTE m_Compatibility; 176 | }; 177 | 178 | // extract frame num from slice headers 179 | class SliceHeader 180 | { 181 | public: 182 | SliceHeader(int nBitsFrame) 183 | : m_framenum(0), 184 | m_nBitsFrame(nBitsFrame) 185 | { 186 | } 187 | 188 | bool Parse(NALUnit* pnalu); 189 | int FrameNum() 190 | { 191 | return m_framenum; 192 | } 193 | 194 | private: 195 | int m_framenum; 196 | int m_nBitsFrame; 197 | }; 198 | 199 | // SEI message structure 200 | class SEIMessage 201 | { 202 | public: 203 | SEIMessage(NALUnit* pnalu); 204 | int Type() { return m_type; } 205 | int Length() { return m_length; } 206 | const BYTE* Payload() { return m_pnalu->Start() + m_idxPayload; } 207 | private: 208 | NALUnit* m_pnalu; 209 | int m_type; 210 | int m_length; 211 | int m_idxPayload; 212 | }; 213 | 214 | // avcC structure from MP4 215 | class avcCHeader 216 | { 217 | public: 218 | avcCHeader(const BYTE* header, int cBytes); 219 | NALUnit* sps() { return &m_sps; } 220 | NALUnit* pps() { return &m_pps; } 221 | 222 | private: 223 | NALUnit m_sps; 224 | NALUnit m_pps; 225 | }; 226 | 227 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/RTSPClientConnection.h: -------------------------------------------------------------------------------- 1 | // 2 | // RTSPClientConnection.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 24/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | #import "RTSPServer.h" 11 | 12 | @interface RTSPClientConnection : NSObject 13 | 14 | 15 | + (RTSPClientConnection*) createWithSocket:(CFSocketNativeHandle) s server:(RTSPServer*) server; 16 | 17 | - (void) onVideoData:(NSArray*) data time:(double) pts; 18 | - (void) shutdown; 19 | 20 | @end 21 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/RTSPClientConnection.mm: -------------------------------------------------------------------------------- 1 | // 2 | // RTSPClientConnection.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 24/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "RTSPClientConnection.h" 10 | #import "RTSPMessage.h" 11 | #import "NALUnit.h" 12 | #import "arpa/inet.h" 13 | 14 | void tonet_short(uint8_t* p, unsigned short s) 15 | { 16 | p[0] = (s >> 8) & 0xff; 17 | p[1] = s & 0xff; 18 | } 19 | void tonet_long(uint8_t* p, unsigned long l) 20 | { 21 | p[0] = (l >> 24) & 0xff; 22 | p[1] = (l >> 16) & 0xff; 23 | p[2] = (l >> 8) & 0xff; 24 | p[3] = l & 0xff; 25 | } 26 | 27 | static const char* Base64Mapping = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"; 28 | static const int max_packet_size = 1200; 29 | 30 | NSString* encodeLong(unsigned long val, int nPad) 31 | { 32 | char ch[4]; 33 | int cch = 4 - nPad; 34 | for (int i = 0; i < cch; i++) 35 | { 36 | int shift = 6 * (cch - (i+1)); 37 | int bits = (val >> shift) & 0x3f; 38 | ch[i] = Base64Mapping[bits]; 39 | } 40 | for (int i = 0; i < nPad; i++) 41 | { 42 | ch[cch + i] = '='; 43 | } 44 | NSString* s = [[NSString alloc] initWithBytes:ch length:4 encoding:NSUTF8StringEncoding]; 45 | return s; 46 | } 47 | 48 | NSString* encodeToBase64(NSData* data) 49 | { 50 | NSString* s = @""; 51 | 52 | const uint8_t* p = (const uint8_t*) [data bytes]; 53 | int cBytes = [data length]; 54 | while (cBytes >= 3) 55 | { 56 | unsigned long val = (p[0] << 16) + (p[1] << 8) + p[2]; 57 | p += 3; 58 | cBytes -= 3; 59 | 60 | s = [s stringByAppendingString:encodeLong(val, 0)]; 61 | } 62 | if (cBytes > 0) 63 | { 64 | int nPad; 65 | unsigned long val; 66 | if (cBytes == 1) 67 | { 68 | // pad 8 bits to 2 x 6 and add 2 == 69 | nPad = 2; 70 | val = p[0] << 4; 71 | } 72 | else 73 | { 74 | // must be two bytes -- pad 16 bits to 3 x 6 and add one = 75 | nPad = 1; 76 | val = (p[0] << 8) + p[1]; 77 | val = val << 2; 78 | } 79 | s = [s stringByAppendingString:encodeLong(val, nPad)]; 80 | } 81 | return s; 82 | } 83 | 84 | enum ServerState 85 | { 86 | ServerIdle, 87 | Setup, 88 | Playing, 89 | }; 90 | 91 | @interface RTSPClientConnection () 92 | { 93 | CFSocketRef _s; 94 | RTSPServer* _server; 95 | CFRunLoopSourceRef _rls; 96 | 97 | CFDataRef _addrRTP; 98 | CFSocketRef _sRTP; 99 | CFDataRef _addrRTCP; 100 | CFSocketRef _sRTCP; 101 | NSString* _session; 102 | ServerState _state; 103 | long _packets; 104 | long _bytesSent; 105 | long _ssrc; 106 | BOOL _bFirst; 107 | 108 | // time mapping using NTP 109 | uint64_t _ntpBase; 110 | uint64_t _rtpBase; 111 | double _ptsBase; 112 | 113 | // RTCP stats 114 | long _packetsReported; 115 | long _bytesReported; 116 | NSDate* _sentRTCP; 117 | 118 | // reader reports 119 | CFSocketRef _recvRTCP; 120 | CFRunLoopSourceRef _rlsRTCP; 121 | } 122 | 123 | - (RTSPClientConnection*) initWithSocket:(CFSocketNativeHandle) s Server:(RTSPServer*) server; 124 | - (void) onSocketData:(CFDataRef)data; 125 | - (void) onRTCP:(CFDataRef) data; 126 | 127 | @end 128 | 129 | static void onSocket ( 130 | CFSocketRef s, 131 | CFSocketCallBackType callbackType, 132 | CFDataRef address, 133 | const void *data, 134 | void *info 135 | ) 136 | { 137 | RTSPClientConnection* conn = (__bridge RTSPClientConnection*)info; 138 | switch (callbackType) 139 | { 140 | case kCFSocketDataCallBack: 141 | [conn onSocketData:(CFDataRef) data]; 142 | break; 143 | 144 | default: 145 | NSLog(@"unexpected socket event"); 146 | break; 147 | } 148 | 149 | } 150 | 151 | static void onRTCP(CFSocketRef s, 152 | CFSocketCallBackType callbackType, 153 | CFDataRef address, 154 | const void *data, 155 | void *info 156 | ) 157 | { 158 | RTSPClientConnection* conn = (__bridge RTSPClientConnection*)info; 159 | switch (callbackType) 160 | { 161 | case kCFSocketDataCallBack: 162 | [conn onRTCP:(CFDataRef) data]; 163 | break; 164 | 165 | default: 166 | NSLog(@"unexpected socket event"); 167 | break; 168 | } 169 | } 170 | 171 | @implementation RTSPClientConnection 172 | 173 | + (RTSPClientConnection*) createWithSocket:(CFSocketNativeHandle) s server:(RTSPServer*) server 174 | { 175 | RTSPClientConnection* conn = [RTSPClientConnection alloc]; 176 | if ([conn initWithSocket:s Server:server] != nil) 177 | { 178 | return conn; 179 | } 180 | return nil; 181 | } 182 | 183 | - (RTSPClientConnection*) initWithSocket:(CFSocketNativeHandle)s Server:(RTSPServer *)server 184 | { 185 | _state = ServerIdle; 186 | _server = server; 187 | CFSocketContext info; 188 | memset(&info, 0, sizeof(info)); 189 | info.info = (void*)CFBridgingRetain(self); 190 | 191 | _s = CFSocketCreateWithNative(nil, s, kCFSocketDataCallBack, onSocket, &info); 192 | 193 | _rls = CFSocketCreateRunLoopSource(nil, _s, 0); 194 | CFRunLoopAddSource(CFRunLoopGetMain(), _rls, kCFRunLoopCommonModes); 195 | 196 | return self; 197 | } 198 | 199 | - (void) onSocketData:(CFDataRef)data 200 | { 201 | if (CFDataGetLength(data) == 0) 202 | { 203 | [self tearDown]; 204 | CFSocketInvalidate(_s); 205 | _s = nil; 206 | [_server shutdownConnection:self]; 207 | return; 208 | } 209 | RTSPMessage* msg = [RTSPMessage createWithData:data]; 210 | if (msg != nil) 211 | { 212 | NSString* response = nil; 213 | NSString* cmd = msg.command; 214 | if ([cmd caseInsensitiveCompare:@"options"] == NSOrderedSame) 215 | { 216 | response = [msg createResponse:200 text:@"OK"]; 217 | response = [response stringByAppendingString:@"Server: AVEncoderDemo/1.0\r\n"]; 218 | response = [response stringByAppendingString:@"Public: DESCRIBE, SETUP, TEARDOWN, PLAY, OPTIONS\r\n\r\n"]; 219 | } 220 | else if ([cmd caseInsensitiveCompare:@"describe"] == NSOrderedSame) 221 | { 222 | NSString* sdp = [self makeSDP]; 223 | response = [msg createResponse:200 text:@"OK"]; 224 | NSString* date = [NSDateFormatter localizedStringFromDate:[NSDate date] dateStyle:NSDateFormatterLongStyle timeStyle:NSDateFormatterLongStyle]; 225 | CFDataRef dlocaladdr = CFSocketCopyAddress(_s); 226 | struct sockaddr_in* localaddr = (struct sockaddr_in*) CFDataGetBytePtr(dlocaladdr); 227 | 228 | response = [response stringByAppendingFormat:@"Content-base: rtsp://%s/\r\n", inet_ntoa(localaddr->sin_addr)]; 229 | CFRelease(dlocaladdr); 230 | response = [response stringByAppendingFormat:@"Date: %@\r\nContent-Type: application/sdp\r\nContent-Length: %d\r\n\r\n", date, [sdp length] ]; 231 | response = [response stringByAppendingString:sdp]; 232 | } 233 | else if ([cmd caseInsensitiveCompare:@"setup"] == NSOrderedSame) 234 | { 235 | NSString* transport = [msg valueForOption:@"transport"]; 236 | NSArray* props = [transport componentsSeparatedByString:@";"]; 237 | NSArray* ports = nil; 238 | for (NSString* s in props) 239 | { 240 | if ([s length] > 14) 241 | { 242 | if ([s compare:@"client-port=" options:0 range:NSMakeRange(0, 12)]) 243 | { 244 | NSString* val = [s substringFromIndex:12]; 245 | ports = [val componentsSeparatedByString:@"-"]; 246 | break; 247 | } 248 | } 249 | } 250 | if ([ports count] == 2) 251 | { 252 | int portRTP = [ports[0] integerValue]; 253 | int portRTCP = [ports[1] integerValue]; 254 | 255 | NSString* session_name = [self createSession:portRTP rtcp:portRTCP]; 256 | if (session_name != nil) 257 | { 258 | response = [msg createResponse:200 text:@"OK"]; 259 | response = [response stringByAppendingFormat:@"Session: %@\r\nTransport: RTP/AVP;unicast;client_port=%d-%d;server_port=6970-6971\r\n\r\n", 260 | session_name, 261 | portRTP,portRTCP]; 262 | } 263 | } 264 | if (response == nil) 265 | { 266 | // !! 267 | response = [msg createResponse:451 text:@"Need better error string here"]; 268 | } 269 | } 270 | else if ([cmd caseInsensitiveCompare:@"play"] == NSOrderedSame) 271 | { 272 | @synchronized(self) 273 | { 274 | if (_state != Setup) 275 | { 276 | response = [msg createResponse:451 text:@"Wrong state"]; 277 | } 278 | else 279 | { 280 | _state = Playing; 281 | _bFirst = YES; 282 | response = [msg createResponse:200 text:@"OK"]; 283 | response = [response stringByAppendingFormat:@"Session: %@\r\n\r\n", _session]; 284 | } 285 | } 286 | } 287 | else if ([cmd caseInsensitiveCompare:@"teardown"] == NSOrderedSame) 288 | { 289 | [self tearDown]; 290 | response = [msg createResponse:200 text:@"OK"]; 291 | } 292 | else 293 | { 294 | NSLog(@"RTSP method %@ not handled", cmd); 295 | response = [msg createResponse:451 text:@"Method not recognised"]; 296 | } 297 | if (response != nil) 298 | { 299 | NSData* dataResponse = [response dataUsingEncoding:NSUTF8StringEncoding]; 300 | CFSocketError e = CFSocketSendData(_s, NULL, (__bridge CFDataRef)(dataResponse), 2); 301 | if (e) 302 | { 303 | NSLog(@"send %ld", e); 304 | } 305 | } 306 | } 307 | } 308 | 309 | - (NSString*) makeSDP 310 | { 311 | NSData* config = [_server getConfigData]; 312 | 313 | avcCHeader avcC((const BYTE*)[config bytes], [config length]); 314 | SeqParamSet seqParams; 315 | seqParams.Parse(avcC.sps()); 316 | int cx = seqParams.EncodedWidth(); 317 | int cy = seqParams.EncodedHeight(); 318 | 319 | NSString* profile_level_id = [NSString stringWithFormat:@"%02x%02x%02x", seqParams.Profile(), seqParams.Compat(), seqParams.Level()]; 320 | 321 | NSData* data = [NSData dataWithBytes:avcC.sps()->Start() length:avcC.sps()->Length()]; 322 | NSString* sps = encodeToBase64(data); 323 | data = [NSData dataWithBytes:avcC.pps()->Start() length:avcC.pps()->Length()]; 324 | NSString* pps = encodeToBase64(data); 325 | 326 | // !! o=, s=, u=, c=, b=? control for track? 327 | unsigned long verid = random(); 328 | 329 | CFDataRef dlocaladdr = CFSocketCopyAddress(_s); 330 | struct sockaddr_in* localaddr = (struct sockaddr_in*) CFDataGetBytePtr(dlocaladdr); 331 | NSString* sdp = [NSString stringWithFormat:@"v=0\r\no=- %ld %ld IN IP4 %s\r\ns=Live stream from iOS\r\nc=IN IP4 0.0.0.0\r\nt=0 0\r\na=control:*\r\n", verid, verid, inet_ntoa(localaddr->sin_addr)]; 332 | CFRelease(dlocaladdr); 333 | 334 | int packets = (_server.bitrate / (max_packet_size * 8)) + 1; 335 | 336 | sdp = [sdp stringByAppendingFormat:@"m=video 0 RTP/AVP 96\r\nb=TIAS:%d\r\na=maxprate:%d.0000\r\na=control:streamid=1\r\n", _server.bitrate, packets]; 337 | sdp = [sdp stringByAppendingFormat:@"a=rtpmap:96 H264/90000\r\na=mimetype:string;\"video/H264\"\r\na=framesize:96 %d-%d\r\na=Width:integer;%d\r\na=Height:integer;%di\r\n", cx, cy, cx, cy]; 338 | sdp = [sdp stringByAppendingFormat:@"a=fmtp:96 packetization-mode=1;profile-level-id=%@;sprop-parameter-sets=%@,%@\r\n", profile_level_id, sps, pps]; 339 | return sdp; 340 | } 341 | 342 | - (NSString*) createSession:(int) portRTP rtcp:(int) portRTCP 343 | { 344 | // !! most basic possible for initial testing 345 | @synchronized(self) 346 | { 347 | CFDataRef data = CFSocketCopyPeerAddress(_s); 348 | struct sockaddr_in* paddr = (struct sockaddr_in*) CFDataGetBytePtr(data); 349 | paddr->sin_port = htons(portRTP); 350 | _addrRTP = CFDataCreate(nil, (uint8_t*) paddr, sizeof(struct sockaddr_in)); 351 | _sRTP = CFSocketCreate(nil, PF_INET, SOCK_DGRAM, IPPROTO_UDP, 0, nil, nil); 352 | 353 | paddr->sin_port = htons(portRTCP); 354 | _addrRTCP = CFDataCreate(nil, (uint8_t*) paddr, sizeof(struct sockaddr_in)); 355 | _sRTCP = CFSocketCreate(nil, PF_INET, SOCK_DGRAM, IPPROTO_UDP, 0, nil, nil); 356 | CFRelease(data); 357 | 358 | // reader reports received here 359 | CFSocketContext info; 360 | memset(&info, 0, sizeof(info)); 361 | info.info = (void*)CFBridgingRetain(self); 362 | _recvRTCP = CFSocketCreate(nil, PF_INET, SOCK_DGRAM, IPPROTO_UDP, kCFSocketDataCallBack, onRTCP, &info); 363 | 364 | struct sockaddr_in addr; 365 | addr.sin_addr.s_addr = INADDR_ANY; 366 | addr.sin_family = AF_INET; 367 | addr.sin_port = htons(6971); 368 | CFDataRef dataAddr = CFDataCreate(nil, (const uint8_t*)&addr, sizeof(addr)); 369 | CFSocketSetAddress(_recvRTCP, dataAddr); 370 | CFRelease(dataAddr); 371 | 372 | _rlsRTCP = CFSocketCreateRunLoopSource(nil, _recvRTCP, 0); 373 | CFRunLoopAddSource(CFRunLoopGetMain(), _rlsRTCP, kCFRunLoopCommonModes); 374 | 375 | // flag that setup is valid 376 | long sessionid = random(); 377 | _session = [NSString stringWithFormat:@"%ld", sessionid]; 378 | _state = Setup; 379 | _ssrc = random(); 380 | _packets = 0; 381 | _bytesSent = 0; 382 | _rtpBase = 0; 383 | 384 | _sentRTCP = nil; 385 | _packetsReported = 0; 386 | _bytesReported = 0; 387 | } 388 | return _session; 389 | } 390 | 391 | - (void) onVideoData:(NSArray*) data time:(double) pts 392 | { 393 | @synchronized(self) 394 | { 395 | if (_state != Playing) 396 | { 397 | return; 398 | } 399 | } 400 | 401 | const int rtp_header_size = 12; 402 | const int max_single_packet = max_packet_size - rtp_header_size; 403 | const int max_fragment_packet = max_single_packet - 2; 404 | unsigned char packet[max_packet_size]; 405 | 406 | int nNALUs = [data count]; 407 | for (int i = 0; i < nNALUs; i++) 408 | { 409 | NSData* nalu = [data objectAtIndex:i]; 410 | int cBytes = [nalu length]; 411 | BOOL bLast = (i == nNALUs-1); 412 | 413 | const unsigned char* pSource = (unsigned char*)[nalu bytes]; 414 | 415 | if (_bFirst) 416 | { 417 | if ((pSource[0] & 0x1f) != 5) 418 | { 419 | continue; 420 | } 421 | _bFirst = NO; 422 | NSLog(@"Playback starting at first IDR"); 423 | } 424 | 425 | if (cBytes < max_single_packet) 426 | { 427 | [self writeHeader:packet marker:bLast time:pts]; 428 | memcpy(packet + rtp_header_size, [nalu bytes], cBytes); 429 | [self sendPacket:packet length:(cBytes + rtp_header_size)]; 430 | } 431 | else 432 | { 433 | unsigned char NALU_Header = pSource[0]; 434 | pSource += 1; 435 | cBytes -= 1; 436 | BOOL bStart = YES; 437 | 438 | while (cBytes) 439 | { 440 | int cThis = (cBytes < max_fragment_packet)? cBytes : max_fragment_packet; 441 | BOOL bEnd = (cThis == cBytes); 442 | [self writeHeader:packet marker:(bLast && bEnd) time:pts]; 443 | unsigned char* pDest = packet + rtp_header_size; 444 | 445 | pDest[0] = (NALU_Header & 0xe0) + 28; // FU_A type 446 | unsigned char fu_header = (NALU_Header & 0x1f); 447 | if (bStart) 448 | { 449 | fu_header |= 0x80; 450 | bStart = false; 451 | } 452 | else if (bEnd) 453 | { 454 | fu_header |= 0x40; 455 | } 456 | pDest[1] = fu_header; 457 | pDest += 2; 458 | memcpy(pDest, pSource, cThis); 459 | pDest += cThis; 460 | [self sendPacket:packet length:(pDest - packet)]; 461 | 462 | pSource += cThis; 463 | cBytes -= cThis; 464 | } 465 | } 466 | } 467 | } 468 | 469 | - (void) writeHeader:(uint8_t*) packet marker:(BOOL) bMarker time:(double) pts 470 | { 471 | packet[0] = 0x80; // v= 2 472 | if (bMarker) 473 | { 474 | packet[1] = 96 | 0x80; 475 | } 476 | else 477 | { 478 | packet[1] = 96; 479 | } 480 | unsigned short seq = _packets & 0xffff; 481 | tonet_short(packet+2, seq); 482 | 483 | // map time 484 | while (_rtpBase == 0) 485 | { 486 | _rtpBase = random(); 487 | _ptsBase = pts; 488 | NSDate* now = [NSDate date]; 489 | // ntp is based on 1900. There's a known fixed offset from 1900 to 1970. 490 | NSDate* ref = [NSDate dateWithTimeIntervalSince1970:-2208988800L]; 491 | double interval = [now timeIntervalSinceDate:ref]; 492 | _ntpBase = (uint64_t)(interval * (1LL << 32)); 493 | } 494 | pts -= _ptsBase; 495 | uint64_t rtp = (uint64_t)(pts * 90000); 496 | rtp += _rtpBase; 497 | tonet_long(packet + 4, rtp); 498 | tonet_long(packet + 8, _ssrc); 499 | } 500 | 501 | - (void) sendPacket:(uint8_t*) packet length:(int) cBytes 502 | { 503 | @synchronized(self) 504 | { 505 | if (_sRTP) 506 | { 507 | CFDataRef data = CFDataCreate(nil, packet, cBytes); 508 | CFSocketSendData(_sRTP, _addrRTP, data, 0); 509 | CFRelease(data); 510 | } 511 | _packets++; 512 | _bytesSent += cBytes; 513 | 514 | // RTCP packets 515 | NSDate* now = [NSDate date]; 516 | if ((_sentRTCP == nil) || ([now timeIntervalSinceDate:_sentRTCP] >= 1)) 517 | { 518 | uint8_t buf[7 * sizeof(uint32_t)]; 519 | buf[0] = 0x80; 520 | buf[1] = 200; // type == SR 521 | tonet_short(buf+2, 6); // length (count of uint32_t minus 1) 522 | tonet_long(buf+4, _ssrc); 523 | tonet_long(buf+8, (_ntpBase >> 32)); 524 | tonet_long(buf+12, _ntpBase); 525 | tonet_long(buf+16, _rtpBase); 526 | tonet_long(buf+20, (_packets - _packetsReported)); 527 | tonet_long(buf+24, (_bytesSent - _bytesReported)); 528 | int lenRTCP = 28; 529 | if (_sRTCP) 530 | { 531 | CFDataRef dataRTCP = CFDataCreate(nil, buf, lenRTCP); 532 | CFSocketSendData(_sRTCP, _addrRTCP, dataRTCP, lenRTCP); 533 | CFRelease(dataRTCP); 534 | } 535 | 536 | _sentRTCP = now; 537 | _packetsReported = _packets; 538 | _bytesReported = _bytesSent; 539 | } 540 | } 541 | } 542 | 543 | - (void) onRTCP:(CFDataRef) data 544 | { 545 | // NSLog(@"RTCP recv"); 546 | } 547 | 548 | - (void) tearDown 549 | { 550 | @synchronized(self) 551 | { 552 | if (_sRTP) 553 | { 554 | CFSocketInvalidate(_sRTP); 555 | _sRTP = nil; 556 | } 557 | if (_sRTCP) 558 | { 559 | CFSocketInvalidate(_sRTCP); 560 | _sRTCP = nil; 561 | } 562 | if (_recvRTCP) 563 | { 564 | CFSocketInvalidate(_recvRTCP); 565 | _recvRTCP = nil; 566 | } 567 | _session = nil; 568 | } 569 | } 570 | 571 | - (void) shutdown 572 | { 573 | [self tearDown]; 574 | @synchronized(self) 575 | { 576 | CFSocketInvalidate(_s); 577 | _s = nil; 578 | } 579 | } 580 | @end 581 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/RTSPMessage.h: -------------------------------------------------------------------------------- 1 | // 2 | // RTSPMessage.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 24/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | 11 | @interface RTSPMessage : NSObject 12 | 13 | 14 | + (RTSPMessage*) createWithData:(CFDataRef) data; 15 | 16 | - (NSString*) valueForOption:(NSString*) option; 17 | - (NSString*) createResponse:(int) code text:(NSString*) desc; 18 | 19 | @property NSString* command; 20 | @property int sequence; 21 | 22 | @end 23 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/RTSPMessage.m: -------------------------------------------------------------------------------- 1 | // 2 | // RTSPMessage.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 24/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "RTSPMessage.h" 10 | 11 | @interface RTSPMessage () 12 | 13 | { 14 | NSArray* _lines; 15 | NSString* _request; 16 | int _cseq; 17 | } 18 | 19 | - (RTSPMessage*) initWithData:(CFDataRef) data; 20 | 21 | @end 22 | 23 | @implementation RTSPMessage 24 | 25 | @synthesize command = _request; 26 | @synthesize sequence = _cseq; 27 | 28 | + (RTSPMessage*) createWithData:(CFDataRef) data 29 | { 30 | RTSPMessage* msg = [[RTSPMessage alloc] initWithData:data]; 31 | return msg; 32 | } 33 | 34 | - (RTSPMessage*) initWithData:(CFDataRef) data 35 | { 36 | self = [super init]; 37 | NSString* msg = [[NSString alloc] initWithData:(__bridge NSData*)data encoding:NSUTF8StringEncoding]; 38 | _lines = [msg componentsSeparatedByString:@"\r\n"]; 39 | if ([_lines count] < 2) 40 | { 41 | NSLog(@"msg parse error"); 42 | return nil; 43 | } 44 | NSArray* lineone = [[_lines objectAtIndex:0] componentsSeparatedByString:@" "]; 45 | _request = [lineone objectAtIndex:0]; 46 | NSString* strSeq = [self valueForOption:@"CSeq"]; 47 | if (strSeq == nil) 48 | { 49 | NSLog(@"no cseq"); 50 | return nil; 51 | } 52 | _cseq = [strSeq intValue]; 53 | 54 | return self; 55 | } 56 | 57 | - (NSString*) valueForOption:(NSString*) option 58 | { 59 | for (int i = 1; i < [_lines count]; i++) 60 | { 61 | NSString* line = [_lines objectAtIndex:i]; 62 | NSArray* comps = [line componentsSeparatedByString:@":"]; 63 | if ([comps count] == 2) 64 | { 65 | if ([option caseInsensitiveCompare:[comps objectAtIndex:0]] == NSOrderedSame) 66 | { 67 | NSString* val = [comps objectAtIndex:1]; 68 | val = [val stringByTrimmingCharactersInSet:[NSCharacterSet whitespaceAndNewlineCharacterSet]]; 69 | return val; 70 | } 71 | } 72 | } 73 | return nil; 74 | } 75 | 76 | - (NSString*) createResponse:(int) code text:(NSString*) desc 77 | { 78 | NSString* val = [NSString stringWithFormat:@"RTSP/1.0 %d %@\r\nCSeq: %d\r\n", code, desc, self.sequence]; 79 | return val; 80 | } 81 | 82 | @end 83 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/RTSPServer.h: -------------------------------------------------------------------------------- 1 | // 2 | // RTSPServer.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 17/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | #import 11 | #include 12 | #include 13 | 14 | @interface RTSPServer : NSObject 15 | 16 | 17 | + (NSString*) getIPAddress; 18 | + (RTSPServer*) setupListener:(NSData*) configData; 19 | 20 | - (NSData*) getConfigData; 21 | - (void) onVideoData:(NSArray*) data time:(double) pts; 22 | - (void) shutdownConnection:(id) conn; 23 | - (void) shutdownServer; 24 | 25 | @property (readwrite, atomic) int bitrate; 26 | 27 | @end 28 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/RTSPServer.m: -------------------------------------------------------------------------------- 1 | // 2 | // RTSPServer.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 17/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "RTSPServer.h" 10 | #import "RTSPClientConnection.h" 11 | #import "ifaddrs.h" 12 | #import "arpa/inet.h" 13 | 14 | @interface RTSPServer () 15 | 16 | { 17 | CFSocketRef _listener; 18 | NSMutableArray* _connections; 19 | NSData* _configData; 20 | int _bitrate; 21 | } 22 | 23 | - (RTSPServer*) init:(NSData*) configData; 24 | - (void) onAccept:(CFSocketNativeHandle) childHandle; 25 | 26 | @end 27 | 28 | static void onSocket ( 29 | CFSocketRef s, 30 | CFSocketCallBackType callbackType, 31 | CFDataRef address, 32 | const void *data, 33 | void *info 34 | ) 35 | { 36 | RTSPServer* server = (__bridge RTSPServer*)info; 37 | switch (callbackType) 38 | { 39 | case kCFSocketAcceptCallBack: 40 | { 41 | CFSocketNativeHandle* pH = (CFSocketNativeHandle*) data; 42 | [server onAccept:*pH]; 43 | break; 44 | } 45 | default: 46 | NSLog(@"unexpected socket event"); 47 | break; 48 | } 49 | 50 | } 51 | 52 | @implementation RTSPServer 53 | 54 | @synthesize bitrate = _bitrate; 55 | 56 | + (RTSPServer*) setupListener:(NSData*) configData 57 | { 58 | RTSPServer* obj = [RTSPServer alloc]; 59 | if (![obj init:configData]) 60 | { 61 | return nil; 62 | } 63 | return obj; 64 | } 65 | 66 | - (RTSPServer*) init:(NSData*) configData 67 | { 68 | _configData = configData; 69 | _connections = [NSMutableArray arrayWithCapacity:10]; 70 | 71 | CFSocketContext info; 72 | memset(&info, 0, sizeof(info)); 73 | info.info = (void*)CFBridgingRetain(self); 74 | 75 | _listener = CFSocketCreate(nil, PF_INET, SOCK_STREAM, IPPROTO_TCP, kCFSocketAcceptCallBack, onSocket, &info); 76 | 77 | // must set SO_REUSEADDR in case a client is still holding this address 78 | int t = 1; 79 | setsockopt(CFSocketGetNative(_listener), SOL_SOCKET, SO_REUSEADDR, &t, sizeof(t)); 80 | 81 | struct sockaddr_in addr; 82 | addr.sin_addr.s_addr = INADDR_ANY; 83 | addr.sin_family = AF_INET; 84 | addr.sin_port = htons(554); 85 | CFDataRef dataAddr = CFDataCreate(nil, (const uint8_t*)&addr, sizeof(addr)); 86 | CFSocketError e = CFSocketSetAddress(_listener, dataAddr); 87 | CFRelease(dataAddr); 88 | 89 | if (e) 90 | { 91 | NSLog(@"bind error %d", (int) e); 92 | } 93 | 94 | CFRunLoopSourceRef rls = CFSocketCreateRunLoopSource(nil, _listener, 0); 95 | CFRunLoopAddSource(CFRunLoopGetMain(), rls, kCFRunLoopCommonModes); 96 | CFRelease(rls); 97 | 98 | return self; 99 | } 100 | 101 | - (NSData*) getConfigData 102 | { 103 | return _configData; 104 | } 105 | 106 | - (void) onAccept:(CFSocketNativeHandle) childHandle 107 | { 108 | RTSPClientConnection* conn = [RTSPClientConnection createWithSocket:childHandle server:self]; 109 | if (conn != nil) 110 | { 111 | @synchronized(self) 112 | { 113 | NSLog(@"Client connected"); 114 | [_connections addObject:conn]; 115 | } 116 | } 117 | 118 | } 119 | 120 | - (void) onVideoData:(NSArray*) data time:(double) pts 121 | { 122 | @synchronized(self) 123 | { 124 | for (RTSPClientConnection* conn in _connections) 125 | { 126 | [conn onVideoData:data time:pts]; 127 | } 128 | } 129 | } 130 | 131 | - (void) shutdownConnection:(id)conn 132 | { 133 | @synchronized(self) 134 | { 135 | NSLog(@"Client disconnected"); 136 | [_connections removeObject:conn]; 137 | } 138 | } 139 | 140 | - (void) shutdownServer 141 | { 142 | @synchronized(self) 143 | { 144 | for (RTSPClientConnection* conn in _connections) 145 | { 146 | [conn shutdown]; 147 | } 148 | _connections = [NSMutableArray arrayWithCapacity:10]; 149 | if (_listener != nil) 150 | { 151 | CFSocketInvalidate(_listener); 152 | _listener = nil; 153 | } 154 | } 155 | } 156 | 157 | + (NSString*) getIPAddress 158 | { 159 | NSString* address; 160 | struct ifaddrs *interfaces = nil; 161 | 162 | // get all our interfaces and find the one that corresponds to wifi 163 | if (!getifaddrs(&interfaces)) 164 | { 165 | for (struct ifaddrs* addr = interfaces; addr != NULL; addr = addr->ifa_next) 166 | { 167 | if (([[NSString stringWithUTF8String:addr->ifa_name] isEqualToString:@"en0"]) && 168 | (addr->ifa_addr->sa_family == AF_INET)) 169 | { 170 | struct sockaddr_in* sa = (struct sockaddr_in*) addr->ifa_addr; 171 | address = [NSString stringWithUTF8String:inet_ntoa(sa->sin_addr)]; 172 | break; 173 | } 174 | } 175 | } 176 | freeifaddrs(interfaces); 177 | return address; 178 | } 179 | 180 | @end 181 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/VideoEncoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // VideoEncoder.h 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 14/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import 10 | #import "AVFoundation/AVAssetWriter.h" 11 | #import "AVFoundation/AVAssetWriterInput.h" 12 | #import "AVFoundation/AVMediaFormat.h" 13 | #import "AVFoundation/AVVideoSettings.h" 14 | 15 | @interface VideoEncoder : NSObject 16 | { 17 | AVAssetWriter* _writer; 18 | AVAssetWriterInput* _writerInput; 19 | NSString* _path; 20 | } 21 | 22 | @property NSString* path; 23 | 24 | + (VideoEncoder*) encoderForPath:(NSString*) path Height:(int) height andWidth:(int) width; 25 | 26 | - (void) initPath:(NSString*)path Height:(int) height andWidth:(int) width; 27 | - (void) finishWithCompletionHandler:(void (^)(void))handler; 28 | - (BOOL) encodeFrame:(CMSampleBufferRef) sampleBuffer; 29 | 30 | 31 | @end 32 | -------------------------------------------------------------------------------- /FFmpegEncoder/H264Encoder/VideoEncoder.m: -------------------------------------------------------------------------------- 1 | // 2 | // VideoEncoder.m 3 | // Encoder Demo 4 | // 5 | // Created by Geraint Davies on 14/01/2013. 6 | // Copyright (c) 2013 GDCL http://www.gdcl.co.uk/license.htm 7 | // 8 | 9 | #import "VideoEncoder.h" 10 | 11 | @implementation VideoEncoder 12 | 13 | @synthesize path = _path; 14 | 15 | + (VideoEncoder*) encoderForPath:(NSString*) path Height:(int) height andWidth:(int) width 16 | { 17 | VideoEncoder* enc = [VideoEncoder alloc]; 18 | [enc initPath:path Height:height andWidth:width]; 19 | return enc; 20 | } 21 | 22 | 23 | - (void) initPath:(NSString*)path Height:(int) height andWidth:(int) width 24 | { 25 | self.path = path; 26 | 27 | [[NSFileManager defaultManager] removeItemAtPath:self.path error:nil]; 28 | NSURL* url = [NSURL fileURLWithPath:self.path]; 29 | 30 | _writer = [AVAssetWriter assetWriterWithURL:url fileType:AVFileTypeQuickTimeMovie error:nil]; 31 | NSDictionary* settings = @{ 32 | AVVideoCodecKey: AVVideoCodecH264, 33 | AVVideoWidthKey: @(width), 34 | AVVideoHeightKey: @(height), 35 | AVVideoCompressionPropertiesKey: @{ 36 | AVVideoAverageBitRateKey: @(1200000), 37 | AVVideoMaxKeyFrameIntervalKey: @(150), 38 | AVVideoProfileLevelKey: AVVideoProfileLevelH264BaselineAutoLevel, 39 | AVVideoAllowFrameReorderingKey: @NO, 40 | //AVVideoH264EntropyModeKey: AVVideoH264EntropyModeCAVLC, 41 | //AVVideoExpectedSourceFrameRateKey: @(30), 42 | //AVVideoAverageNonDroppableFrameRateKey: @(30) 43 | } 44 | }; 45 | _writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:settings]; 46 | _writerInput.expectsMediaDataInRealTime = YES; 47 | [_writer addInput:_writerInput]; 48 | } 49 | 50 | - (void) finishWithCompletionHandler:(void (^)(void))handler 51 | { 52 | [_writer finishWritingWithCompletionHandler: handler]; 53 | } 54 | 55 | - (BOOL) encodeFrame:(CMSampleBufferRef) sampleBuffer 56 | { 57 | if (CMSampleBufferDataIsReady(sampleBuffer)) 58 | { 59 | if (_writer.status == AVAssetWriterStatusUnknown) 60 | { 61 | CMTime startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 62 | [_writer startWriting]; 63 | [_writer startSessionAtSourceTime:startTime]; 64 | } 65 | if (_writer.status == AVAssetWriterStatusFailed) 66 | { 67 | NSLog(@"writer error %@", _writer.error.localizedDescription); 68 | return NO; 69 | } 70 | if (_writerInput.readyForMoreMediaData == YES) 71 | { 72 | [_writerInput appendSampleBuffer:sampleBuffer]; 73 | return YES; 74 | } 75 | } 76 | return NO; 77 | } 78 | 79 | @end 80 | -------------------------------------------------------------------------------- /FFmpegEncoder/HLSUploader.h: -------------------------------------------------------------------------------- 1 | // 2 | // HLSUploader.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 12/20/13. 6 | // Copyright (c) 2013 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "DirectoryWatcher.h" 11 | 12 | @interface HLSUploader : NSObject 13 | 14 | @property (nonatomic, strong) DirectoryWatcher *directoryWatcher; 15 | @property (nonatomic, strong) NSString *directoryPath; 16 | @property (nonatomic, strong) NSMutableDictionary *files; 17 | @property (nonatomic, strong) NSString *manifestPath; 18 | @property (nonatomic, strong) NSString *remoteFolderName; 19 | @property (nonatomic) dispatch_queue_t scanningQueue; 20 | 21 | - (id) initWithDirectoryPath:(NSString*)directoryPath remoteFolderName:(NSString*)remoteFolderName; 22 | 23 | - (NSURL*) manifestURL; 24 | 25 | @end 26 | -------------------------------------------------------------------------------- /FFmpegEncoder/HLSUploader.m: -------------------------------------------------------------------------------- 1 | // 2 | // HLSUploader.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 12/20/13. 6 | // Copyright (c) 2013 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "HLSUploader.h" 10 | #import "OWSharedS3Client.h" 11 | 12 | static NSString * const kBucketName = @"openwatch-livestreamer"; 13 | 14 | static NSString * const kManifestKey = @"manifest"; 15 | static NSString * const kFileNameKey = @"fileName"; 16 | 17 | static NSString * const kUploadStateQueued = @"queued"; 18 | static NSString * const kUploadStateFinished = @"finished"; 19 | static NSString * const kUploadStateUploading = @"uploading"; 20 | 21 | @interface HLSUploader() 22 | @property (nonatomic) NSUInteger numbersOffset; 23 | @property (nonatomic, strong) NSMutableDictionary *queuedSegments; 24 | @property (nonatomic) NSUInteger nextSegmentIndexToUpload; 25 | @end 26 | 27 | @implementation HLSUploader 28 | 29 | - (id) initWithDirectoryPath:(NSString *)directoryPath remoteFolderName:(NSString *)remoteFolderName { 30 | if (self = [super init]) { 31 | _directoryPath = [directoryPath copy]; 32 | _directoryWatcher = [DirectoryWatcher watchFolderWithPath:_directoryPath delegate:self]; 33 | _files = [NSMutableDictionary dictionary]; 34 | _remoteFolderName = [remoteFolderName copy]; 35 | _scanningQueue = dispatch_queue_create("Scanning Queue", DISPATCH_QUEUE_SERIAL); 36 | _queuedSegments = [NSMutableDictionary dictionaryWithCapacity:5]; 37 | _numbersOffset = 0; 38 | _nextSegmentIndexToUpload = 0; 39 | } 40 | return self; 41 | } 42 | 43 | - (void) uploadNextSegment { 44 | NSLog(@"nextSegmentIndexToUpload: %d, segmentCount: %d, queuedSegments: %d", _nextSegmentIndexToUpload, self.files.count, self.queuedSegments.count); 45 | if (_nextSegmentIndexToUpload >= self.files.count - 1) { 46 | NSLog(@"Cannot upload file currently being recorded at index: %d", _nextSegmentIndexToUpload); 47 | return; 48 | } 49 | NSDictionary *segmentInfo = [_queuedSegments objectForKey:@(_nextSegmentIndexToUpload)]; 50 | NSString *manifest = [segmentInfo objectForKey:kManifestKey]; 51 | NSString *fileName = [segmentInfo objectForKey:kFileNameKey]; 52 | NSString *fileUploadState = [_files objectForKey:fileName]; 53 | if (![fileUploadState isEqualToString:kUploadStateQueued]) { 54 | NSLog(@"Trying to upload file that isn't queued (%@): %@", fileUploadState, segmentInfo); 55 | return; 56 | } 57 | [_files setObject:kUploadStateUploading forKey:fileName]; 58 | NSString *filePath = [_directoryPath stringByAppendingPathComponent:fileName]; 59 | NSString *key = [NSString stringWithFormat:@"%@/%@", _remoteFolderName, fileName]; 60 | [[OWSharedS3Client sharedClient] postObjectWithFile:filePath bucket:kBucketName key:key acl:@"public-read" success:^(S3PutObjectResponse *responseObject) { 61 | dispatch_async(_scanningQueue, ^{ 62 | NSLog(@"Uploaded %@", fileName); 63 | [_files setObject:kUploadStateFinished forKey:fileName]; 64 | NSError *error = nil; 65 | [[NSFileManager defaultManager] removeItemAtPath:filePath error:&error]; 66 | if (error) { 67 | NSLog(@"Error removing uploaded segment: %@", error.description); 68 | } 69 | [_queuedSegments removeObjectForKey:@(_nextSegmentIndexToUpload)]; 70 | [self updateManifestWithString:manifest]; 71 | _nextSegmentIndexToUpload++; 72 | [self uploadNextSegment]; 73 | }); 74 | } failure:^(NSError *error) { 75 | dispatch_async(_scanningQueue, ^{ 76 | [_files setObject:kUploadStateQueued forKey:fileName]; 77 | NSLog(@"Failed to upload segment, requeuing %@: %@", fileName, error.description); 78 | [self uploadNextSegment]; 79 | }); 80 | }]; 81 | } 82 | 83 | - (void) updateManifestWithString:(NSString*)manifestString { 84 | NSData *data = [manifestString dataUsingEncoding:NSUTF8StringEncoding]; 85 | NSString *key = [NSString stringWithFormat:@"%@/%@", _remoteFolderName, [_manifestPath lastPathComponent]]; 86 | [[OWSharedS3Client sharedClient] postObjectWithData:data bucket:kBucketName key:key acl:@"public-read" success:^(S3PutObjectResponse *responseObject) { 87 | NSLog(@"Manifest updated"); 88 | } failure:^(NSError *error) { 89 | NSLog(@"Error updating manifest: %@", error.description); 90 | }]; 91 | } 92 | 93 | - (void) directoryDidChange:(DirectoryWatcher *)folderWatcher { 94 | dispatch_async(_scanningQueue, ^{ 95 | NSError *error = nil; 96 | NSArray *files = [[NSFileManager defaultManager] contentsOfDirectoryAtPath:_directoryPath error:&error]; 97 | NSLog(@"Directory changed, fileCount: %lu", (unsigned long)files.count); 98 | if (error) { 99 | NSLog(@"Error listing directory contents"); 100 | } 101 | if (!_manifestPath) { 102 | [self initializeManifestPathFromFiles:files]; 103 | } 104 | [self detectNewSegmentsFromFiles:files]; 105 | }); 106 | } 107 | 108 | - (void) detectNewSegmentsFromFiles:(NSArray*)files { 109 | if (!_manifestPath) { 110 | NSLog(@"Manifest path not yet available"); 111 | return; 112 | } 113 | [files enumerateObjectsUsingBlock:^(NSString *fileName, NSUInteger idx, BOOL *stop) { 114 | NSArray *components = [fileName componentsSeparatedByString:@"."]; 115 | NSString *filePrefix = [components firstObject]; 116 | NSString *fileExtension = [components lastObject]; 117 | if ([fileExtension isEqualToString:@"ts"]) { 118 | NSString *uploadState = [_files objectForKey:fileName]; 119 | if (!uploadState) { 120 | NSString *manifestSnapshot = [self manifestSnapshot]; 121 | NSUInteger segmentIndex = [self indexForFilePrefix:filePrefix]; 122 | NSDictionary *segmentInfo = @{kManifestKey: manifestSnapshot, 123 | kFileNameKey: fileName}; 124 | NSLog(@"new file detected: %@", fileName); 125 | [_files setObject:kUploadStateQueued forKey:fileName]; 126 | [_queuedSegments setObject:segmentInfo forKey:@(segmentIndex)]; 127 | [self uploadNextSegment]; 128 | } 129 | } 130 | }]; 131 | } 132 | 133 | - (void) initializeManifestPathFromFiles:(NSArray*)files { 134 | [files enumerateObjectsUsingBlock:^(NSString *fileName, NSUInteger idx, BOOL *stop) { 135 | if ([[fileName pathExtension] isEqualToString:@"m3u8"]) { 136 | NSArray *components = [fileName componentsSeparatedByString:@"."]; 137 | NSString *filePrefix = [components firstObject]; 138 | _manifestPath = [_directoryPath stringByAppendingPathComponent:fileName]; 139 | _numbersOffset = filePrefix.length; 140 | NSAssert(_numbersOffset > 0, nil); 141 | *stop = YES; 142 | } 143 | }]; 144 | } 145 | 146 | - (NSString*) manifestSnapshot { 147 | return [NSString stringWithContentsOfFile:_manifestPath encoding:NSUTF8StringEncoding error:nil]; 148 | } 149 | 150 | - (NSUInteger) indexForFilePrefix:(NSString*)filePrefix { 151 | NSString *numbers = [filePrefix substringFromIndex:_numbersOffset]; 152 | return [numbers integerValue]; 153 | } 154 | 155 | - (NSURL*) manifestURL { 156 | NSString *urlString = [NSString stringWithFormat:@"http://%@.s3.amazonaws.com/%@/%@", kBucketName, _remoteFolderName, [_manifestPath lastPathComponent]]; 157 | return [NSURL URLWithString:urlString]; 158 | } 159 | 160 | @end 161 | -------------------------------------------------------------------------------- /FFmpegEncoder/HLSWriter.h: -------------------------------------------------------------------------------- 1 | // 2 | // HLSWriter.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 12/16/13. 6 | // Copyright (c) 2013 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface HLSWriter : NSObject 12 | 13 | @property (nonatomic, strong) NSString *uuid; 14 | @property (nonatomic) dispatch_queue_t conversionQueue; 15 | @property (nonatomic, strong, readonly) NSString *directoryPath; 16 | 17 | - (id) initWithDirectoryPath:(NSString*)directoryPath; 18 | 19 | - (void) addVideoStreamWithWidth:(int)width height:(int)height; 20 | - (void) addAudioStreamWithSampleRate:(int)sampleRate; 21 | 22 | - (BOOL) prepareForWriting:(NSError**)error; 23 | 24 | - (void) processEncodedData:(NSData*)data presentationTimestamp:(double)pts streamIndex:(NSUInteger)streamIndex; 25 | 26 | - (BOOL) finishWriting:(NSError**)error; 27 | 28 | @end 29 | -------------------------------------------------------------------------------- /FFmpegEncoder/HLSWriter.m: -------------------------------------------------------------------------------- 1 | // 2 | // HLSWriter.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 12/16/13. 6 | // Copyright (c) 2013 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "HLSWriter.h" 10 | #import "FFOutputFile.h" 11 | #import "FFmpegWrapper.h" 12 | #import "libavformat/avformat.h" 13 | #import "libavcodec/avcodec.h" 14 | #import "libavutil/opt.h" 15 | 16 | @interface HLSWriter() 17 | @property (nonatomic, strong) FFOutputFile *outputFile; 18 | @property (nonatomic, strong) FFOutputStream *videoStream; 19 | @property (nonatomic, strong) FFOutputStream *audioStream; 20 | @property (nonatomic) AVPacket *packet; 21 | @property (nonatomic) AVRational videoTimeBase; 22 | @property (nonatomic) AVRational audioTimeBase; 23 | @property (nonatomic) NSUInteger segmentDurationSeconds; 24 | @end 25 | 26 | @implementation HLSWriter 27 | 28 | - (id) initWithDirectoryPath:(NSString *)directoryPath { 29 | if (self = [super init]) { 30 | av_register_all(); 31 | avformat_network_init(); 32 | avcodec_register_all(); 33 | _directoryPath = directoryPath; 34 | _packet = av_malloc(sizeof(AVPacket)); 35 | _videoTimeBase.num = 1; 36 | _videoTimeBase.den = 1000000000; 37 | _audioTimeBase.num = 1; 38 | _audioTimeBase.den = 1000000000; 39 | _segmentDurationSeconds = 10; 40 | [self setupOutputFile]; 41 | _conversionQueue = dispatch_queue_create("HLS Write queue", DISPATCH_QUEUE_SERIAL); 42 | _uuid = [[NSUUID UUID] UUIDString]; 43 | } 44 | return self; 45 | } 46 | 47 | - (void) setupOutputFile { 48 | NSString *outputPath = [_directoryPath stringByAppendingPathComponent:@"hls.m3u8"]; 49 | _outputFile = [[FFOutputFile alloc] initWithPath:outputPath options:@{kFFmpegOutputFormatKey: @"hls"}]; 50 | 51 | //FFBitstreamFilter *bitstreamFilter = [[FFBitstreamFilter alloc] initWithFilterName:@"h264_mp4toannexb"]; 52 | //[_outputFile addBitstreamFilter:bitstreamFilter]; 53 | } 54 | 55 | - (void) addVideoStreamWithWidth:(int)width height:(int)height { 56 | _videoStream = [[FFOutputStream alloc] initWithOutputFile:_outputFile outputCodec:@"h264"]; 57 | [_videoStream setupVideoContextWithWidth:width height:height]; 58 | av_opt_set_int(_outputFile.formatContext->priv_data, "hls_time", _segmentDurationSeconds, 0); 59 | } 60 | 61 | - (void) addAudioStreamWithSampleRate:(int)sampleRate { 62 | _audioStream = [[FFOutputStream alloc] initWithOutputFile:_outputFile outputCodec:@"aac"]; 63 | [_audioStream setupAudioContextWithSampleRate:sampleRate]; 64 | } 65 | 66 | - (BOOL) prepareForWriting:(NSError *__autoreleasing *)error { 67 | // Open the output file for writing and write header 68 | if (![_outputFile openFileForWritingWithError:error]) { 69 | return NO; 70 | } 71 | if (![_outputFile writeHeaderWithError:error]) { 72 | return NO; 73 | } 74 | return YES; 75 | } 76 | 77 | 78 | - (void) processEncodedData:(NSData*)data presentationTimestamp:(double)pts streamIndex:(NSUInteger)streamIndex { 79 | dispatch_async(_conversionQueue, ^{ 80 | av_init_packet(_packet); 81 | 82 | uint64_t originalPTS = (uint64_t)(1000000000 * pts); 83 | //NSLog(@"*** Writing packet at %lld", originalPTS); 84 | 85 | _packet->data = (uint8_t*)data.bytes; 86 | _packet->size = (int)data.length; 87 | _packet->stream_index = streamIndex; 88 | uint64_t scaledPTS = av_rescale_q(originalPTS, _videoTimeBase, _outputFile.formatContext->streams[_packet->stream_index]->time_base); 89 | //NSLog(@"*** Scaled PTS: %lld", scaledPTS); 90 | 91 | _packet->pts = scaledPTS; 92 | _packet->dts = scaledPTS; 93 | NSError *error = nil; 94 | [_outputFile writePacket:_packet error:&error]; 95 | if (error) { 96 | NSLog(@"Error writing packet at streamIndex %d and PTS %lld: %@", streamIndex, originalPTS, error.description); 97 | } else { 98 | //NSLog(@"Wrote packet of length %d at streamIndex %d and PTS %lld", data.length, streamIndex, originalPTS); 99 | } 100 | }); 101 | } 102 | 103 | - (BOOL) finishWriting:(NSError *__autoreleasing *)error { 104 | return [_outputFile writeTrailerWithError:error]; 105 | } 106 | 107 | @end 108 | -------------------------------------------------------------------------------- /FFmpegEncoder/Images.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "29x29", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "40x40", 11 | "scale" : "2x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "60x60", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "ipad", 20 | "size" : "29x29", 21 | "scale" : "1x" 22 | }, 23 | { 24 | "idiom" : "ipad", 25 | "size" : "29x29", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "ipad", 30 | "size" : "40x40", 31 | "scale" : "1x" 32 | }, 33 | { 34 | "idiom" : "ipad", 35 | "size" : "40x40", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "ipad", 40 | "size" : "76x76", 41 | "scale" : "1x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "76x76", 46 | "scale" : "2x" 47 | } 48 | ], 49 | "info" : { 50 | "version" : 1, 51 | "author" : "xcode" 52 | } 53 | } -------------------------------------------------------------------------------- /FFmpegEncoder/Images.xcassets/LaunchImage.launchimage/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "orientation" : "portrait", 5 | "idiom" : "iphone", 6 | "extent" : "full-screen", 7 | "minimum-system-version" : "7.0", 8 | "scale" : "2x" 9 | }, 10 | { 11 | "orientation" : "portrait", 12 | "idiom" : "iphone", 13 | "subtype" : "retina4", 14 | "extent" : "full-screen", 15 | "minimum-system-version" : "7.0", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "orientation" : "portrait", 20 | "idiom" : "ipad", 21 | "extent" : "full-screen", 22 | "minimum-system-version" : "7.0", 23 | "scale" : "1x" 24 | }, 25 | { 26 | "orientation" : "landscape", 27 | "idiom" : "ipad", 28 | "extent" : "full-screen", 29 | "minimum-system-version" : "7.0", 30 | "scale" : "1x" 31 | }, 32 | { 33 | "orientation" : "portrait", 34 | "idiom" : "ipad", 35 | "extent" : "full-screen", 36 | "minimum-system-version" : "7.0", 37 | "scale" : "2x" 38 | }, 39 | { 40 | "orientation" : "landscape", 41 | "idiom" : "ipad", 42 | "extent" : "full-screen", 43 | "minimum-system-version" : "7.0", 44 | "scale" : "2x" 45 | } 46 | ], 47 | "info" : { 48 | "version" : 1, 49 | "author" : "xcode" 50 | } 51 | } -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFAPIClient.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFAPIClient.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "KFEndpointResponse.h" 11 | #import "AFNetworking.h" 12 | 13 | @interface KFAPIClient : AFHTTPClient 14 | 15 | + (KFAPIClient*) sharedClient; 16 | 17 | - (void) requestRecordingEndpoint:(void (^)(KFEndpointResponse *endpointResponse, NSError *error))endpointCallback; 18 | 19 | @end 20 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFAPIClient.m: -------------------------------------------------------------------------------- 1 | // 2 | // KFAPIClient.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFAPIClient.h" 10 | #import "OWSecrets.h" 11 | #import "AFOAuth2Client.h" 12 | #import "KFLogging.h" 13 | #import "KFUser.h" 14 | #import "KFS3EndpointResponse.h" 15 | 16 | static NSString* const kKFAPIClientErrorDomain = @"kKFAPIClientErrorDomain"; 17 | 18 | @implementation KFAPIClient 19 | 20 | + (KFAPIClient*) sharedClient { 21 | static KFAPIClient *_sharedClient = nil; 22 | static dispatch_once_t onceToken; 23 | dispatch_once(&onceToken, ^{ 24 | _sharedClient = [[KFAPIClient alloc] init]; 25 | }); 26 | return _sharedClient; 27 | } 28 | 29 | - (instancetype) init { 30 | NSURL *url = [NSURL URLWithString:KICKFLIP_API_BASE_URL]; 31 | if (self = [super initWithBaseURL:url]) { 32 | [self registerHTTPOperationClass:[AFJSONRequestOperation class]]; 33 | [self setDefaultHeader:@"Accept" value:@"application/json"]; 34 | 35 | [self checkOAuthCredentialsWithCallback:^(BOOL success, NSError *error) { 36 | if (success) { 37 | [self requestRecordingEndpoint:nil]; 38 | } 39 | }]; 40 | } 41 | return self; 42 | } 43 | 44 | - (void) checkOAuthCredentialsWithCallback:(void (^)(BOOL success, NSError * error))callback { 45 | NSURL *url = [NSURL URLWithString:KICKFLIP_API_BASE_URL]; 46 | AFOAuth2Client *oauthClient = [AFOAuth2Client clientWithBaseURL:url clientID:KICKFLIP_PRODUCTION_API_ID secret:KICKFLIP_PRODUCTION_API_SECRET]; 47 | 48 | AFOAuthCredential *credential = [AFOAuthCredential retrieveCredentialWithIdentifier:oauthClient.serviceProviderIdentifier]; 49 | if (credential && !credential.isExpired) { 50 | [self setAuthorizationHeaderWithCredential:credential]; 51 | if (callback) { 52 | callback(YES, nil); 53 | } 54 | return; 55 | } 56 | 57 | [oauthClient authenticateUsingOAuthWithPath:@"/o/token/" parameters:@{@"grant_type": kAFOAuthClientCredentialsGrantType} success:^(AFOAuthCredential *credential) { 58 | NSLog(@"I have new token! %@", credential.accessToken); 59 | [AFOAuthCredential storeCredential:credential withIdentifier:oauthClient.serviceProviderIdentifier]; 60 | [self setAuthorizationHeaderWithCredential:credential]; 61 | if (callback) { 62 | callback(YES, nil); 63 | } 64 | } failure:^(NSError *error) { 65 | if (callback) { 66 | callback(NO, error); 67 | } 68 | }]; 69 | } 70 | 71 | - (void) setAuthorizationHeaderWithCredential:(AFOAuthCredential*)credential { 72 | [self setDefaultHeader:@"Authorization" value:[NSString stringWithFormat:@"Bearer %@", credential.accessToken]]; 73 | } 74 | 75 | - (void) requestRecordingEndpoint:(void (^)(KFEndpointResponse *, NSError *))endpointCallback { 76 | [self checkOAuthCredentialsWithCallback:^(BOOL success, NSError *error) { 77 | if (!success) { 78 | DDLogError(@"Error fetching OAuth credentials: %@", error); 79 | if (endpointCallback) { 80 | endpointCallback(nil, error); 81 | } 82 | return; 83 | } 84 | KFUser *activeUser = [KFUser activeUser]; 85 | if (activeUser) { // this will change when we support RTMP 86 | KFS3EndpointResponse *endpointResponse = [KFS3EndpointResponse endpointResponseForUser:activeUser]; 87 | if (endpointCallback) { 88 | endpointCallback(endpointResponse, nil); 89 | } 90 | return; 91 | } 92 | [self postPath:@"/api/new/user/" parameters:nil success:^(AFHTTPRequestOperation *operation, id responseObject) { 93 | if (responseObject && [responseObject isKindOfClass:[NSDictionary class]]) { 94 | NSDictionary *responseDictionary = (NSDictionary*)responseObject; 95 | KFUser *activeUser = [KFUser activeUserWithDictionary:responseDictionary]; 96 | if (activeUser) { 97 | KFS3EndpointResponse *endpointResponse = [KFS3EndpointResponse endpointResponseForUser:activeUser]; 98 | if (endpointCallback) { 99 | endpointCallback(endpointResponse, nil); 100 | } 101 | return; 102 | } else { 103 | if (endpointCallback) { 104 | endpointCallback(nil, [NSError errorWithDomain:kKFAPIClientErrorDomain code:100 userInfo:@{NSLocalizedDescriptionKey: @"User response error", @"operation": operation}]); 105 | } 106 | } 107 | } else { 108 | if (endpointCallback) { 109 | endpointCallback(nil, [NSError errorWithDomain:kKFAPIClientErrorDomain code:100 userInfo:@{NSLocalizedDescriptionKey: @"User response error", @"operation": operation}]); 110 | } 111 | } 112 | 113 | } failure:^(AFHTTPRequestOperation *operation, NSError *error) { 114 | if (error && endpointCallback) { 115 | endpointCallback(nil, error); 116 | } 117 | }]; 118 | }]; 119 | } 120 | 121 | 122 | @end 123 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFBroadcaster.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFBroadcaster.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "KFRecorder.h" 11 | #import "KFUploader.h" 12 | 13 | @class KFBroadcaster; 14 | 15 | @protocol KFBroadcasterDelegate 16 | - (void) broadcasterDidStartBroadcasting:(KFBroadcaster*)recorder; 17 | - (void) broadcasterDidFinishBroadcasting:(KFBroadcaster*)recorder; 18 | - (void) broadcaster:(KFBroadcaster*)broadcaster videoReadyAtURL:(NSURL*)url; 19 | @end 20 | 21 | @interface KFBroadcaster : NSObject 22 | 23 | @property (nonatomic, strong) KFRecorder *recorder; 24 | @property (nonatomic, strong) KFUploader *uploader; 25 | @property (nonatomic, weak) id delegate; 26 | 27 | - (void) startBroadcaster; 28 | - (void) stopBroadcaster; 29 | 30 | 31 | @end 32 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFBroadcaster.m: -------------------------------------------------------------------------------- 1 | // 2 | // KFBroadcaster.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFBroadcaster.h" 10 | 11 | @implementation KFBroadcaster 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFEndpointResponse.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFEndpointResponse.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @class KFUser; 12 | 13 | @interface KFEndpointResponse : NSObject 14 | 15 | @property (nonatomic, strong, readonly) KFUser *user; 16 | @property (nonatomic, strong, readonly) NSString *uuid; 17 | @property (nonatomic, strong, readonly) NSURL *broadcastURL; 18 | 19 | @end 20 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFEndpointResponse.m: -------------------------------------------------------------------------------- 1 | // 2 | // KFEndpointResponse.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFEndpointResponse.h" 10 | 11 | @implementation KFEndpointResponse 12 | 13 | 14 | @end 15 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFHLSMonitor.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFHLSMonitor.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface KFHLSMonitor : NSObject 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFHLSMonitor.m: -------------------------------------------------------------------------------- 1 | // 2 | // KFHLSMonitor.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFHLSMonitor.h" 10 | 11 | @implementation KFHLSMonitor 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFLogging.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFLogging.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/22/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #ifndef _KFLogging_h 10 | #define _KFLogging_h 11 | 12 | #ifndef LOG_LEVEL_DEF 13 | #define LOG_LEVEL_DEF ddKickflipLogLevel 14 | #endif 15 | 16 | #import "DDLog.h" 17 | 18 | #ifdef DEBUG 19 | static const int ddKickflipLogLevel = LOG_LEVEL_VERBOSE; 20 | #else 21 | static const int ddKickflipLogLevel = LOG_LEVEL_OFF; 22 | #endif 23 | 24 | #endif 25 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFPreviewView.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFPreviewView.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface KFPreviewView : NSObject 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFPreviewView.m: -------------------------------------------------------------------------------- 1 | // 2 | // KFPreviewView.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFPreviewView.h" 10 | 11 | @implementation KFPreviewView 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFRTMPEndpointResponse.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFRTMPEndpointResponse.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFEndpointResponse.h" 10 | 11 | @interface KFRTMPEndpointResponse : KFEndpointResponse 12 | 13 | @property (nonatomic, strong, readonly) NSURL *rtmpURL; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFRTMPEndpointResponse.m: -------------------------------------------------------------------------------- 1 | // 2 | // KFRTMPEndpointResponse.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFRTMPEndpointResponse.h" 10 | 11 | @implementation KFRTMPEndpointResponse 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFRecorder.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFRecorder.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @class KFRecorder; 13 | 14 | @protocol KFRecorderDelegate 15 | - (void) recorderDidStartRecording:(KFRecorder*)recorder; 16 | - (void) recorderDidFinishRecording:(KFRecorder*)recorder; 17 | @end 18 | 19 | @interface KFRecorder : NSObject 20 | 21 | @property (nonatomic, weak) id delegate; 22 | 23 | - (void) startRecording; 24 | - (void) stopRecording; 25 | - (AVCaptureVideoPreviewLayer*) previewLayer; 26 | 27 | 28 | @end 29 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFRecorder.m: -------------------------------------------------------------------------------- 1 | // 2 | // KFRecorder.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFRecorder.h" 10 | 11 | @implementation KFRecorder 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFRecordingViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFRecordingViewController.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "KFPreviewView.h" 11 | 12 | @interface KFRecordingViewController : UIViewController 13 | 14 | @property (nonatomic, strong) UIButton *recordButton; 15 | @property (nonatomic, strong) KFPreviewView *previewView; 16 | 17 | @end 18 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFRecordingViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // KFRecordingViewController.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFRecordingViewController.h" 10 | 11 | @interface KFRecordingViewController () 12 | 13 | @end 14 | 15 | @implementation KFRecordingViewController 16 | 17 | - (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil 18 | { 19 | self = [super initWithNibName:nibNameOrNil bundle:nibBundleOrNil]; 20 | if (self) { 21 | // Custom initialization 22 | } 23 | return self; 24 | } 25 | 26 | - (void)viewDidLoad 27 | { 28 | [super viewDidLoad]; 29 | // Do any additional setup after loading the view. 30 | } 31 | 32 | - (void)didReceiveMemoryWarning 33 | { 34 | [super didReceiveMemoryWarning]; 35 | // Dispose of any resources that can be recreated. 36 | } 37 | 38 | @end 39 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFS3EndpointResponse.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFS3EndpointResponse.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFEndpointResponse.h" 10 | 11 | @class KFUser; 12 | 13 | @interface KFS3EndpointResponse : KFEndpointResponse 14 | 15 | + (instancetype) endpointResponseForUser:(KFUser*)user; 16 | 17 | @end 18 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFS3EndpointResponse.m: -------------------------------------------------------------------------------- 1 | // 2 | // KFS3EndpointResponse.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFS3EndpointResponse.h" 10 | #import "KFUser.h" 11 | 12 | @interface KFS3EndpointResponse() 13 | @property (nonatomic, strong, readwrite) NSURL *broadcastURL; 14 | @property (nonatomic, strong, readwrite) KFUser *user; 15 | @property (nonatomic, strong, readwrite) NSString *uuid; 16 | 17 | @end 18 | 19 | @implementation KFS3EndpointResponse 20 | @synthesize broadcastURL = _broadcastURL; 21 | @synthesize user = _user; 22 | @synthesize uuid = _uuid; 23 | 24 | + (instancetype) endpointResponseForUser:(KFUser*)user { 25 | KFS3EndpointResponse *response = [[KFS3EndpointResponse alloc] init]; 26 | response.user = user; 27 | response.uuid = [[NSUUID UUID] UUIDString]; 28 | 29 | NSString *broadcastURLString = [NSString stringWithFormat:@"http://%@.s3.amazonaws.com/%@/%@/index.m3u8", user.appName, user.username, response.uuid]; // this should probably be done somewhere else 30 | response.broadcastURL = [NSURL URLWithString:broadcastURLString]; 31 | return response; 32 | } 33 | 34 | @end 35 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFS3Uploader.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFS3Uploader.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFUploader.h" 10 | #import "KFHLSMonitor.h" 11 | 12 | @interface KFS3Uploader : KFUploader 13 | 14 | @property (nonatomic, strong) KFHLSMonitor *monitor; 15 | 16 | @end 17 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFS3Uploader.m: -------------------------------------------------------------------------------- 1 | // 2 | // KFS3Uploader.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFS3Uploader.h" 10 | 11 | @implementation KFS3Uploader 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFUploader.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFUploader.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @class KFUploader; 12 | 13 | @protocol KFUploaderDelegate 14 | - (void) uploader:(KFUploader*)uploader videoReadyAtURL:(NSURL*)url; 15 | @end 16 | 17 | @interface KFUploader : NSObject 18 | 19 | @end 20 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFUploader.m: -------------------------------------------------------------------------------- 1 | // 2 | // KFUploader.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFUploader.h" 10 | 11 | @implementation KFUploader 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFUser.h: -------------------------------------------------------------------------------- 1 | // 2 | // KFUser.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/22/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface KFUser : NSObject 12 | 13 | @property (readonly, nonatomic, strong) NSString *username; 14 | @property (readonly, nonatomic, strong) NSString *awsSecretKey; 15 | @property (readonly, nonatomic, strong) NSString *awsAccessKey; 16 | @property (readonly, nonatomic, strong) NSString *appName; 17 | 18 | + (instancetype) activeUser; 19 | + (instancetype) activeUserWithDictionary:(NSDictionary*)dictionary; 20 | + (void) deactivateUser; 21 | 22 | @end 23 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/KFUser.m: -------------------------------------------------------------------------------- 1 | // 2 | // KFUser.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/22/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "KFUser.h" 10 | #import "SSKeychain.h" 11 | #import "KFLogging.h" 12 | 13 | static NSString * const KFUsernameKey = @"KFUsernameKey"; 14 | static NSString * const KFKeychainServiceName = @"io.kickflip.keychainservice"; 15 | static NSString * const KFAWSAccessKey = @"KFAWSAccessKey"; 16 | static NSString * const KFAWSSecretKey = @"KFAWSSecretKey"; 17 | static NSString * const KFAppNameKey = @"KFAppNameKey"; 18 | 19 | @interface KFUser() 20 | @property (readwrite, nonatomic, strong) NSString *username; 21 | @property (readwrite, nonatomic, strong) NSString *awsSecretKey; 22 | @property (readwrite, nonatomic, strong) NSString *awsAccessKey; 23 | @property (readwrite, nonatomic, strong) NSString *appName; 24 | @end 25 | 26 | @implementation KFUser 27 | 28 | + (instancetype) activeUser { 29 | NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults]; 30 | NSString *username = [defaults objectForKey:KFUsernameKey]; 31 | if (!username) { 32 | return nil; 33 | } 34 | NSString *appName = [defaults objectForKey:KFAppNameKey]; 35 | if (!appName) { 36 | return nil; 37 | } 38 | NSString *awsSecretKey = [SSKeychain passwordForService:KFKeychainServiceName account:KFAWSSecretKey]; 39 | if (!awsSecretKey) { 40 | return nil; 41 | } 42 | NSString *awsAccessKey = [SSKeychain passwordForService:KFKeychainServiceName account:KFAWSAccessKey]; 43 | if (!awsAccessKey) { 44 | return nil; 45 | } 46 | 47 | KFUser *user = [[KFUser alloc] init]; 48 | user.username = username; 49 | user.awsAccessKey = awsAccessKey; 50 | user.awsSecretKey = awsSecretKey; 51 | user.appName = appName; 52 | return user; 53 | } 54 | 55 | + (instancetype) activeUserWithDictionary:(NSDictionary*)dictionary { 56 | [self deactivateUser]; 57 | [SSKeychain setAccessibilityType:kSecAttrAccessibleAfterFirstUnlock]; 58 | NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults]; 59 | NSString *username = dictionary[@"name"]; 60 | if (!username) { 61 | DDLogError(@"username is nil!"); 62 | return nil; 63 | } 64 | NSString *appName = dictionary[@"app"]; 65 | if (!appName) { 66 | DDLogError(@"appName is nil!"); 67 | return nil; 68 | } 69 | NSString *awsAccessKey = dictionary[@"aws_access_key"]; 70 | if (!awsAccessKey) { 71 | DDLogError(@"awsAccessKey is nil!"); 72 | return nil; 73 | } 74 | NSString *awsSecretKey = dictionary[@"aws_secret_key"]; 75 | if (!awsSecretKey) { 76 | DDLogError(@"awsSecretKey is nil!"); 77 | return nil; 78 | } 79 | [defaults setObject:username forKey:KFUsernameKey]; 80 | [defaults setObject:appName forKey:KFAppNameKey]; 81 | [defaults synchronize]; 82 | 83 | [SSKeychain setPassword:awsAccessKey forService:KFKeychainServiceName account:KFAWSAccessKey]; 84 | [SSKeychain setPassword:awsSecretKey forService:KFKeychainServiceName account:KFAWSSecretKey]; 85 | 86 | return [self activeUser]; 87 | } 88 | 89 | + (void) deactivateUser { 90 | NSUserDefaults *defaults = [NSUserDefaults standardUserDefaults]; 91 | [defaults removeObjectForKey:KFUsernameKey]; 92 | [defaults removeObjectForKey:KFAppNameKey]; 93 | [defaults synchronize]; 94 | [SSKeychain deletePasswordForService:KFKeychainServiceName account:KFAWSAccessKey]; 95 | [SSKeychain deletePasswordForService:KFKeychainServiceName account:KFAWSSecretKey]; 96 | } 97 | 98 | 99 | @end 100 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/Kickflip.h: -------------------------------------------------------------------------------- 1 | // 2 | // Kickflip.h 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "KFRecordingViewController.h" 11 | #import "KFBroadcaster.h" 12 | 13 | @interface Kickflip : NSObject 14 | 15 | + (void) setupWithAPIKey:(NSString*)key secret:(NSString*)secret; 16 | + (void) presentBroadcastViewFromViewController:(UIViewController*)viewController ready:(void (^)(NSURL *streamURL, NSError *error))readyBlock completion:(void (^)(void))completionBlock; 17 | 18 | + (KFBroadcaster*) broadcaster; 19 | + (NSString*) apiKey; 20 | + (NSString*) apiSecret; 21 | 22 | @end 23 | -------------------------------------------------------------------------------- /FFmpegEncoder/Kickflip/Kickflip.m: -------------------------------------------------------------------------------- 1 | // 2 | // Kickflip.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 1/16/14. 6 | // Copyright (c) 2014 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import "Kickflip.h" 10 | 11 | @implementation Kickflip 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /FFmpegEncoder/OWSharedS3Client.h: -------------------------------------------------------------------------------- 1 | // 2 | // OWSharedS3Client.h 3 | // LiveStreamer 4 | // 5 | // Created by Christopher Ballinger on 10/4/13. 6 | // Copyright (c) 2013 OpenWatch, Inc. All rights reserved. 7 | // 8 | 9 | #import "OWS3Client.h" 10 | 11 | @interface OWSharedS3Client : OWS3Client 12 | 13 | + (OWSharedS3Client*) sharedClient; 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /FFmpegEncoder/OWSharedS3Client.m: -------------------------------------------------------------------------------- 1 | // 2 | // OWSharedS3Client.m 3 | // LiveStreamer 4 | // 5 | // Created by Christopher Ballinger on 10/4/13. 6 | // Copyright (c) 2013 OpenWatch, Inc. All rights reserved. 7 | // 8 | 9 | #import "OWSharedS3Client.h" 10 | #import "OWSecrets.h" 11 | 12 | @implementation OWSharedS3Client 13 | 14 | + (OWSharedS3Client*) sharedClient { 15 | static OWSharedS3Client *_sharedInstance = nil; 16 | static dispatch_once_t onceToken; 17 | dispatch_once(&onceToken, ^{ 18 | _sharedInstance = [[OWSharedS3Client alloc] init]; 19 | }); 20 | return _sharedInstance; 21 | } 22 | 23 | - (id) init { 24 | if (self = [super initWithAccessKey:AWS_ACCESS_KEY_ID secretKey:AWS_SECRET_KEY]) { 25 | self.region = US_EAST_1; 26 | self.useSSL = NO; 27 | } 28 | return self; 29 | } 30 | 31 | @end 32 | -------------------------------------------------------------------------------- /FFmpegEncoder/en.lproj/InfoPlist.strings: -------------------------------------------------------------------------------- 1 | /* Localized versions of Info.plist keys */ 2 | 3 | -------------------------------------------------------------------------------- /FFmpegEncoder/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // FFmpegEncoder 4 | // 5 | // Created by Christopher Ballinger on 12/15/13. 6 | // Copyright (c) 2013 Christopher Ballinger. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | #import "EncoderDemoAppDelegate.h" 12 | 13 | int main(int argc, char * argv[]) 14 | { 15 | @autoreleasepool { 16 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([EncoderDemoAppDelegate class])); 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2013 Chris Ballinger 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | 23 | This software includes dependencies that are under terms of different licenses: 24 | 25 | * FFmpeg - LGPL 2.1+ 26 | * FFmpegWrapper - LGPL 2.1+ 27 | * H264-RTSP-Server-iOS - GDCL Source Code License http://www.gdcl.co.uk/license.htm -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FFmpeg-iOS-Encoder 2 | 3 | Development on this project has been moved to [kickflip-ios-sdk](https://github.com/Kickflip/kickflip-ios-sdk). [Kickflip](http://kickflip.io) has open source live broadcasting SDKs for [iOS](https://github.com/Kickflip/kickflip-ios-sdk) and [Android](https://github.com/kickflip/kickflip-android-sdk). Check it out! --------------------------------------------------------------------------------