├── Classes ├── CoverPlaceholder.png ├── RosyWriterAppDelegate.h ├── RosyWriterAppDelegate.m ├── RosyWriterCIFilterRenderer.h ├── RosyWriterCIFilterRenderer.m ├── RosyWriterCPURenderer.h ├── RosyWriterCPURenderer.m ├── RosyWriterCapturePipeline.h ├── RosyWriterCapturePipeline.m ├── RosyWriterOpenCVRenderer.h ├── RosyWriterOpenCVRenderer.mm ├── RosyWriterOpenGLRenderer.h ├── RosyWriterOpenGLRenderer.m ├── RosyWriterRenderer.h ├── RosyWriterViewController+Helper.h ├── RosyWriterViewController+Helper.m ├── RosyWriterViewController.h ├── RosyWriterViewController.m ├── Utilities │ ├── CameraControlFunctions.h │ ├── CameraControlFunctions.m │ ├── GL │ │ ├── ShaderUtilities.c │ │ ├── ShaderUtilities.h │ │ ├── matrix.c │ │ └── matrix.h │ ├── MovieRecorder.h │ ├── MovieRecorder.m │ ├── OpenGLPixelBufferView.h │ ├── OpenGLPixelBufferView.m │ ├── VideoTimeConverter.h │ └── VideoTimeConverter.m ├── inertialRecorder.h └── inertialRecorder.m ├── LICENSE ├── LICENSE.txt ├── MarsLogger-Info.plist ├── MarsLogger-Prefix.pch ├── MarsLogger.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ ├── xcshareddata │ │ ├── IDEWorkspaceChecks.plist │ │ └── WorkspaceSettings.xcsettings │ └── xcuserdata │ │ └── feijiulin.xcuserdatad │ │ └── UserInterfaceState.xcuserstate ├── xcshareddata │ └── xcschemes │ │ └── MarsLogger.xcscheme └── xcuserdata │ └── feijiulin.xcuserdatad │ ├── xcdebugger │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ └── xcschememanagement.plist ├── MarsLoggerTests ├── Info.plist └── MarsLoggerTests.m ├── README.assets ├── 1.gif ├── 2.png └── 20230213204850.jpg ├── README.md ├── ReadMe.txt ├── Resources ├── Base.lproj │ ├── Launch.storyboard │ └── Main.storyboard ├── Images.xcassets │ ├── AppIcon.appiconset │ │ ├── 1024.png │ │ ├── 120-1.png │ │ ├── 120.png │ │ ├── 152.png │ │ ├── 167.png │ │ ├── 180.png │ │ ├── 20.png │ │ ├── 29.png │ │ ├── 40-1.png │ │ ├── 40-2.png │ │ ├── 40.png │ │ ├── 58-1.png │ │ ├── 58.png │ │ ├── 60.png │ │ ├── 76.png │ │ ├── 80-1.png │ │ ├── 80.png │ │ ├── 87.png │ │ └── Contents.json │ └── LaunchImage.launchimage │ │ └── Contents.json ├── Shaders │ ├── myFilter.fsh │ └── myFilter.vsh └── en.lproj │ └── InfoPlist.strings └── main.m /Classes/CoverPlaceholder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Classes/CoverPlaceholder.png -------------------------------------------------------------------------------- /Classes/RosyWriterAppDelegate.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | Application delegate 8 | */ 9 | 10 | 11 | #import 12 | 13 | @interface RosyWriterAppDelegate : UIResponder 14 | 15 | @property(nonatomic, strong) UIWindow *window; 16 | 17 | @end 18 | -------------------------------------------------------------------------------- /Classes/RosyWriterAppDelegate.m: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | Application delegate 8 | */ 9 | 10 | #import "RosyWriterAppDelegate.h" 11 | 12 | @implementation RosyWriterAppDelegate 13 | 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /Classes/RosyWriterCIFilterRenderer.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | The RosyWriter CoreImage CIFilter-based effect renderer 8 | */ 9 | 10 | #import "RosyWriterRenderer.h" 11 | 12 | @interface RosyWriterCIFilterRenderer : NSObject 13 | 14 | @end 15 | -------------------------------------------------------------------------------- /Classes/RosyWriterCIFilterRenderer.m: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 3 | See LICENSE.txt for this sample’s licensing information 4 | 5 | Abstract: 6 | The RosyWriter CoreImage CIFilter-based effect renderer 7 | */ 8 | 9 | #import "RosyWriterCIFilterRenderer.h" 10 | 11 | @interface RosyWriterCIFilterRenderer () 12 | { 13 | CIContext *_ciContext; 14 | CIFilter *_rosyFilter; 15 | CGColorSpaceRef _rgbColorSpace; 16 | CVPixelBufferPoolRef _bufferPool; 17 | CFDictionaryRef _bufferPoolAuxAttributes; 18 | CMFormatDescriptionRef _outputFormatDescription; 19 | } 20 | 21 | @end 22 | 23 | @implementation RosyWriterCIFilterRenderer 24 | 25 | #pragma mark API 26 | 27 | - (void)dealloc 28 | { 29 | [self deleteBuffers]; 30 | } 31 | 32 | #pragma mark RosyWriterRenderer 33 | 34 | - (BOOL)operatesInPlace 35 | { 36 | return NO; 37 | } 38 | 39 | - (FourCharCode)inputPixelFormat 40 | { 41 | return kCVPixelFormatType_32BGRA; 42 | } 43 | 44 | - (void)prepareForInputWithFormatDescription:(CMFormatDescriptionRef)inputFormatDescription outputRetainedBufferCountHint:(size_t)outputRetainedBufferCountHint 45 | { 46 | // The input and output dimensions are the same. This renderer doesn't do any scaling. 47 | CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions( inputFormatDescription ); 48 | 49 | [self deleteBuffers]; 50 | if ( ! [self initializeBuffersWithOutputDimensions:dimensions retainedBufferCountHint:outputRetainedBufferCountHint] ) { 51 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem preparing renderer." userInfo:nil]; 52 | } 53 | 54 | _rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 55 | EAGLContext *eaglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; 56 | _ciContext = [CIContext contextWithEAGLContext:eaglContext options:@{ kCIContextWorkingColorSpace : [NSNull null] } ]; 57 | 58 | _rosyFilter = [CIFilter filterWithName:@"CIColorMatrix"]; 59 | CGFloat greenCoefficients[4] = { 0, 0, 0, 0 }; 60 | [_rosyFilter setValue:[CIVector vectorWithValues:greenCoefficients count:4] forKey:@"inputGVector"]; 61 | } 62 | 63 | - (void)reset 64 | { 65 | [self deleteBuffers]; 66 | } 67 | 68 | - (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer 69 | { 70 | OSStatus err = noErr; 71 | CVPixelBufferRef renderedOutputPixelBuffer = NULL; 72 | 73 | CIImage *sourceImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options:nil]; 74 | 75 | // [_rosyFilter setValue:sourceImage forKey:kCIInputImageKey]; 76 | // CIImage *filteredImage = [_rosyFilter valueForKey:kCIOutputImageKey]; // De-green 77 | CIImage *filteredImage = sourceImage; 78 | 79 | err = CVPixelBufferPoolCreatePixelBuffer( kCFAllocatorDefault, _bufferPool, &renderedOutputPixelBuffer ); 80 | if ( err ) { 81 | NSLog( @"Cannot obtain a pixel buffer from the buffer pool (%d)", (int)err ); 82 | goto bail; 83 | } 84 | 85 | // render the filtered image out to a pixel buffer (no locking needed as CIContext's render method will do that) 86 | [_ciContext render:filteredImage toCVPixelBuffer:renderedOutputPixelBuffer bounds:[filteredImage extent] colorSpace:_rgbColorSpace]; 87 | 88 | bail: 89 | 90 | return renderedOutputPixelBuffer; 91 | } 92 | 93 | - (CMFormatDescriptionRef)outputFormatDescription 94 | { 95 | return _outputFormatDescription; 96 | } 97 | 98 | #pragma mark Internal 99 | 100 | - (BOOL)initializeBuffersWithOutputDimensions:(CMVideoDimensions)outputDimensions retainedBufferCountHint:(size_t)clientRetainedBufferCountHint 101 | { 102 | BOOL success = YES; 103 | 104 | size_t maxRetainedBufferCount = clientRetainedBufferCountHint; 105 | _bufferPool = createPixelBufferPool( outputDimensions.width, outputDimensions.height, kCVPixelFormatType_32BGRA, (int32_t)maxRetainedBufferCount ); 106 | if ( ! _bufferPool ) { 107 | NSLog( @"Problem initializing a buffer pool." ); 108 | success = NO; 109 | goto bail; 110 | } 111 | 112 | _bufferPoolAuxAttributes = createPixelBufferPoolAuxAttributes( (int32_t)maxRetainedBufferCount ); 113 | preallocatePixelBuffersInPool( _bufferPool, _bufferPoolAuxAttributes ); 114 | 115 | CMFormatDescriptionRef outputFormatDescription = NULL; 116 | CVPixelBufferRef testPixelBuffer = NULL; 117 | CVPixelBufferPoolCreatePixelBufferWithAuxAttributes( kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &testPixelBuffer ); 118 | if ( ! testPixelBuffer ) { 119 | NSLog( @"Problem creating a pixel buffer." ); 120 | success = NO; 121 | goto bail; 122 | } 123 | CMVideoFormatDescriptionCreateForImageBuffer( kCFAllocatorDefault, testPixelBuffer, &outputFormatDescription ); 124 | _outputFormatDescription = outputFormatDescription; 125 | CFRelease( testPixelBuffer ); 126 | 127 | bail: 128 | if ( ! success ) { 129 | [self deleteBuffers]; 130 | } 131 | return success; 132 | } 133 | 134 | - (void)deleteBuffers 135 | { 136 | if ( _bufferPool ) { 137 | CFRelease( _bufferPool ); 138 | _bufferPool = NULL; 139 | } 140 | if ( _bufferPoolAuxAttributes ) { 141 | CFRelease( _bufferPoolAuxAttributes ); 142 | _bufferPoolAuxAttributes = NULL; 143 | } 144 | if ( _outputFormatDescription ) { 145 | CFRelease( _outputFormatDescription ); 146 | _outputFormatDescription = NULL; 147 | } 148 | if ( _rgbColorSpace ) { 149 | CFRelease( _rgbColorSpace ); 150 | _rgbColorSpace = NULL; 151 | } 152 | 153 | _ciContext = nil; 154 | _rosyFilter = nil; 155 | } 156 | 157 | static CVPixelBufferPoolRef createPixelBufferPool( int32_t width, int32_t height, OSType pixelFormat, int32_t maxBufferCount ) 158 | { 159 | CVPixelBufferPoolRef outputPool = NULL; 160 | 161 | NSDictionary *sourcePixelBufferOptions = @{ (id)kCVPixelBufferPixelFormatTypeKey : @(pixelFormat), 162 | (id)kCVPixelBufferWidthKey : @(width), 163 | (id)kCVPixelBufferHeightKey : @(height), 164 | (id)kCVPixelFormatOpenGLESCompatibility : @(YES), 165 | (id)kCVPixelBufferIOSurfacePropertiesKey : @{ /*empty dictionary*/ } }; 166 | 167 | NSDictionary *pixelBufferPoolOptions = @{ (id)kCVPixelBufferPoolMinimumBufferCountKey : @(maxBufferCount) }; 168 | 169 | CVPixelBufferPoolCreate( kCFAllocatorDefault, (__bridge CFDictionaryRef)pixelBufferPoolOptions, (__bridge CFDictionaryRef)sourcePixelBufferOptions, &outputPool ); 170 | 171 | return outputPool; 172 | } 173 | 174 | static CFDictionaryRef createPixelBufferPoolAuxAttributes( int32_t maxBufferCount ) 175 | { 176 | // CVPixelBufferPoolCreatePixelBufferWithAuxAttributes() will return kCVReturnWouldExceedAllocationThreshold if we have already vended the max number of buffers 177 | NSDictionary *auxAttributes = [[NSDictionary alloc] initWithObjectsAndKeys:@(maxBufferCount), (id)kCVPixelBufferPoolAllocationThresholdKey, nil]; 178 | return CFBridgingRetain( auxAttributes ); 179 | } 180 | 181 | static void preallocatePixelBuffersInPool( CVPixelBufferPoolRef pool, CFDictionaryRef auxAttributes ) 182 | { 183 | // Preallocate buffers in the pool, since this is for real-time display/capture 184 | NSMutableArray *pixelBuffers = [[NSMutableArray alloc] init]; 185 | while ( 1 ) 186 | { 187 | CVPixelBufferRef pixelBuffer = NULL; 188 | OSStatus err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes( kCFAllocatorDefault, pool, auxAttributes, &pixelBuffer ); 189 | 190 | if ( err == kCVReturnWouldExceedAllocationThreshold ) { 191 | break; 192 | } 193 | assert( err == noErr ); 194 | 195 | [pixelBuffers addObject:CFBridgingRelease( pixelBuffer )]; 196 | } 197 | [pixelBuffers removeAllObjects]; 198 | } 199 | 200 | @end 201 | -------------------------------------------------------------------------------- /Classes/RosyWriterCPURenderer.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | The RosyWriter CPU-based effect renderer 8 | */ 9 | 10 | 11 | #import "RosyWriterRenderer.h" 12 | 13 | @interface RosyWriterCPURenderer : NSObject 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /Classes/RosyWriterCPURenderer.m: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | The RosyWriter CPU-based effect renderer 8 | */ 9 | 10 | #import "RosyWriterCPURenderer.h" 11 | 12 | @implementation RosyWriterCPURenderer 13 | 14 | #pragma mark RosyWriterRenderer 15 | 16 | - (BOOL)operatesInPlace 17 | { 18 | return YES; 19 | } 20 | 21 | - (FourCharCode)inputPixelFormat 22 | { 23 | return kCVPixelFormatType_32BGRA; 24 | } 25 | 26 | - (void)prepareForInputWithFormatDescription:(CMFormatDescriptionRef)inputFormatDescription outputRetainedBufferCountHint:(size_t)outputRetainedBufferCountHint 27 | { 28 | // nothing to do, we are stateless 29 | } 30 | 31 | - (void)reset 32 | { 33 | // nothing to do, we are stateless 34 | } 35 | 36 | - (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer 37 | { 38 | // const int kBytesPerPixel = 4; 39 | 40 | CVPixelBufferLockBaseAddress( pixelBuffer, 0 ); 41 | 42 | // int bufferWidth = (int)CVPixelBufferGetWidth( pixelBuffer ); 43 | // int bufferHeight = (int)CVPixelBufferGetHeight( pixelBuffer ); 44 | // size_t bytesPerRow = CVPixelBufferGetBytesPerRow( pixelBuffer ); 45 | // uint8_t *baseAddress = CVPixelBufferGetBaseAddress( pixelBuffer ); 46 | 47 | // for ( int row = 0; row < bufferHeight; row++ ) 48 | // { 49 | // uint8_t *pixel = baseAddress + row * bytesPerRow; 50 | // for ( int column = 0; column < bufferWidth; column++ ) 51 | // { 52 | // pixel[1] = 0; // De-green (second pixel in BGRA is green) 53 | // pixel += kBytesPerPixel; 54 | // } 55 | // } 56 | 57 | CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 ); 58 | 59 | return (CVPixelBufferRef)CFRetain( pixelBuffer ); 60 | } 61 | 62 | @end 63 | -------------------------------------------------------------------------------- /Classes/RosyWriterCapturePipeline.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | The class that creates and manages the AVCaptureSession 8 | */ 9 | 10 | 11 | #import 12 | 13 | @protocol RosyWriterCapturePipelineDelegate; 14 | 15 | @interface RosyWriterCapturePipeline : NSObject 16 | 17 | - (instancetype)initWithDelegate:(id)delegate callbackQueue:(dispatch_queue_t)queue; // delegate is weak referenced 18 | 19 | // These methods are synchronous 20 | - (void)startRunning; 21 | - (void)stopRunning; 22 | 23 | // Must be running before starting recording 24 | // These methods are asynchronous, see the recording delegate callbacks 25 | - (void)startRecording; 26 | - (void)stopRecording; 27 | 28 | - (float)reportLensFocalLenParams; 29 | 30 | // lock focus and exposure at the point of interest 31 | - (void)focusAtPoint:(CGPoint)point; 32 | 33 | // unlock auto focus and auto exposure, this invalidates focusAtPoint 34 | - (void)unlockFocusAndExposure; 35 | 36 | // get the file URL where the inertial data is saved 37 | - (NSURL *)getInertialFileURL; 38 | 39 | @property(atomic) BOOL renderingEnabled; // When set to NO the GPU will not be used after the setRenderingEnabled: call returns. 40 | 41 | @property(atomic) AVCaptureVideoOrientation recordingOrientation; // client can set the orientation for the recorded movie 42 | 43 | - (CGAffineTransform)transformFromVideoBufferOrientationToOrientation:(AVCaptureVideoOrientation)orientation withAutoMirroring:(BOOL)mirroring; // only valid after startRunning has been called 44 | 45 | // Stats 46 | @property(atomic, readonly) float videoFrameRate; 47 | @property(atomic, readonly) float fx; // focal length in horizontal x axis in pixels 48 | @property(atomic, readonly) int64_t exposureDuration; // nanoseconds 49 | @property(atomic, readonly) BOOL autoLocked; // are both auto focus and auto exposure locked? 50 | @property(atomic, readonly) CMVideoDimensions videoDimensions; 51 | @property(atomic, readonly) AVCaptureDeviceInput *videoDeviceInput; 52 | @property(atomic, readonly) NSURL *metadataFileURL; 53 | @end 54 | 55 | @protocol RosyWriterCapturePipelineDelegate 56 | @required 57 | 58 | - (void)capturePipeline:(RosyWriterCapturePipeline *)capturePipeline didStopRunningWithError:(NSError *)error; 59 | 60 | // Preview 61 | - (void)capturePipeline:(RosyWriterCapturePipeline *)capturePipeline previewPixelBufferReadyForDisplay:(CVPixelBufferRef)previewPixelBuffer; 62 | - (void)capturePipelineDidRunOutOfPreviewBuffers:(RosyWriterCapturePipeline *)capturePipeline; 63 | 64 | // Recording 65 | - (void)capturePipelineRecordingDidStart:(RosyWriterCapturePipeline *)capturePipeline; 66 | - (void)capturePipeline:(RosyWriterCapturePipeline *)capturePipeline recordingDidFailWithError:(NSError *)error; // Can happen at any point after a startRecording call, for example: startRecording->didFail (without a didStart), willStop->didFail (without a didStop) 67 | - (void)capturePipelineRecordingWillStop:(RosyWriterCapturePipeline *)capturePipeline; 68 | - (void)capturePipelineRecordingDidStop:(RosyWriterCapturePipeline *)capturePipeline; 69 | 70 | @end 71 | 72 | union intToFloat 73 | { 74 | uint32_t i; 75 | float fp; 76 | }; 77 | 78 | -------------------------------------------------------------------------------- /Classes/RosyWriterOpenCVRenderer.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | The RosyWriter OpenCV based effect renderer 8 | */ 9 | 10 | #import "RosyWriterRenderer.h" 11 | 12 | // To use the RosyWriterOpenCVRenderer, import this header in RosyWriterCapturePipeline.m 13 | // and intialize _renderer to a RosyWriterOpenCVRenderer. 14 | 15 | @interface RosyWriterOpenCVRenderer : NSObject 16 | 17 | @end 18 | -------------------------------------------------------------------------------- /Classes/RosyWriterOpenCVRenderer.mm: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | The RosyWriter OpenCV based effect renderer 8 | */ 9 | 10 | #import "RosyWriterOpenCVRenderer.h" 11 | 12 | // To build OpenCV into the project: 13 | // - Download opencv2.framework for iOS 14 | // - Insert framework into project's Frameworks group 15 | // - Make sure framework is included under the target's Build Phases -> Link Binary With Libraries. 16 | #import 17 | 18 | @implementation RosyWriterOpenCVRenderer 19 | 20 | #pragma mark RosyWriterRenderer 21 | 22 | - (BOOL)operatesInPlace 23 | { 24 | return YES; 25 | } 26 | 27 | - (FourCharCode)inputPixelFormat 28 | { 29 | return kCVPixelFormatType_32BGRA; 30 | } 31 | 32 | - (void)prepareForInputWithFormatDescription:(CMFormatDescriptionRef)inputFormatDescription outputRetainedBufferCountHint:(size_t)outputRetainedBufferCountHint 33 | { 34 | // nothing to do, we are stateless 35 | } 36 | 37 | - (void)reset 38 | { 39 | // nothing to do, we are stateless 40 | } 41 | 42 | - (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer 43 | { 44 | CVPixelBufferLockBaseAddress( pixelBuffer, 0 ); 45 | 46 | unsigned char *base = (unsigned char *)CVPixelBufferGetBaseAddress( pixelBuffer ); 47 | // size_t width = CVPixelBufferGetWidth( pixelBuffer ); 48 | size_t height = CVPixelBufferGetHeight( pixelBuffer ); 49 | size_t stride = CVPixelBufferGetBytesPerRow( pixelBuffer ); 50 | size_t extendedWidth = stride / sizeof( uint32_t ); // each pixel is 4 bytes/32 bits 51 | 52 | // Since the OpenCV Mat is wrapping the CVPixelBuffer's pixel data, we must do all of our modifications while its base address is locked. 53 | // If we want to operate on the buffer later, we'll have to do an expensive deep copy of the pixel data, using memcpy or Mat::clone(). 54 | 55 | // Use extendedWidth instead of width to account for possible row extensions (sometimes used for memory alignment). 56 | // We only need to work on columms from [0, width - 1] regardless. 57 | 58 | cv::Mat bgraImage = cv::Mat( (int)height, (int)extendedWidth, CV_8UC4, base ); 59 | 60 | // for ( uint32_t y = 0; y < height; y++ ) 61 | // { 62 | // for ( uint32_t x = 0; x < width; x++ ) 63 | // { 64 | // bgraImage.at >(y,x)[1] = 0; // De-green 65 | // } 66 | // } 67 | 68 | CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 ); 69 | 70 | return (CVPixelBufferRef)CFRetain( pixelBuffer ); 71 | } 72 | 73 | @end 74 | -------------------------------------------------------------------------------- /Classes/RosyWriterOpenGLRenderer.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | The RosyWriter OpenGL effect renderer 8 | */ 9 | 10 | 11 | #import "RosyWriterRenderer.h" 12 | 13 | @interface RosyWriterOpenGLRenderer : NSObject 14 | 15 | @end 16 | -------------------------------------------------------------------------------- /Classes/RosyWriterOpenGLRenderer.m: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | The RosyWriter OpenGL effect renderer 8 | */ 9 | 10 | #import "RosyWriterOpenGLRenderer.h" 11 | #import 12 | #import "ShaderUtilities.h" 13 | #import "matrix.h" 14 | #import "OpenGLPixelBufferView.h" // customized opengl program and enums 15 | 16 | 17 | @interface RosyWriterOpenGLRenderer () 18 | { 19 | EAGLContext *_oglContext; 20 | CVOpenGLESTextureCacheRef _textureCache; 21 | CVOpenGLESTextureCacheRef _renderTextureCache; 22 | CVPixelBufferPoolRef _bufferPool; 23 | CFDictionaryRef _bufferPoolAuxAttributes; 24 | CMFormatDescriptionRef _outputFormatDescription; 25 | GLuint _program; 26 | GLint _frame; 27 | GLuint _offscreenBufferHandle; 28 | } 29 | 30 | @end 31 | 32 | @implementation RosyWriterOpenGLRenderer 33 | 34 | #pragma mark API 35 | 36 | - (instancetype)init 37 | { 38 | self = [super init]; 39 | if ( self ) 40 | { 41 | _oglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; 42 | if ( ! _oglContext ) { 43 | NSLog( @"Problem with OpenGL context." ); 44 | return nil; 45 | } 46 | } 47 | return self; 48 | } 49 | 50 | - (void)dealloc 51 | { 52 | [self deleteBuffers]; 53 | } 54 | 55 | #pragma mark RosyWriterRenderer 56 | 57 | - (BOOL)operatesInPlace 58 | { 59 | return NO; 60 | } 61 | 62 | - (FourCharCode)inputPixelFormat 63 | { 64 | return kCVPixelFormatType_32BGRA; 65 | } 66 | 67 | - (void)prepareForInputWithFormatDescription:(CMFormatDescriptionRef)inputFormatDescription outputRetainedBufferCountHint:(size_t)outputRetainedBufferCountHint 68 | { 69 | // The input and output dimensions are the same. This renderer doesn't do any scaling. 70 | CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions( inputFormatDescription ); 71 | 72 | [self deleteBuffers]; 73 | if ( ! [self initializeBuffersWithOutputDimensions:dimensions retainedBufferCountHint:outputRetainedBufferCountHint] ) { 74 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem preparing renderer." userInfo:nil]; 75 | } 76 | } 77 | 78 | - (void)reset 79 | { 80 | [self deleteBuffers]; 81 | } 82 | 83 | - (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer 84 | { 85 | static const GLfloat squareVertices[] = { 86 | -1.0f, -1.0f, // bottom left 87 | 1.0f, -1.0f, // bottom right 88 | -1.0f, 1.0f, // top left 89 | 1.0f, 1.0f, // top right 90 | }; 91 | static const float textureVertices[] = { 92 | 0.0f, 0.0f, // bottom left 93 | 1.0f, 0.0f, // bottom right 94 | 0.0f, 1.0f, // top left 95 | 1.0f, 1.0f, // top right 96 | }; 97 | 98 | if ( _offscreenBufferHandle == 0 ) { 99 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Unintialized buffer" userInfo:nil]; 100 | return NULL; 101 | } 102 | 103 | if ( pixelBuffer == NULL ) { 104 | @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"NULL pixel buffer" userInfo:nil]; 105 | return NULL; 106 | } 107 | 108 | const CMVideoDimensions srcDimensions = { (int32_t)CVPixelBufferGetWidth(pixelBuffer), (int32_t)CVPixelBufferGetHeight(pixelBuffer) }; 109 | const CMVideoDimensions dstDimensions = CMVideoFormatDescriptionGetDimensions( _outputFormatDescription ); 110 | if ( srcDimensions.width != dstDimensions.width || srcDimensions.height != dstDimensions.height ) { 111 | @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"Invalid pixel buffer dimensions" userInfo:nil]; 112 | return NULL; 113 | } 114 | 115 | if ( CVPixelBufferGetPixelFormatType( pixelBuffer ) != kCVPixelFormatType_32BGRA ) { 116 | @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"Invalid pixel buffer format" userInfo:nil]; 117 | return NULL; 118 | } 119 | 120 | EAGLContext *oldContext = [EAGLContext currentContext]; 121 | if ( oldContext != _oglContext ) { 122 | if ( ! [EAGLContext setCurrentContext:_oglContext] ) { 123 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem with OpenGL context" userInfo:nil]; 124 | return NULL; 125 | } 126 | } 127 | 128 | CVReturn err = noErr; 129 | CVOpenGLESTextureRef srcTexture = NULL; 130 | CVOpenGLESTextureRef dstTexture = NULL; 131 | CVPixelBufferRef dstPixelBuffer = NULL; 132 | 133 | err = CVOpenGLESTextureCacheCreateTextureFromImage( kCFAllocatorDefault, 134 | _textureCache, 135 | pixelBuffer, 136 | NULL, 137 | GL_TEXTURE_2D, 138 | GL_RGBA, 139 | srcDimensions.width, 140 | srcDimensions.height, 141 | GL_BGRA, 142 | GL_UNSIGNED_BYTE, 143 | 0, 144 | &srcTexture ); 145 | if ( ! srcTexture || err ) { 146 | NSLog( @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err ); 147 | goto bail; 148 | } 149 | 150 | err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes( kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &dstPixelBuffer ); 151 | if ( err == kCVReturnWouldExceedAllocationThreshold ) { 152 | // Flush the texture cache to potentially release the retained buffers and try again to create a pixel buffer 153 | CVOpenGLESTextureCacheFlush( _renderTextureCache, 0 ); 154 | err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes( kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &dstPixelBuffer ); 155 | } 156 | if ( err ) { 157 | if ( err == kCVReturnWouldExceedAllocationThreshold ) { 158 | NSLog( @"Pool is out of buffers, dropping frame" ); 159 | } 160 | else { 161 | NSLog( @"Error at CVPixelBufferPoolCreatePixelBuffer %d", err ); 162 | } 163 | goto bail; 164 | } 165 | 166 | err = CVOpenGLESTextureCacheCreateTextureFromImage( kCFAllocatorDefault, 167 | _renderTextureCache, 168 | dstPixelBuffer, 169 | NULL, 170 | GL_TEXTURE_2D, 171 | GL_RGBA, 172 | dstDimensions.width, 173 | dstDimensions.height, 174 | GL_BGRA, 175 | GL_UNSIGNED_BYTE, 176 | 0, 177 | &dstTexture ); 178 | 179 | if ( ! dstTexture || err ) { 180 | NSLog( @"Error at CVOpenGLESTextureCacheCreateTextureFromImage %d", err ); 181 | goto bail; 182 | } 183 | 184 | glBindFramebuffer( GL_FRAMEBUFFER, _offscreenBufferHandle ); 185 | glViewport( 0, 0, srcDimensions.width, srcDimensions.height ); 186 | glUseProgram( _program ); 187 | 188 | 189 | // Set up our destination pixel buffer as the framebuffer's render target. 190 | glActiveTexture( GL_TEXTURE0 ); 191 | glBindTexture( CVOpenGLESTextureGetTarget( dstTexture ), CVOpenGLESTextureGetName( dstTexture ) ); 192 | glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR ); 193 | glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); 194 | glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE ); 195 | glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE ); 196 | glFramebufferTexture2D( GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, CVOpenGLESTextureGetTarget( dstTexture ), CVOpenGLESTextureGetName( dstTexture ), 0 ); 197 | 198 | 199 | // Render our source pixel buffer. 200 | glActiveTexture( GL_TEXTURE1 ); 201 | glBindTexture( CVOpenGLESTextureGetTarget( srcTexture ), CVOpenGLESTextureGetName( srcTexture ) ); 202 | glUniform1i( _frame, 1 ); 203 | 204 | glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR ); 205 | glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); 206 | glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE ); 207 | glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE ); 208 | 209 | glVertexAttribPointer( ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices ); 210 | glEnableVertexAttribArray( ATTRIB_VERTEX ); 211 | glVertexAttribPointer( ATTRIB_TEXTUREPOSITON, 2, GL_FLOAT, 0, 0, textureVertices ); 212 | glEnableVertexAttribArray( ATTRIB_TEXTUREPOSITON ); 213 | 214 | glDrawArrays( GL_TRIANGLE_STRIP, 0, 4 ); 215 | 216 | glBindTexture( CVOpenGLESTextureGetTarget( srcTexture ), 0 ); 217 | glBindTexture( CVOpenGLESTextureGetTarget( dstTexture ), 0 ); 218 | 219 | // Make sure that outstanding GL commands which render to the destination pixel buffer have been submitted. 220 | // AVAssetWriter, AVSampleBufferDisplayLayer, and GL will block until the rendering is complete when sourcing from this pixel buffer. 221 | glFlush(); 222 | 223 | bail: 224 | if ( oldContext != _oglContext ) { 225 | [EAGLContext setCurrentContext:oldContext]; 226 | } 227 | if ( srcTexture ) { 228 | CFRelease( srcTexture ); 229 | } 230 | if ( dstTexture ) { 231 | CFRelease( dstTexture ); 232 | } 233 | return dstPixelBuffer; 234 | } 235 | 236 | - (CMFormatDescriptionRef)outputFormatDescription 237 | { 238 | return _outputFormatDescription; 239 | } 240 | 241 | #pragma mark Internal 242 | 243 | - (BOOL)initializeBuffersWithOutputDimensions:(CMVideoDimensions)outputDimensions retainedBufferCountHint:(size_t)clientRetainedBufferCountHint 244 | { 245 | BOOL success = YES; 246 | 247 | EAGLContext *oldContext = [EAGLContext currentContext]; 248 | if ( oldContext != _oglContext ) { 249 | if ( ! [EAGLContext setCurrentContext:_oglContext] ) { 250 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem with OpenGL context" userInfo:nil]; 251 | return NO; 252 | } 253 | } 254 | 255 | glDisable( GL_DEPTH_TEST ); 256 | 257 | glGenFramebuffers( 1, &_offscreenBufferHandle ); 258 | glBindFramebuffer( GL_FRAMEBUFFER, _offscreenBufferHandle ); 259 | 260 | CVReturn err = CVOpenGLESTextureCacheCreate( kCFAllocatorDefault, NULL, _oglContext, NULL, &_textureCache ); 261 | if ( err ) { 262 | NSLog( @"Error at CVOpenGLESTextureCacheCreate %d", err ); 263 | success = NO; 264 | goto bail; 265 | } 266 | 267 | err = CVOpenGLESTextureCacheCreate( kCFAllocatorDefault, NULL, _oglContext, NULL, &_renderTextureCache ); 268 | if ( err ) { 269 | NSLog( @"Error at CVOpenGLESTextureCacheCreate %d", err ); 270 | success = NO; 271 | goto bail; 272 | } 273 | 274 | // Load vertex and fragment shaders 275 | GLint attribLocation[NUM_ATTRIBUTES] = { 276 | ATTRIB_VERTEX, ATTRIB_TEXTUREPOSITON, 277 | }; 278 | GLchar *attribName[NUM_ATTRIBUTES] = { 279 | "position", "texturecoordinate", 280 | }; 281 | 282 | // const GLchar *vertSrc = [RosyWriterOpenGLRenderer readFile:@"myFilter.vsh"]; 283 | // const GLchar *fragSrc = [RosyWriterOpenGLRenderer readFile:@"myFilter.fsh"]; 284 | 285 | const GLchar *vertSrc = kPassThruVertex; 286 | const GLchar *fragSrc = kPassThruFragment; 287 | 288 | // shader program 289 | glueCreateProgram( vertSrc, fragSrc, 290 | NUM_ATTRIBUTES, (const GLchar **)&attribName[0], attribLocation, 291 | 0, 0, 0, 292 | &_program ); 293 | if ( ! _program ) { 294 | NSLog( @"Problem initializing the program." ); 295 | success = NO; 296 | goto bail; 297 | } 298 | _frame = glueGetUniformLocation( _program, "videoframe" ); 299 | 300 | size_t maxRetainedBufferCount = clientRetainedBufferCountHint; 301 | _bufferPool = createPixelBufferPool( outputDimensions.width, outputDimensions.height, kCVPixelFormatType_32BGRA, (int32_t)maxRetainedBufferCount ); 302 | if ( ! _bufferPool ) { 303 | NSLog( @"Problem initializing a buffer pool." ); 304 | success = NO; 305 | goto bail; 306 | } 307 | 308 | _bufferPoolAuxAttributes = createPixelBufferPoolAuxAttributes( (int32_t)maxRetainedBufferCount ); 309 | preallocatePixelBuffersInPool( _bufferPool, _bufferPoolAuxAttributes ); 310 | 311 | CMFormatDescriptionRef outputFormatDescription = NULL; 312 | CVPixelBufferRef testPixelBuffer = NULL; 313 | CVPixelBufferPoolCreatePixelBufferWithAuxAttributes( kCFAllocatorDefault, _bufferPool, _bufferPoolAuxAttributes, &testPixelBuffer ); 314 | if ( ! testPixelBuffer ) { 315 | NSLog( @"Problem creating a pixel buffer." ); 316 | success = NO; 317 | goto bail; 318 | } 319 | CMVideoFormatDescriptionCreateForImageBuffer( kCFAllocatorDefault, testPixelBuffer, &outputFormatDescription ); 320 | _outputFormatDescription = outputFormatDescription; 321 | CFRelease( testPixelBuffer ); 322 | 323 | bail: 324 | if ( ! success ) { 325 | [self deleteBuffers]; 326 | } 327 | if ( oldContext != _oglContext ) { 328 | [EAGLContext setCurrentContext:oldContext]; 329 | } 330 | return success; 331 | } 332 | 333 | - (void)deleteBuffers 334 | { 335 | EAGLContext *oldContext = [EAGLContext currentContext]; 336 | if ( oldContext != _oglContext ) { 337 | if ( ! [EAGLContext setCurrentContext:_oglContext] ) { 338 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem with OpenGL context" userInfo:nil]; 339 | return; 340 | } 341 | } 342 | if ( _offscreenBufferHandle ) { 343 | glDeleteFramebuffers( 1, &_offscreenBufferHandle ); 344 | _offscreenBufferHandle = 0; 345 | } 346 | if ( _program ) { 347 | glDeleteProgram( _program ); 348 | _program = 0; 349 | } 350 | if ( _textureCache ) { 351 | CFRelease( _textureCache ); 352 | _textureCache = 0; 353 | } 354 | if ( _renderTextureCache ) { 355 | CFRelease( _renderTextureCache ); 356 | _renderTextureCache = 0; 357 | } 358 | if ( _bufferPool ) { 359 | CFRelease( _bufferPool ); 360 | _bufferPool = NULL; 361 | } 362 | if ( _bufferPoolAuxAttributes ) { 363 | CFRelease( _bufferPoolAuxAttributes ); 364 | _bufferPoolAuxAttributes = NULL; 365 | } 366 | if ( _outputFormatDescription ) { 367 | CFRelease( _outputFormatDescription ); 368 | _outputFormatDescription = NULL; 369 | } 370 | if ( oldContext != _oglContext ) { 371 | [EAGLContext setCurrentContext:oldContext]; 372 | } 373 | } 374 | 375 | + (const GLchar *)readFile:(NSString *)name 376 | { 377 | NSString *path; 378 | const GLchar *source; 379 | 380 | path = [[NSBundle mainBundle] pathForResource:name ofType: nil]; 381 | source = (GLchar *)[[NSString stringWithContentsOfFile:path encoding:NSUTF8StringEncoding error:nil] UTF8String]; 382 | return source; 383 | } 384 | 385 | static CVPixelBufferPoolRef createPixelBufferPool( int32_t width, int32_t height, FourCharCode pixelFormat, int32_t maxBufferCount ) 386 | { 387 | CVPixelBufferPoolRef outputPool = NULL; 388 | 389 | NSDictionary *sourcePixelBufferOptions = @{ (id)kCVPixelBufferPixelFormatTypeKey : @(pixelFormat), 390 | (id)kCVPixelBufferWidthKey : @(width), 391 | (id)kCVPixelBufferHeightKey : @(height), 392 | (id)kCVPixelFormatOpenGLESCompatibility : @(YES), 393 | (id)kCVPixelBufferIOSurfacePropertiesKey : @{ /*empty dictionary*/ } }; 394 | 395 | NSDictionary *pixelBufferPoolOptions = @{ (id)kCVPixelBufferPoolMinimumBufferCountKey : @(maxBufferCount) }; 396 | 397 | CVPixelBufferPoolCreate( kCFAllocatorDefault, (__bridge CFDictionaryRef)pixelBufferPoolOptions, (__bridge CFDictionaryRef)sourcePixelBufferOptions, &outputPool ); 398 | 399 | return outputPool; 400 | } 401 | 402 | static CFDictionaryRef createPixelBufferPoolAuxAttributes( int32_t maxBufferCount ) 403 | { 404 | // CVPixelBufferPoolCreatePixelBufferWithAuxAttributes() will return kCVReturnWouldExceedAllocationThreshold if we have already vended the max number of buffers 405 | return CFBridgingRetain( @{ (id)kCVPixelBufferPoolAllocationThresholdKey : @(maxBufferCount) } ); 406 | } 407 | 408 | static void preallocatePixelBuffersInPool( CVPixelBufferPoolRef pool, CFDictionaryRef auxAttributes ) 409 | { 410 | // Preallocate buffers in the pool, since this is for real-time display/capture 411 | NSMutableArray *pixelBuffers = [[NSMutableArray alloc] init]; 412 | while ( 1 ) 413 | { 414 | CVPixelBufferRef pixelBuffer = NULL; 415 | OSStatus err = CVPixelBufferPoolCreatePixelBufferWithAuxAttributes( kCFAllocatorDefault, pool, auxAttributes, &pixelBuffer ); 416 | 417 | if ( err == kCVReturnWouldExceedAllocationThreshold ) { 418 | break; 419 | } 420 | assert( err == noErr ); 421 | 422 | [pixelBuffers addObject:CFBridgingRelease( pixelBuffer )]; 423 | } 424 | [pixelBuffers removeAllObjects]; 425 | } 426 | 427 | @end 428 | -------------------------------------------------------------------------------- /Classes/RosyWriterRenderer.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | A generic protocol for renderer objects used by RosyWriterCapturePipeline 8 | */ 9 | 10 | #import 11 | #import 12 | #import 13 | 14 | @protocol RosyWriterRenderer 15 | 16 | @required 17 | 18 | /* Format/Processing Requirements */ 19 | @property(nonatomic, readonly) BOOL operatesInPlace; // When YES the input pixel buffer is written to by the renderer instead of writing the result to a new pixel buffer. 20 | @property(nonatomic, readonly) FourCharCode inputPixelFormat; // One of 420f, 420v, or BGRA 21 | 22 | /* Resource Lifecycle */ 23 | // Prepare and destroy expensive resources inside these callbacks. 24 | // The outputRetainedBufferCountHint tells out of place renderers how many of their output buffers will be held onto by the downstream pipeline at one time. 25 | // This can be used by the renderer to size and preallocate their pools. 26 | - (void)prepareForInputWithFormatDescription:(CMFormatDescriptionRef)inputFormatDescription outputRetainedBufferCountHint:(size_t)outputRetainedBufferCountHint; 27 | - (void)reset; 28 | 29 | /* Rendering */ 30 | // Renderers which operate in place should return the input pixel buffer with a +1 retain count. 31 | // Renderers which operate out of place should create a pixel buffer to return from a pool they own. 32 | // When rendering to a pixel buffer with the GPU it is not necessary to block until rendering has completed before returning. 33 | // It is sufficient to call glFlush() to ensure that the commands have been flushed to the GPU. 34 | - (CVPixelBufferRef)copyRenderedPixelBuffer:(CVPixelBufferRef)pixelBuffer; 35 | 36 | @optional 37 | 38 | // This property must be implemented if operatesInPlace is NO and the output pixel buffers have a different format description than the input. 39 | // If implemented a non-NULL value must be returned once the renderer has been prepared (can be NULL after being reset). 40 | @property(nonatomic, readonly) CMFormatDescriptionRef __attribute__((NSObject)) outputFormatDescription; 41 | 42 | @end 43 | -------------------------------------------------------------------------------- /Classes/RosyWriterViewController+Helper.h: -------------------------------------------------------------------------------- 1 | 2 | #import "RosyWriterViewController.h" 3 | #import 4 | 5 | @interface RosyWriterViewController (Helper) 6 | 7 | - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates 8 | previewLayer:(AVCaptureVideoPreviewLayer *)previewLayer 9 | ports:(NSArray *)ports; 10 | 11 | - (UIImage *)cropImage:(UIImage *)image usingPreviewLayer:(AVCaptureVideoPreviewLayer *)previewLayer; 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /Classes/RosyWriterViewController+Helper.m: -------------------------------------------------------------------------------- 1 | 2 | #import "RosyWriterViewController+Helper.h" 3 | 4 | @implementation RosyWriterViewController (Helper) 5 | 6 | - (CGPoint)convertToPointOfInterestFromViewCoordinates:(CGPoint)viewCoordinates 7 | previewLayer:(AVCaptureVideoPreviewLayer *)previewLayer 8 | ports:(NSArray *)ports 9 | { 10 | CGPoint pointOfInterest = CGPointMake(.5f, .5f); 11 | CGSize frameSize = previewLayer.frame.size; 12 | 13 | if ( [previewLayer.videoGravity isEqualToString:AVLayerVideoGravityResize] ) { 14 | pointOfInterest = CGPointMake(viewCoordinates.y / frameSize.height, 1.f - (viewCoordinates.x / frameSize.width)); 15 | } else { 16 | CGRect cleanAperture; 17 | for (AVCaptureInputPort *port in ports) { 18 | if (port.mediaType == AVMediaTypeVideo) { 19 | cleanAperture = CMVideoFormatDescriptionGetCleanAperture([port formatDescription], YES); 20 | CGSize apertureSize = cleanAperture.size; 21 | CGPoint point = viewCoordinates; 22 | 23 | CGFloat apertureRatio = apertureSize.height / apertureSize.width; 24 | CGFloat viewRatio = frameSize.width / frameSize.height; 25 | CGFloat xc = .5f; 26 | CGFloat yc = .5f; 27 | 28 | if ( [previewLayer.videoGravity isEqualToString:AVLayerVideoGravityResizeAspect] ) { 29 | if (viewRatio > apertureRatio) { 30 | CGFloat y2 = frameSize.height; 31 | CGFloat x2 = frameSize.height * apertureRatio; 32 | CGFloat x1 = frameSize.width; 33 | CGFloat blackBar = (x1 - x2) / 2; 34 | if (point.x >= blackBar && point.x <= blackBar + x2) { 35 | xc = point.y / y2; 36 | yc = 1.f - ((point.x - blackBar) / x2); 37 | } 38 | } else { 39 | CGFloat y2 = frameSize.width / apertureRatio; 40 | CGFloat y1 = frameSize.height; 41 | CGFloat x2 = frameSize.width; 42 | CGFloat blackBar = (y1 - y2) / 2; 43 | if (point.y >= blackBar && point.y <= blackBar + y2) { 44 | xc = ((point.y - blackBar) / y2); 45 | yc = 1.f - (point.x / x2); 46 | } 47 | } 48 | } else if ([previewLayer.videoGravity isEqualToString:AVLayerVideoGravityResizeAspectFill]) { 49 | if (viewRatio > apertureRatio) { 50 | CGFloat y2 = apertureSize.width * (frameSize.width / apertureSize.height); 51 | xc = (point.y + ((y2 - frameSize.height) / 2.f)) / y2; 52 | yc = (frameSize.width - point.x) / frameSize.width; 53 | } else { 54 | CGFloat x2 = apertureSize.height * (frameSize.height / apertureSize.width); 55 | yc = 1.f - ((point.x + ((x2 - frameSize.width) / 2)) / x2); 56 | xc = point.y / frameSize.height; 57 | } 58 | } 59 | 60 | pointOfInterest = CGPointMake(xc, yc); 61 | break; 62 | } 63 | } 64 | } 65 | 66 | return pointOfInterest; 67 | } 68 | 69 | - (UIImage *)cropImage:(UIImage *)image usingPreviewLayer:(AVCaptureVideoPreviewLayer *)previewLayer 70 | { 71 | CGRect previewBounds = previewLayer.bounds; 72 | CGRect outputRect = [previewLayer metadataOutputRectOfInterestForRect:previewBounds]; 73 | 74 | CGImageRef takenCGImage = image.CGImage; 75 | size_t width = CGImageGetWidth(takenCGImage); 76 | size_t height = CGImageGetHeight(takenCGImage); 77 | CGRect cropRect = CGRectMake(outputRect.origin.x * width, outputRect.origin.y * height, 78 | outputRect.size.width * width, outputRect.size.height * height); 79 | 80 | CGImageRef cropCGImage = CGImageCreateWithImageInRect(takenCGImage, cropRect); 81 | image = [UIImage imageWithCGImage:cropCGImage scale:1 orientation:image.imageOrientation]; 82 | CGImageRelease(cropCGImage); 83 | 84 | return image; 85 | } 86 | 87 | @end 88 | -------------------------------------------------------------------------------- /Classes/RosyWriterViewController.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | View controller for camera interface 8 | */ 9 | 10 | 11 | #import 12 | 13 | 14 | @interface RosyWriterViewController : UIViewController 15 | 16 | 17 | /** 18 | * Set NO if you don't want ot enable user triggered focusing. Enabled by default. 19 | */ 20 | @property (nonatomic) BOOL tapToFocus; 21 | 22 | /** 23 | * Alter the layer and the animation displayed when the user taps on screen. 24 | * @param layer Layer to be displayed 25 | * @param animation to be applied after the layer is shown 26 | */ 27 | - (void)alterFocusBox:(CALayer *)layer animation:(CAAnimation *)animation; 28 | 29 | @end 30 | -------------------------------------------------------------------------------- /Classes/Utilities/CameraControlFunctions.h: -------------------------------------------------------------------------------- 1 | #import 2 | #import 3 | 4 | void computeExpectedExposureTimeAndIso(AVCaptureDeviceFormat *format, 5 | CMTime *oldDuration, float oldISO, 6 | CMTime *expectedDuration, float *expectedISO); 7 | 8 | /** 9 | Warn: This function does not return meaningful path at the moment. 10 | */ 11 | NSString *getAssetPath(NSString * assetLocalIdentifier); 12 | -------------------------------------------------------------------------------- /Classes/Utilities/CameraControlFunctions.m: -------------------------------------------------------------------------------- 1 | 2 | #import "CameraControlFunctions.h" 3 | #import 4 | 5 | const int64_t kDesiredExposureTimeMillisec = 5; 6 | 7 | void computeExpectedExposureTimeAndIso(AVCaptureDeviceFormat *format, 8 | CMTime *oldDuration, float oldISO, 9 | CMTime *expectedDuration, float *expectedISO) { 10 | // eg., for iphone 6S format.minExposureDuration 1e-2 ms 11 | // format.maxExposureDuration 333.3 ms format.minISO 23 format.maxISO 736 12 | CMTime desiredDuration = CMTimeMake(kDesiredExposureTimeMillisec, 1000); 13 | float ratio = (float)(CMTimeGetSeconds(*oldDuration)/CMTimeGetSeconds(desiredDuration)); 14 | NSLog(@"Present exposure duration %.5f ms and ISO %.5f", 15 | CMTimeGetSeconds(*oldDuration)*1000, oldISO); 16 | 17 | if (CMTIME_COMPARE_INLINE(desiredDuration, >, *oldDuration)) { 18 | *expectedDuration = *oldDuration; 19 | *expectedISO = oldISO; 20 | } else { 21 | *expectedDuration = desiredDuration; 22 | *expectedISO = oldISO * ratio; 23 | if (*expectedISO > format.maxISO) 24 | *expectedISO = format.maxISO; 25 | else if (*expectedISO < format.minISO) 26 | *expectedISO = format.minISO; 27 | } 28 | NSLog(@"Camera old exposure duration %.5f and ISO %.3f," 29 | " desired exposure duration %.5f and ISO %.3f and ratio %.3f", 30 | CMTimeGetSeconds(*oldDuration), oldISO, 31 | CMTimeGetSeconds(*expectedDuration), *expectedISO, ratio); 32 | } 33 | 34 | NSString *getAssetPath(NSString * assetLocalIdentifier) { 35 | // see: https://stackoverflow.com/questions/27854937/ios8-photos-framework-how-to-get-the-nameor-filename-of-a-phasset 36 | // PHFetchOptions *fetchOptions = [[PHFetchOptions alloc] init]; 37 | // fetchOptions.sortDescriptors = @[[NSSortDescriptor sortDescriptorWithKey:@"creationDate" ascending:YES]]; 38 | PHFetchResult *fetchResult = [PHAsset fetchAssetsWithLocalIdentifiers:@[assetLocalIdentifier] options:nil]; 39 | PHAsset *asset = nil; 40 | if (fetchResult != nil && fetchResult.count > 0) { 41 | // get last photo from Photos 42 | asset = [fetchResult lastObject]; 43 | } 44 | __block NSString *path = nil; 45 | if (asset) { 46 | // get photo info from this asset 47 | // for iOS 8 48 | PHImageRequestOptions * imageRequestOptions = [[PHImageRequestOptions alloc] init]; 49 | imageRequestOptions.synchronous = YES; 50 | // Warn: Because by default, requestImageDataForAsset method executes asynchronously, the following way to pass out path will not work. 51 | [[PHImageManager defaultManager] 52 | requestImageDataForAsset:asset 53 | options:imageRequestOptions 54 | resultHandler:^(NSData *imageData, NSString *dataUTI, 55 | UIImageOrientation orientation, 56 | NSDictionary *info) 57 | { 58 | NSURL * fileURL = [info objectForKey:@"PHImageFileURLKey"]; 59 | if (fileURL) { 60 | // path looks like this - 61 | // file:///var/mobile/Media/DCIM/###APPLE/IMG_####.JPG 62 | path = [[NSFileManager defaultManager] displayNameAtPath:[fileURL path]]; 63 | NSLog(@"PHImageFile path %@", path); 64 | } 65 | }]; 66 | // for iOS 9+ 67 | // https://stackoverflow.com/questions/32687403/phasset-get-original-file-name/32706194 68 | // this path looks like "Movie.MP4" 69 | NSArray *resources = [PHAssetResource assetResourcesForAsset:asset]; 70 | path = ((PHAssetResource*)resources[0]).originalFilename; 71 | } 72 | return path; 73 | } 74 | -------------------------------------------------------------------------------- /Classes/Utilities/GL/ShaderUtilities.c: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | Shader compiler and linker utilities 8 | */ 9 | 10 | #include 11 | #include 12 | #include 13 | #include 14 | #include "ShaderUtilities.h" 15 | 16 | #define LogInfo printf 17 | #define LogError printf 18 | 19 | /* Compile a shader from the provided source(s) */ 20 | GLint glueCompileShader(GLenum target, GLsizei count, const GLchar **sources, GLuint *shader) 21 | { 22 | GLint status; 23 | 24 | *shader = glCreateShader(target); 25 | glShaderSource(*shader, count, sources, NULL); 26 | glCompileShader(*shader); 27 | 28 | #if defined(DEBUG) 29 | GLint logLength = 0; 30 | glGetShaderiv(*shader, GL_INFO_LOG_LENGTH, &logLength); 31 | if (logLength > 0) 32 | { 33 | GLchar *log = (GLchar *)malloc(logLength); 34 | glGetShaderInfoLog(*shader, logLength, &logLength, log); 35 | LogInfo("Shader compile log:\n%s", log); 36 | free(log); 37 | } 38 | #endif 39 | 40 | glGetShaderiv(*shader, GL_COMPILE_STATUS, &status); 41 | if (status == 0) 42 | { 43 | int i; 44 | 45 | LogError("Failed to compile shader:\n"); 46 | for (i = 0; i < count; i++) 47 | LogInfo("%s", sources[i]); 48 | } 49 | 50 | return status; 51 | } 52 | 53 | 54 | /* Link a program with all currently attached shaders */ 55 | GLint glueLinkProgram(GLuint program) 56 | { 57 | GLint status; 58 | 59 | glLinkProgram(program); 60 | 61 | #if defined(DEBUG) 62 | GLint logLength = 0; 63 | glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLength); 64 | if (logLength > 0) 65 | { 66 | GLchar *log = (GLchar *)malloc(logLength); 67 | glGetProgramInfoLog(program, logLength, &logLength, log); 68 | LogInfo("Program link log:\n%s", log); 69 | free(log); 70 | } 71 | #endif 72 | 73 | glGetProgramiv(program, GL_LINK_STATUS, &status); 74 | if (status == 0) 75 | LogError("Failed to link program %d", program); 76 | 77 | return status; 78 | } 79 | 80 | 81 | /* Validate a program (for i.e. inconsistent samplers) */ 82 | GLint glueValidateProgram(GLuint program) 83 | { 84 | GLint status; 85 | 86 | glValidateProgram(program); 87 | 88 | #if defined(DEBUG) 89 | GLint logLength = 0; 90 | glGetProgramiv(program, GL_INFO_LOG_LENGTH, &logLength); 91 | if (logLength > 0) 92 | { 93 | GLchar *log = (GLchar *)malloc(logLength); 94 | glGetProgramInfoLog(program, logLength, &logLength, log); 95 | LogInfo("Program validate log:\n%s", log); 96 | free(log); 97 | } 98 | #endif 99 | 100 | glGetProgramiv(program, GL_VALIDATE_STATUS, &status); 101 | if (status == 0) 102 | LogError("Failed to validate program %d", program); 103 | 104 | return status; 105 | } 106 | 107 | 108 | /* Return named uniform location after linking */ 109 | GLint glueGetUniformLocation(GLuint program, const GLchar *uniformName) 110 | { 111 | GLint loc; 112 | 113 | loc = glGetUniformLocation(program, uniformName); 114 | 115 | return loc; 116 | } 117 | 118 | 119 | /* Convenience wrapper that compiles, links, enumerates uniforms and attribs */ 120 | GLint glueCreateProgram(const GLchar *vertSource, const GLchar *fragSource, 121 | GLsizei attribNameCt, const GLchar **attribNames, 122 | const GLint *attribLocations, 123 | GLsizei uniformNameCt, const GLchar **uniformNames, 124 | GLint *uniformLocations, 125 | GLuint *program) 126 | { 127 | GLuint vertShader = 0, fragShader = 0, prog = 0, status = 1, i; 128 | 129 | // Create shader program 130 | prog = glCreateProgram(); 131 | 132 | // Create and compile vertex shader 133 | status *= glueCompileShader(GL_VERTEX_SHADER, 1, &vertSource, &vertShader); 134 | 135 | // Create and compile fragment shader 136 | status *= glueCompileShader(GL_FRAGMENT_SHADER, 1, &fragSource, &fragShader); 137 | 138 | // Attach vertex shader to program 139 | glAttachShader(prog, vertShader); 140 | 141 | // Attach fragment shader to program 142 | glAttachShader(prog, fragShader); 143 | 144 | // Bind attribute locations 145 | // This needs to be done prior to linking 146 | for (i = 0; i < attribNameCt; i++) 147 | { 148 | if(strlen(attribNames[i])) 149 | glBindAttribLocation(prog, attribLocations[i], attribNames[i]); 150 | } 151 | 152 | // Link program 153 | status *= glueLinkProgram(prog); 154 | 155 | // Get locations of uniforms 156 | if (status) 157 | { 158 | for(i = 0; i < uniformNameCt; i++) 159 | { 160 | if(strlen(uniformNames[i])) 161 | uniformLocations[i] = glueGetUniformLocation(prog, uniformNames[i]); 162 | } 163 | *program = prog; 164 | } 165 | 166 | // Release vertex and fragment shaders 167 | if (vertShader) 168 | glDeleteShader(vertShader); 169 | if (fragShader) 170 | glDeleteShader(fragShader); 171 | 172 | return status; 173 | } 174 | -------------------------------------------------------------------------------- /Classes/Utilities/GL/ShaderUtilities.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | Shader compiler and linker utilities 8 | */ 9 | 10 | 11 | #ifndef RosyWriter_ShaderUtilities_h 12 | #define RosyWriter_ShaderUtilities_h 13 | 14 | #include 15 | #include 16 | 17 | GLint glueCompileShader(GLenum target, GLsizei count, const GLchar **sources, GLuint *shader); 18 | GLint glueLinkProgram(GLuint program); 19 | GLint glueValidateProgram(GLuint program); 20 | GLint glueGetUniformLocation(GLuint program, const GLchar *name); 21 | 22 | GLint glueCreateProgram(const GLchar *vertSource, const GLchar *fragSource, 23 | GLsizei attribNameCt, const GLchar **attribNames, 24 | const GLint *attribLocations, 25 | GLsizei uniformNameCt, const GLchar **uniformNames, 26 | GLint *uniformLocations, 27 | GLuint *program); 28 | 29 | #endif 30 | -------------------------------------------------------------------------------- /Classes/Utilities/GL/matrix.c: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | Simple 4x4 matrix computations 8 | */ 9 | 10 | #include 11 | #include 12 | #include "matrix.h" 13 | 14 | /* 15 | NOTE: These functions are created for your convenience but the matrix algorithms 16 | are not optimized. You are encouraged to do additional research on your own to 17 | implement a more robust numerical algorithm. 18 | */ 19 | 20 | void mat4f_LoadIdentity(float* m) 21 | { 22 | m[0] = 1.0f; 23 | m[1] = 0.0f; 24 | m[2] = 0.0f; 25 | m[3] = 0.0f; 26 | 27 | m[4] = 0.0f; 28 | m[5] = 1.0f; 29 | m[6] = 0.0f; 30 | m[7] = 0.0f; 31 | 32 | m[8] = 0.0f; 33 | m[9] = 0.0f; 34 | m[10] = 1.0f; 35 | m[11] = 0.0f; 36 | 37 | m[12] = 0.0f; 38 | m[13] = 0.0f; 39 | m[14] = 0.0f; 40 | m[15] = 1.0f; 41 | } 42 | 43 | // s is a 3D vector 44 | void mat4f_LoadScale(float* s, float* m) 45 | { 46 | m[0] = s[0]; 47 | m[1] = 0.0f; 48 | m[2] = 0.0f; 49 | m[3] = 0.0f; 50 | 51 | m[4] = 0.0f; 52 | m[5] = s[1]; 53 | m[6] = 0.0f; 54 | m[7] = 0.0f; 55 | 56 | m[8] = 0.0f; 57 | m[9] = 0.0f; 58 | m[10] = s[2]; 59 | m[11] = 0.0f; 60 | 61 | m[12] = 0.0f; 62 | m[13] = 0.0f; 63 | m[14] = 0.0f; 64 | m[15] = 1.0f; 65 | } 66 | 67 | void mat4f_LoadXRotation(float radians, float* m) 68 | { 69 | float cosrad = cosf(radians); 70 | float sinrad = sinf(radians); 71 | 72 | m[0] = 1.0f; 73 | m[1] = 0.0f; 74 | m[2] = 0.0f; 75 | m[3] = 0.0f; 76 | 77 | m[4] = 0.0f; 78 | m[5] = cosrad; 79 | m[6] = sinrad; 80 | m[7] = 0.0f; 81 | 82 | m[8] = 0.0f; 83 | m[9] = -sinrad; 84 | m[10] = cosrad; 85 | m[11] = 0.0f; 86 | 87 | m[12] = 0.0f; 88 | m[13] = 0.0f; 89 | m[14] = 0.0f; 90 | m[15] = 1.0f; 91 | } 92 | 93 | void mat4f_LoadYRotation(float radians, float* mout) 94 | { 95 | float cosrad = cosf(radians); 96 | float sinrad = sinf(radians); 97 | 98 | mout[0] = cosrad; 99 | mout[1] = 0.0f; 100 | mout[2] = -sinrad; 101 | mout[3] = 0.0f; 102 | 103 | mout[4] = 0.0f; 104 | mout[5] = 1.0f; 105 | mout[6] = 0.0f; 106 | mout[7] = 0.0f; 107 | 108 | mout[8] = sinrad; 109 | mout[9] = 0.0f; 110 | mout[10] = cosrad; 111 | mout[11] = 0.0f; 112 | 113 | mout[12] = 0.0f; 114 | mout[13] = 0.0f; 115 | mout[14] = 0.0f; 116 | mout[15] = 1.0f; 117 | } 118 | 119 | void mat4f_LoadZRotation(float radians, float* mout) 120 | { 121 | float cosrad = cosf(radians); 122 | float sinrad = sinf(radians); 123 | 124 | mout[0] = cosrad; 125 | mout[1] = sinrad; 126 | mout[2] = 0.0f; 127 | mout[3] = 0.0f; 128 | 129 | mout[4] = -sinrad; 130 | mout[5] = cosrad; 131 | mout[6] = 0.0f; 132 | mout[7] = 0.0f; 133 | 134 | mout[8] = 0.0f; 135 | mout[9] = 0.0f; 136 | mout[10] = 1.0f; 137 | mout[11] = 0.0f; 138 | 139 | mout[12] = 0.0f; 140 | mout[13] = 0.0f; 141 | mout[14] = 0.0f; 142 | mout[15] = 1.0f; 143 | } 144 | 145 | // v is a 3D vector 146 | void mat4f_LoadTranslation(float* v, float* mout) 147 | { 148 | mout[0] = 1.0f; 149 | mout[1] = 0.0f; 150 | mout[2] = 0.0f; 151 | mout[3] = 0.0f; 152 | 153 | mout[4] = 0.0f; 154 | mout[5] = 1.0f; 155 | mout[6] = 0.0f; 156 | mout[7] = 0.0f; 157 | 158 | mout[8] = 0.0f; 159 | mout[9] = 0.0f; 160 | mout[10] = 1.0f; 161 | mout[11] = 0.0f; 162 | 163 | mout[12] = v[0]; 164 | mout[13] = v[1]; 165 | mout[14] = v[2]; 166 | mout[15] = 1.0f; 167 | } 168 | 169 | void mat4f_LoadPerspective(float fov_radians, float aspect, float zNear, float zFar, float* mout) 170 | { 171 | float f = 1.0f / tanf(fov_radians/2.0f); 172 | 173 | mout[0] = f / aspect; 174 | mout[1] = 0.0f; 175 | mout[2] = 0.0f; 176 | mout[3] = 0.0f; 177 | 178 | mout[4] = 0.0f; 179 | mout[5] = f; 180 | mout[6] = 0.0f; 181 | mout[7] = 0.0f; 182 | 183 | mout[8] = 0.0f; 184 | mout[9] = 0.0f; 185 | mout[10] = (zFar+zNear) / (zNear-zFar); 186 | mout[11] = -1.0f; 187 | 188 | mout[12] = 0.0f; 189 | mout[13] = 0.0f; 190 | mout[14] = 2 * zFar * zNear / (zNear-zFar); 191 | mout[15] = 0.0f; 192 | } 193 | 194 | void mat4f_LoadOrtho(float left, float right, float bottom, float top, float near, float far, float* mout) 195 | { 196 | float r_l = right - left; 197 | float t_b = top - bottom; 198 | float f_n = far - near; 199 | float tx = - (right + left) / (right - left); 200 | float ty = - (top + bottom) / (top - bottom); 201 | float tz = - (far + near) / (far - near); 202 | 203 | mout[0] = 2.0f / r_l; 204 | mout[1] = 0.0f; 205 | mout[2] = 0.0f; 206 | mout[3] = 0.0f; 207 | 208 | mout[4] = 0.0f; 209 | mout[5] = 2.0f / t_b; 210 | mout[6] = 0.0f; 211 | mout[7] = 0.0f; 212 | 213 | mout[8] = 0.0f; 214 | mout[9] = 0.0f; 215 | mout[10] = -2.0f / f_n; 216 | mout[11] = 0.0f; 217 | 218 | mout[12] = tx; 219 | mout[13] = ty; 220 | mout[14] = tz; 221 | mout[15] = 1.0f; 222 | } 223 | 224 | void mat4f_MultiplyMat4f(const float* a, const float* b, float* mout) 225 | { 226 | mout[0] = a[0] * b[0] + a[4] * b[1] + a[8] * b[2] + a[12] * b[3]; 227 | mout[1] = a[1] * b[0] + a[5] * b[1] + a[9] * b[2] + a[13] * b[3]; 228 | mout[2] = a[2] * b[0] + a[6] * b[1] + a[10] * b[2] + a[14] * b[3]; 229 | mout[3] = a[3] * b[0] + a[7] * b[1] + a[11] * b[2] + a[15] * b[3]; 230 | 231 | mout[4] = a[0] * b[4] + a[4] * b[5] + a[8] * b[6] + a[12] * b[7]; 232 | mout[5] = a[1] * b[4] + a[5] * b[5] + a[9] * b[6] + a[13] * b[7]; 233 | mout[6] = a[2] * b[4] + a[6] * b[5] + a[10] * b[6] + a[14] * b[7]; 234 | mout[7] = a[3] * b[4] + a[7] * b[5] + a[11] * b[6] + a[15] * b[7]; 235 | 236 | mout[8] = a[0] * b[8] + a[4] * b[9] + a[8] * b[10] + a[12] * b[11]; 237 | mout[9] = a[1] * b[8] + a[5] * b[9] + a[9] * b[10] + a[13] * b[11]; 238 | mout[10] = a[2] * b[8] + a[6] * b[9] + a[10] * b[10] + a[14] * b[11]; 239 | mout[11] = a[3] * b[8] + a[7] * b[9] + a[11] * b[10] + a[15] * b[11]; 240 | 241 | mout[12] = a[0] * b[12] + a[4] * b[13] + a[8] * b[14] + a[12] * b[15]; 242 | mout[13] = a[1] * b[12] + a[5] * b[13] + a[9] * b[14] + a[13] * b[15]; 243 | mout[14] = a[2] * b[12] + a[6] * b[13] + a[10] * b[14] + a[14] * b[15]; 244 | mout[15] = a[3] * b[12] + a[7] * b[13] + a[11] * b[14] + a[15] * b[15]; 245 | } 246 | -------------------------------------------------------------------------------- /Classes/Utilities/GL/matrix.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | Simple 4x4 matrix computations 8 | */ 9 | 10 | #ifndef MATRIX_H 11 | #define MATRIX_H 12 | 13 | void mat4f_LoadIdentity(float* m); 14 | void mat4f_LoadScale(float* s, float* m); 15 | 16 | void mat4f_LoadXRotation(float radians, float* mout); 17 | void mat4f_LoadYRotation(float radians, float* mout); 18 | void mat4f_LoadZRotation(float radians, float* mout); 19 | 20 | void mat4f_LoadTranslation(float* t, float* mout); 21 | 22 | void mat4f_LoadPerspective(float fov_radians, float aspect, float zNear, float zFar, float* mout); 23 | void mat4f_LoadOrtho(float left, float right, float bottom, float top, float near, float far, float* mout); 24 | 25 | void mat4f_MultiplyMat4f(const float* a, const float* b, float* mout); 26 | 27 | #endif /* MATRIX_H */ 28 | -------------------------------------------------------------------------------- /Classes/Utilities/MovieRecorder.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | Real-time movie recorder which is totally non-blocking 8 | */ 9 | 10 | 11 | #import 12 | 13 | #import 14 | #import 15 | 16 | @protocol MovieRecorderDelegate; 17 | 18 | @interface MovieRecorder : NSObject 19 | 20 | @property(nonatomic, readonly) NSMutableArray *savedFrameTimestamps; 21 | @property(nonatomic, readonly) NSMutableArray *savedFrameIntrinsics; 22 | @property(nonatomic, readonly) NSMutableArray *savedExposureDurations; 23 | 24 | @property(nonatomic, readonly) NSMutableArray *savedARPose; 25 | 26 | - (instancetype)initWithURL:(NSURL *)URL delegate:(id)delegate callbackQueue:(dispatch_queue_t)queue; // delegate is weak referenced 27 | 28 | // Only one audio and video track each are allowed. 29 | - (void)addVideoTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription transform:(CGAffineTransform)transform settings:(NSDictionary *)videoSettings; // see AVVideoSettings.h for settings keys/values 30 | - (void)addAudioTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription settings:(NSDictionary *)audioSettings; // see AVAudioSettings.h for settings keys/values 31 | 32 | // Asynchronous, might take several hundred milliseconds. 33 | // When finished the delegate's recorderDidFinishPreparing: or recorder:didFailWithError: method will be called. 34 | - (void)prepareToRecord; 35 | 36 | // - (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer; 37 | 38 | - (void)appendVideoPixelBuffer:(CVPixelBufferRef)pixelBuffer withPresentationTime:(CMTime)presentationTime withIntrinsicMat:(NSArray *)intrinsic3x3 withExposureDuration:(int64_t)exposureDuration; 39 | 40 | - (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer; 41 | 42 | // Asynchronous, might take several hundred milliseconds. 43 | // When finished the delegate's recorderDidFinishRecording: or recorder:didFailWithError: method will be called. 44 | - (void)finishRecording; 45 | 46 | @end 47 | 48 | @protocol MovieRecorderDelegate 49 | @required 50 | - (void)movieRecorderDidFinishPreparing:(MovieRecorder *)recorder; 51 | - (void)movieRecorder:(MovieRecorder *)recorder didFailWithError:(NSError *)error; 52 | - (void)movieRecorderDidFinishRecording:(MovieRecorder *)recorder; 53 | @end 54 | -------------------------------------------------------------------------------- /Classes/Utilities/MovieRecorder.m: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | Real-time movie recorder which is totally non-blocking 8 | */ 9 | 10 | #import "MovieRecorder.h" 11 | 12 | #import 13 | #import 14 | 15 | #import 16 | #import 17 | #import 18 | 19 | #import "VideoTimeConverter.h" 20 | 21 | #define LOG_STATUS_TRANSITIONS 0 22 | 23 | typedef NS_ENUM( NSInteger, MovieRecorderStatus ) { 24 | MovieRecorderStatusIdle = 0, 25 | MovieRecorderStatusPreparingToRecord, 26 | MovieRecorderStatusRecording, 27 | MovieRecorderStatusFinishingRecordingPart1, // waiting for inflight buffers to be appended 28 | MovieRecorderStatusFinishingRecordingPart2, // calling finish writing on the asset writer 29 | MovieRecorderStatusFinished, // terminal state 30 | MovieRecorderStatusFailed // terminal state 31 | }; // internal state machine 32 | 33 | 34 | @interface MovieRecorder () 35 | { 36 | MovieRecorderStatus _status; 37 | 38 | dispatch_queue_t _writingQueue; 39 | 40 | NSURL *_URL; 41 | 42 | AVAssetWriter *_assetWriter; 43 | BOOL _haveStartedSession; 44 | 45 | CMFormatDescriptionRef _audioTrackSourceFormatDescription; 46 | NSDictionary *_audioTrackSettings; 47 | AVAssetWriterInput *_audioInput; 48 | 49 | CMFormatDescriptionRef _videoTrackSourceFormatDescription; 50 | CGAffineTransform _videoTrackTransform; 51 | NSDictionary *_videoTrackSettings; 52 | AVAssetWriterInput *_videoInput; 53 | 54 | __weak id _delegate; 55 | dispatch_queue_t _delegateCallbackQueue; 56 | } 57 | @end 58 | 59 | @implementation MovieRecorder 60 | 61 | #pragma mark - 62 | #pragma mark API 63 | 64 | - (instancetype)initWithURL:(NSURL *)URL delegate:(id)delegate callbackQueue:(dispatch_queue_t)queue // delegate is weak referenced 65 | { 66 | NSParameterAssert( delegate != nil ); 67 | NSParameterAssert( queue != nil ); 68 | NSParameterAssert( URL != nil ); 69 | 70 | self = [super init]; 71 | if ( self ) 72 | { 73 | _writingQueue = dispatch_queue_create( "com.apple.sample.movierecorder.writing", DISPATCH_QUEUE_SERIAL ); 74 | _videoTrackTransform = CGAffineTransformIdentity; 75 | _URL = URL; 76 | _delegate = delegate; 77 | _delegateCallbackQueue = queue; 78 | _savedFrameTimestamps = [[NSMutableArray alloc] init]; 79 | _savedFrameIntrinsics = [[NSMutableArray alloc] init]; 80 | _savedExposureDurations = [[NSMutableArray alloc] init]; 81 | _savedARPose = [[NSMutableArray alloc] init]; 82 | } 83 | return self; 84 | } 85 | 86 | - (void)addVideoTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription 87 | transform:(CGAffineTransform)transform 88 | settings:(NSDictionary *)videoSettings 89 | { 90 | if ( formatDescription == NULL ) { 91 | @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"NULL format description" userInfo:nil]; 92 | return; 93 | } 94 | 95 | @synchronized( self ) 96 | { 97 | if ( _status != MovieRecorderStatusIdle ) { 98 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Cannot add tracks while not idle" userInfo:nil]; 99 | return; 100 | } 101 | 102 | if ( _videoTrackSourceFormatDescription ) { 103 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Cannot add more than one video track" userInfo:nil]; 104 | return; 105 | } 106 | 107 | _videoTrackSourceFormatDescription = (CMFormatDescriptionRef)CFRetain( formatDescription ); 108 | _videoTrackTransform = transform; 109 | _videoTrackSettings = [videoSettings copy]; 110 | } 111 | } 112 | 113 | - (void)addAudioTrackWithSourceFormatDescription:(CMFormatDescriptionRef)formatDescription settings:(NSDictionary *)audioSettings 114 | { 115 | if ( formatDescription == NULL ) { 116 | @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"NULL format description" userInfo:nil]; 117 | return; 118 | } 119 | 120 | @synchronized( self ) 121 | { 122 | if ( _status != MovieRecorderStatusIdle ) { 123 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Cannot add tracks while not idle" userInfo:nil]; 124 | return; 125 | } 126 | 127 | if ( _audioTrackSourceFormatDescription ) { 128 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Cannot add more than one audio track" userInfo:nil]; 129 | return; 130 | } 131 | 132 | _audioTrackSourceFormatDescription = (CMFormatDescriptionRef)CFRetain( formatDescription ); 133 | _audioTrackSettings = [audioSettings copy]; 134 | } 135 | } 136 | 137 | - (void)prepareToRecord 138 | { 139 | @synchronized( self ) 140 | { 141 | if ( _status != MovieRecorderStatusIdle ) { 142 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Already prepared, cannot prepare again" userInfo:nil]; 143 | return; 144 | } 145 | if ([_savedFrameTimestamps count]) 146 | [_savedFrameTimestamps removeAllObjects]; 147 | if ([_savedFrameIntrinsics count]) 148 | [_savedFrameIntrinsics removeAllObjects]; 149 | if ([_savedExposureDurations count]) 150 | [_savedExposureDurations removeAllObjects]; 151 | [self transitionToStatus:MovieRecorderStatusPreparingToRecord error:nil]; 152 | } 153 | 154 | dispatch_async( dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_LOW, 0 ), ^{ 155 | 156 | @autoreleasepool 157 | { 158 | NSError *error = nil; 159 | // AVAssetWriter will not write over an existing file. 160 | [[NSFileManager defaultManager] removeItemAtURL:self->_URL error:NULL]; 161 | 162 | self->_assetWriter = [[AVAssetWriter alloc] initWithURL:self->_URL fileType:AVFileTypeQuickTimeMovie error:&error]; 163 | 164 | // Create and add inputs 165 | if ( ! error && self->_videoTrackSourceFormatDescription ) { 166 | [self setupAssetWriterVideoInputWithSourceFormatDescription:self->_videoTrackSourceFormatDescription transform:self->_videoTrackTransform settings:self->_videoTrackSettings error:&error]; 167 | } 168 | 169 | if ( ! error && self->_audioTrackSourceFormatDescription ) { 170 | [self setupAssetWriterAudioInputWithSourceFormatDescription:self->_audioTrackSourceFormatDescription settings:self->_audioTrackSettings error:&error]; 171 | } 172 | 173 | if ( ! error ) { 174 | BOOL success = [self->_assetWriter startWriting]; 175 | if ( ! success ) { 176 | error = self->_assetWriter.error; 177 | } 178 | } 179 | 180 | @synchronized( self ) 181 | { 182 | if ( error ) { 183 | [self transitionToStatus:MovieRecorderStatusFailed error:error]; 184 | } 185 | else { 186 | [self transitionToStatus:MovieRecorderStatusRecording error:nil]; 187 | } 188 | } 189 | } 190 | } ); 191 | } 192 | 193 | //- (void)appendVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer 194 | //{ 195 | // [self appendSampleBuffer:sampleBuffer ofMediaType:AVMediaTypeVideo]; 196 | //} 197 | 198 | - (void)appendVideoPixelBuffer:(CVPixelBufferRef)pixelBuffer 199 | withPresentationTime:(CMTime)presentationTime 200 | withIntrinsicMat:(NSArray *)intrinsic3x3 201 | withExposureDuration:(int64_t)exposureDuration 202 | { 203 | CMSampleBufferRef sampleBuffer = NULL; 204 | 205 | CMSampleTimingInfo timingInfo = {0,}; 206 | timingInfo.duration = kCMTimeInvalid; 207 | timingInfo.decodeTimeStamp = kCMTimeInvalid; 208 | timingInfo.presentationTimeStamp = presentationTime; 209 | 210 | OSStatus err = CMSampleBufferCreateForImageBuffer( kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, _videoTrackSourceFormatDescription, &timingInfo, &sampleBuffer ); 211 | if ( sampleBuffer ) { 212 | [self appendSampleBuffer:sampleBuffer ofMediaType:AVMediaTypeVideo withIntrinsicMat:intrinsic3x3 withExposureDuration:exposureDuration]; 213 | CFRelease( sampleBuffer ); 214 | } 215 | else { 216 | NSString *exceptionReason = [NSString stringWithFormat:@"sample buffer create failed (%i)", (int)err]; 217 | @throw [NSException exceptionWithName:NSInvalidArgumentException reason:exceptionReason userInfo:nil]; 218 | } 219 | } 220 | 221 | - (void)appendAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer 222 | { 223 | [self appendSampleBuffer:sampleBuffer ofMediaType:AVMediaTypeAudio]; 224 | } 225 | 226 | - (void)finishRecording 227 | { 228 | @synchronized( self ) 229 | { 230 | BOOL shouldFinishRecording = NO; 231 | switch ( _status ) 232 | { 233 | case MovieRecorderStatusIdle: 234 | case MovieRecorderStatusPreparingToRecord: 235 | case MovieRecorderStatusFinishingRecordingPart1: 236 | case MovieRecorderStatusFinishingRecordingPart2: 237 | case MovieRecorderStatusFinished: 238 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Not recording" userInfo:nil]; 239 | break; 240 | case MovieRecorderStatusFailed: 241 | // From the client's perspective the movie recorder can asynchronously transition to an error state as the result of an append. 242 | // Because of this we are lenient when finishRecording is called and we are in an error state. 243 | NSLog( @"Recording has failed, nothing to do" ); 244 | break; 245 | case MovieRecorderStatusRecording: 246 | shouldFinishRecording = YES; 247 | break; 248 | } 249 | 250 | if ( shouldFinishRecording ) { 251 | [self transitionToStatus:MovieRecorderStatusFinishingRecordingPart1 error:nil]; 252 | } 253 | else { 254 | return; 255 | } 256 | } 257 | 258 | dispatch_async( _writingQueue, ^{ 259 | 260 | @autoreleasepool 261 | { 262 | @synchronized( self ) 263 | { 264 | // We may have transitioned to an error state as we appended inflight buffers. In that case there is nothing to do now. 265 | if ( self->_status != MovieRecorderStatusFinishingRecordingPart1 ) { 266 | return; 267 | } 268 | 269 | // It is not safe to call -[AVAssetWriter finishWriting*] concurrently with -[AVAssetWriterInput appendSampleBuffer:] 270 | // We transition to MovieRecorderStatusFinishingRecordingPart2 while on _writingQueue, which guarantees that no more buffers will be appended. 271 | [self transitionToStatus:MovieRecorderStatusFinishingRecordingPart2 error:nil]; 272 | } 273 | 274 | [self->_assetWriter finishWritingWithCompletionHandler:^{ 275 | @synchronized( self ) 276 | { 277 | NSError *error = self->_assetWriter.error; 278 | if ( error ) { 279 | [self transitionToStatus:MovieRecorderStatusFailed error:error]; 280 | } 281 | else { 282 | [self transitionToStatus:MovieRecorderStatusFinished error:nil]; 283 | } 284 | } 285 | }]; 286 | } 287 | } ); 288 | } 289 | 290 | - (void)dealloc 291 | { 292 | if ( _audioTrackSourceFormatDescription ) { 293 | CFRelease( _audioTrackSourceFormatDescription ); 294 | } 295 | 296 | if ( _videoTrackSourceFormatDescription ) { 297 | CFRelease( _videoTrackSourceFormatDescription ); 298 | } 299 | } 300 | 301 | #pragma mark - 302 | #pragma mark Internal 303 | 304 | - (void)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer ofMediaType:(NSString *)mediaType 305 | { 306 | [self appendSampleBuffer:sampleBuffer 307 | ofMediaType:mediaType 308 | withIntrinsicMat:nil 309 | withExposureDuration:-1]; 310 | } 311 | 312 | - (void)appendSampleBuffer:(CMSampleBufferRef)sampleBuffer 313 | ofMediaType:(NSString *)mediaType 314 | withIntrinsicMat:(NSArray *)intrinsic3x3 315 | withExposureDuration:(int64_t)exposureDuration 316 | { 317 | if ( sampleBuffer == NULL ) { 318 | @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"NULL sample buffer" userInfo:nil]; 319 | return; 320 | } 321 | 322 | @synchronized( self ) { 323 | if ( _status < MovieRecorderStatusRecording ) { 324 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Not ready to record yet" userInfo:nil]; 325 | return; 326 | } 327 | } 328 | 329 | CFRetain( sampleBuffer ); 330 | dispatch_async( _writingQueue, ^{ 331 | 332 | @autoreleasepool 333 | { 334 | @synchronized( self ) 335 | { 336 | // From the client's perspective the movie recorder can asynchronously transition to an error state as the result of an append. 337 | // Because of this we are lenient when samples are appended and we are no longer recording. 338 | // Instead of throwing an exception we just release the sample buffers and return. 339 | if ( self->_status > MovieRecorderStatusFinishingRecordingPart1 ) { 340 | CFRelease( sampleBuffer ); 341 | return; 342 | } 343 | } 344 | CMTime sampleTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 345 | int64_t frameTimestamp = CMTimeGetNanoseconds(sampleTime); 346 | if ( ! self->_haveStartedSession ) { 347 | [self->_assetWriter startSessionAtSourceTime:sampleTime]; 348 | self->_haveStartedSession = YES; 349 | } 350 | 351 | AVAssetWriterInput *input = ( mediaType == AVMediaTypeVideo ) ? self->_videoInput : self->_audioInput; 352 | 353 | if ( input.readyForMoreMediaData ) 354 | { 355 | BOOL success = [input appendSampleBuffer:sampleBuffer]; 356 | if ( ! success ) { 357 | NSError *error = self->_assetWriter.error; 358 | @synchronized( self ) { 359 | [self transitionToStatus:MovieRecorderStatusFailed error:error]; 360 | } 361 | } else { 362 | [self->_savedFrameTimestamps addObject:[NSNumber numberWithLongLong:frameTimestamp]]; 363 | if (intrinsic3x3 != nil) 364 | [self->_savedFrameIntrinsics addObject:intrinsic3x3]; 365 | if (exposureDuration != -1) 366 | [self->_savedExposureDurations addObject:[NSNumber numberWithLongLong:exposureDuration]]; 367 | } 368 | } 369 | else 370 | { 371 | NSLog( @"%@ input not ready for more media data, dropping buffer", mediaType ); 372 | } 373 | CFRelease( sampleBuffer ); 374 | } 375 | } ); 376 | } 377 | 378 | // call under @synchonized( self ) 379 | - (void)transitionToStatus:(MovieRecorderStatus)newStatus error:(NSError *)error 380 | { 381 | BOOL shouldNotifyDelegate = NO; 382 | 383 | #if LOG_STATUS_TRANSITIONS 384 | NSLog( @"MovieRecorder state transition: %@->%@", [self stringForStatus:_status], [self stringForStatus:newStatus] ); 385 | #endif 386 | 387 | if ( newStatus != _status ) 388 | { 389 | // terminal states 390 | if ( ( newStatus == MovieRecorderStatusFinished ) || ( newStatus == MovieRecorderStatusFailed ) ) 391 | { 392 | shouldNotifyDelegate = YES; 393 | // make sure there are no more sample buffers in flight before we tear down the asset writer and inputs 394 | 395 | dispatch_async( _writingQueue, ^{ 396 | [self teardownAssetWriterAndInputs]; 397 | if ( newStatus == MovieRecorderStatusFailed ) { 398 | [[NSFileManager defaultManager] removeItemAtURL:self->_URL error:NULL]; 399 | } 400 | } ); 401 | 402 | #if LOG_STATUS_TRANSITIONS 403 | if ( error ) { 404 | NSLog( @"MovieRecorder error: %@, code: %i", error, (int)error.code ); 405 | } 406 | #endif 407 | } 408 | else if ( newStatus == MovieRecorderStatusRecording ) 409 | { 410 | shouldNotifyDelegate = YES; 411 | } 412 | 413 | _status = newStatus; 414 | } 415 | 416 | if ( shouldNotifyDelegate ) 417 | { 418 | dispatch_async( _delegateCallbackQueue, ^{ 419 | @autoreleasepool 420 | { 421 | switch ( newStatus ) 422 | { 423 | case MovieRecorderStatusRecording: 424 | [self->_delegate movieRecorderDidFinishPreparing:self]; 425 | break; 426 | case MovieRecorderStatusFinished: 427 | [self->_delegate movieRecorderDidFinishRecording:self]; 428 | break; 429 | case MovieRecorderStatusFailed: 430 | [self->_delegate movieRecorder:self didFailWithError:error]; 431 | break; 432 | default: 433 | NSAssert1( NO, @"Unexpected recording status (%i) for delegate callback", (int)newStatus ); 434 | break; 435 | } 436 | } 437 | } ); 438 | } 439 | } 440 | 441 | #if LOG_STATUS_TRANSITIONS 442 | 443 | - (NSString *)stringForStatus:(MovieRecorderStatus)status 444 | { 445 | NSString *statusString = nil; 446 | 447 | switch ( status ) 448 | { 449 | case MovieRecorderStatusIdle: 450 | statusString = @"Idle"; 451 | break; 452 | case MovieRecorderStatusPreparingToRecord: 453 | statusString = @"PreparingToRecord"; 454 | break; 455 | case MovieRecorderStatusRecording: 456 | statusString = @"Recording"; 457 | break; 458 | case MovieRecorderStatusFinishingRecordingPart1: 459 | statusString = @"FinishingRecordingPart1"; 460 | break; 461 | case MovieRecorderStatusFinishingRecordingPart2: 462 | statusString = @"FinishingRecordingPart2"; 463 | break; 464 | case MovieRecorderStatusFinished: 465 | statusString = @"Finished"; 466 | break; 467 | case MovieRecorderStatusFailed: 468 | statusString = @"Failed"; 469 | break; 470 | default: 471 | statusString = @"Unknown"; 472 | break; 473 | } 474 | return statusString; 475 | 476 | } 477 | 478 | #endif // LOG_STATUS_TRANSITIONS 479 | 480 | - (BOOL)setupAssetWriterAudioInputWithSourceFormatDescription:(CMFormatDescriptionRef)audioFormatDescription settings:(NSDictionary *)audioSettings error:(NSError **)errorOut 481 | { 482 | if ( ! audioSettings ) { 483 | NSLog( @"No audio settings provided, using default settings" ); 484 | audioSettings = @{ AVFormatIDKey : @(kAudioFormatMPEG4AAC) }; 485 | } 486 | 487 | if ( [_assetWriter canApplyOutputSettings:audioSettings forMediaType:AVMediaTypeAudio] ) 488 | { 489 | _audioInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeAudio outputSettings:audioSettings sourceFormatHint:audioFormatDescription]; 490 | _audioInput.expectsMediaDataInRealTime = YES; 491 | 492 | if ( [_assetWriter canAddInput:_audioInput] ) 493 | { 494 | [_assetWriter addInput:_audioInput]; 495 | } 496 | else 497 | { 498 | if ( errorOut ) { 499 | *errorOut = [[self class] cannotSetupInputError]; 500 | } 501 | return NO; 502 | } 503 | } 504 | else 505 | { 506 | if ( errorOut ) { 507 | *errorOut = [[self class] cannotSetupInputError]; 508 | } 509 | return NO; 510 | } 511 | 512 | return YES; 513 | } 514 | 515 | - (BOOL)setupAssetWriterVideoInputWithSourceFormatDescription:(CMFormatDescriptionRef)videoFormatDescription transform:(CGAffineTransform)transform settings:(NSDictionary *)videoSettings error:(NSError **)errorOut 516 | { 517 | if ( ! videoSettings ) 518 | { 519 | float bitsPerPixel; 520 | CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions( videoFormatDescription ); 521 | int numPixels = dimensions.width * dimensions.height; 522 | int bitsPerSecond; 523 | 524 | NSLog( @"No video settings provided, using default settings" ); 525 | 526 | // Assume that lower-than-SD resolutions are intended for streaming, and use a lower bitrate 527 | if ( numPixels < ( 640 * 480 ) ) { 528 | bitsPerPixel = 4.05; // This bitrate approximately matches the quality produced by AVCaptureSessionPresetMedium or Low. 529 | } 530 | else { 531 | bitsPerPixel = 10.1; // This bitrate approximately matches the quality produced by AVCaptureSessionPresetHigh. 532 | } 533 | 534 | bitsPerSecond = numPixels * bitsPerPixel; 535 | 536 | NSDictionary *compressionProperties = @{ AVVideoAverageBitRateKey : @(bitsPerSecond), 537 | AVVideoExpectedSourceFrameRateKey : @(30), 538 | AVVideoMaxKeyFrameIntervalKey : @(30) }; 539 | 540 | videoSettings = @{ AVVideoCodecKey : AVVideoCodecH264, 541 | AVVideoWidthKey : @(dimensions.width), 542 | AVVideoHeightKey : @(dimensions.height), 543 | AVVideoCompressionPropertiesKey : compressionProperties }; 544 | } 545 | 546 | if ( [_assetWriter canApplyOutputSettings:videoSettings forMediaType:AVMediaTypeVideo] ) 547 | { 548 | _videoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoSettings sourceFormatHint:videoFormatDescription]; 549 | _videoInput.expectsMediaDataInRealTime = YES; 550 | _videoInput.transform = transform; 551 | 552 | if ( [_assetWriter canAddInput:_videoInput] ) 553 | { 554 | [_assetWriter addInput:_videoInput]; 555 | } 556 | else 557 | { 558 | if ( errorOut ) { 559 | *errorOut = [[self class] cannotSetupInputError]; 560 | } 561 | return NO; 562 | } 563 | } 564 | else 565 | { 566 | if ( errorOut ) { 567 | *errorOut = [[self class] cannotSetupInputError]; 568 | } 569 | return NO; 570 | } 571 | 572 | return YES; 573 | } 574 | 575 | + (NSError *)cannotSetupInputError 576 | { 577 | NSString *localizedDescription = NSLocalizedString( @"Recording cannot be started", nil ); 578 | NSString *localizedFailureReason = NSLocalizedString( @"Cannot setup asset writer input.", nil ); 579 | NSDictionary *errorDict = @{ NSLocalizedDescriptionKey : localizedDescription, 580 | NSLocalizedFailureReasonErrorKey : localizedFailureReason }; 581 | return [NSError errorWithDomain:@"com.apple.dts.samplecode" code:0 userInfo:errorDict]; 582 | } 583 | 584 | - (void)teardownAssetWriterAndInputs 585 | { 586 | _videoInput = nil; 587 | _audioInput = nil; 588 | _assetWriter = nil; 589 | } 590 | 591 | @end 592 | -------------------------------------------------------------------------------- /Classes/Utilities/OpenGLPixelBufferView.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | The OpenGL ES view 8 | */ 9 | 10 | #import 11 | #import 12 | #import 13 | 14 | @interface OpenGLPixelBufferView : UIView 15 | 16 | - (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer; 17 | - (void)flushPixelBufferCache; 18 | - (void)reset; 19 | 20 | @end 21 | 22 | #if !defined(_STRINGIFY) 23 | #define __STRINGIFY( _x ) # _x 24 | #define _STRINGIFY( _x ) __STRINGIFY( _x ) 25 | #endif 26 | 27 | static const char * kPassThruVertex = _STRINGIFY( 28 | 29 | attribute vec4 position; 30 | attribute mediump vec4 texturecoordinate; 31 | varying mediump vec2 coordinate; 32 | 33 | void main() 34 | { 35 | gl_Position = position; 36 | coordinate = texturecoordinate.xy; 37 | } 38 | 39 | ); 40 | 41 | static const char * kPassThruFragment = _STRINGIFY( 42 | 43 | varying highp vec2 coordinate; 44 | uniform sampler2D videoframe; 45 | 46 | void main() 47 | { 48 | gl_FragColor = texture2D(videoframe, coordinate); 49 | } 50 | 51 | ); 52 | 53 | enum { 54 | ATTRIB_VERTEX, 55 | ATTRIB_TEXTUREPOSITON, 56 | NUM_ATTRIBUTES 57 | }; 58 | -------------------------------------------------------------------------------- /Classes/Utilities/OpenGLPixelBufferView.m: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | The OpenGL ES view 8 | */ 9 | 10 | #import "OpenGLPixelBufferView.h" 11 | #import 12 | #import 13 | #import "ShaderUtilities.h" 14 | 15 | 16 | 17 | @interface OpenGLPixelBufferView () 18 | { 19 | EAGLContext *_oglContext; 20 | CVOpenGLESTextureCacheRef _textureCache; 21 | GLint _width; 22 | GLint _height; 23 | GLuint _frameBufferHandle; 24 | GLuint _colorBufferHandle; 25 | GLuint _program; 26 | GLint _frame; 27 | } 28 | @end 29 | 30 | @implementation OpenGLPixelBufferView 31 | 32 | + (Class)layerClass 33 | { 34 | return [CAEAGLLayer class]; 35 | } 36 | 37 | - (instancetype)initWithFrame:(CGRect)frame 38 | { 39 | self = [super initWithFrame:frame]; 40 | if ( self ) 41 | { 42 | // On iOS8 and later we use the native scale of the screen as our content scale factor. 43 | // This allows us to render to the exact pixel resolution of the screen which avoids additional scaling and GPU rendering work. 44 | // For example the iPhone 6 Plus appears to UIKit as a 736 x 414 pt screen with a 3x scale factor (2208 x 1242 virtual pixels). 45 | // But the native pixel dimensions are actually 1920 x 1080. 46 | // Since we are streaming 1080p buffers from the camera we can render to the iPhone 6 Plus screen at 1:1 with no additional scaling if we set everything up correctly. 47 | // Using the native scale of the screen also allows us to render at full quality when using the display zoom feature on iPhone 6/6 Plus. 48 | 49 | // Only try to compile this code if we are using the 8.0 or later SDK. 50 | #if __IPHONE_OS_VERSION_MAX_ALLOWED >= 80000 51 | if ( [UIScreen instancesRespondToSelector:@selector(nativeScale)] ) 52 | { 53 | self.contentScaleFactor = [UIScreen mainScreen].nativeScale; 54 | } 55 | else 56 | #endif 57 | { 58 | self.contentScaleFactor = [UIScreen mainScreen].scale; 59 | } 60 | 61 | // Initialize OpenGL ES 2 62 | CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer; 63 | eaglLayer.opaque = YES; 64 | eaglLayer.drawableProperties = @{ kEAGLDrawablePropertyRetainedBacking : @(NO), 65 | kEAGLDrawablePropertyColorFormat : kEAGLColorFormatRGBA8 }; 66 | 67 | _oglContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; 68 | if ( ! _oglContext ) { 69 | NSLog( @"Problem with OpenGL context." ); 70 | return nil; 71 | } 72 | } 73 | return self; 74 | } 75 | 76 | - (BOOL)initializeBuffers 77 | { 78 | BOOL success = YES; 79 | 80 | glDisable( GL_DEPTH_TEST ); 81 | 82 | glGenFramebuffers( 1, &_frameBufferHandle ); 83 | glBindFramebuffer( GL_FRAMEBUFFER, _frameBufferHandle ); 84 | 85 | glGenRenderbuffers( 1, &_colorBufferHandle ); 86 | glBindRenderbuffer( GL_RENDERBUFFER, _colorBufferHandle ); 87 | 88 | [_oglContext renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer]; 89 | 90 | glGetRenderbufferParameteriv( GL_RENDERBUFFER, GL_RENDERBUFFER_WIDTH, &_width ); 91 | glGetRenderbufferParameteriv( GL_RENDERBUFFER, GL_RENDERBUFFER_HEIGHT, &_height ); 92 | 93 | glFramebufferRenderbuffer( GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _colorBufferHandle ); 94 | if ( glCheckFramebufferStatus( GL_FRAMEBUFFER ) != GL_FRAMEBUFFER_COMPLETE ) { 95 | NSLog( @"Failure with framebuffer generation" ); 96 | success = NO; 97 | goto bail; 98 | } 99 | 100 | // Create a new CVOpenGLESTexture cache 101 | CVReturn err = CVOpenGLESTextureCacheCreate( kCFAllocatorDefault, NULL, _oglContext, NULL, &_textureCache ); 102 | if ( err ) { 103 | NSLog( @"Error at CVOpenGLESTextureCacheCreate %d", err ); 104 | success = NO; 105 | goto bail; 106 | } 107 | 108 | // attributes 109 | GLint attribLocation[NUM_ATTRIBUTES] = { 110 | ATTRIB_VERTEX, ATTRIB_TEXTUREPOSITON, 111 | }; 112 | GLchar *attribName[NUM_ATTRIBUTES] = { 113 | "position", "texturecoordinate", 114 | }; 115 | 116 | glueCreateProgram( kPassThruVertex, kPassThruFragment, 117 | NUM_ATTRIBUTES, (const GLchar **)&attribName[0], attribLocation, 118 | 0, 0, 0, 119 | &_program ); 120 | 121 | if ( ! _program ) { 122 | NSLog( @"Error creating the program" ); 123 | success = NO; 124 | goto bail; 125 | } 126 | 127 | _frame = glueGetUniformLocation( _program, "videoframe" ); 128 | 129 | bail: 130 | if ( ! success ) { 131 | [self reset]; 132 | } 133 | return success; 134 | } 135 | 136 | - (void)reset 137 | { 138 | EAGLContext *oldContext = [EAGLContext currentContext]; 139 | if ( oldContext != _oglContext ) { 140 | if ( ! [EAGLContext setCurrentContext:_oglContext] ) { 141 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem with OpenGL context" userInfo:nil]; 142 | return; 143 | } 144 | } 145 | if ( _frameBufferHandle ) { 146 | glDeleteFramebuffers( 1, &_frameBufferHandle ); 147 | _frameBufferHandle = 0; 148 | } 149 | if ( _colorBufferHandle ) { 150 | glDeleteRenderbuffers( 1, &_colorBufferHandle ); 151 | _colorBufferHandle = 0; 152 | } 153 | if ( _program ) { 154 | glDeleteProgram( _program ); 155 | _program = 0; 156 | } 157 | if ( _textureCache ) { 158 | CFRelease( _textureCache ); 159 | _textureCache = 0; 160 | } 161 | if ( oldContext != _oglContext ) { 162 | [EAGLContext setCurrentContext:oldContext]; 163 | } 164 | } 165 | 166 | - (void)dealloc 167 | { 168 | [self reset]; 169 | } 170 | 171 | - (void)displayPixelBuffer:(CVPixelBufferRef)pixelBuffer 172 | { 173 | static const GLfloat squareVertices[] = { 174 | -1.0f, -1.0f, // bottom left 175 | 1.0f, -1.0f, // bottom right 176 | -1.0f, 1.0f, // top left 177 | 1.0f, 1.0f, // top right 178 | }; 179 | 180 | if ( pixelBuffer == NULL ) { 181 | @throw [NSException exceptionWithName:NSInvalidArgumentException reason:@"NULL pixel buffer" userInfo:nil]; 182 | return; 183 | } 184 | 185 | EAGLContext *oldContext = [EAGLContext currentContext]; 186 | if ( oldContext != _oglContext ) { 187 | if ( ! [EAGLContext setCurrentContext:_oglContext] ) { 188 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"Problem with OpenGL context" userInfo:nil]; 189 | return; 190 | } 191 | } 192 | 193 | if ( _frameBufferHandle == 0 ) { 194 | BOOL success = [self initializeBuffers]; 195 | if ( ! success ) { 196 | NSLog( @"Problem initializing OpenGL buffers." ); 197 | return; 198 | } 199 | } 200 | 201 | // Create a CVOpenGLESTexture from a CVPixelBufferRef 202 | size_t frameWidth = CVPixelBufferGetWidth( pixelBuffer ); 203 | size_t frameHeight = CVPixelBufferGetHeight( pixelBuffer ); 204 | CVOpenGLESTextureRef texture = NULL; 205 | CVReturn err = CVOpenGLESTextureCacheCreateTextureFromImage( kCFAllocatorDefault, 206 | _textureCache, 207 | pixelBuffer, 208 | NULL, 209 | GL_TEXTURE_2D, 210 | GL_RGBA, 211 | (GLsizei)frameWidth, 212 | (GLsizei)frameHeight, 213 | GL_BGRA, 214 | GL_UNSIGNED_BYTE, 215 | 0, 216 | &texture ); 217 | 218 | 219 | if ( ! texture || err ) { 220 | NSLog( @"CVOpenGLESTextureCacheCreateTextureFromImage failed (error: %d)", err ); 221 | return; 222 | } 223 | 224 | // Set the view port to the entire view 225 | glBindFramebuffer( GL_FRAMEBUFFER, _frameBufferHandle ); 226 | glViewport( 0, 0, _width, _height ); 227 | 228 | glUseProgram( _program ); 229 | glActiveTexture( GL_TEXTURE0 ); 230 | glBindTexture( CVOpenGLESTextureGetTarget( texture ), CVOpenGLESTextureGetName( texture ) ); 231 | glUniform1i( _frame, 0 ); 232 | 233 | // Set texture parameters 234 | glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR ); 235 | glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); 236 | glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE ); 237 | glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE ); 238 | 239 | glVertexAttribPointer( ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices ); 240 | glEnableVertexAttribArray( ATTRIB_VERTEX ); 241 | 242 | // Preserve aspect ratio; fill layer bounds 243 | CGSize textureSamplingSize; 244 | CGSize cropScaleAmount = CGSizeMake( self.bounds.size.width / (float)frameWidth, self.bounds.size.height / (float)frameHeight ); 245 | if ( cropScaleAmount.height > cropScaleAmount.width ) { 246 | textureSamplingSize.width = self.bounds.size.width / ( frameWidth * cropScaleAmount.height ); 247 | textureSamplingSize.height = 1.0; 248 | } 249 | else { 250 | textureSamplingSize.width = 1.0; 251 | textureSamplingSize.height = self.bounds.size.height / ( frameHeight * cropScaleAmount.width ); 252 | } 253 | 254 | // Perform a vertical flip by swapping the top left and the bottom left coordinate. 255 | // CVPixelBuffers have a top left origin and OpenGL has a bottom left origin. 256 | GLfloat passThroughTextureVertices[] = { 257 | ( 1.0 - textureSamplingSize.width ) / 2.0, ( 1.0 + textureSamplingSize.height ) / 2.0, // top left 258 | ( 1.0 + textureSamplingSize.width ) / 2.0, ( 1.0 + textureSamplingSize.height ) / 2.0, // top right 259 | ( 1.0 - textureSamplingSize.width ) / 2.0, ( 1.0 - textureSamplingSize.height ) / 2.0, // bottom left 260 | ( 1.0 + textureSamplingSize.width ) / 2.0, ( 1.0 - textureSamplingSize.height ) / 2.0, // bottom right 261 | }; 262 | 263 | glVertexAttribPointer( ATTRIB_TEXTUREPOSITON, 2, GL_FLOAT, 0, 0, passThroughTextureVertices ); 264 | glEnableVertexAttribArray( ATTRIB_TEXTUREPOSITON ); 265 | 266 | glDrawArrays( GL_TRIANGLE_STRIP, 0, 4 ); 267 | 268 | glBindRenderbuffer( GL_RENDERBUFFER, _colorBufferHandle ); 269 | [_oglContext presentRenderbuffer:GL_RENDERBUFFER]; 270 | 271 | glBindTexture( CVOpenGLESTextureGetTarget( texture ), 0 ); 272 | glBindTexture( GL_TEXTURE_2D, 0 ); 273 | CFRelease( texture ); 274 | 275 | if ( oldContext != _oglContext ) { 276 | [EAGLContext setCurrentContext:oldContext]; 277 | } 278 | } 279 | 280 | - (void)flushPixelBufferCache 281 | { 282 | if ( _textureCache ) { 283 | CVOpenGLESTextureCacheFlush(_textureCache, 0); 284 | } 285 | } 286 | 287 | @end 288 | -------------------------------------------------------------------------------- /Classes/Utilities/VideoTimeConverter.h: -------------------------------------------------------------------------------- 1 | 2 | #import 3 | #import 4 | #import 5 | 6 | 7 | // Conversion of time between different clocks. 8 | // see: https://github.com/robovm/apple-ios-samples/blob/master/UsingAVFoundationAPIstorecordamoviewithlocationmetadata/AVCaptureLocation/AAPLCaptureManager.m 9 | // Clocks NSDate sampleBufferClock=captureSession.masterClock motionClock=CMClockGetHostTimeClock() 10 | 11 | @interface VideoTimeConverter : NSObject 12 | 13 | @property(nonatomic, retain) __attribute__((NSObject)) CMClockRef sampleBufferClock; 14 | 15 | - (void)checkStatus; 16 | 17 | // captureSession.masterClock to HostTimeClock 18 | - (void)convertSampleBufferTimeToMotionClock:(CMSampleBufferRef)sampleBuffer; 19 | 20 | // NSDate to captureSession.masterClock 21 | - (CMTime)movieTimeForLocationTime:(NSDate *)date; 22 | 23 | @end 24 | 25 | CMTime getAttachmentTime(CMSampleBufferRef mediaSample); 26 | 27 | int64_t CMTimeGetNanoseconds(CMTime time); 28 | 29 | int64_t CMTimeGetMilliseconds(CMTime time); 30 | 31 | NSString *secDoubleToNanoString(double time); 32 | 33 | // NSDate to HostTimeClock 34 | CMTime CMTimeForNSDate(NSDate *date); 35 | 36 | NSString *NSDateToString(NSDate *date); 37 | -------------------------------------------------------------------------------- /Classes/Utilities/VideoTimeConverter.m: -------------------------------------------------------------------------------- 1 | 2 | #import "VideoTimeConverter.h" 3 | #import 4 | 5 | CFStringRef const VIDEOSNAKE_REMAPPED_PTS = CFSTR("RemappedPTS"); 6 | const int32_t kSecToNanos = 1000000000; 7 | 8 | @interface VideoTimeConverter () { 9 | 10 | } 11 | 12 | @property(nonatomic, retain) __attribute__((NSObject)) CMClockRef motionClock; 13 | 14 | @end 15 | 16 | @implementation VideoTimeConverter 17 | 18 | - (id)init 19 | { 20 | self = [super init]; 21 | if (self != nil) { 22 | _motionClock = CMClockGetHostTimeClock(); 23 | if (_motionClock) 24 | CFRetain(_motionClock); 25 | } 26 | 27 | return self; 28 | } 29 | 30 | - (void)dealloc 31 | { 32 | if (_sampleBufferClock) 33 | CFRelease(_sampleBufferClock); 34 | if (_motionClock) 35 | CFRelease(_motionClock); 36 | } 37 | 38 | - (void)checkStatus 39 | { 40 | if ( self.sampleBufferClock == NULL ) { 41 | @throw [NSException exceptionWithName:NSInternalInconsistencyException reason:@"No sample buffer clock. Please set one before calling start." userInfo:nil]; 42 | return; 43 | } 44 | } 45 | 46 | - (void)convertSampleBufferTimeToMotionClock:(CMSampleBufferRef)sampleBuffer 47 | { 48 | CMTime originalPTS = CMSampleBufferGetPresentationTimeStamp(sampleBuffer); 49 | CMTime remappedPTS = originalPTS; 50 | if ( self.sampleBufferClock && self.motionClock ) { 51 | if ( !CFEqual(self.sampleBufferClock, self.motionClock) ) { 52 | remappedPTS = CMSyncConvertTime(originalPTS, self.sampleBufferClock, self.motionClock); 53 | } 54 | } 55 | // Attach the remapped timestamp to the buffer for use in -sync 56 | CFDictionaryRef remappedPTSDict = CMTimeCopyAsDictionary(remappedPTS, kCFAllocatorDefault); 57 | CMSetAttachment(sampleBuffer, VIDEOSNAKE_REMAPPED_PTS, remappedPTSDict, kCMAttachmentMode_ShouldPropagate); 58 | 59 | CFRelease(remappedPTSDict); 60 | } 61 | 62 | - (CMTime)movieTimeForLocationTime:(NSDate *)date 63 | { 64 | CMTime locationTime = CMTimeForNSDate(date); 65 | CMTime locationMovieTime = CMSyncConvertTime(locationTime, CMClockGetHostTimeClock(), self.sampleBufferClock); 66 | return locationMovieTime; 67 | } 68 | 69 | @end 70 | 71 | CMTime getAttachmentTime(CMSampleBufferRef mediaSample) 72 | { 73 | CFDictionaryRef mediaTimeDict = CMGetAttachment(mediaSample, VIDEOSNAKE_REMAPPED_PTS, NULL); 74 | CMTime mediaTime = (mediaTimeDict) ? CMTimeMakeFromDictionary(mediaTimeDict) : CMSampleBufferGetPresentationTimeStamp(mediaSample); 75 | return mediaTime; 76 | } 77 | 78 | int64_t CMTimeGetNanoseconds(CMTime time) { 79 | CMTime timenano = CMTimeConvertScale(time, kSecToNanos, kCMTimeRoundingMethod_Default); 80 | return timenano.value; 81 | } 82 | 83 | int64_t CMTimeGetMilliseconds(CMTime time) { 84 | CMTime timenano = CMTimeConvertScale(time, 1000, kCMTimeRoundingMethod_Default); 85 | return timenano.value; 86 | } 87 | 88 | NSString *secDoubleToNanoString(double time) { 89 | double integral; 90 | double fractional = modf(time, &integral); 91 | fractional *= kSecToNanos; 92 | return [NSString stringWithFormat:@"%.0f%09.0f", integral, fractional]; 93 | } 94 | 95 | CMTime CMTimeForNSDate(NSDate *date) { 96 | CMTime now = CMClockGetTime(CMClockGetHostTimeClock()); 97 | NSTimeInterval elapsed = -(date.timeIntervalSinceNow); // this will be a negative number if date was in the past (it should be). 98 | CMTime eventTime = CMTimeSubtract(now, CMTimeMake(elapsed * now.timescale, now.timescale)); 99 | return eventTime; 100 | } 101 | 102 | NSString *NSDateToString(NSDate *date) { 103 | NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init]; 104 | [dateFormatter setDateFormat:@"yyyy_MM_dd_HH_mm_ss.SSS"]; 105 | //Optionally for time zone conversions 106 | // [formatter setTimeZone:[NSTimeZone timeZoneWithName:@"..."]]; 107 | return [dateFormatter stringFromDate:date]; 108 | } 109 | -------------------------------------------------------------------------------- /Classes/inertialRecorder.h: -------------------------------------------------------------------------------- 1 | 2 | /* 3 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 4 | See LICENSE.txt for this sample’s licensing information 5 | 6 | Abstract: 7 | View controller for camera interface 8 | */ 9 | 10 | 11 | #import 12 | 13 | @interface inertialRecorder : NSObject 14 | 15 | - (void)switchRecording; 16 | 17 | @property NSURL *fileURL; 18 | @property NSURL *arURL; 19 | @property NSURL *arFrameURL; 20 | @property NSURL *accelURL; 21 | @property NSURL *GPSURL; 22 | @property NSURL *headURL; 23 | @property NSURL *baroURL; 24 | 25 | @property BOOL isRecording; 26 | 27 | @end 28 | 29 | 30 | @interface NodeWrapper : NSObject 31 | @property NSTimeInterval time; 32 | @property double x; 33 | @property double y; 34 | @property double z; 35 | @property BOOL isGyro; 36 | 37 | - (NSComparisonResult)compare:(NodeWrapper *)otherObject; 38 | 39 | @end 40 | 41 | NSURL *getFileURL(const NSString *filename); 42 | 43 | NSURL *createOutputFolderURL(void); 44 | -------------------------------------------------------------------------------- /Classes/inertialRecorder.m: -------------------------------------------------------------------------------- 1 | 2 | #import "inertialRecorder.h" 3 | #import 4 | #import "VideoTimeConverter.h" 5 | 6 | const double GRAVITY = 9.80; // see https://developer.apple.com/documentation/coremotion/getting_raw_accelerometer_events 7 | const double RATE = 100; // fps for inertial data 8 | 9 | @interface inertialRecorder () 10 | { 11 | 12 | } 13 | @property CMMotionManager *motionManager; 14 | @property(nonatomic,strong) CMAltimeter *altimeter;//气压计 15 | @property NSOperationQueue *queue; 16 | @property NSTimer *timer; 17 | 18 | @property NSMutableArray *rawAccelGyroData; 19 | 20 | @property NSMutableString *logStringAccel; 21 | 22 | @property NSMutableString *logStringbaro; 23 | 24 | 25 | @property BOOL interpolateAccel; // interpolate accelerometer data at gyro timestamps? 26 | @property NSString *timeStartImu; 27 | 28 | @end 29 | 30 | @implementation inertialRecorder 31 | 32 | - (instancetype)init { 33 | self = [super init]; 34 | if ( self ) 35 | { 36 | _isRecording = false; 37 | _motionManager = [[CMMotionManager alloc] init]; 38 | if (!_motionManager.isDeviceMotionAvailable) { 39 | NSLog(@"Device does not support motion capture."); } 40 | _fileURL = nil; 41 | _interpolateAccel = TRUE; 42 | 43 | } 44 | return self; 45 | } 46 | 47 | - (NSMutableArray *) removeDuplicates:(NSArray *)array { 48 | // see https://stackoverflow.com/questions/1025674/the-best-way-to-remove-duplicate-values-from-nsmutablearray-in-objective-c 49 | NSMutableArray *mutableArray = [array mutableCopy]; 50 | NSInteger index = [array count] - 1; 51 | for (id object in [array reverseObjectEnumerator]) { 52 | if ([mutableArray indexOfObject:object inRange:NSMakeRange(0, index)] != NSNotFound) { 53 | [mutableArray removeObjectAtIndex:index]; 54 | } 55 | index--; 56 | } 57 | return mutableArray; 58 | } 59 | 60 | - (NSMutableString*)interpolate:(NSMutableArray*) accelGyroData startTime:(NSString *) startTime { 61 | 62 | NSMutableArray *gyroArray = [[NSMutableArray alloc] init]; 63 | NSMutableArray *accelArray = [[NSMutableArray alloc] init]; 64 | 65 | for (int i=0;i<[accelGyroData count];i++) { 66 | NodeWrapper *nw =[accelGyroData objectAtIndex:i]; 67 | if (nw.time <= 0) 68 | continue; 69 | if (nw.isGyro) 70 | [gyroArray addObject:nw]; 71 | else 72 | [accelArray addObject:nw]; 73 | } 74 | 75 | NSArray *sortedArrayGyro = [gyroArray sortedArrayUsingSelector:@selector(compare:)]; 76 | NSArray *sortedArrayAccel = [accelArray sortedArrayUsingSelector:@selector(compare:)]; 77 | 78 | NSMutableArray *mutableGyroCopy = [self removeDuplicates:sortedArrayGyro]; 79 | NSMutableArray *mutableAccelCopy = [self removeDuplicates:sortedArrayAccel]; 80 | 81 | // interpolate 82 | NSMutableString *mainString = [[NSMutableString alloc]initWithString:@""]; 83 | int accelIndex = 0; 84 | [mainString appendFormat:@"Timestamp[nanosec], gx[rad/s], gy[rad/s], gz[rad/s], ax[m/s^2], ay[m/s^2], az[m/s^2]\n"]; 85 | for (int gyroIndex = 0; gyroIndex < [mutableGyroCopy count]; ++gyroIndex) { 86 | NodeWrapper *nwg = [mutableGyroCopy objectAtIndex:gyroIndex]; 87 | NodeWrapper *nwa = [mutableAccelCopy objectAtIndex:accelIndex]; 88 | if (nwg.time < nwa.time) { 89 | continue; 90 | } else if (nwg.time == nwa.time) { 91 | [mainString appendFormat:@"%@, %.6f, %.6f, %.6f, %.6f, %.6f, %.6f\n", secDoubleToNanoString(nwg.time), nwg.x, nwg.y, nwg.z, nwa.x, nwa.y, nwa.z]; 92 | } else { 93 | int lowerIndex = accelIndex; 94 | int upperIndex = accelIndex + 1; 95 | for (int iterIndex = accelIndex + 1; iterIndex < [mutableAccelCopy count]; ++iterIndex) { 96 | NodeWrapper *nwa1 = [mutableAccelCopy objectAtIndex:iterIndex]; 97 | if (nwa1.time < nwg.time) { 98 | lowerIndex = iterIndex; 99 | } else if (nwa1.time > nwg.time) { 100 | upperIndex = iterIndex; 101 | break; 102 | } else { 103 | lowerIndex = iterIndex; 104 | upperIndex = iterIndex; 105 | break; 106 | } 107 | } 108 | 109 | if (upperIndex >= [mutableAccelCopy count]) 110 | break; 111 | 112 | if (upperIndex == lowerIndex) { 113 | NodeWrapper *nwa1 = [mutableAccelCopy objectAtIndex:upperIndex]; 114 | [mainString appendFormat:@"%@, %.6f, %.6f, %.6f, %.6f, %.6f, %.6f\n", secDoubleToNanoString(nwg.time), nwg.x, nwg.y, nwg.z, nwa1.x, nwa1.y, nwa1.z]; 115 | } else if (upperIndex == lowerIndex + 1) { 116 | //存储的是gyro_accel.cvs 文件 117 | NodeWrapper *nwa = [mutableAccelCopy objectAtIndex:lowerIndex]; 118 | NodeWrapper *nwa1 = [mutableAccelCopy objectAtIndex:upperIndex]; 119 | double ratio = (nwg.time - nwa.time) / (nwa1.time - nwa.time); 120 | double interpax = nwa.x + (nwa1.x - nwa.x) * ratio; 121 | double interpay = nwa.y + (nwa1.y - nwa.y) * ratio; 122 | double interpaz = nwa.z + (nwa1.z - nwa.z) * ratio; 123 | [mainString appendFormat:@"%@, %.6f, %.6f, %.6f, %.6f, %.6f, %.6f\n", secDoubleToNanoString(nwg.time), nwg.x, nwg.y, nwg.z, interpax, interpay, interpaz]; 124 | } else { 125 | NSLog(@"Impossible lower and upper bound %d %d for gyro timestamp %.5f", lowerIndex, upperIndex, nwg.time); 126 | } 127 | accelIndex = lowerIndex; 128 | } 129 | } 130 | if ([gyroArray count]) 131 | [gyroArray removeAllObjects]; 132 | if ([accelArray count]) 133 | [accelArray removeAllObjects]; 134 | return mainString; 135 | } 136 | 137 | - (void)switchRecording { 138 | if (_isRecording) { 139 | 140 | _isRecording = false; 141 | [_motionManager stopGyroUpdates]; 142 | [_motionManager stopAccelerometerUpdates]; 143 | if (self.altimeter) { 144 | [self.altimeter stopRelativeAltitudeUpdates];//停止气压值 145 | self.altimeter = nil; 146 | } 147 | NSMutableString *mainString = [[NSMutableString alloc]initWithString:@""]; 148 | if (!_interpolateAccel) { 149 | [mainString appendFormat:@"Timestamp[nanosec], x, y, z[(a:m/s^2)/(g:rad/s)], isGyro?\n"]; 150 | for(int i=0;i<[_rawAccelGyroData count];i++ ) { 151 | NodeWrapper *nw =[_rawAccelGyroData objectAtIndex:i]; 152 | [mainString appendFormat:@"%.7f, %.5f, %.5f, %.5f, %d\n", nw.time, nw.x, nw.y, nw.z, nw.isGyro]; 153 | } 154 | } else { // linearly interpolate acceleration offline 155 | // TODO(jhuai): Though offline interpolation is enough for practical needs, 156 | // eg., 20 min recording, online interpolation may be still desirable. 157 | // It can be implemented referring to Vins Mobile and MarsLogger Android. 158 | mainString = [self interpolate:_rawAccelGyroData startTime:_timeStartImu]; 159 | } 160 | if ([_rawAccelGyroData count]) 161 | [_rawAccelGyroData removeAllObjects]; 162 | 163 | NSData *settingsData; 164 | settingsData = [mainString dataUsingEncoding: NSUTF8StringEncoding allowLossyConversion:false]; 165 | 166 | NSData *logStringAccelData = [_logStringAccel dataUsingEncoding: NSUTF8StringEncoding allowLossyConversion:false];//磁力计写到本地 167 | 168 | if (_logStringbaro.length>=1) { 169 | 170 | NSData *logStringbaroData = [_logStringbaro dataUsingEncoding: NSUTF8StringEncoding allowLossyConversion:false];//气压计写入到本地 171 | if ([logStringbaroData writeToURL:_baroURL atomically:YES]) { 172 | NSLog(@"Written inertial data to %@", _baroURL); 173 | } 174 | _logStringbaro = [@"" mutableCopy]; 175 | } 176 | 177 | if ([logStringAccelData writeToURL:_accelURL atomically:YES]) { 178 | NSLog(@"Written inertial data to %@", _accelURL); 179 | } 180 | 181 | if ([settingsData writeToURL:_fileURL atomically:YES]) { 182 | NSLog(@"Written inertial data to %@", _fileURL); 183 | } 184 | else { 185 | NSLog(@"Failed to record inertial data at %@", _fileURL); 186 | } 187 | 188 | NSLog(@"Stopped recording inertial data!"); 189 | } else { 190 | _isRecording = true; 191 | 192 | NSLog(@"Start recording inertial data!"); 193 | _rawAccelGyroData = [[NSMutableArray alloc] init]; 194 | _motionManager.gyroUpdateInterval = 1/RATE; 195 | _motionManager.accelerometerUpdateInterval = 1/RATE; 196 | 197 | NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init]; 198 | [dateFormatter setDateFormat:@"EEE_MM_dd_yyyy_HH_mm_ss"]; 199 | _timeStartImu = [dateFormatter stringFromDate:[NSDate date]]; 200 | _logStringAccel = [@"Timestamp,x,y,z\r\n" mutableCopy]; 201 | 202 | 203 | if (_motionManager.gyroAvailable && _motionManager.accelerometerAvailable) { 204 | // _queue = [NSOperationQueue currentQueue]; // mainQueue, run on main UI thread 205 | _queue = [[NSOperationQueue alloc] init]; // background thread 206 | [_motionManager startGyroUpdatesToQueue:_queue withHandler: ^ (CMGyroData *gyroData, NSError *error) { 207 | 208 | CMRotationRate rotate = gyroData.rotationRate; 209 | 210 | NodeWrapper *nw = [[NodeWrapper alloc] init]; 211 | nw.isGyro = true; 212 | nw.time = gyroData.timestamp; 213 | nw.x = rotate.x; 214 | nw.y = rotate.y; 215 | nw.z = rotate.z; 216 | // NSLog(@"x1======%lf",rotate.x); 217 | [self->_rawAccelGyroData addObject:nw]; 218 | 219 | //NSLog(@"timestamp2-:%@",secDoubleToNanoString(gyroData.timestamp)); 220 | }]; 221 | [_motionManager startAccelerometerUpdatesToQueue:_queue withHandler: ^ (CMAccelerometerData *accelData, NSError *error) { 222 | 223 | CMAcceleration accel = accelData.acceleration; 224 | double x = accel.x; 225 | double y = accel.y; 226 | double z = accel.z; 227 | 228 | NodeWrapper *nw = [[NodeWrapper alloc] init]; 229 | nw.isGyro = false; 230 | // The time stamp is the amount of time in seconds since the device booted. 231 | nw.time = accelData.timestamp; 232 | nw.x = - x * GRAVITY; 233 | nw.y = - y * GRAVITY; 234 | nw.z = - z * GRAVITY; 235 | 236 | [self->_logStringAccel appendString: [NSString stringWithFormat:@"%@,%f,%f,%f\r\n", 237 | secDoubleToNanoString(accelData.timestamp), 238 | x, //G-units 239 | y, 240 | z]]; //磁力计 241 | 242 | [self->_rawAccelGyroData addObject:nw]; 243 | 244 | // NSLog(@"x2======%lf",accelData.timestamp); 245 | }]; 246 | 247 | if (![CMAltimeter isRelativeAltitudeAvailable]){//检测气压计当前设备是否可用 248 | NSLog(@"Barometer is not available on this device. Sorry!"); 249 | return; 250 | } 251 | 252 | _logStringbaro = [@"Timestamp,baro\r\n" mutableCopy]; 253 | self.altimeter = [[CMAltimeter alloc] init]; //获取气压值 254 | [self.altimeter startRelativeAltitudeUpdatesToQueue:_queue withHandler:^(CMAltitudeData * _Nullable altitudeData, NSError * _Nullable error) { 255 | if (error) { 256 | [self.altimeter stopRelativeAltitudeUpdates];//停止气压计 257 | return; 258 | } 259 | 260 | [self.logStringbaro appendString: [NSString stringWithFormat:@"%@,%0.2f\r\n", 261 | secDoubleToNanoString(altitudeData.timestamp), 262 | [altitudeData.pressure floatValue] 263 | ]]; 264 | 265 | NSLog(@"高度:%0.2f m 气压值:%0.2f kPa",[altitudeData.relativeAltitude floatValue],[altitudeData.pressure floatValue]); 266 | }]; 267 | 268 | 269 | 270 | 271 | } else { 272 | NSLog(@"Gyroscope or accelerometer not available"); 273 | } 274 | } 275 | } 276 | 277 | 278 | 279 | 280 | 281 | 282 | @end 283 | 284 | 285 | @implementation NodeWrapper 286 | - (NSComparisonResult)compare:(NodeWrapper *)otherObject { 287 | return [@(self.time) compare:@(otherObject.time)]; // @ converts double to NSNumber 288 | } 289 | @end 290 | 291 | 292 | NSURL *getFileURL(NSString *filename) { 293 | NSArray *paths = [[NSFileManager defaultManager] URLsForDirectory:NSDocumentDirectory inDomains:NSUserDomainMask]; 294 | NSURL *documentsURL = [paths lastObject]; 295 | return [documentsURL URLByAppendingPathComponent:filename isDirectory:NO]; 296 | } 297 | 298 | NSURL *createOutputFolderURL(void) { 299 | NSDate *now = [NSDate date]; 300 | NSDateFormatter *dateFormatter = [[NSDateFormatter alloc] init]; 301 | [dateFormatter setDateFormat:@"yyyy_MM_dd_HH_mm_ss_SS"]; 302 | NSString *dateTimeString = [dateFormatter stringFromDate:now]; 303 | 304 | NSArray *paths = [[NSFileManager defaultManager] URLsForDirectory:NSDocumentDirectory inDomains:NSUserDomainMask]; 305 | NSURL *documentsURL = [paths lastObject]; 306 | NSURL *outputFolderURL = [documentsURL URLByAppendingPathComponent:dateTimeString isDirectory:YES]; 307 | 308 | NSError *error = nil; 309 | [[NSFileManager defaultManager] createDirectoryAtURL:outputFolderURL 310 | withIntermediateDirectories:NO 311 | attributes:nil 312 | error:&error]; 313 | if (error != nil) { 314 | NSLog(@"Error creating directory: %@", error); 315 | outputFolderURL = nil; 316 | } 317 | return outputFolderURL; 318 | } 319 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | 635 | Copyright (C) 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | Copyright (C) 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Sample code project: RosyWriter 2 | Version: 2.2 3 | 4 | IMPORTANT: This Apple software is supplied to you by Apple 5 | Inc. ("Apple") in consideration of your agreement to the following 6 | terms, and your use, installation, modification or redistribution of 7 | this Apple software constitutes acceptance of these terms. If you do 8 | not agree with these terms, please do not use, install, modify or 9 | redistribute this Apple software. 10 | 11 | In consideration of your agreement to abide by the following terms, and 12 | subject to these terms, Apple grants you a personal, non-exclusive 13 | license, under Apple's copyrights in this original Apple software (the 14 | "Apple Software"), to use, reproduce, modify and redistribute the Apple 15 | Software, with or without modifications, in source and/or binary forms; 16 | provided that if you redistribute the Apple Software in its entirety and 17 | without modifications, you must retain this notice and the following 18 | text and disclaimers in all such redistributions of the Apple Software. 19 | Neither the name, trademarks, service marks or logos of Apple Inc. may 20 | be used to endorse or promote products derived from the Apple Software 21 | without specific prior written permission from Apple. Except as 22 | expressly stated in this notice, no other rights or licenses, express or 23 | implied, are granted by Apple herein, including but not limited to any 24 | patent rights that may be infringed by your derivative works or by other 25 | works in which the Apple Software may be incorporated. 26 | 27 | The Apple Software is provided by Apple on an "AS IS" basis. APPLE 28 | MAKES NO WARRANTIES, EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION 29 | THE IMPLIED WARRANTIES OF NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS 30 | FOR A PARTICULAR PURPOSE, REGARDING THE APPLE SOFTWARE OR ITS USE AND 31 | OPERATION ALONE OR IN COMBINATION WITH YOUR PRODUCTS. 32 | 33 | IN NO EVENT SHALL APPLE BE LIABLE FOR ANY SPECIAL, INDIRECT, INCIDENTAL 34 | OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 35 | SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 36 | INTERRUPTION) ARISING IN ANY WAY OUT OF THE USE, REPRODUCTION, 37 | MODIFICATION AND/OR DISTRIBUTION OF THE APPLE SOFTWARE, HOWEVER CAUSED 38 | AND WHETHER UNDER THEORY OF CONTRACT, TORT (INCLUDING NEGLIGENCE), 39 | STRICT LIABILITY OR OTHERWISE, EVEN IF APPLE HAS BEEN ADVISED OF THE 40 | POSSIBILITY OF SUCH DAMAGE. 41 | 42 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 43 | -------------------------------------------------------------------------------- /MarsLogger-Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | en 7 | CFBundleDisplayName 8 | ${PRODUCT_NAME} 9 | CFBundleExecutable 10 | ${EXECUTABLE_NAME} 11 | CFBundleIdentifier 12 | $(PRODUCT_BUNDLE_IDENTIFIER) 13 | CFBundleInfoDictionaryVersion 14 | 6.0 15 | CFBundleName 16 | ${PRODUCT_NAME} 17 | CFBundlePackageType 18 | APPL 19 | CFBundleShortVersionString 20 | 1.0.0 21 | CFBundleSignature 22 | ???? 23 | CFBundleVersion 24 | 1.0.0 25 | LSRequiresIPhoneOS 26 | 27 | LSSupportsOpeningDocumentsInPlace 28 | 29 | NSCameraUsageDescription 30 | 31 | NSLocationUsageDescription 32 | 获取位置 33 | NSLocationWhenInUseUsageDescription 34 | 获取定位 35 | NSMicrophoneUsageDescription 36 | 37 | NSPhotoLibraryAddUsageDescription 38 | Save Video to the Photos Album 39 | NSPhotoLibraryUsageDescription 40 | 41 | UIFileSharingEnabled 42 | 43 | UILaunchStoryboardName 44 | Launch 45 | UIMainStoryboardFile 46 | Main 47 | UIRequiredDeviceCapabilities 48 | 49 | video-camera 50 | accelerometer 51 | gyroscope 52 | 53 | UIRequiresFullScreen 54 | 55 | UIRequiresFullScreen~ipad 56 | 57 | UIStatusBarHidden 58 | 59 | UISupportedInterfaceOrientations 60 | 61 | UIInterfaceOrientationPortrait 62 | 63 | UISupportedInterfaceOrientations~ipad 64 | 65 | UIInterfaceOrientationPortrait 66 | 67 | 68 | 69 | -------------------------------------------------------------------------------- /MarsLogger-Prefix.pch: -------------------------------------------------------------------------------- 1 | // 2 | // Prefix header for all source files of the 'RosyWriter' target in the 'RosyWriter' project 3 | // 4 | 5 | #import 6 | 7 | #ifndef __IPHONE_7_0 8 | #warning "This project uses features only available in iOS SDK 7.0 and later." 9 | #endif 10 | 11 | #ifdef __OBJC__ 12 | #import 13 | #import 14 | #endif 15 | -------------------------------------------------------------------------------- /MarsLogger.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /MarsLogger.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /MarsLogger.xcodeproj/project.xcworkspace/xcshareddata/WorkspaceSettings.xcsettings: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | PreviewsEnabled 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /MarsLogger.xcodeproj/project.xcworkspace/xcuserdata/feijiulin.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/MarsLogger.xcodeproj/project.xcworkspace/xcuserdata/feijiulin.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /MarsLogger.xcodeproj/xcshareddata/xcschemes/MarsLogger.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 37 | 38 | 39 | 40 | 42 | 48 | 49 | 50 | 51 | 52 | 62 | 64 | 70 | 71 | 72 | 73 | 79 | 81 | 87 | 88 | 89 | 90 | 92 | 93 | 96 | 97 | 98 | -------------------------------------------------------------------------------- /MarsLogger.xcodeproj/xcuserdata/feijiulin.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | -------------------------------------------------------------------------------- /MarsLogger.xcodeproj/xcuserdata/feijiulin.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | MarsLogger.xcscheme_^#shared#^_ 8 | 9 | orderHint 10 | 0 11 | 12 | MarsLoggerCIFilter.xcscheme_^#shared#^_ 13 | 14 | orderHint 15 | 1 16 | 17 | MarsLoggerCPU.xcscheme_^#shared#^_ 18 | 19 | orderHint 20 | 2 21 | 22 | MarsLoggerOpenCV.xcscheme_^#shared#^_ 23 | 24 | orderHint 25 | 3 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /MarsLoggerTests/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | CFBundleDevelopmentRegion 6 | $(DEVELOPMENT_LANGUAGE) 7 | CFBundleExecutable 8 | $(EXECUTABLE_NAME) 9 | CFBundleIdentifier 10 | $(PRODUCT_BUNDLE_IDENTIFIER) 11 | CFBundleInfoDictionaryVersion 12 | 6.0 13 | CFBundleName 14 | $(PRODUCT_NAME) 15 | CFBundlePackageType 16 | BNDL 17 | CFBundleShortVersionString 18 | 1.0 19 | CFBundleVersion 20 | 1 21 | 22 | 23 | -------------------------------------------------------------------------------- /MarsLoggerTests/MarsLoggerTests.m: -------------------------------------------------------------------------------- 1 | // 2 | // MarsLoggerTests.m 3 | // MarsLoggerTests 4 | // 5 | // Created by zxc on 2019/12/13. 6 | // 7 | 8 | #import 9 | #import "VideoTimeConverter.h" 10 | 11 | @interface MarsLoggerTests : XCTestCase 12 | 13 | @end 14 | 15 | @implementation MarsLoggerTests 16 | 17 | - (void)setUp { 18 | // Put setup code here. This method is called before the invocation of each test method in the class. 19 | } 20 | 21 | - (void)tearDown { 22 | // Put teardown code here. This method is called after the invocation of each test method in the class. 23 | } 24 | 25 | - (void)testsecDoubleToNanoString { 26 | // This is an example of a functional test case. 27 | // Use XCTAssert and related functions to verify your tests produce the correct results. 28 | double time1 = 8523.974328432; 29 | NSString *time1s = @"8523974328432"; 30 | NSString *res1 = secDoubleToNanoString(time1); 31 | NSString *warn1 = [NSString stringWithFormat:@"expected %@ return %@", time1s, res1]; 32 | XCTAssertTrue([time1s isEqualToString:res1], @"%@", warn1); 33 | 34 | double time2 = 8523.004328432; 35 | NSString *time2s = @"8523004328432"; 36 | NSString *res2 = secDoubleToNanoString(time2); 37 | NSString *warn2 = [NSString stringWithFormat:@"expected %@ return %@", time2s, res2]; 38 | XCTAssertTrue([time2s isEqualToString:res2], @"%@", warn2); 39 | } 40 | 41 | - (void)testPerformanceExample { 42 | // This is an example of a performance test case. 43 | [self measureBlock:^{ 44 | // Put the code you want to measure the time of here. 45 | }]; 46 | } 47 | 48 | @end 49 | -------------------------------------------------------------------------------- /README.assets/1.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/README.assets/1.gif -------------------------------------------------------------------------------- /README.assets/2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/README.assets/2.png -------------------------------------------------------------------------------- /README.assets/20230213204850.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/README.assets/20230213204850.jpg -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ios-data-collector 2 | ![image-20210831102531915](README.assets/1.gif) 3 | 4 | 该仓库参考:https://github.com/OSUPCVLab/marslogger_ios/tree/56bfe7e78a5f7ea5ad10f97c4fe0d7bd032b0ede。 5 | ## 编译运行: 6 | 1. Open MarsLogger.xcodeproj in XCode 7 | 2. In project properties -> General set your team signing and make sure that signing certificate was successfully created 8 | 3. Connect your device (you may have to wait for the debugger to be set up), select it (Product -> Destination) and run application (Product -> Run) 9 | ## 增加的功能 10 | 原ios数据采集软件采集相机和IMU的数据,因为项目需要我们在此基础上添加了磁力计、ARKit Pose、气压计和GPS信息 11 | ![image-20210831102531915](README.assets/2.png) 12 | 13 | 14 | ## Dataset format: 15 | * accel.csv: time(s(from 1970)),ax(g-units),ay(g-units),az(g-units) 16 | * baro.csv: time(s(from 1970)),baro(Kpa) 17 | * head.csv: Timestamp,trueHeading,magneticHeading,headingAccuracy 18 | * gyro_accel.csv: Timestamp[nanosec], gx[rad/s], gy[rad/s], gz[rad/s], ax[m/s^2], ay[m/s^2], az[m/s^2] 19 | * gps.csv: Timestamp,currLatitude,currLongitude 20 | * pose.csv:Timestamp,tx,ty,tz,qx,qy,qz,qw 21 | * frames.csv: time(s),frameNumber,_focalLenghtX,focalLenghtY,principalPointX,principalPointY_ 22 | * movie.MP4: frames compressed in video 23 | 24 | 25 | -------------------------------------------------------------------------------- /ReadMe.txt: -------------------------------------------------------------------------------- 1 | 2 | RosyWriter 3 | 4 | This sample demonstrates how to use AVCaptureVideoDataOutput to bring frames from the camera into various processing pipelines, including CPU-based, OpenGL (i.e. on the GPU), CoreImage filters, and OpenCV. It also demonstrates best practices for writing the processed output of these pipelines to a movie file using AVAssetWriter. 5 | 6 | The project includes a different target for each of the different processing pipelines. 7 | 8 | Classes 9 | RosyWriterViewController 10 | -- This file contains the view controller logic, including support for the Record button and video preview. 11 | RosyWriterCapturePipeline 12 | -- This file manages the audio and video capture pipelines, including the AVCaptureSession, the various queues, and resource management. 13 | 14 | Renderers 15 | RosyWriterRenderer 16 | -- This file defines a generic protocol for renderer objects used by RosyWriterCapturePipeline. 17 | RosyWriterOpenGLRenderer 18 | -- This file manages the OpenGL (GPU) processing for the "rosy" effect and delivers rendered buffers. 19 | RosyWriterCPURenderer 20 | -- This file manages the CPU processing for the "rosy" effect and delivers rendered buffers. 21 | RosyWriterCIFilterRenderer 22 | -- This file manages the CoreImage processing for the "rosy" effect and delivers rendered buffers. 23 | RosyWriterOpenCVRenderer 24 | -- This file manages the delivery of frames to an OpenCV processing block and delivers rendered buffers. 25 | 26 | RosyWriterAppDelegate 27 | -- This file is a standard application delegate class. 28 | 29 | Shaders 30 | myFilter 31 | -- OpenGL shader code for the "rosy" effect 32 | 33 | Utilities 34 | MovieRecorder 35 | -- Illustrates real-time use of AVAssetWriter to record the displayed effect. 36 | OpenGLPixelBufferView 37 | -- This is a view that displays pixel buffers on the screen using OpenGL. 38 | 39 | GL 40 | -- Utilities used by the GL processing pipeline. 41 | 42 | This program has been found to be able to record 12 minutes of 1920 x 1080 video at 30Hz and inertial data at 100Hz on an iPhone 6S. 43 | 44 | TODOs 45 | 1. Customize the video dimension with the user input. 46 | Currently the video frame size is preset with 47 | captionSession.sessionPreset = AVCaptureSessionPreset1280x720 48 | 2. Warning “All interface orientations must be supported unless the app requires full screen” 49 | for a universal app 50 | To resolve this warning, refer to [here](https://stackoverflow.com/questions/37168888/ios-9-warning-all-interface-orientations-must-be-supported-unless-the-app-req). 51 | 52 | =============================================================== 53 | Copyright © 2016 Apple Inc. All rights reserved. 54 | -------------------------------------------------------------------------------- /Resources/Base.lproj/Launch.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 27 | 34 | 41 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | -------------------------------------------------------------------------------- /Resources/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 35 | 42 | 49 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/1024.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/1024.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/120-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/120-1.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/120.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/120.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/152.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/152.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/167.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/167.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/180.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/180.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/20.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/20.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/29.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/29.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/40-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/40-1.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/40-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/40-2.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/40.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/58-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/58-1.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/58.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/58.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/60.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/60.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/76.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/76.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/80-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/80-1.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/80.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/80.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/87.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KunruiHuang/ios-data-collector/3ebb82a48574a114ca6b55acd15ded82393aeb3a/Resources/Images.xcassets/AppIcon.appiconset/87.png -------------------------------------------------------------------------------- /Resources/Images.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "size" : "20x20", 5 | "idiom" : "iphone", 6 | "filename" : "40.png", 7 | "scale" : "2x" 8 | }, 9 | { 10 | "size" : "20x20", 11 | "idiom" : "iphone", 12 | "filename" : "60.png", 13 | "scale" : "3x" 14 | }, 15 | { 16 | "size" : "29x29", 17 | "idiom" : "iphone", 18 | "filename" : "58.png", 19 | "scale" : "2x" 20 | }, 21 | { 22 | "size" : "29x29", 23 | "idiom" : "iphone", 24 | "filename" : "87.png", 25 | "scale" : "3x" 26 | }, 27 | { 28 | "size" : "40x40", 29 | "idiom" : "iphone", 30 | "filename" : "80.png", 31 | "scale" : "2x" 32 | }, 33 | { 34 | "size" : "40x40", 35 | "idiom" : "iphone", 36 | "filename" : "120.png", 37 | "scale" : "3x" 38 | }, 39 | { 40 | "size" : "60x60", 41 | "idiom" : "iphone", 42 | "filename" : "120-1.png", 43 | "scale" : "2x" 44 | }, 45 | { 46 | "size" : "60x60", 47 | "idiom" : "iphone", 48 | "filename" : "180.png", 49 | "scale" : "3x" 50 | }, 51 | { 52 | "size" : "20x20", 53 | "idiom" : "ipad", 54 | "filename" : "20.png", 55 | "scale" : "1x" 56 | }, 57 | { 58 | "size" : "20x20", 59 | "idiom" : "ipad", 60 | "filename" : "40-1.png", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "size" : "29x29", 65 | "idiom" : "ipad", 66 | "filename" : "29.png", 67 | "scale" : "1x" 68 | }, 69 | { 70 | "size" : "29x29", 71 | "idiom" : "ipad", 72 | "filename" : "58-1.png", 73 | "scale" : "2x" 74 | }, 75 | { 76 | "size" : "40x40", 77 | "idiom" : "ipad", 78 | "filename" : "40-2.png", 79 | "scale" : "1x" 80 | }, 81 | { 82 | "size" : "40x40", 83 | "idiom" : "ipad", 84 | "filename" : "80-1.png", 85 | "scale" : "2x" 86 | }, 87 | { 88 | "size" : "76x76", 89 | "idiom" : "ipad", 90 | "filename" : "76.png", 91 | "scale" : "1x" 92 | }, 93 | { 94 | "size" : "76x76", 95 | "idiom" : "ipad", 96 | "filename" : "152.png", 97 | "scale" : "2x" 98 | }, 99 | { 100 | "size" : "83.5x83.5", 101 | "idiom" : "ipad", 102 | "filename" : "167.png", 103 | "scale" : "2x" 104 | }, 105 | { 106 | "size" : "1024x1024", 107 | "idiom" : "ios-marketing", 108 | "filename" : "1024.png", 109 | "scale" : "1x" 110 | } 111 | ], 112 | "info" : { 113 | "version" : 1, 114 | "author" : "xcode" 115 | } 116 | } -------------------------------------------------------------------------------- /Resources/Images.xcassets/LaunchImage.launchimage/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "orientation" : "portrait", 5 | "idiom" : "iphone", 6 | "extent" : "full-screen", 7 | "minimum-system-version" : "7.0", 8 | "scale" : "2x" 9 | }, 10 | { 11 | "orientation" : "portrait", 12 | "idiom" : "iphone", 13 | "extent" : "full-screen", 14 | "minimum-system-version" : "7.0", 15 | "subtype" : "retina4", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "orientation" : "portrait", 20 | "idiom" : "ipad", 21 | "extent" : "full-screen", 22 | "minimum-system-version" : "7.0", 23 | "scale" : "1x" 24 | }, 25 | { 26 | "orientation" : "landscape", 27 | "idiom" : "ipad", 28 | "extent" : "full-screen", 29 | "minimum-system-version" : "7.0", 30 | "scale" : "1x" 31 | }, 32 | { 33 | "orientation" : "portrait", 34 | "idiom" : "ipad", 35 | "extent" : "full-screen", 36 | "minimum-system-version" : "7.0", 37 | "scale" : "2x" 38 | }, 39 | { 40 | "orientation" : "landscape", 41 | "idiom" : "ipad", 42 | "extent" : "full-screen", 43 | "minimum-system-version" : "7.0", 44 | "scale" : "2x" 45 | } 46 | ], 47 | "info" : { 48 | "version" : 1, 49 | "author" : "xcode" 50 | } 51 | } -------------------------------------------------------------------------------- /Resources/Shaders/myFilter.fsh: -------------------------------------------------------------------------------- 1 | 2 | precision mediump float; 3 | 4 | varying mediump vec2 coordinate; 5 | uniform sampler2D videoframe; 6 | 7 | void main() 8 | { 9 | vec4 color = texture2D(videoframe, coordinate); 10 | gl_FragColor.bgra = vec4(color.b, 0.0 * color.g, color.r, color.a); 11 | } -------------------------------------------------------------------------------- /Resources/Shaders/myFilter.vsh: -------------------------------------------------------------------------------- 1 | 2 | attribute vec4 position; 3 | attribute mediump vec4 texturecoordinate; 4 | varying mediump vec2 coordinate; 5 | 6 | void main() 7 | { 8 | gl_Position = position; 9 | coordinate = texturecoordinate.xy; 10 | } 11 | 12 | -------------------------------------------------------------------------------- /Resources/en.lproj/InfoPlist.strings: -------------------------------------------------------------------------------- 1 | /* Localized versions of Info.plist keys */ 2 | 3 | -------------------------------------------------------------------------------- /main.m: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright (C) 2016 Apple Inc. All Rights Reserved. 3 | See LICENSE.txt for this sample’s licensing information 4 | 5 | Abstract: 6 | Standard main file. 7 | */ 8 | 9 | #import 10 | 11 | #import "RosyWriterAppDelegate.h" 12 | 13 | int main(int argc, char *argv[]) 14 | { 15 | int retVal = 0; 16 | @autoreleasepool { 17 | retVal = UIApplicationMain( argc, argv, nil, NSStringFromClass( [RosyWriterAppDelegate class] ) ); 18 | } 19 | return retVal; 20 | } 21 | --------------------------------------------------------------------------------