├── README.md ├── YXYVideoModel ├── 2017.mp4 ├── 2018.mp4 ├── ico.jpeg ├── Assets.xcassets │ ├── Contents.json │ └── AppIcon.appiconset │ │ └── Contents.json ├── ViewController.h ├── YXYViewController.h ├── AppDelegate.h ├── main.m ├── XScratchView.h ├── XRGBTool.h ├── OpenGLView20.h ├── Base.lproj │ ├── Main.storyboard │ └── LaunchScreen.storyboard ├── Info.plist ├── AppDelegate.m ├── XScratchView.m ├── YXYViewController.m ├── XRGBTool.m ├── OpenGLView20.m └── ViewController.m ├── YXYVideoModel.xcodeproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ └── xcshareddata │ │ └── IDEWorkspaceChecks.plist └── project.pbxproj └── .gitignore /README.md: -------------------------------------------------------------------------------- 1 | # YXYVideoEditor 2 | 视频编辑 压缩,剪切,拼接,滤镜,水印,快放,音频处理。 3 | 邮箱:939607134@qq.com 4 | 5 | -------------------------------------------------------------------------------- /YXYVideoModel/2017.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xuyushiguang/YXYVideoEditor/HEAD/YXYVideoModel/2017.mp4 -------------------------------------------------------------------------------- /YXYVideoModel/2018.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xuyushiguang/YXYVideoEditor/HEAD/YXYVideoModel/2018.mp4 -------------------------------------------------------------------------------- /YXYVideoModel/ico.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/xuyushiguang/YXYVideoEditor/HEAD/YXYVideoModel/ico.jpeg -------------------------------------------------------------------------------- /YXYVideoModel/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "version" : 1, 4 | "author" : "xcode" 5 | } 6 | } -------------------------------------------------------------------------------- /YXYVideoModel.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /YXYVideoModel/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // YXYVideoModel 4 | // 5 | // Created by yxy on 2018/9/13. 6 | // Copyright © 2018年 yxy. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface ViewController : UIViewController 12 | 13 | 14 | @end 15 | 16 | -------------------------------------------------------------------------------- /YXYVideoModel/YXYViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // YXYViewController.h 3 | // YXYVideoModel 4 | // 5 | // Created by LiuGen on 2018/9/18. 6 | // Copyright © 2018年 Test. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface YXYViewController : UIViewController 12 | 13 | @end 14 | -------------------------------------------------------------------------------- /YXYVideoModel.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /YXYVideoModel/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // YXYVideoModel 4 | // 5 | // Created by yxy on 2018/9/13. 6 | // Copyright © 2018年 yxy. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | @interface AppDelegate : UIResponder 12 | 13 | @property (strong, nonatomic) UIWindow *window; 14 | 15 | 16 | @end 17 | 18 | -------------------------------------------------------------------------------- /YXYVideoModel/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // YXYVideoModel 4 | // 5 | // Created by yxy on 2018/9/13. 6 | // Copyright © 2018年 yxy. All rights reserved. 7 | // 8 | 9 | #import 10 | #import "AppDelegate.h" 11 | 12 | int main(int argc, char * argv[]) { 13 | @autoreleasepool { 14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class])); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /YXYVideoModel/XScratchView.h: -------------------------------------------------------------------------------- 1 | // 2 | // ScratchCardView.h 3 | // RGBTool 4 | // 5 | // Created by admin on 21/08/2017. 6 | // Copyright © 2017 yang. All rights reserved. 7 | // 8 | 9 | 10 | #import 11 | 12 | @interface XScratchView : UIView 13 | 14 | /** masoicImage(放在底层) */ 15 | @property (nonatomic, strong) UIImage *mosaicImage; 16 | /** surfaceImage(放在顶层) */ 17 | @property (nonatomic, strong) UIImage *surfaceImage; 18 | /** 恢复 */ 19 | - (void)recover; 20 | 21 | -(UIImage*)didMosaicImage; 22 | @end 23 | -------------------------------------------------------------------------------- /YXYVideoModel/XRGBTool.h: -------------------------------------------------------------------------------- 1 | // 2 | // XRGBTool.h 3 | // RGBTool 4 | // 5 | // Created by c on 21/08/2017. 6 | // Copyright © 2017 c. All rights reserved. 7 | // 8 | 9 | 10 | #import 11 | #import 12 | 13 | #define kScreenWidth [UIScreen mainScreen].bounds.size.width 14 | #define kScreenHeight [UIScreen mainScreen].bounds.size.height 15 | 16 | @interface XRGBTool : NSObject 17 | 18 | /** 获取所有的像素点RGBA和坐标 */ 19 | + (NSArray *)getRGBsArrFromImage:(UIImage *)image; 20 | 21 | //局部修改图片颜色 22 | + (UIImage *)changePicColorPartial:(UIImage *)image; 23 | 24 | /** 通过遍历像素点实现马赛克效果,level越大,马赛克颗粒越大,若level为0则默认为图片1/20 */ 25 | + (UIImage *)getMosaicImageWith:(UIImage *)image level:(NSInteger)level; 26 | 27 | /** 通过滤镜来实现马赛克效果(只能处理.png格式的图片) */ 28 | + (UIImage *)getFilterMosaicImageWith:(UIImage *)image; 29 | 30 | @end 31 | -------------------------------------------------------------------------------- /YXYVideoModel/OpenGLView20.h: -------------------------------------------------------------------------------- 1 | // 2 | // OpenGLView20.h 3 | // MyTest 4 | // 5 | // Created on 12/20/11. 6 | // Copyright (c) 2011 All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | #import 12 | #import 13 | #import 14 | #include 15 | 16 | @interface OpenGLView20 : UIView 17 | { 18 | /** 19 | OpenGL绘图上下文 20 | */ 21 | EAGLContext *_glContext; 22 | 23 | /** 24 | 帧缓冲区 25 | */ 26 | GLuint _framebuffer; 27 | 28 | /** 29 | 渲染缓冲区 30 | */ 31 | GLuint _renderBuffer; 32 | 33 | /** 34 | 着色器句柄 35 | */ 36 | GLuint _program; 37 | 38 | /** 39 | YUV纹理数组 40 | */ 41 | GLuint _textureYUV[3]; 42 | 43 | /** 44 | 视频宽度 45 | */ 46 | GLuint _videoW; 47 | 48 | /** 49 | 视频高度 50 | */ 51 | GLuint _videoH; 52 | 53 | GLsizei _viewScale; 54 | 55 | //void *_pYuvData; 56 | 57 | #ifdef DEBUG 58 | struct timeval _time; 59 | NSInteger _frameRate; 60 | #endif 61 | } 62 | #pragma mark - 接口 63 | - (void)displayYUV420pData:(void *)data width:(NSInteger)w height:(NSInteger)h; 64 | - (void)setVideoSize:(GLuint)width height:(GLuint)height; 65 | 66 | /** 67 | 清除画面 68 | */ 69 | - (void)clearFrame; 70 | 71 | @end 72 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Xcode 2 | # 3 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore 4 | 5 | ## Build generated 6 | build/ 7 | DerivedData/ 8 | 9 | ## Various settings 10 | *.pbxuser 11 | !default.pbxuser 12 | *.mode1v3 13 | !default.mode1v3 14 | *.mode2v3 15 | !default.mode2v3 16 | *.perspectivev3 17 | !default.perspectivev3 18 | xcuserdata/ 19 | 20 | ## Other 21 | *.moved-aside 22 | *.xccheckout 23 | *.xcscmblueprint 24 | 25 | ## Obj-C/Swift specific 26 | *.hmap 27 | *.ipa 28 | *.dSYM.zip 29 | *.dSYM 30 | 31 | # CocoaPods 32 | # 33 | # We recommend against adding the Pods directory to your .gitignore. However 34 | # you should judge for yourself, the pros and cons are mentioned at: 35 | # https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control 36 | # 37 | # Pods/ 38 | 39 | # Carthage 40 | # 41 | # Add this line if you want to avoid checking in source code from Carthage dependencies. 42 | # Carthage/Checkouts 43 | 44 | Carthage/Build 45 | 46 | # fastlane 47 | # 48 | # It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the 49 | # screenshots whenever they are needed. 50 | # For more information about the recommended setup visit: 51 | # https://docs.fastlane.tools/best-practices/source-control/#source-control 52 | 53 | fastlane/report.xml 54 | fastlane/Preview.html 55 | fastlane/screenshots/**/*.png 56 | fastlane/test_output 57 | 58 | # Code Injection 59 | # 60 | # After new code Injection tools there's a generated folder /iOSInjectionProject 61 | # https://github.com/johnno1962/injectionforxcode 62 | 63 | iOSInjectionProject/ 64 | -------------------------------------------------------------------------------- /YXYVideoModel/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /YXYVideoModel/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | NSMicrophoneUsageDescription 7 | App需要您的同意,才能访问麦克风 8 | NSCameraUsageDescription 9 | App需要您的同意,才能访问相机 10 | NSPhotoLibraryUsageDescription 11 | App需要您的同意,才能访问相册 12 | CFBundleDevelopmentRegion 13 | $(DEVELOPMENT_LANGUAGE) 14 | CFBundleExecutable 15 | $(EXECUTABLE_NAME) 16 | CFBundleIdentifier 17 | $(PRODUCT_BUNDLE_IDENTIFIER) 18 | CFBundleInfoDictionaryVersion 19 | 6.0 20 | CFBundleName 21 | $(PRODUCT_NAME) 22 | CFBundlePackageType 23 | APPL 24 | CFBundleShortVersionString 25 | 1.0 26 | CFBundleVersion 27 | 1 28 | LSRequiresIPhoneOS 29 | 30 | UILaunchStoryboardName 31 | LaunchScreen 32 | UIMainStoryboardFile 33 | Main 34 | UIRequiredDeviceCapabilities 35 | 36 | armv7 37 | 38 | UISupportedInterfaceOrientations 39 | 40 | UIInterfaceOrientationPortrait 41 | UIInterfaceOrientationLandscapeLeft 42 | UIInterfaceOrientationLandscapeRight 43 | 44 | UISupportedInterfaceOrientations~ipad 45 | 46 | UIInterfaceOrientationPortrait 47 | UIInterfaceOrientationPortraitUpsideDown 48 | UIInterfaceOrientationLandscapeLeft 49 | UIInterfaceOrientationLandscapeRight 50 | 51 | 52 | 53 | -------------------------------------------------------------------------------- /YXYVideoModel/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /YXYVideoModel/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "iphone", 5 | "size" : "20x20", 6 | "scale" : "2x" 7 | }, 8 | { 9 | "idiom" : "iphone", 10 | "size" : "20x20", 11 | "scale" : "3x" 12 | }, 13 | { 14 | "idiom" : "iphone", 15 | "size" : "29x29", 16 | "scale" : "2x" 17 | }, 18 | { 19 | "idiom" : "iphone", 20 | "size" : "29x29", 21 | "scale" : "3x" 22 | }, 23 | { 24 | "idiom" : "iphone", 25 | "size" : "40x40", 26 | "scale" : "2x" 27 | }, 28 | { 29 | "idiom" : "iphone", 30 | "size" : "40x40", 31 | "scale" : "3x" 32 | }, 33 | { 34 | "idiom" : "iphone", 35 | "size" : "60x60", 36 | "scale" : "2x" 37 | }, 38 | { 39 | "idiom" : "iphone", 40 | "size" : "60x60", 41 | "scale" : "3x" 42 | }, 43 | { 44 | "idiom" : "ipad", 45 | "size" : "20x20", 46 | "scale" : "1x" 47 | }, 48 | { 49 | "idiom" : "ipad", 50 | "size" : "20x20", 51 | "scale" : "2x" 52 | }, 53 | { 54 | "idiom" : "ipad", 55 | "size" : "29x29", 56 | "scale" : "1x" 57 | }, 58 | { 59 | "idiom" : "ipad", 60 | "size" : "29x29", 61 | "scale" : "2x" 62 | }, 63 | { 64 | "idiom" : "ipad", 65 | "size" : "40x40", 66 | "scale" : "1x" 67 | }, 68 | { 69 | "idiom" : "ipad", 70 | "size" : "40x40", 71 | "scale" : "2x" 72 | }, 73 | { 74 | "idiom" : "ipad", 75 | "size" : "76x76", 76 | "scale" : "1x" 77 | }, 78 | { 79 | "idiom" : "ipad", 80 | "size" : "76x76", 81 | "scale" : "2x" 82 | }, 83 | { 84 | "idiom" : "ipad", 85 | "size" : "83.5x83.5", 86 | "scale" : "2x" 87 | }, 88 | { 89 | "idiom" : "ios-marketing", 90 | "size" : "1024x1024", 91 | "scale" : "1x" 92 | } 93 | ], 94 | "info" : { 95 | "version" : 1, 96 | "author" : "xcode" 97 | } 98 | } -------------------------------------------------------------------------------- /YXYVideoModel/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // YXYVideoModel 4 | //邮箱:939607134@qq.com 5 | // Created by yxy on 2018/9/13. 6 | // Copyright © 2018年 yxy. All rights reserved. 7 | // 8 | 9 | #import "AppDelegate.h" 10 | 11 | 12 | 13 | @interface AppDelegate () 14 | 15 | @end 16 | 17 | @implementation AppDelegate 18 | 19 | 20 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | return YES; 29 | } 30 | 31 | 32 | - (void)applicationWillResignActive:(UIApplication *)application { 33 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state. 34 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game. 35 | } 36 | 37 | 38 | - (void)applicationDidEnterBackground:(UIApplication *)application { 39 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later. 40 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits. 41 | } 42 | 43 | 44 | - (void)applicationWillEnterForeground:(UIApplication *)application { 45 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background. 46 | } 47 | 48 | 49 | - (void)applicationDidBecomeActive:(UIApplication *)application { 50 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface. 51 | } 52 | 53 | 54 | - (void)applicationWillTerminate:(UIApplication *)application { 55 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:. 56 | } 57 | 58 | 59 | @end 60 | -------------------------------------------------------------------------------- /YXYVideoModel/XScratchView.m: -------------------------------------------------------------------------------- 1 | // 2 | // ScratchCardView.m 3 | // RGBTool 4 | // 5 | // Created by admin on 21/08/2017. 6 | // Copyright © 2017 yang. All rights reserved. 7 | // 8 | 9 | 10 | #import "XScratchView.h" 11 | 12 | @interface XScratchView () 13 | 14 | /** */ 15 | @property (nonatomic, strong) UIImageView *surfaceImageView; 16 | /** */ 17 | @property (nonatomic, strong) CALayer *imageLayer; 18 | /** */ 19 | @property (nonatomic, strong) CAShapeLayer *shapeLayer; 20 | /** 手指的涂抹路径 */ 21 | @property (nonatomic, assign) CGMutablePathRef path; 22 | 23 | @end 24 | 25 | @implementation XScratchView 26 | 27 | - (instancetype)initWithFrame:(CGRect)frame{ 28 | if (self = [super initWithFrame:frame]) { 29 | self.surfaceImageView = [[UIImageView alloc] initWithFrame:self.bounds]; 30 | [self addSubview:self.surfaceImageView]; 31 | 32 | self.imageLayer = [CALayer layer]; 33 | self.imageLayer.frame = self.bounds; 34 | [self.layer addSublayer:self.imageLayer]; 35 | 36 | self.shapeLayer = [CAShapeLayer layer]; 37 | self.shapeLayer.frame = self.bounds; 38 | self.shapeLayer.lineCap = kCALineCapRound; 39 | self.shapeLayer.lineJoin = kCALineJoinRound; 40 | self.shapeLayer.lineWidth = 20; 41 | self.shapeLayer.strokeColor = [UIColor whiteColor].CGColor; 42 | self.shapeLayer.fillColor = nil;//此处必须设为nil,否则后边添加addLine的时候会自动填充 43 | 44 | self.imageLayer.mask = self.shapeLayer; 45 | self.path = CGPathCreateMutable(); 46 | } 47 | return self; 48 | } 49 | #pragma mark --- Touch Events 50 | - (void)touchesBegan:(NSSet *)touches withEvent:(UIEvent *)event{ 51 | [super touchesBegan:touches withEvent:event]; 52 | UITouch *touch = [touches anyObject]; 53 | CGPoint point = [touch locationInView:self]; 54 | CGPathMoveToPoint(self.path, nil, point.x, point.y); 55 | self.shapeLayer.path = self.path; 56 | } 57 | 58 | - (void)touchesMoved:(NSSet *)touches withEvent:(UIEvent *)event{ 59 | [super touchesMoved:touches withEvent:event]; 60 | UITouch *touch = [touches anyObject]; 61 | CGPoint point = [touch locationInView:self]; 62 | CGPathAddLineToPoint(self.path, nil, point.x, point.y); 63 | self.shapeLayer.path = self.path; 64 | } 65 | 66 | - (void)touchesEnded:(NSSet *)touches withEvent:(UIEvent *)event{ 67 | [super touchesEnded:touches withEvent:event]; 68 | } 69 | 70 | #pragma mark --- Public Methods 71 | - (void)recover{ 72 | CGPathRelease(self.path); 73 | self.path = CGPathCreateMutable(); 74 | self.shapeLayer.path = nil; 75 | } 76 | 77 | 78 | 79 | -(UIImage*)didMosaicImage 80 | { 81 | UIGraphicsBeginImageContextWithOptions(self.frame.size, NO, 0); 82 | CGContextRef context = UIGraphicsGetCurrentContext(); 83 | [self.layer renderInContext:context]; 84 | UIImage *mImg = UIGraphicsGetImageFromCurrentImageContext(); 85 | UIGraphicsEndImageContext(); 86 | return mImg; 87 | } 88 | 89 | #pragma mark --- 重写属性的set方法 90 | 91 | - (void)setMosaicImage:(UIImage *)mosaicImage{ 92 | _mosaicImage = mosaicImage; 93 | self.imageLayer.contents = (id)mosaicImage.CGImage;//把照片放在Layer层 94 | } 95 | 96 | - (void)setSurfaceImage:(UIImage *)surfaceImage{ 97 | _surfaceImage = surfaceImage; // 98 | self.surfaceImageView.image = surfaceImage;///把产生的马赛克照片放在上层 99 | } 100 | 101 | 102 | - (void)dealloc{ 103 | if (self.path) { 104 | CGPathRelease(self.path); 105 | } 106 | } 107 | 108 | 109 | @end 110 | -------------------------------------------------------------------------------- /YXYVideoModel/YXYViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // YXYViewController.m 3 | // YXYVideoModel 4 | // 5 | // Created by LiuGen on 2018/9/18. 6 | // Copyright © 2018年 Test. All rights reserved. 7 | // 8 | /** 9 | 视频录制 10 | 11 | */ 12 | #import "YXYViewController.h" 13 | #import 14 | 15 | @interface YXYViewController () 16 | { 17 | 18 | UIView *_playView; 19 | dispatch_queue_t _queue_t; 20 | } 21 | @property (strong,nonatomic) AVCaptureSession *captureSession;//负责输入和输出设置之间的数据传递 22 | @property (strong,nonatomic) AVCaptureDeviceInput *captureDeviceInput;//负责从AVCaptureDevice获得输入数据 23 | @property (strong,nonatomic) AVCaptureMovieFileOutput *captureMovieFileOutput;//视频输出流 24 | @property (strong,nonatomic) AVCaptureVideoPreviewLayer *captureVideoPreviewLayer;//相机拍摄预览图层 25 | @property (nonatomic,strong) AVCaptureVideoDataOutput *videoOutput; 26 | 27 | @property(nonatomic,strong) AVSampleBufferDisplayLayer *sampleLayer; 28 | 29 | @end 30 | 31 | @implementation YXYViewController 32 | -(void)applicationFonter 33 | { 34 | [_sampleLayer flush]; 35 | } 36 | -(void)applicationback 37 | { 38 | 39 | } 40 | 41 | - (void)viewDidLoad { 42 | [super viewDidLoad]; 43 | 44 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationFonter) name:UIApplicationWillEnterForegroundNotification object:nil]; 45 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(applicationback) name:UIApplicationDidEnterBackgroundNotification object:nil]; 46 | // 47 | UIButton *bt = [UIButton buttonWithType:UIButtonTypeCustom]; 48 | bt.frame = CGRectMake(0, 0, 100, 50); 49 | bt.backgroundColor = [UIColor redColor]; 50 | [bt addTarget:self action:@selector(openCamera) forControlEvents:UIControlEventTouchUpInside]; 51 | [self.view addSubview:bt]; 52 | 53 | _playView = [[UIView alloc] initWithFrame:CGRectMake(0, 60, self.view.bounds.size.width, self.view.bounds.size.height-60)]; 54 | [self.view addSubview:_playView]; 55 | 56 | _sampleLayer = [AVSampleBufferDisplayLayer layer]; 57 | _sampleLayer.videoGravity = AVLayerVideoGravityResizeAspectFill; 58 | _sampleLayer.frame = _playView.bounds; 59 | #pragma mark =设置视频方向= 60 | [_sampleLayer setAffineTransform:CGAffineTransformMakeRotation(M_PI_2)]; 61 | [_playView.layer addSublayer:_sampleLayer]; 62 | 63 | _captureSession = [[AVCaptureSession alloc] init]; 64 | // _captureSession.sessionPreset = AVCaptureSessionPreset640x480; 65 | AVCaptureDevice *captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack]; 66 | _captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil]; 67 | [_captureSession addInput:_captureDeviceInput]; 68 | 69 | // 2.2 获取音频输入设备 70 | AVCaptureDevice *audioCaptureDevice=[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject]; 71 | // 2.4 创建音频输入源 72 | AVCaptureDeviceInput * audioInput = [[AVCaptureDeviceInput alloc] initWithDevice:audioCaptureDevice error:nil]; 73 | // 2.6 将音频输入源添加到会话 74 | if ([_captureSession canAddInput:audioInput]) { 75 | [_captureSession addInput:audioInput]; 76 | } 77 | 78 | 79 | self.videoOutput = [[AVCaptureVideoDataOutput alloc] init]; 80 | self.videoOutput.alwaysDiscardsLateVideoFrames = YES; //立即丢弃旧帧,节省内存,默认YES 81 | _queue_t = dispatch_queue_create("com.gdu.123", 0); 82 | [self.videoOutput setSampleBufferDelegate:self queue:_queue_t]; 83 | if ([_captureSession canAddOutput:self.videoOutput]) { 84 | [_captureSession addOutput:self.videoOutput]; 85 | } 86 | 87 | 88 | } 89 | - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 90 | { 91 | NSLog(@"===%ld",(long)connection.videoOrientation); 92 | [_sampleLayer enqueueSampleBuffer:sampleBuffer]; 93 | } 94 | 95 | 96 | 97 | -(void)openCamera 98 | { 99 | [self.captureSession startRunning]; 100 | 101 | } 102 | 103 | -(AVCaptureDevice *)getCameraDeviceWithPosition:(AVCaptureDevicePosition )position{ 104 | NSArray *cameras= [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; 105 | for (AVCaptureDevice *camera in cameras) { 106 | if ([camera position]==position) { 107 | return camera; 108 | } 109 | } 110 | return nil; 111 | } 112 | 113 | #pragma mark =另外一种录制视频= 114 | -(void)builderCamera 115 | { 116 | _captureSession = [[AVCaptureSession alloc] init]; 117 | _captureSession.sessionPreset = AVCaptureSessionPreset640x480; 118 | AVCaptureDevice *captureDevice = [self getCameraDeviceWithPosition:AVCaptureDevicePositionBack]; 119 | 120 | _captureDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:captureDevice error:nil]; 121 | //添加一个音频输入设备 122 | AVCaptureDevice *audioCaptureDevice=[[AVCaptureDevice devicesWithMediaType:AVMediaTypeAudio] firstObject]; 123 | AVCaptureDeviceInput *audioCaptureDeviceInput=[[AVCaptureDeviceInput alloc]initWithDevice:audioCaptureDevice error:nil]; 124 | 125 | 126 | //初始化设备输出对象,用于获得输出数据 127 | _captureMovieFileOutput=[[AVCaptureMovieFileOutput alloc]init]; 128 | 129 | //将设备输入添加到会话中 130 | if ([_captureSession canAddInput:_captureDeviceInput]) { 131 | [_captureSession addInput:_captureDeviceInput]; 132 | [_captureSession addInput:audioCaptureDeviceInput]; 133 | AVCaptureConnection *captureConnection=[_captureMovieFileOutput connectionWithMediaType: AVMediaTypeVideo]; 134 | if ([captureConnection isVideoStabilizationSupported ]) { 135 | captureConnection.preferredVideoStabilizationMode=AVCaptureVideoStabilizationModeAuto; 136 | } 137 | } 138 | //将设备输出添加到会话中 139 | if ([_captureSession canAddOutput:_captureMovieFileOutput]) { 140 | [_captureSession addOutput:_captureMovieFileOutput]; 141 | } 142 | //创建视频预览层,用于实时展示摄像头状态 143 | _captureVideoPreviewLayer=[[AVCaptureVideoPreviewLayer alloc]initWithSession:self.captureSession]; 144 | 145 | CALayer *layer=_playView.layer; 146 | layer.masksToBounds=YES; 147 | 148 | _captureVideoPreviewLayer.frame=layer.bounds; 149 | _captureVideoPreviewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;//填充模式 150 | //将视频预览层添加到界面中 151 | [layer addSublayer:_captureVideoPreviewLayer]; 152 | } 153 | 154 | 155 | - (void)didReceiveMemoryWarning { 156 | [super didReceiveMemoryWarning]; 157 | // Dispose of any resources that can be recreated. 158 | } 159 | 160 | /* 161 | #pragma mark - Navigation 162 | 163 | // In a storyboard-based application, you will often want to do a little preparation before navigation 164 | - (void)prepareForSegue:(UIStoryboardSegue *)segue sender:(id)sender { 165 | // Get the new view controller using [segue destinationViewController]. 166 | // Pass the selected object to the new view controller. 167 | } 168 | */ 169 | 170 | @end 171 | -------------------------------------------------------------------------------- /YXYVideoModel/XRGBTool.m: -------------------------------------------------------------------------------- 1 | // 2 | // XRGBTool.m 3 | // RGBTool 4 | // 5 | // Created by c on 21/08/2017. 6 | // Copyright © 2017 c. All rights reserved. 7 | // 8 | 9 | #import "XRGBTool.h" 10 | #import "XPixelItem.h" 11 | 12 | @implementation XRGBTool 13 | 14 | + (NSArray *)getRGBsArrFromImage:(UIImage *)image{ 15 | //1.get the image into your data buffer 16 | CGImageRef imageRef = [image CGImage]; 17 | NSUInteger imageW = CGImageGetWidth(imageRef); 18 | NSUInteger imageH = CGImageGetHeight(imageRef); 19 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 20 | NSUInteger bytesPerPixel = 4;//一个像素四个分量,即A、R、G、B 21 | NSUInteger bytesPerRow = bytesPerPixel * imageW;//每一行宽度乘以每一个像素的分量 22 | unsigned char *rawData = (unsigned char *)calloc(imageH*imageW*bytesPerPixel, sizeof(unsigned char)); 23 | NSUInteger bitsPerComponent = 8;//每个分量8个字节 24 | /* 25 | 参数1:数据源 26 | 参数2:图片宽 27 | 参数3:图片高 28 | 参数4:表示每一个像素点,每一个分量大小 29 | 在我们图像学中,像素点:A,R,G,B四个组成 每一个表示一个分量(例如,A,R,G,B) 30 | !!!!--------->>>>>>>>>在我们计算机图像学中每一个分量的大小是8个字节 31 | 参数5:每一行大小(其实图片是由像素数组组成的) 32 | 如何计算每一行的大小,所占用的内存 33 | 首先计算每一个像素点大小(我们取最大值): ARGB是4个分量 = 每个分量8个字节 * 4 34 | 参数6:颜色空间 35 | 参数7:是否需要透明度 36 | */ 37 | CGContextRef context = CGBitmapContextCreate(rawData, imageW, imageH, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big); 38 | CGContextDrawImage(context, CGRectMake(0, 0, imageW, imageH), imageRef); 39 | 40 | //2.Now your rawData contains the image data int the RGBA8888 pixel format 41 | NSUInteger blackPixel = 0;//计算黑色像素点个数 42 | NSMutableArray *pixelsArr = [NSMutableArray array]; 43 | for (int y = 0; y < imageH; y++) { 44 | for (int x = 0; x < imageW; x++) { 45 | NSUInteger byteIndex = bytesPerRow*y + bytesPerPixel*x; 46 | //rawData一维数组存储方式RGBA(第一个像素)RGBA(第二个像素) 47 | NSUInteger red = rawData[byteIndex]; 48 | NSUInteger green = rawData[byteIndex+1]; 49 | NSUInteger blue = rawData[byteIndex+2]; 50 | NSUInteger alpha = rawData[byteIndex+3]; 51 | 52 | // XPixelItem *pixelItem = [[XPixelItem alloc] init]; 53 | // pixelItem.color = [UIColor colorWithRed:red/255.0 green:green/255.0 blue:blue/255.0 alpha:alpha/255.0];//像素点颜色 54 | // pixelItem.location = CGPointMake(x, y);//像素点位置 55 | // 56 | // [pixelsArr addObject:pixelItem]; 57 | if (red+green+blue == 0 && (alpha/255.0 >= 0.5)){//计算黑色部分所占比例 58 | blackPixel++; 59 | } 60 | } 61 | } 62 | //计算黑色像素的个数 63 | NSLog(@"黑色所占的面积--%f,%lu",blackPixel*1.0/(imageW*imageH),(unsigned long)pixelsArr.count); 64 | 65 | imageRef = CGBitmapContextCreateImage(context); 66 | CGContextRelease(context); 67 | CGColorSpaceRelease(colorSpace); 68 | free(rawData); 69 | return pixelsArr; 70 | } 71 | //通过获得像素点改变像素点部分 72 | + (UIImage *)changePicColorPartial:(UIImage *)image{ 73 | //1.get the image into your data buffer 74 | CGImageRef imageRef = [image CGImage]; 75 | NSUInteger imageW = CGImageGetWidth(imageRef); 76 | NSUInteger imageH = CGImageGetHeight(imageRef); 77 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 78 | NSUInteger bytesPerPixel = 4;//一个像素四个分量,即ARGB 79 | NSUInteger bytesPerRow = bytesPerPixel * imageW; 80 | unsigned char *rawData = (unsigned char *)calloc(imageH*imageW*bytesPerPixel, sizeof(unsigned char)); 81 | NSUInteger bitsPerComponent = 8;//每个分量8个字节 82 | CGContextRef context = CGBitmapContextCreate(rawData, imageW, imageH, bitsPerComponent, bytesPerRow, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big); 83 | CGContextDrawImage(context, CGRectMake(0, 0, imageW, imageH), imageRef); 84 | 85 | //2.Now your rawData contains the image data int the RGBA8888 pixel format 86 | for (int y = 0; y < imageH; y++) { 87 | for (int x = 0; x < imageW; x++) { 88 | NSUInteger byteIndex = bytesPerRow*y + bytesPerPixel*x; 89 | //rawData一维数组存储方式RGBA(第一个像素)RGBA(第二个像素) 90 | NSUInteger red = rawData[byteIndex];// r 91 | NSUInteger green = rawData[byteIndex+1];// g 92 | NSUInteger blue = rawData[byteIndex+2];// b 93 | NSUInteger alpha = rawData[byteIndex+3];// a 94 | if (red+green+blue == 0 && (alpha/255.0 >= 0.5)) {//修改黑色部分 95 | rawData[byteIndex] = 255; 96 | rawData[byteIndex+1] = 0; 97 | rawData[byteIndex+2] = 0; 98 | rawData[byteIndex+3] = 255; 99 | }else if(red+green+blue == 0 && (alpha/255.0 < 0.5)){//修改透明部分 100 | rawData[byteIndex] = 255; 101 | rawData[byteIndex+1] = 255; 102 | rawData[byteIndex+2] = 255; 103 | rawData[byteIndex+3] = 150; 104 | }else if(red+green+blue == 255*3 && (alpha/255.0 >= 0.5)){//修改白色部分 105 | rawData[byteIndex] = 140; 106 | rawData[byteIndex+1] = 128; 107 | rawData[byteIndex+2] = 214; 108 | rawData[byteIndex+3] = 255; 109 | } 110 | } 111 | } 112 | imageRef = CGBitmapContextCreateImage(context); 113 | CGContextRelease(context); 114 | CGColorSpaceRelease(colorSpace); 115 | free(rawData); 116 | return [UIImage imageWithCGImage:imageRef]; 117 | 118 | } 119 | 120 | + (UIImage *)getMosaicImageWith:(UIImage *)image level:(NSInteger)level{ 121 | CGImageRef imageRef = image.CGImage; 122 | NSUInteger imageW = CGImageGetWidth(imageRef); 123 | NSUInteger imageH = CGImageGetHeight(imageRef); 124 | //创建颜色空间 125 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 126 | unsigned char *rawData = (unsigned char *)calloc(imageH*imageW*4, sizeof(unsigned char)); 127 | CGContextRef contextRef = CGBitmapContextCreate(rawData, imageW, imageH, 8, imageW*4, colorSpace, kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big); 128 | CGContextDrawImage(contextRef, CGRectMake(0, 0, imageW, imageH), imageRef); 129 | 130 | unsigned char *bitMapData = CGBitmapContextGetData(contextRef); 131 | NSUInteger currentIndex,preCurrentIndex; 132 | NSUInteger sizeLevel = level == 0 ? MIN(imageW, imageH)/20.0 : level; 133 | //像素点默认是4个通道 134 | unsigned char *pixels[4] = {0}; 135 | for (int i = 0; i < imageH; i++) { 136 | for (int j = 0; j < imageW; j++) { 137 | currentIndex = imageW*i + j; 138 | NSUInteger red = rawData[currentIndex*4]; 139 | NSUInteger green = rawData[currentIndex*4+1]; 140 | NSUInteger blue = rawData[currentIndex*4+2]; 141 | NSUInteger alpha = rawData[currentIndex*4+3]; 142 | if (red+green+blue == 0 && (alpha/255.0 <= 0.5)) { 143 | rawData[currentIndex*4] = 255; 144 | rawData[currentIndex*4+1] = 255; 145 | rawData[currentIndex*4+2] = 255; 146 | rawData[currentIndex*4+3] = 0; 147 | continue; 148 | } 149 | 150 | if (i % sizeLevel == 0) { 151 | if (j % sizeLevel == 0) { 152 | //dest - src - length 153 | memcpy(pixels, bitMapData+4*currentIndex, 4); 154 | }else{ 155 | //将上一个像素点的值赋给第二个 156 | memcpy(bitMapData+4*currentIndex, pixels, 4); 157 | } 158 | }else{ 159 | preCurrentIndex = (i-1)*imageW+j; 160 | memcpy(bitMapData+4*currentIndex, bitMapData+4*preCurrentIndex, 4); 161 | } 162 | } 163 | } 164 | //获取图片数据集合 165 | NSUInteger size = imageW*imageH*4; 166 | CGDataProviderRef providerRef = CGDataProviderCreateWithData(NULL, bitMapData, size, NULL); 167 | //创建马赛克图片,根据变换过的bitMapData像素来创建图片 168 | CGImageRef mosaicImageRef = CGImageCreate(imageW, imageH, 8, 4*8, imageW*4, colorSpace, kCGBitmapByteOrderDefault, providerRef, NULL, NO, kCGRenderingIntentDefault);//Creates a bitmap image from data supplied by a data provider. 169 | //创建输出马赛克图片 170 | CGContextRef outContextRef = CGBitmapContextCreate(bitMapData, imageW, imageH, 8, imageW*4, colorSpace, kCGImageAlphaPremultipliedLast); 171 | //绘制图片 172 | CGContextDrawImage(outContextRef, CGRectMake(0, 0, imageW, imageH), mosaicImageRef); 173 | 174 | CGImageRef resultImageRef = CGBitmapContextCreateImage(contextRef); 175 | UIImage *mosaicImage = [UIImage imageWithCGImage:resultImageRef]; 176 | //释放内存 177 | CGImageRelease(resultImageRef); 178 | CGImageRelease(mosaicImageRef); 179 | CGColorSpaceRelease(colorSpace); 180 | CGDataProviderRelease(providerRef); 181 | CGContextRelease(outContextRef); 182 | return mosaicImage; 183 | } 184 | 185 | 186 | + (UIImage *)getFilterMosaicImageWith:(UIImage *)image{ 187 | CIImage *ciImage = [[CIImage alloc] initWithImage:image]; 188 | CIFilter *filter = [CIFilter filterWithName:@"CIPixellate"]; 189 | [filter setValue:ciImage forKey:kCIInputImageKey]; 190 | [filter setDefaults]; 191 | //导出图片 192 | CIImage *outPutImage = [filter valueForKey:kCIOutputImageKey]; 193 | CIContext *context = [CIContext contextWithOptions:nil]; 194 | CGImageRef cgImage = [context createCGImage:outPutImage fromRect:[outPutImage extent]]; 195 | UIImage *showImage = [UIImage imageWithCGImage:cgImage]; 196 | CGImageRelease(cgImage); 197 | return showImage; 198 | } 199 | 200 | 201 | @end 202 | -------------------------------------------------------------------------------- /YXYVideoModel/OpenGLView20.m: -------------------------------------------------------------------------------- 1 | // 2 | // OpenGLView.m 3 | // MyTest 4 | // 5 | // Created on 12/20/11. 6 | // Copyright (c) 2011 All rights reserved. 7 | // 8 | 9 | #import "OpenGLView20.h" 10 | 11 | enum AttribEnum 12 | { 13 | ATTRIB_VERTEX, 14 | ATTRIB_TEXTURE, 15 | ATTRIB_COLOR, 16 | }; 17 | 18 | enum TextureType 19 | { 20 | TEXY = 0, 21 | TEXU, 22 | TEXV, 23 | TEXC 24 | }; 25 | 26 | //#define PRINT_CALL 1 27 | 28 | @interface OpenGLView20() 29 | 30 | /** 31 | 初始化YUV纹理 32 | */ 33 | - (void)setupYUVTexture; 34 | 35 | /** 36 | 创建缓冲区 37 | @return 成功返回TRUE 失败返回FALSE 38 | */ 39 | - (BOOL)createFrameAndRenderBuffer; 40 | 41 | /** 42 | 销毁缓冲区 43 | */ 44 | - (void)destoryFrameAndRenderBuffer; 45 | 46 | //加载着色器 47 | /** 48 | 初始化YUV纹理 49 | */ 50 | - (void)loadShader; 51 | 52 | /** 53 | 编译着色代码 54 | @param shader 代码 55 | @param shaderType 类型 56 | @return 成功返回着色器 失败返回-1 57 | */ 58 | - (GLuint)compileShader:(NSString*)shaderCode withType:(GLenum)shaderType; 59 | 60 | /** 61 | 渲染 62 | */ 63 | - (void)render; 64 | @end 65 | 66 | @implementation OpenGLView20 67 | 68 | //- (void)debugGlError 69 | //{ 70 | // GLenum r = glGetError(); 71 | // if (r != 0) 72 | // { 73 | // printf("%d \n", r); 74 | // } 75 | //} 76 | - (BOOL)doInit 77 | { 78 | CAEAGLLayer *eaglLayer = (CAEAGLLayer*) self.layer; 79 | //eaglLayer.opaque = YES; 80 | 81 | eaglLayer.opaque = YES; 82 | eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys: 83 | [NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, 84 | kEAGLColorFormatRGB565, kEAGLDrawablePropertyColorFormat, 85 | //[NSNumber numberWithBool:YES], kEAGLDrawablePropertyRetainedBacking, 86 | nil]; 87 | self.contentScaleFactor = [UIScreen mainScreen].scale; 88 | _viewScale = [UIScreen mainScreen].scale; 89 | 90 | _glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; 91 | 92 | //[self debugGlError]; 93 | 94 | if(!_glContext || ![EAGLContext setCurrentContext:_glContext]) 95 | { 96 | return NO; 97 | } 98 | 99 | [self setupYUVTexture]; 100 | [self loadShader]; 101 | glUseProgram(_program); 102 | 103 | GLuint textureUniformY = glGetUniformLocation(_program, "SamplerY"); 104 | GLuint textureUniformU = glGetUniformLocation(_program, "SamplerU"); 105 | GLuint textureUniformV = glGetUniformLocation(_program, "SamplerV"); 106 | glUniform1i(textureUniformY, 0); 107 | glUniform1i(textureUniformU, 1); 108 | glUniform1i(textureUniformV, 2); 109 | 110 | return YES; 111 | } 112 | 113 | - (id)initWithCoder:(NSCoder *)aDecoder 114 | { 115 | self = [super initWithCoder:aDecoder]; 116 | if (self) 117 | { 118 | if (![self doInit]) 119 | { 120 | self = nil; 121 | } 122 | } 123 | return self; 124 | } 125 | 126 | - (id)initWithFrame:(CGRect)frame 127 | { 128 | self = [super initWithFrame:frame]; 129 | if (self) 130 | { 131 | if (![self doInit]) 132 | { 133 | self = nil; 134 | } 135 | } 136 | return self; 137 | } 138 | 139 | - (void)layoutSubviews 140 | { 141 | dispatch_async(dispatch_get_global_queue(0, 0), ^{ 142 | @synchronized(self) 143 | { 144 | [EAGLContext setCurrentContext:_glContext]; 145 | [self destoryFrameAndRenderBuffer]; 146 | [self createFrameAndRenderBuffer]; 147 | } 148 | 149 | glViewport(1, 1, self.bounds.size.width*_viewScale - 2, self.bounds.size.height*_viewScale - 2); 150 | }); 151 | } 152 | 153 | - (void)setupYUVTexture 154 | { 155 | if (_textureYUV[TEXY]) 156 | { 157 | glDeleteTextures(3, _textureYUV); 158 | } 159 | glGenTextures(3, _textureYUV); 160 | if (!_textureYUV[TEXY] || !_textureYUV[TEXU] || !_textureYUV[TEXV]) 161 | { 162 | NSLog(@"<<<<<<<<<<<<纹理创建失败!>>>>>>>>>>>>"); 163 | return; 164 | } 165 | 166 | glActiveTexture(GL_TEXTURE0); 167 | glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXY]); 168 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); 169 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); 170 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 171 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 172 | 173 | glActiveTexture(GL_TEXTURE1); 174 | glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXU]); 175 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); 176 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); 177 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 178 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 179 | 180 | glActiveTexture(GL_TEXTURE2); 181 | glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXV]); 182 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR); 183 | glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR); 184 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); 185 | glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); 186 | } 187 | 188 | - (void)render 189 | { 190 | [EAGLContext setCurrentContext:_glContext]; 191 | CGSize size = self.bounds.size; 192 | glViewport(1, 1, size.width*_viewScale-2, size.height*_viewScale-2); 193 | static const GLfloat squareVertices[] = { 194 | -1.0f, -1.0f, 195 | 1.0f, -1.0f, 196 | -1.0f, 1.0f, 197 | 1.0f, 1.0f, 198 | }; 199 | 200 | 201 | static const GLfloat coordVertices[] = { 202 | 0.0f, 1.0f, 203 | 1.0f, 1.0f, 204 | 0.0f, 0.0f, 205 | 1.0f, 0.0f, 206 | }; 207 | 208 | 209 | // Update attribute values 210 | glVertexAttribPointer(ATTRIB_VERTEX, 2, GL_FLOAT, 0, 0, squareVertices); 211 | glEnableVertexAttribArray(ATTRIB_VERTEX); 212 | 213 | 214 | glVertexAttribPointer(ATTRIB_TEXTURE, 2, GL_FLOAT, 0, 0, coordVertices); 215 | glEnableVertexAttribArray(ATTRIB_TEXTURE); 216 | 217 | 218 | // Draw 219 | glDrawArrays(GL_TRIANGLE_STRIP, 0, 4); 220 | glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer); 221 | [_glContext presentRenderbuffer:GL_RENDERBUFFER]; 222 | } 223 | 224 | #pragma mark - 设置openGL 225 | + (Class)layerClass 226 | { 227 | return [CAEAGLLayer class]; 228 | } 229 | 230 | - (BOOL)createFrameAndRenderBuffer 231 | { 232 | glGenFramebuffers(1, &_framebuffer); 233 | glGenRenderbuffers(1, &_renderBuffer); 234 | 235 | glBindFramebuffer(GL_FRAMEBUFFER, _framebuffer); 236 | glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer); 237 | 238 | 239 | if (![_glContext renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer *)self.layer]) 240 | { 241 | NSLog(@"attach渲染缓冲区失败"); 242 | } 243 | glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, _renderBuffer); 244 | if (glCheckFramebufferStatus(GL_FRAMEBUFFER) != GL_FRAMEBUFFER_COMPLETE) 245 | { 246 | NSLog(@"创建缓冲区错误 0x%x", glCheckFramebufferStatus(GL_FRAMEBUFFER)); 247 | return NO; 248 | } 249 | return YES; 250 | } 251 | 252 | - (void)destoryFrameAndRenderBuffer 253 | { 254 | if (_framebuffer) 255 | { 256 | glDeleteFramebuffers(1, &_framebuffer); 257 | } 258 | 259 | if (_renderBuffer) 260 | { 261 | glDeleteRenderbuffers(1, &_renderBuffer); 262 | } 263 | 264 | _framebuffer = 0; 265 | _renderBuffer = 0; 266 | } 267 | 268 | #define FSH @"varying lowp vec2 TexCoordOut;\ 269 | \ 270 | uniform sampler2D SamplerY;\ 271 | uniform sampler2D SamplerU;\ 272 | uniform sampler2D SamplerV;\ 273 | \ 274 | void main(void)\ 275 | {\ 276 | mediump vec3 yuv;\ 277 | lowp vec3 rgb;\ 278 | \ 279 | yuv.x = texture2D(SamplerY, TexCoordOut).r;\ 280 | yuv.y = texture2D(SamplerU, TexCoordOut).r - 0.5;\ 281 | yuv.z = texture2D(SamplerV, TexCoordOut).r - 0.5;\ 282 | \ 283 | rgb = mat3( 1, 1, 1,\ 284 | 0, -0.39465, 2.03211,\ 285 | 1.13983, -0.58060, 0) * yuv;\ 286 | \ 287 | gl_FragColor = vec4(rgb, 1);\ 288 | \ 289 | }" 290 | 291 | #define VSH @"attribute vec4 position;\ 292 | attribute vec2 TexCoordIn;\ 293 | varying vec2 TexCoordOut;\ 294 | \ 295 | void main(void)\ 296 | {\ 297 | gl_Position = position;\ 298 | TexCoordOut = TexCoordIn;\ 299 | }" 300 | 301 | /** 302 | 加载着色器 303 | */ 304 | - (void)loadShader 305 | { 306 | /** 307 | 1 308 | */ 309 | GLuint vertexShader = [self compileShader:VSH withType:GL_VERTEX_SHADER]; 310 | GLuint fragmentShader = [self compileShader:FSH withType:GL_FRAGMENT_SHADER]; 311 | 312 | /** 313 | 2 314 | */ 315 | _program = glCreateProgram(); 316 | glAttachShader(_program, vertexShader); 317 | glAttachShader(_program, fragmentShader); 318 | 319 | /** 320 | 绑定需要在link之前 321 | */ 322 | glBindAttribLocation(_program, ATTRIB_VERTEX, "position"); 323 | glBindAttribLocation(_program, ATTRIB_TEXTURE, "TexCoordIn"); 324 | 325 | glLinkProgram(_program); 326 | 327 | /** 328 | 3 329 | */ 330 | GLint linkSuccess; 331 | glGetProgramiv(_program, GL_LINK_STATUS, &linkSuccess); 332 | if (linkSuccess == GL_FALSE) { 333 | GLchar messages[256]; 334 | glGetProgramInfoLog(_program, sizeof(messages), 0, &messages[0]); 335 | NSString *messageString = [NSString stringWithUTF8String:messages]; 336 | NSLog(@"<<<<着色器连接失败 %@>>>", messageString); 337 | //exit(1); 338 | } 339 | 340 | if (vertexShader) 341 | glDeleteShader(vertexShader); 342 | if (fragmentShader) 343 | glDeleteShader(fragmentShader); 344 | } 345 | 346 | - (GLuint)compileShader:(NSString*)shaderString withType:(GLenum)shaderType 347 | { 348 | 349 | /** 350 | 1 351 | */ 352 | if (!shaderString) { 353 | // NSLog(@"Error loading shader: %@", error.localizedDescription); 354 | exit(1); 355 | } 356 | else 357 | { 358 | //NSLog(@"shader code-->%@", shaderString); 359 | } 360 | 361 | /** 362 | 2 363 | */ 364 | GLuint shaderHandle = glCreateShader(shaderType); 365 | 366 | /** 367 | 3 368 | */ 369 | const char * shaderStringUTF8 = [shaderString UTF8String]; 370 | int shaderStringLength = [shaderString length]; 371 | glShaderSource(shaderHandle, 1, &shaderStringUTF8, &shaderStringLength); 372 | 373 | /** 374 | 4 375 | */ 376 | glCompileShader(shaderHandle); 377 | 378 | /** 379 | 5 380 | */ 381 | GLint compileSuccess; 382 | glGetShaderiv(shaderHandle, GL_COMPILE_STATUS, &compileSuccess); 383 | if (compileSuccess == GL_FALSE) { 384 | GLchar messages[256]; 385 | glGetShaderInfoLog(shaderHandle, sizeof(messages), 0, &messages[0]); 386 | NSString *messageString = [NSString stringWithUTF8String:messages]; 387 | NSLog(@"%@", messageString); 388 | exit(1); 389 | } 390 | 391 | return shaderHandle; 392 | } 393 | 394 | #pragma mark - 接口 395 | - (void)displayYUV420pData:(void *)data width:(NSInteger)w height:(NSInteger)h 396 | { 397 | //_pYuvData = data; 398 | // if (_offScreen || !self.window) 399 | // { 400 | // return; 401 | // } 402 | if (!self.window) 403 | { 404 | return; 405 | } 406 | @synchronized(self) 407 | { 408 | if (w != _videoW || h != _videoH) 409 | { 410 | [self setVideoSize:w height:h]; 411 | } 412 | [EAGLContext setCurrentContext:_glContext]; 413 | 414 | glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXY]); 415 | glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RED_EXT, GL_UNSIGNED_BYTE, data); 416 | 417 | //[self debugGlError]; 418 | 419 | glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXU]); 420 | glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w/2, h/2, GL_RED_EXT, GL_UNSIGNED_BYTE, data + w * h); 421 | 422 | // [self debugGlError]; 423 | 424 | glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXV]); 425 | glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w/2, h/2, GL_RED_EXT, GL_UNSIGNED_BYTE, data + w * h * 5 / 4); 426 | 427 | //[self debugGlError]; 428 | 429 | [self render]; 430 | } 431 | 432 | #ifdef DEBUG 433 | 434 | GLenum err = glGetError(); 435 | if (err != GL_NO_ERROR) 436 | { 437 | printf("GL_ERROR=======>%d\n", err); 438 | } 439 | struct timeval nowtime; 440 | gettimeofday(&nowtime, NULL); 441 | if (nowtime.tv_sec != _time.tv_sec) 442 | { 443 | printf("视频 %d 帧率: %d\n", self.tag, _frameRate); 444 | memcpy(&_time, &nowtime, sizeof(struct timeval)); 445 | _frameRate = 1; 446 | } 447 | else 448 | { 449 | _frameRate++; 450 | } 451 | #endif 452 | } 453 | 454 | - (void)setVideoSize:(GLuint)width height:(GLuint)height 455 | { 456 | _videoW = width; 457 | _videoH = height; 458 | 459 | void *blackData = malloc(width * height * 1.5); 460 | if(blackData) 461 | //bzero(blackData, width * height * 1.5); 462 | memset(blackData, 0x0, width * height * 1.5); 463 | 464 | [EAGLContext setCurrentContext:_glContext]; 465 | glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXY]); 466 | glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, width, height, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, blackData); 467 | glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXU]); 468 | glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, width/2, height/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, blackData + width * height); 469 | 470 | glBindTexture(GL_TEXTURE_2D, _textureYUV[TEXV]); 471 | glTexImage2D(GL_TEXTURE_2D, 0, GL_RED_EXT, width/2, height/2, 0, GL_RED_EXT, GL_UNSIGNED_BYTE, blackData + width * height * 5 / 4); 472 | free(blackData); 473 | } 474 | 475 | 476 | - (void)clearFrame 477 | { 478 | if ([self window]) 479 | { 480 | [EAGLContext setCurrentContext:_glContext]; 481 | glClearColor(0.0, 0.0, 0.0, 1.0); 482 | glClear(GL_COLOR_BUFFER_BIT); 483 | glBindRenderbuffer(GL_RENDERBUFFER, _renderBuffer); 484 | [_glContext presentRenderbuffer:GL_RENDERBUFFER]; 485 | } 486 | 487 | } 488 | 489 | @end 490 | -------------------------------------------------------------------------------- /YXYVideoModel.xcodeproj/project.pbxproj: -------------------------------------------------------------------------------- 1 | // !$*UTF8*$! 2 | { 3 | archiveVersion = 1; 4 | classes = { 5 | }; 6 | objectVersion = 50; 7 | objects = { 8 | 9 | /* Begin PBXBuildFile section */ 10 | 03030725218073D7004C029D /* OpenGLView20.m in Sources */ = {isa = PBXBuildFile; fileRef = 03030723218073D7004C029D /* OpenGLView20.m */; }; 11 | 03363E842149F10100FA3EFE /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 03363E832149F10100FA3EFE /* AppDelegate.m */; }; 12 | 03363E872149F10100FA3EFE /* ViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 03363E862149F10100FA3EFE /* ViewController.m */; }; 13 | 03363E8A2149F10100FA3EFE /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 03363E882149F10100FA3EFE /* Main.storyboard */; }; 14 | 03363E8C2149F10300FA3EFE /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 03363E8B2149F10300FA3EFE /* Assets.xcassets */; }; 15 | 03363E8F2149F10300FA3EFE /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 03363E8D2149F10300FA3EFE /* LaunchScreen.storyboard */; }; 16 | 03363E922149F10300FA3EFE /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 03363E912149F10300FA3EFE /* main.m */; }; 17 | 03363E992149F16400FA3EFE /* 2018.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = 03363E982149F16400FA3EFE /* 2018.mp4 */; }; 18 | 03363E9B214A09E100FA3EFE /* 2017.mp4 in Resources */ = {isa = PBXBuildFile; fileRef = 03363E9A214A09E100FA3EFE /* 2017.mp4 */; }; 19 | 03363E9D214A166900FA3EFE /* ico.jpeg in Resources */ = {isa = PBXBuildFile; fileRef = 03363E9C214A166900FA3EFE /* ico.jpeg */; }; 20 | 03954E0D2150972300AF6E06 /* YXYViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 03954E0C2150972300AF6E06 /* YXYViewController.m */; }; 21 | /* End PBXBuildFile section */ 22 | 23 | /* Begin PBXFileReference section */ 24 | 03030723218073D7004C029D /* OpenGLView20.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = OpenGLView20.m; sourceTree = ""; }; 25 | 03030724218073D7004C029D /* OpenGLView20.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = OpenGLView20.h; sourceTree = ""; }; 26 | 03363E7F2149F10100FA3EFE /* YXYVideoModel.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = YXYVideoModel.app; sourceTree = BUILT_PRODUCTS_DIR; }; 27 | 03363E822149F10100FA3EFE /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; }; 28 | 03363E832149F10100FA3EFE /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; }; 29 | 03363E852149F10100FA3EFE /* ViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ViewController.h; sourceTree = ""; }; 30 | 03363E862149F10100FA3EFE /* ViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ViewController.m; sourceTree = ""; }; 31 | 03363E892149F10100FA3EFE /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; }; 32 | 03363E8B2149F10300FA3EFE /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; 33 | 03363E8E2149F10300FA3EFE /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; 34 | 03363E902149F10300FA3EFE /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; 35 | 03363E912149F10300FA3EFE /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; }; 36 | 03363E982149F16400FA3EFE /* 2018.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = 2018.mp4; sourceTree = ""; }; 37 | 03363E9A214A09E100FA3EFE /* 2017.mp4 */ = {isa = PBXFileReference; lastKnownFileType = file; path = 2017.mp4; sourceTree = ""; }; 38 | 03363E9C214A166900FA3EFE /* ico.jpeg */ = {isa = PBXFileReference; lastKnownFileType = image.jpeg; path = ico.jpeg; sourceTree = ""; }; 39 | 03363E9E214A3D7D00FA3EFE /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; path = README.md; sourceTree = SOURCE_ROOT; }; 40 | 03954E0B2150972300AF6E06 /* YXYViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = YXYViewController.h; sourceTree = ""; }; 41 | 03954E0C2150972300AF6E06 /* YXYViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = YXYViewController.m; sourceTree = ""; }; 42 | /* End PBXFileReference section */ 43 | 44 | /* Begin PBXFrameworksBuildPhase section */ 45 | 03363E7C2149F10100FA3EFE /* Frameworks */ = { 46 | isa = PBXFrameworksBuildPhase; 47 | buildActionMask = 2147483647; 48 | files = ( 49 | ); 50 | runOnlyForDeploymentPostprocessing = 0; 51 | }; 52 | /* End PBXFrameworksBuildPhase section */ 53 | 54 | /* Begin PBXGroup section */ 55 | 03363E762149F10100FA3EFE = { 56 | isa = PBXGroup; 57 | children = ( 58 | 03363E812149F10100FA3EFE /* YXYVideoModel */, 59 | 03363E802149F10100FA3EFE /* Products */, 60 | ); 61 | sourceTree = ""; 62 | }; 63 | 03363E802149F10100FA3EFE /* Products */ = { 64 | isa = PBXGroup; 65 | children = ( 66 | 03363E7F2149F10100FA3EFE /* YXYVideoModel.app */, 67 | ); 68 | name = Products; 69 | sourceTree = ""; 70 | }; 71 | 03363E812149F10100FA3EFE /* YXYVideoModel */ = { 72 | isa = PBXGroup; 73 | children = ( 74 | 03363E9E214A3D7D00FA3EFE /* README.md */, 75 | 03363E9C214A166900FA3EFE /* ico.jpeg */, 76 | 03363E9A214A09E100FA3EFE /* 2017.mp4 */, 77 | 03363E982149F16400FA3EFE /* 2018.mp4 */, 78 | 03363E822149F10100FA3EFE /* AppDelegate.h */, 79 | 03363E832149F10100FA3EFE /* AppDelegate.m */, 80 | 03363E852149F10100FA3EFE /* ViewController.h */, 81 | 03363E862149F10100FA3EFE /* ViewController.m */, 82 | 03954E0B2150972300AF6E06 /* YXYViewController.h */, 83 | 03954E0C2150972300AF6E06 /* YXYViewController.m */, 84 | 03030724218073D7004C029D /* OpenGLView20.h */, 85 | 03030723218073D7004C029D /* OpenGLView20.m */, 86 | 03363E882149F10100FA3EFE /* Main.storyboard */, 87 | 03363E8B2149F10300FA3EFE /* Assets.xcassets */, 88 | 03363E8D2149F10300FA3EFE /* LaunchScreen.storyboard */, 89 | 03363E902149F10300FA3EFE /* Info.plist */, 90 | 03363E912149F10300FA3EFE /* main.m */, 91 | ); 92 | path = YXYVideoModel; 93 | sourceTree = ""; 94 | }; 95 | /* End PBXGroup section */ 96 | 97 | /* Begin PBXNativeTarget section */ 98 | 03363E7E2149F10100FA3EFE /* YXYVideoModel */ = { 99 | isa = PBXNativeTarget; 100 | buildConfigurationList = 03363E952149F10300FA3EFE /* Build configuration list for PBXNativeTarget "YXYVideoModel" */; 101 | buildPhases = ( 102 | 03363E7B2149F10100FA3EFE /* Sources */, 103 | 03363E7C2149F10100FA3EFE /* Frameworks */, 104 | 03363E7D2149F10100FA3EFE /* Resources */, 105 | ); 106 | buildRules = ( 107 | ); 108 | dependencies = ( 109 | ); 110 | name = YXYVideoModel; 111 | productName = YXYVideoModel; 112 | productReference = 03363E7F2149F10100FA3EFE /* YXYVideoModel.app */; 113 | productType = "com.apple.product-type.application"; 114 | }; 115 | /* End PBXNativeTarget section */ 116 | 117 | /* Begin PBXProject section */ 118 | 03363E772149F10100FA3EFE /* Project object */ = { 119 | isa = PBXProject; 120 | attributes = { 121 | LastUpgradeCheck = 0940; 122 | ORGANIZATIONNAME = Test; 123 | TargetAttributes = { 124 | 03363E7E2149F10100FA3EFE = { 125 | CreatedOnToolsVersion = 9.4.1; 126 | }; 127 | }; 128 | }; 129 | buildConfigurationList = 03363E7A2149F10100FA3EFE /* Build configuration list for PBXProject "YXYVideoModel" */; 130 | compatibilityVersion = "Xcode 9.3"; 131 | developmentRegion = en; 132 | hasScannedForEncodings = 0; 133 | knownRegions = ( 134 | en, 135 | Base, 136 | ); 137 | mainGroup = 03363E762149F10100FA3EFE; 138 | productRefGroup = 03363E802149F10100FA3EFE /* Products */; 139 | projectDirPath = ""; 140 | projectRoot = ""; 141 | targets = ( 142 | 03363E7E2149F10100FA3EFE /* YXYVideoModel */, 143 | ); 144 | }; 145 | /* End PBXProject section */ 146 | 147 | /* Begin PBXResourcesBuildPhase section */ 148 | 03363E7D2149F10100FA3EFE /* Resources */ = { 149 | isa = PBXResourcesBuildPhase; 150 | buildActionMask = 2147483647; 151 | files = ( 152 | 03363E9B214A09E100FA3EFE /* 2017.mp4 in Resources */, 153 | 03363E8F2149F10300FA3EFE /* LaunchScreen.storyboard in Resources */, 154 | 03363E992149F16400FA3EFE /* 2018.mp4 in Resources */, 155 | 03363E8C2149F10300FA3EFE /* Assets.xcassets in Resources */, 156 | 03363E8A2149F10100FA3EFE /* Main.storyboard in Resources */, 157 | 03363E9D214A166900FA3EFE /* ico.jpeg in Resources */, 158 | ); 159 | runOnlyForDeploymentPostprocessing = 0; 160 | }; 161 | /* End PBXResourcesBuildPhase section */ 162 | 163 | /* Begin PBXSourcesBuildPhase section */ 164 | 03363E7B2149F10100FA3EFE /* Sources */ = { 165 | isa = PBXSourcesBuildPhase; 166 | buildActionMask = 2147483647; 167 | files = ( 168 | 03363E872149F10100FA3EFE /* ViewController.m in Sources */, 169 | 03363E922149F10300FA3EFE /* main.m in Sources */, 170 | 03363E842149F10100FA3EFE /* AppDelegate.m in Sources */, 171 | 03954E0D2150972300AF6E06 /* YXYViewController.m in Sources */, 172 | 03030725218073D7004C029D /* OpenGLView20.m in Sources */, 173 | ); 174 | runOnlyForDeploymentPostprocessing = 0; 175 | }; 176 | /* End PBXSourcesBuildPhase section */ 177 | 178 | /* Begin PBXVariantGroup section */ 179 | 03363E882149F10100FA3EFE /* Main.storyboard */ = { 180 | isa = PBXVariantGroup; 181 | children = ( 182 | 03363E892149F10100FA3EFE /* Base */, 183 | ); 184 | name = Main.storyboard; 185 | sourceTree = ""; 186 | }; 187 | 03363E8D2149F10300FA3EFE /* LaunchScreen.storyboard */ = { 188 | isa = PBXVariantGroup; 189 | children = ( 190 | 03363E8E2149F10300FA3EFE /* Base */, 191 | ); 192 | name = LaunchScreen.storyboard; 193 | sourceTree = ""; 194 | }; 195 | /* End PBXVariantGroup section */ 196 | 197 | /* Begin XCBuildConfiguration section */ 198 | 03363E932149F10300FA3EFE /* Debug */ = { 199 | isa = XCBuildConfiguration; 200 | buildSettings = { 201 | ALWAYS_SEARCH_USER_PATHS = NO; 202 | CLANG_ANALYZER_NONNULL = YES; 203 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 204 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 205 | CLANG_CXX_LIBRARY = "libc++"; 206 | CLANG_ENABLE_MODULES = YES; 207 | CLANG_ENABLE_OBJC_ARC = YES; 208 | CLANG_ENABLE_OBJC_WEAK = YES; 209 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 210 | CLANG_WARN_BOOL_CONVERSION = YES; 211 | CLANG_WARN_COMMA = YES; 212 | CLANG_WARN_CONSTANT_CONVERSION = YES; 213 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 214 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 215 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 216 | CLANG_WARN_EMPTY_BODY = YES; 217 | CLANG_WARN_ENUM_CONVERSION = YES; 218 | CLANG_WARN_INFINITE_RECURSION = YES; 219 | CLANG_WARN_INT_CONVERSION = YES; 220 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 221 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 222 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 223 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 224 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 225 | CLANG_WARN_STRICT_PROTOTYPES = YES; 226 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 227 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 228 | CLANG_WARN_UNREACHABLE_CODE = YES; 229 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 230 | CODE_SIGN_IDENTITY = "iPhone Developer"; 231 | COPY_PHASE_STRIP = NO; 232 | DEBUG_INFORMATION_FORMAT = dwarf; 233 | ENABLE_STRICT_OBJC_MSGSEND = YES; 234 | ENABLE_TESTABILITY = YES; 235 | GCC_C_LANGUAGE_STANDARD = gnu11; 236 | GCC_DYNAMIC_NO_PIC = NO; 237 | GCC_NO_COMMON_BLOCKS = YES; 238 | GCC_OPTIMIZATION_LEVEL = 0; 239 | GCC_PREPROCESSOR_DEFINITIONS = ( 240 | "DEBUG=1", 241 | "$(inherited)", 242 | ); 243 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 244 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 245 | GCC_WARN_UNDECLARED_SELECTOR = YES; 246 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 247 | GCC_WARN_UNUSED_FUNCTION = YES; 248 | GCC_WARN_UNUSED_VARIABLE = YES; 249 | IPHONEOS_DEPLOYMENT_TARGET = 11.4; 250 | MTL_ENABLE_DEBUG_INFO = YES; 251 | ONLY_ACTIVE_ARCH = YES; 252 | SDKROOT = iphoneos; 253 | }; 254 | name = Debug; 255 | }; 256 | 03363E942149F10300FA3EFE /* Release */ = { 257 | isa = XCBuildConfiguration; 258 | buildSettings = { 259 | ALWAYS_SEARCH_USER_PATHS = NO; 260 | CLANG_ANALYZER_NONNULL = YES; 261 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; 262 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; 263 | CLANG_CXX_LIBRARY = "libc++"; 264 | CLANG_ENABLE_MODULES = YES; 265 | CLANG_ENABLE_OBJC_ARC = YES; 266 | CLANG_ENABLE_OBJC_WEAK = YES; 267 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; 268 | CLANG_WARN_BOOL_CONVERSION = YES; 269 | CLANG_WARN_COMMA = YES; 270 | CLANG_WARN_CONSTANT_CONVERSION = YES; 271 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; 272 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; 273 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES; 274 | CLANG_WARN_EMPTY_BODY = YES; 275 | CLANG_WARN_ENUM_CONVERSION = YES; 276 | CLANG_WARN_INFINITE_RECURSION = YES; 277 | CLANG_WARN_INT_CONVERSION = YES; 278 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; 279 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; 280 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; 281 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; 282 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; 283 | CLANG_WARN_STRICT_PROTOTYPES = YES; 284 | CLANG_WARN_SUSPICIOUS_MOVE = YES; 285 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; 286 | CLANG_WARN_UNREACHABLE_CODE = YES; 287 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; 288 | CODE_SIGN_IDENTITY = "iPhone Developer"; 289 | COPY_PHASE_STRIP = NO; 290 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; 291 | ENABLE_NS_ASSERTIONS = NO; 292 | ENABLE_STRICT_OBJC_MSGSEND = YES; 293 | GCC_C_LANGUAGE_STANDARD = gnu11; 294 | GCC_NO_COMMON_BLOCKS = YES; 295 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES; 296 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; 297 | GCC_WARN_UNDECLARED_SELECTOR = YES; 298 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; 299 | GCC_WARN_UNUSED_FUNCTION = YES; 300 | GCC_WARN_UNUSED_VARIABLE = YES; 301 | IPHONEOS_DEPLOYMENT_TARGET = 11.4; 302 | MTL_ENABLE_DEBUG_INFO = NO; 303 | SDKROOT = iphoneos; 304 | VALIDATE_PRODUCT = YES; 305 | }; 306 | name = Release; 307 | }; 308 | 03363E962149F10300FA3EFE /* Debug */ = { 309 | isa = XCBuildConfiguration; 310 | buildSettings = { 311 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 312 | CODE_SIGN_STYLE = Automatic; 313 | DEVELOPMENT_TEAM = 8PLB35PLP3; 314 | INFOPLIST_FILE = YXYVideoModel/Info.plist; 315 | IPHONEOS_DEPLOYMENT_TARGET = 9.0; 316 | LD_RUNPATH_SEARCH_PATHS = ( 317 | "$(inherited)", 318 | "@executable_path/Frameworks", 319 | ); 320 | PRODUCT_BUNDLE_IDENTIFIER = com.test.simpleapp.YXYVideoModel; 321 | PRODUCT_NAME = "$(TARGET_NAME)"; 322 | TARGETED_DEVICE_FAMILY = "1,2"; 323 | }; 324 | name = Debug; 325 | }; 326 | 03363E972149F10300FA3EFE /* Release */ = { 327 | isa = XCBuildConfiguration; 328 | buildSettings = { 329 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; 330 | CODE_SIGN_STYLE = Automatic; 331 | DEVELOPMENT_TEAM = 8PLB35PLP3; 332 | INFOPLIST_FILE = YXYVideoModel/Info.plist; 333 | IPHONEOS_DEPLOYMENT_TARGET = 9.0; 334 | LD_RUNPATH_SEARCH_PATHS = ( 335 | "$(inherited)", 336 | "@executable_path/Frameworks", 337 | ); 338 | PRODUCT_BUNDLE_IDENTIFIER = com.test.simpleapp.YXYVideoModel; 339 | PRODUCT_NAME = "$(TARGET_NAME)"; 340 | TARGETED_DEVICE_FAMILY = "1,2"; 341 | }; 342 | name = Release; 343 | }; 344 | /* End XCBuildConfiguration section */ 345 | 346 | /* Begin XCConfigurationList section */ 347 | 03363E7A2149F10100FA3EFE /* Build configuration list for PBXProject "YXYVideoModel" */ = { 348 | isa = XCConfigurationList; 349 | buildConfigurations = ( 350 | 03363E932149F10300FA3EFE /* Debug */, 351 | 03363E942149F10300FA3EFE /* Release */, 352 | ); 353 | defaultConfigurationIsVisible = 0; 354 | defaultConfigurationName = Release; 355 | }; 356 | 03363E952149F10300FA3EFE /* Build configuration list for PBXNativeTarget "YXYVideoModel" */ = { 357 | isa = XCConfigurationList; 358 | buildConfigurations = ( 359 | 03363E962149F10300FA3EFE /* Debug */, 360 | 03363E972149F10300FA3EFE /* Release */, 361 | ); 362 | defaultConfigurationIsVisible = 0; 363 | defaultConfigurationName = Release; 364 | }; 365 | /* End XCConfigurationList section */ 366 | }; 367 | rootObject = 03363E772149F10100FA3EFE /* Project object */; 368 | } 369 | -------------------------------------------------------------------------------- /YXYVideoModel/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // YXYVideoModel 4 | //邮箱:939607134@qq.com 5 | // Created by yxy on 2018/9/13. 6 | // Copyright © 2018年 yxy. All rights reserved. 7 | // 8 | // AVAsset:素材库里的素材; 9 | // AVAssetTrack:素材的轨道; 10 | // AVMutableComposition :一个用来合成视频的工程文件; 11 | // AVMutableCompositionTrack :工程文件中的轨道,有音频轨、视频轨等,里面可以插入各种对应的素材 12 | // AVMutableVideoCompositionLayerInstruction:视频轨道中的一个视频,可以缩放、旋转等 13 | // AVMutableVideoCompositionInstruction:一个视频轨道,包含了这个轨道上的所有视频素材 14 | // AVMutableVideoComposition:管理所有视频轨道,可以决定最终视频的尺寸,裁剪需要在这里进行 15 | // AVAssetExportSession:配置渲染参数并渲染 16 | // AVMutableAudioMix 管理所有音频 17 | // AVMutableAudioMixInputParameters 音频参数设定 18 | 19 | 20 | 21 | #import "ViewController.h" 22 | #import 23 | #define kPathDocument1 [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES) firstObject] 24 | #define Cache_PATH_IN_DOMAINS [NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES) lastObject] 25 | @interface ViewController () 26 | 27 | @end 28 | 29 | @implementation ViewController 30 | 31 | - (void)viewDidLoad { 32 | [super viewDidLoad]; 33 | NSLog(@" \n %@",kPathDocument1); 34 | // !!!: 建议使用模拟器运行这个工程,可以根据打印出来的文件路径(kPathDocument1是上面的宏定义)找到合成的视频文件; 35 | 36 | /** 37 | 首先把一段视频放在一个临时文件夹里面(Cache_PATH_IN_DOMAINS是上面的宏定义),以备后面测试使用; 38 | 最终编辑完成的视频放在(kPathDocument1是上面的宏定义)文件里面, 39 | */ 40 | NSString *filePath = [[NSBundle mainBundle] pathForResource:@"2018" ofType:@"mp4"]; 41 | NSData *data = [NSData dataWithContentsOfFile:filePath]; 42 | NSString *fp = [Cache_PATH_IN_DOMAINS stringByAppendingString:@"/2018.mp4"]; 43 | unlink([fp UTF8String]); 44 | [data writeToFile:fp atomically:YES]; 45 | 46 | NSString *filePath2 = [[NSBundle mainBundle] pathForResource:@"2017" ofType:@"mp4"]; 47 | NSData *data2 = [NSData dataWithContentsOfFile:filePath2]; 48 | NSString *fp2 = [Cache_PATH_IN_DOMAINS stringByAppendingString:@"/2017.mp4"]; 49 | unlink([fp2 UTF8String]); 50 | [data2 writeToFile:fp2 atomically:YES]; 51 | 52 | 53 | UIButton *bt = [UIButton buttonWithType:UIButtonTypeCustom]; 54 | bt.backgroundColor = [UIColor redColor]; 55 | bt.frame = CGRectMake(100, 50, 100, 50); 56 | [bt setTitle:@"2倍快放" forState:UIControlStateNormal]; 57 | [bt addTarget:self action:@selector(actionForButton) forControlEvents:UIControlEventTouchUpInside]; 58 | [self.view addSubview:bt]; 59 | 60 | UIButton *bt2 = [UIButton buttonWithType:UIButtonTypeCustom]; 61 | bt2.backgroundColor = [UIColor redColor]; 62 | bt2.frame = CGRectMake(100, 120, 100, 50); 63 | [bt2 setTitle:@"图像旋转" forState:UIControlStateNormal]; 64 | [bt2 addTarget:self action:@selector(actionForButton2) forControlEvents:UIControlEventTouchUpInside]; 65 | [self.view addSubview:bt2]; 66 | 67 | UIButton *bt3 = [UIButton buttonWithType:UIButtonTypeCustom]; 68 | bt3.backgroundColor = [UIColor redColor]; 69 | bt3.frame = CGRectMake(100, 190, 100, 50); 70 | [bt3 setTitle:@"视频裁剪" forState:UIControlStateNormal]; 71 | [bt3 addTarget:self action:@selector(actionForButton3) forControlEvents:UIControlEventTouchUpInside]; 72 | [self.view addSubview:bt3]; 73 | 74 | 75 | UIButton *bt4 = [UIButton buttonWithType:UIButtonTypeCustom]; 76 | bt4.backgroundColor = [UIColor redColor]; 77 | bt4.frame = CGRectMake(100, 260, 150, 50); 78 | [bt4 setTitle:@"视频拼接音频" forState:UIControlStateNormal]; 79 | [bt4 addTarget:self action:@selector(actionForButton4) forControlEvents:UIControlEventTouchUpInside]; 80 | [self.view addSubview:bt4]; 81 | 82 | UIButton *bt5 = [UIButton buttonWithType:UIButtonTypeCustom]; 83 | bt5.backgroundColor = [UIColor redColor]; 84 | bt5.frame = CGRectMake(100, 330, 100, 50); 85 | [bt5 setTitle:@"水印" forState:UIControlStateNormal]; 86 | [bt5 addTarget:self action:@selector(actionForButton5) forControlEvents:UIControlEventTouchUpInside]; 87 | [self.view addSubview:bt5]; 88 | 89 | 90 | UIButton *bt6 = [UIButton buttonWithType:UIButtonTypeCustom]; 91 | bt6.backgroundColor = [UIColor redColor]; 92 | bt6.frame = CGRectMake(100, 400, 100, 50); 93 | [bt6 setTitle:@"滤镜" forState:UIControlStateNormal]; 94 | [bt6 addTarget:self action:@selector(actionForButton6) forControlEvents:UIControlEventTouchUpInside]; 95 | [self.view addSubview:bt6]; 96 | 97 | UIButton *bt7 = [UIButton buttonWithType:UIButtonTypeCustom]; 98 | bt7.backgroundColor = [UIColor redColor]; 99 | bt7.frame = CGRectMake(100, 470, 100, 50); 100 | [bt7 setTitle:@"视频压缩" forState:UIControlStateNormal]; 101 | [bt7 addTarget:self action:@selector(actionForButton7) forControlEvents:UIControlEventTouchUpInside]; 102 | [self.view addSubview:bt7]; 103 | } 104 | #pragma mark =视频压缩= 105 | -(void)actionForButton7 106 | { 107 | NSString *fp = [Cache_PATH_IN_DOMAINS stringByAppendingString:@"/2018.mp4"]; 108 | // [data writeToFile:fp atomically:YES]; 109 | NSString *filePath = fp; 110 | 111 | //1.将素材拖入到素材库中 112 | AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:filePath]]; 113 | 114 | 115 | NSString *outputFilePath = [kPathDocument1 stringByAppendingString:@"/2018_7.mp4"]; 116 | unlink([outputFilePath UTF8String]); 117 | //4.导出 118 | /** 119 | AVAssetExportPresetMediumQuality 120 | AVAssetExportPresetLowQuality 121 | AAVAssetExportPresetHighestQuality 122 | 三种压缩自己选择 123 | */ 124 | AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetMediumQuality]; 125 | 126 | exporter.outputURL = [NSURL fileURLWithPath:outputFilePath isDirectory:YES]; 127 | exporter.outputFileType = AVFileTypeMPEG4; 128 | exporter.shouldOptimizeForNetworkUse = YES; 129 | [exporter exportAsynchronouslyWithCompletionHandler:^{ 130 | if (exporter.error) { 131 | //... 132 | NSLog(@"error: %@",exporter.error); 133 | }else{ 134 | //... 135 | NSLog(@"success"); 136 | } 137 | }]; 138 | 139 | } 140 | 141 | #pragma mark =滤镜= 142 | -(void)actionForButton6 143 | { 144 | // NSString *filePath = [[NSBundle mainBundle] pathForResource:@"2018" ofType:@"mp4"]; 145 | // 146 | // NSData *data = [NSData dataWithContentsOfFile:filePath]; 147 | NSString *fp = [Cache_PATH_IN_DOMAINS stringByAppendingString:@"/2018.mp4"]; 148 | // [data writeToFile:fp atomically:YES]; 149 | NSString *filePath = fp; 150 | 151 | //1.将素材拖入到素材库中 152 | AVURLAsset *asset = [AVURLAsset assetWithURL:[NSURL fileURLWithPath:filePath]]; 153 | 154 | CIFilter *filter = [CIFilter filterWithName:@"CIMaskToAlpha"]; 155 | //查看有哪些滤镜效果 156 | // NSLog(@"%@",[CIFilter filterNamesInCategory:kCICategoryVideo]) ; 157 | 158 | /** 159 | cpu占用200%多,太可怕了。非常耗时间了; 160 | */ 161 | AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoCompositionWithAsset:asset applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest * _Nonnull request) { 162 | CIImage *source = request.sourceImage.imageByClampingToExtent; 163 | float currentTime = request.compositionTime.value / request.compositionTime.timescale; 164 | /* 165 | 可以根据currentTime视频时间设置哪些时间段添加滤镜 166 | **/ 167 | if (currentTime < 3) { 168 | [request finishWithImage:source context:nil]; 169 | } else { 170 | [filter setValue:source forKey:kCIInputImageKey]; 171 | //4 172 | CIImage *output = [filter.outputImage imageByCroppingToRect:request.sourceImage.extent]; 173 | [request finishWithImage:output context:nil]; 174 | } 175 | 176 | }]; 177 | 178 | 179 | NSString *outputFilePath = [kPathDocument1 stringByAppendingString:@"/2018_6.mp4"]; 180 | unlink([outputFilePath UTF8String]); 181 | //4.导出 182 | AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetMediumQuality]; 183 | exporter.videoComposition = videoComposition; 184 | exporter.outputURL = [NSURL fileURLWithPath:outputFilePath isDirectory:YES]; 185 | exporter.outputFileType = AVFileTypeMPEG4; 186 | exporter.shouldOptimizeForNetworkUse = YES; 187 | [exporter exportAsynchronouslyWithCompletionHandler:^{ 188 | if (exporter.error) { 189 | //... 190 | NSLog(@"error: %@",exporter.error); 191 | }else{ 192 | //... 193 | NSLog(@"success"); 194 | } 195 | }]; 196 | } 197 | 198 | #pragma mark =水印= 199 | -(void)actionForButton5 200 | { 201 | 202 | // NSString *filePath = [[NSBundle mainBundle] pathForResource:@"2018" ofType:@"mp4"]; 203 | // 204 | // NSData *data = [NSData dataWithContentsOfFile:filePath]; 205 | NSString *fp = [Cache_PATH_IN_DOMAINS stringByAppendingString:@"/2018.mp4"]; 206 | // [data writeToFile:fp atomically:YES]; 207 | NSString *filePath = fp; 208 | 209 | //1.将素材拖入到素材库中 210 | AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:filePath]]; 211 | //素材的视频轨 212 | AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0]; 213 | //素材的音频轨 214 | AVAssetTrack *audioAssertTrack = [[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]; 215 | 216 | //2.将素材的视频插入视频轨,音频插入音频轨 217 | //这是工程文件 218 | AVMutableComposition *composition = [AVMutableComposition composition]; 219 | //视频轨道 220 | AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 221 | //在视频轨道插入一个时间段的视频 222 | [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil]; 223 | 224 | //音频轨道 225 | AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 226 | //插入音频数据,否则没有声音 227 | [audioCompositionTrack insertTimeRange: CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:audioAssertTrack atTime:kCMTimeZero error:nil]; 228 | 229 | 230 | //3.裁剪视频,就是要将所有视频轨进行裁剪,就需要得到所有的视频轨,而得到一个视频轨就需要得到它上面所有的视频素材 231 | AVMutableVideoCompositionLayerInstruction *videoCompositionLayerIns = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoAssetTrack]; 232 | [videoCompositionLayerIns setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero]; 233 | //得到视频素材(这个例子中只有一个视频) 234 | AVMutableVideoCompositionInstruction *videoCompositionIns = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 235 | [videoCompositionIns setTimeRange:CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration)]; 236 | videoCompositionIns.layerInstructions = @[videoCompositionLayerIns]; 237 | 238 | //得到视频轨道(这个例子中只有一个轨道) 239 | AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition]; 240 | videoComposition.instructions = @[videoCompositionIns]; 241 | videoComposition.renderSize = CGSizeMake(960, 544); 242 | //裁剪出对应的大小 243 | videoComposition.frameDuration = CMTimeMake(1, 30); 244 | #pragma mark =添加水印= 245 | /** 246 | 注意:坐标原点是左下角。 247 | backgroundLayer 是水印layer,可以添加动画,[layer1 addAnimation: CABaseAnimation forKey: nil];layer能做的事情都可以。 248 | parentLayer 的frame和视频的大小要一样,是存放水印layer和视频layer的视图; 249 | videoLayer 的frame和视频的大小要一样,videolayer是用来播放视频的图层,如果想设置成九个画面,可以创建 250 | 九个videolayer,修改frame的位置,一定要把videolayer添加到parentlayer上;然后调用+ (instancetype)videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayers:(NSArray *)videoLayers inLayer:(CALayer *)animationLayer 251 | 这个方法第一个参数是数组,用来存放videolayer的,下面的用例我在屏幕上播放两个画面。你可以去掉注释尝试一下。 252 | **/ 253 | UIImage *img = [UIImage imageNamed:@"ico.jpeg"]; 254 | CALayer *backgroundLayer = [CALayer layer]; 255 | [backgroundLayer setContents:(id)[img CGImage]]; 256 | backgroundLayer.frame = CGRectMake(CGSizeMake(960, 544).width-img.size.width, 0, img.size.width, img.size.height); 257 | [backgroundLayer setMasksToBounds:YES]; 258 | CALayer *parentLayer = [CALayer layer]; 259 | CALayer *videoLayer = [CALayer layer]; 260 | 261 | parentLayer.frame = CGRectMake(0, 0, CGSizeMake(960, 544).width, CGSizeMake(960, 544).height); 262 | videoLayer.frame = CGRectMake(0, 0, CGSizeMake(960, 544).width, CGSizeMake(960, 544).height); 263 | 264 | [parentLayer addSublayer:videoLayer]; 265 | [parentLayer addSublayer:backgroundLayer]; 266 | //单个画面播放 267 | videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; 268 | //去掉注释,试试两个画面播放,注意要修改videolayer的frame,否则会被遮挡 269 | // CALayer *videoLayer2 = [CALayer layer]; 270 | // videoLayer2.frame = CGRectMake(CGSizeMake(960, 544).width/2, 0, CGSizeMake(960, 544).width/2, CGSizeMake(960, 544).height); 271 | // [parentLayer addSublayer:videoLayer2]; 272 | // videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayers:@[videoLayer,videoLayer2] inLayer:parentLayer]; 273 | 274 | 275 | NSString *outputFilePath = [kPathDocument1 stringByAppendingString:@"/2018_5.mp4"]; 276 | unlink([outputFilePath UTF8String]); 277 | //4.导出 278 | AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality]; 279 | exporter.videoComposition = videoComposition; 280 | exporter.outputURL = [NSURL fileURLWithPath:outputFilePath isDirectory:YES]; 281 | exporter.outputFileType = AVFileTypeMPEG4; 282 | exporter.shouldOptimizeForNetworkUse = YES; 283 | [exporter exportAsynchronouslyWithCompletionHandler:^{ 284 | if (exporter.error) { 285 | //... 286 | NSLog(@"error: %@",exporter.error); 287 | }else{ 288 | //... 289 | NSLog(@"success"); 290 | } 291 | }]; 292 | } 293 | 294 | #pragma mark =视频拼接= 295 | -(void)actionForButton4 296 | { 297 | // NSString *filePath = [[NSBundle mainBundle] pathForResource:@"2018" ofType:@"mp4"]; 298 | // 299 | // NSData *data = [NSData dataWithContentsOfFile:filePath]; 300 | NSString *fp = [Cache_PATH_IN_DOMAINS stringByAppendingString:@"/2018.mp4"]; 301 | NSString *fp2 = [Cache_PATH_IN_DOMAINS stringByAppendingString:@"/2017.mp4"]; 302 | // [data writeToFile:fp atomically:YES]; 303 | NSString *filePath = fp; 304 | NSString *filePath2 = fp2; 305 | 306 | NSDictionary *opstDict = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:NO] forKey:AVURLAssetPreferPreciseDurationAndTimingKey]; 307 | 308 | //1.将素材拖入到素材库中 309 | AVAsset *asset = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:filePath] options:opstDict]; 310 | AVAsset *asset2 = [[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:filePath2] options:opstDict]; 311 | 312 | //素材的视频轨 313 | AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0]; 314 | //素材的音频轨 315 | AVAssetTrack *audioAssertTrack = [[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]; 316 | 317 | //素材的视频轨 318 | AVAssetTrack *videoAssetTrack2 = [[asset2 tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0]; 319 | 320 | //素材的音频轨 321 | // AVAssetTrack *audioAssertTrack2 = [[asset2 tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]; 322 | 323 | //2.将素材的视频插入视频轨,音频插入音频轨 324 | //这是工程文件 325 | AVMutableComposition *composition = [AVMutableComposition composition]; 326 | //视频轨道 327 | AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 328 | #pragma mark =视频轨道合并= 329 | //在视频轨道插入一个时间段的视频 330 | //由于没有计算当前CMTime的起始位置,现在插入0的位置,所以合并出来的视频是后添加在前面, 331 | [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset2.duration) ofTrack:videoAssetTrack2 atTime:kCMTimeZero error:nil]; 332 | [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil]; 333 | #pragma mark =音频轨道合并= 334 | //音频轨道 335 | AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 336 | //插入音频数据,否则没有声音 337 | [audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset2.duration) ofTrack:audioAssertTrack atTime:kCMTimeZero error:nil]; 338 | [audioCompositionTrack insertTimeRange: CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:audioAssertTrack atTime:kCMTimeZero error:nil]; 339 | 340 | 341 | //3.裁剪视频,就是要将所有视频轨进行裁剪,就需要得到所有的视频轨,而得到一个视频轨就需要得到它上面所有的视频素材 342 | AVMutableVideoCompositionLayerInstruction *videoCompositionLayerIns = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoAssetTrack]; 343 | /** 344 | 两个视频合并成一个的时候,如果都在一个视频轨道上面,在这里设置旋转和缩放,整个轨道上的视频都会变化。所以想要添加功能,就分时间段 345 | 设置。- (void)setTransformRampFromStartTransform:(CGAffineTransform)startTransform toEndTransform:(CGAffineTransform)endTransform timeRange:(CMTimeRange)timeRange 346 | */ 347 | [videoCompositionLayerIns setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero]; 348 | 349 | // [videoCompositionLayerIns setTransformRampFromStartTransform:CGAffineTransformIdentity toEndTransform:CGAffineTransformMakeRotation(M_PI_4) timeRange:CMTimeRangeMake(CMTimeMake(7*30, 30), CMTimeMake(1*30, 30))]; 350 | 351 | //得到视频素材 352 | AVMutableVideoCompositionInstruction *videoCompositionIns = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 353 | #pragma mark =视频的总时间= 354 | /** 355 | 总时间是两段视频时间总和 356 | **/ 357 | [videoCompositionIns setTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeAdd(videoAssetTrack.timeRange.duration, videoAssetTrack2.timeRange.duration))]; 358 | videoCompositionIns.layerInstructions = @[videoCompositionLayerIns]; 359 | //得到视频轨道(这个例子中只有一个轨道) 360 | AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition]; 361 | videoComposition.instructions = @[videoCompositionIns]; 362 | videoComposition.renderSize = CGSizeMake(960, 544); 363 | //裁剪出对应的大小 364 | videoComposition.frameDuration = CMTimeMake(1, 30); 365 | 366 | 367 | NSString *outputFilePath = [kPathDocument1 stringByAppendingString:@"/2018_4.mp4"]; 368 | unlink([outputFilePath UTF8String]); 369 | //4.导出 370 | AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality]; 371 | exporter.videoComposition = videoComposition; 372 | exporter.outputURL = [NSURL fileURLWithPath:outputFilePath isDirectory:YES]; 373 | exporter.outputFileType = AVFileTypeMPEG4; 374 | exporter.shouldOptimizeForNetworkUse = YES; 375 | [exporter exportAsynchronouslyWithCompletionHandler:^{ 376 | if (exporter.error) { 377 | //... 378 | NSLog(@"error: %@",exporter.error); 379 | }else{ 380 | //... 381 | NSLog(@"success"); 382 | } 383 | }]; 384 | } 385 | 386 | #pragma mark =视频裁剪= 387 | -(void)actionForButton3 388 | { 389 | 390 | // NSString *filePath = [[NSBundle mainBundle] pathForResource:@"2018" ofType:@"mp4"]; 391 | // 392 | // NSData *data = [NSData dataWithContentsOfFile:filePath]; 393 | NSString *fp = [Cache_PATH_IN_DOMAINS stringByAppendingString:@"/2018.mp4"]; 394 | // [data writeToFile:fp atomically:YES]; 395 | NSString *filePath = fp; 396 | 397 | //1.将素材拖入到素材库中 398 | AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:filePath]]; 399 | 400 | 401 | NSString *outputFilePath = [kPathDocument1 stringByAppendingString:@"/2018_3.mp4"]; 402 | unlink([outputFilePath UTF8String]); 403 | //4.导出 404 | AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetMediumQuality]; 405 | 406 | exporter.outputURL = [NSURL fileURLWithPath:outputFilePath isDirectory:YES]; 407 | exporter.outputFileType = AVFileTypeMPEG4; 408 | exporter.shouldOptimizeForNetworkUse = YES; 409 | #pragma mark =在这里裁剪= 410 | /** 411 | 需要裁剪的时间段, 412 | */ 413 | exporter.timeRange = CMTimeRangeMake(CMTimeMakeWithSeconds(0, asset.duration.timescale), CMTimeMakeWithSeconds(4, asset.duration.timescale)); 414 | [exporter exportAsynchronouslyWithCompletionHandler:^{ 415 | if (exporter.error) { 416 | //... 417 | NSLog(@"error: %@",exporter.error); 418 | }else{ 419 | //... 420 | NSLog(@"success"); 421 | } 422 | }]; 423 | } 424 | 425 | #pragma mark =图像旋转= 426 | -(void)actionForButton2 427 | { 428 | // NSString *filePath = [[NSBundle mainBundle] pathForResource:@"2018" ofType:@"mp4"]; 429 | // 430 | // NSData *data = [NSData dataWithContentsOfFile:filePath]; 431 | NSString *fp = [Cache_PATH_IN_DOMAINS stringByAppendingString:@"/2018.mp4"]; 432 | // [data writeToFile:fp atomically:YES]; 433 | NSString *filePath = fp; 434 | 435 | //1.将素材拖入到素材库中 436 | AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:filePath]]; 437 | //素材的视频轨 438 | AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0]; 439 | //素材的音频轨 440 | AVAssetTrack *audioAssertTrack = [[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]; 441 | 442 | //2.将素材的视频插入视频轨,音频插入音频轨 443 | //这是工程文件 444 | AVMutableComposition *composition = [AVMutableComposition composition]; 445 | //视频轨道 446 | AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 447 | //在视频轨道插入一个时间段的视频 448 | [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil]; 449 | // [videoCompositionTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) toDuration:CMTimeMake(asset.duration.value/2, asset.duration.timescale)]; 450 | //音频轨道 451 | AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 452 | //插入音频数据,否则没有声音 453 | [audioCompositionTrack insertTimeRange: CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:audioAssertTrack atTime:kCMTimeZero error:nil]; 454 | 455 | // [audioCompositionTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) toDuration:CMTimeMake(asset.duration.value/2, asset.duration.timescale)]; 456 | 457 | //3.裁剪视频,就是要将所有视频轨进行裁剪,就需要得到所有的视频轨,而得到一个视频轨就需要得到它上面所有的视频素材 458 | AVMutableVideoCompositionLayerInstruction *videoCompositionLayerIns = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoAssetTrack]; 459 | // [videoCompositionLayerIns setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero]; 460 | #pragma mark =图像旋转= 461 | /** 462 | StartTransform:开始旋转的角度,我这里是0度 463 | EndTransform:最终的角度,我这里是顺时针45度, 464 | timeRange:旋转持续的时间,CMTimeRangeMake(CMTimeMake(4*30, 30), CMTimeMake(1*30, 30))第一个参数CMTimeMake(4*30, 30)表示在视频第四秒的时候开始开始旋转,第二个参数CMTimeMake(1*30, 30)表示旋转的过程持续的时间,我的是1秒,视频图像旋转是个动画的 465 | 过程,会在1秒内完成。如果第二个参数改成CMTimeMake(2*30, 30),则图像在2秒内完成旋转。 466 | 注意:图像旋转的锚点是在左上角。 467 | */ 468 | [videoCompositionLayerIns setTransformRampFromStartTransform:CGAffineTransformIdentity toEndTransform:CGAffineTransformMakeRotation(M_PI_4) timeRange:CMTimeRangeMake(CMTimeMake(4*30, 30), CMTimeMake(1*30, 30))]; 469 | /** 470 | 设置透明度,0~1.0之间,值越小,图像越黑暗, 471 | **/ 472 | // [videoCompositionLayerIns setOpacity:0.1 atTime:kCMTimeZero]; 473 | /** 474 | 裁剪视频,根据参数CGRect裁剪相同大小的矩形图像。只是图像裁剪,并不是视频时间长度的裁剪。 475 | 注意:此方法和AVMutableVideoComposition的renderSize属性并不一样,renderSize裁剪的是整个视频的大小,而CGRect裁剪 476 | 之外的地方图像是黑色的。 477 | **/ 478 | // [videoCompositionLayerIns setCropRectangle:CGRectMake(0, 0, 100, 100) atTime:kCMTimeZero]; 479 | 480 | 481 | //得到视频素材(这个例子中只有一个视频) 482 | AVMutableVideoCompositionInstruction *videoCompositionIns = [AVMutableVideoCompositionInstruction videoCompositionInstruction];[videoCompositionIns setTimeRange:CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration)]; 483 | videoCompositionIns.layerInstructions = @[videoCompositionLayerIns]; 484 | //得到视频轨道(这个例子中只有一个轨道) 485 | AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition]; 486 | videoComposition.instructions = @[videoCompositionIns]; 487 | videoComposition.renderSize = CGSizeMake(960, 544); 488 | //裁剪出对应的大小 489 | videoComposition.frameDuration = CMTimeMake(1, 30); 490 | 491 | 492 | NSString *outputFilePath = [kPathDocument1 stringByAppendingString:@"/2018_2.mp4"]; 493 | unlink([outputFilePath UTF8String]); 494 | //4.导出 495 | AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality]; 496 | exporter.videoComposition = videoComposition; 497 | exporter.outputURL = [NSURL fileURLWithPath:outputFilePath isDirectory:YES]; 498 | exporter.outputFileType = AVFileTypeMPEG4; 499 | exporter.shouldOptimizeForNetworkUse = YES; 500 | [exporter exportAsynchronouslyWithCompletionHandler:^{ 501 | if (exporter.error) { 502 | //... 503 | NSLog(@"error: %@",exporter.error); 504 | }else{ 505 | //... 506 | NSLog(@"success"); 507 | } 508 | }]; 509 | 510 | } 511 | 512 | #pragma mark =2倍快放= 513 | -(void)actionForButton 514 | { 515 | 516 | // NSString *filePath = [[NSBundle mainBundle] pathForResource:@"2018" ofType:@"mp4"]; 517 | // 518 | // NSData *data = [NSData dataWithContentsOfFile:filePath]; 519 | NSString *fp = [Cache_PATH_IN_DOMAINS stringByAppendingString:@"/2018.mp4"]; 520 | // [data writeToFile:fp atomically:YES]; 521 | NSString *filePath = fp; 522 | 523 | //1.将素材拖入到素材库中 524 | AVAsset *asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:filePath]]; 525 | //素材的视频轨 526 | AVAssetTrack *videoAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0]; 527 | //素材的音频轨 528 | AVAssetTrack *audioAssertTrack = [[asset tracksWithMediaType:AVMediaTypeAudio]objectAtIndex:0]; 529 | 530 | //2.将素材的视频插入视频轨,音频插入音频轨 531 | //这是工程文件 532 | AVMutableComposition *composition = [AVMutableComposition composition]; 533 | //视频轨道 534 | AVMutableCompositionTrack *videoCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 535 | //在视频轨道插入一个时间段的视频 536 | [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:videoAssetTrack atTime:kCMTimeZero error:nil]; 537 | #pragma mark 在这里修改快放速度 538 | [videoCompositionTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) toDuration:CMTimeMake(asset.duration.value/2, asset.duration.timescale)]; 539 | //音频轨道 540 | AVMutableCompositionTrack *audioCompositionTrack = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 541 | //插入音频数据,否则没有声音 542 | [audioCompositionTrack insertTimeRange: CMTimeRangeMake(kCMTimeZero, asset.duration) ofTrack:audioAssertTrack atTime:kCMTimeZero error:nil]; 543 | #pragma mark 在这里修改快放速度 544 | [audioCompositionTrack scaleTimeRange:CMTimeRangeMake(kCMTimeZero, asset.duration) toDuration:CMTimeMake(asset.duration.value/2, asset.duration.timescale)]; 545 | 546 | //3.裁剪视频,就是要将所有视频轨进行裁剪,就需要得到所有的视频轨,而得到一个视频轨就需要得到它上面所有的视频素材 547 | AVMutableVideoCompositionLayerInstruction *videoCompositionLayerIns = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoAssetTrack]; 548 | [videoCompositionLayerIns setTransform:videoAssetTrack.preferredTransform atTime:kCMTimeZero]; 549 | //得到视频素材(这个例子中只有一个视频) 550 | AVMutableVideoCompositionInstruction *videoCompositionIns = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 551 | [videoCompositionIns setTimeRange:CMTimeRangeMake(kCMTimeZero, videoAssetTrack.timeRange.duration)]; 552 | videoCompositionIns.layerInstructions = @[videoCompositionLayerIns]; 553 | //得到视频轨道(这个例子中只有一个轨道) 554 | AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition]; 555 | videoComposition.instructions = @[videoCompositionIns]; 556 | videoComposition.renderSize = CGSizeMake(960, 544); 557 | //裁剪出对应的大小 558 | videoComposition.frameDuration = CMTimeMake(1, 30); 559 | 560 | NSString *outputFilePath = [kPathDocument1 stringByAppendingString:@"/2018_1.mp4"]; 561 | unlink([outputFilePath UTF8String]); 562 | //4.导出 563 | AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetMediumQuality]; 564 | exporter.videoComposition = videoComposition; 565 | exporter.outputURL = [NSURL fileURLWithPath:outputFilePath isDirectory:YES]; 566 | exporter.outputFileType = AVFileTypeMPEG4; 567 | exporter.shouldOptimizeForNetworkUse = YES; 568 | [exporter exportAsynchronouslyWithCompletionHandler:^{ 569 | if (exporter.error) { 570 | //... 571 | NSLog(@"error: %@",exporter.error); 572 | }else{ 573 | //... 574 | NSLog(@"success"); 575 | } 576 | }]; 577 | } 578 | 579 | 580 | 581 | 582 | 583 | 584 | 585 | - (void)didReceiveMemoryWarning { 586 | [super didReceiveMemoryWarning]; 587 | // Dispose of any resources that can be recreated. 588 | } 589 | 590 | 591 | @end 592 | --------------------------------------------------------------------------------