├── README.md ├── VideoService.h └── VideoService.m /README.md: -------------------------------------------------------------------------------- 1 | # VideoService 2 | 集成视频播放过程中一些常用的功能,例如视频合成、视频添加水印、获取视频时间长度、获取视频大小、获取视频缩略图、获取视频某一帧图片 3 | -------------------------------------------------------------------------------- /VideoService.h: -------------------------------------------------------------------------------- 1 | // 2 | // VideoService.h 3 | // YouJia 4 | // 5 | // Created by aa on 14-4-18. 6 | // 7 | // 8 | 9 | #import 10 | #import 11 | 12 | @interface VideoService : NSObject 13 | 14 | /*用mp4编码格式进行编码 15 | *@param url 原视频存放url 16 | *@param encodeUrl 编码后的视频存放路径 17 | *@return 18 | */ 19 | + (void)encodeMP4WithVideoUrl:(NSURL *)url 20 | outputVideoUrl:(NSString *)encodeUrl 21 | blockHandler:(void (^)(AVAssetExportSession*))handler; 22 | 23 | /*视频压缩并转码成mp4 24 | *@param inputURL 需要压缩的视频地址 25 | *@param outputURL 压缩后的视频存放地址 26 | *@param handler block模块 27 | */ 28 | + (void) lowQuailtyWithInputURL:(NSURL*)inputURL 29 | outputURL:(NSURL*)outputURL 30 | blockHandler:(void (^)(AVAssetExportSession*))handler; 31 | 32 | /*视频合成 33 | *@param firstUrl 第一段视频的url 34 | *@param secondUrl 第二段视频的url 35 | *@param outputUrl 合成后视频存放的url 36 | *@param size 视频size 37 | * 38 | */ 39 | + (void)mergeVideoFromFristVideoUrl:(NSURL *)firstUrl 40 | secondVideoUrl:(NSURL *)secondUrl 41 | withOutputVideoUrl:(NSURL *)outputUrl 42 | andVideoSize:(CGSize)size 43 | blockHandler:(void (^)(AVAssetExportSession*))handler; 44 | 45 | /*视频添加水印 46 | *@param videoUrl 视频的url 47 | *@param img 水印图片 48 | *@param outputPath 处理后视频存放的路径 49 | *@param size 视频size 50 | *@param imgRect 水印图片在视频中的位置 51 | * 52 | */ 53 | + (void) loadVideoByUrl:(NSURL *)videoUrl 54 | andOutputUrl:(NSString *)outputPath 55 | andImage:(UIImage *)img 56 | andVideoSize:(CGSize)size 57 | andImgRect:(CGRect)imgRect 58 | blockHandler:(void (^)(AVAssetExportSession*))handler; 59 | 60 | 61 | #pragma mark------------------------------- 62 | 63 | /*获取视频时间长度 64 | *@param URL 视频存放的url 65 | *@return float类型 66 | */ 67 | + (CGFloat) getVideoDuration:(NSURL*) URL; 68 | 69 | /*获取视频文件的大小 70 | *@param path 视频存放的路径 71 | *@return 单位为kb的数值 72 | */ 73 | + (NSInteger) getFileSize:(NSString*) path; 74 | 75 | /*获取视频缩略图 76 | *@param videoURL 视频存放的路径 77 | *@return image 78 | */ 79 | +(UIImage *)getImage:(NSURL *)videoURL; 80 | 81 | //获取某一帧的图片 82 | + (UIImage*) thumbnailImageForVideo:(NSURL *)videoURL atTime:(NSTimeInterval)time; 83 | 84 | @end 85 | -------------------------------------------------------------------------------- /VideoService.m: -------------------------------------------------------------------------------- 1 | // 2 | // VideoService.m 3 | // YouJia 4 | // 5 | // Created by aa on 14-4-18. 6 | // 7 | // 8 | 9 | #import "VideoService.h" 10 | #import 11 | #import 12 | 13 | @implementation VideoService 14 | 15 | 16 | #pragma mark--------------视频文件处理方法-------------------------- 17 | 18 | /*获取视频时间长度 19 | *@param URL 视频存放的url 20 | *@return float类型 21 | */ 22 | + (CGFloat) getVideoDuration:(NSURL*) URL 23 | { 24 | NSDictionary *opts = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:NO] 25 | forKey:AVURLAssetPreferPreciseDurationAndTimingKey]; 26 | AVURLAsset *urlAsset = [AVURLAsset URLAssetWithURL:URL options:opts]; 27 | float second = 0; 28 | second = urlAsset.duration.value/urlAsset.duration.timescale; 29 | return second; 30 | } 31 | 32 | /*获取视频文件的大小 33 | *@param path 视频存放的路径 34 | *@return 单位为kb的数值 35 | */ 36 | + (NSInteger) getFileSize:(NSString*) path 37 | { 38 | NSFileManager * filemanager = [[[NSFileManager alloc]init] autorelease]; 39 | if([filemanager fileExistsAtPath:path]){ 40 | NSDictionary * attributes = [filemanager attributesOfItemAtPath:path error:nil]; 41 | NSNumber *theFileSize; 42 | if ( (theFileSize = [attributes objectForKey:NSFileSize]) ) 43 | return [theFileSize intValue]/1024; //返回的是kb单位的大小 44 | } 45 | return 0; 46 | } 47 | 48 | /*获取视频缩略图 49 | *@param videoURL 视频存放的路径 50 | *@return image 51 | */ 52 | +(UIImage *)getImage:(NSURL *)videoURL 53 | { 54 | AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:videoURL options:nil]; 55 | AVAssetImageGenerator *gen = [[AVAssetImageGenerator alloc] initWithAsset:asset]; 56 | gen.appliesPreferredTrackTransform = YES; 57 | CMTime time = CMTimeMakeWithSeconds(0.0, 600); 58 | NSError *error = nil; 59 | CMTime actualTime; 60 | CGImageRef image = [gen copyCGImageAtTime:time actualTime:&actualTime error:&error]; 61 | UIImage *thumb = [[UIImage alloc] initWithCGImage:image]; 62 | CGImageRelease(image); 63 | [asset release]; 64 | [gen release]; 65 | return [thumb autorelease]; 66 | 67 | } 68 | 69 | //获取某一帧的图片 70 | + (UIImage*) thumbnailImageForVideo:(NSURL *)videoURL atTime:(NSTimeInterval)time { 71 | 72 | AVURLAsset *asset = [[[AVURLAsset alloc] initWithURL:videoURL options:nil]autorelease]; 73 | NSParameterAssert(asset); 74 | 75 | AVAssetImageGenerator *assetImageGenerator =[[[AVAssetImageGenerator alloc] initWithAsset:asset]autorelease]; 76 | assetImageGenerator.appliesPreferredTrackTransform = YES; 77 | assetImageGenerator.apertureMode =AVAssetImageGeneratorApertureModeEncodedPixels; 78 | 79 | CGImageRef thumbnailImageRef = NULL; 80 | CFTimeInterval thumbnailImageTime = time; 81 | NSError *thumbnailImageGenerationError = nil; 82 | thumbnailImageRef = [assetImageGenerator copyCGImageAtTime:CMTimeMake(thumbnailImageTime, 60)actualTime:NULL error:&thumbnailImageGenerationError]; 83 | 84 | if(!thumbnailImageRef) 85 | { 86 | NSLog(@"thumbnailImageGenerationError %@",thumbnailImageGenerationError); 87 | }else 88 | { 89 | UIImage *thumbImage = [[[UIImage alloc] initWithCGImage:thumbnailImageRef] autorelease]; 90 | CGImageRelease(thumbnailImageRef); 91 | return thumbImage; 92 | } 93 | 94 | return nil; 95 | } 96 | 97 | 98 | #pragma mark----------------------------------- 99 | 100 | 101 | /*用mp4编码格式进行编码 102 | *@param url 原视频存放url 103 | *@param encodeUrl 编码后的视频存放路径 104 | *@return 105 | */ 106 | + (void)encodeMP4WithVideoUrl:(NSURL *)url 107 | outputVideoUrl:(NSString *)encodeUrl 108 | blockHandler:(void (^)(AVAssetExportSession*))handler 109 | { 110 | AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:url options:nil]; 111 | AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:avAsset 112 | presetName:AVAssetExportPresetMediumQuality]; 113 | 114 | exportSession.outputURL = [NSURL fileURLWithPath:encodeUrl]; 115 | exportSession.shouldOptimizeForNetworkUse = NO; 116 | BOOL isMp4=NO; 117 | for (NSString *str in exportSession.supportedFileTypes) 118 | { 119 | NSLog(@"output file type=%@",str); 120 | if ([str isEqualToString:AVFileTypeMPEG4]) { 121 | isMp4=YES; 122 | break; 123 | } 124 | } 125 | if (isMp4) { 126 | exportSession.outputFileType = AVFileTypeMPEG4; 127 | } 128 | else{ 129 | exportSession.outputFileType = AVFileTypeQuickTimeMovie; 130 | } 131 | [exportSession exportAsynchronouslyWithCompletionHandler:^{ 132 | handler(exportSession); 133 | 134 | [exportSession release]; 135 | 136 | NSFileManager *fm = [NSFileManager defaultManager]; 137 | BOOL isDeleteOk = [fm removeItemAtURL:url error:nil]; 138 | NSLog(@"删除缓存 %d",isDeleteOk); 139 | }]; 140 | } 141 | 142 | 143 | /*视频压缩并转码成mp4 144 | *@param inputURL 需要压缩的视频地址 145 | *@param outputURL 压缩后的视频存放地址 146 | *@param handler block模块 147 | */ 148 | + (void) lowQuailtyWithInputURL:(NSURL*)inputURL 149 | outputURL:(NSURL*)outputURL 150 | blockHandler:(void (^)(AVAssetExportSession*))handler 151 | { 152 | AVURLAsset *avAsset = [AVURLAsset URLAssetWithURL:inputURL options:nil]; 153 | AVAssetExportSession *exportSession = [[AVAssetExportSession alloc]initWithAsset:avAsset 154 | presetName:AVAssetExportPresetMediumQuality]; 155 | 156 | exportSession.outputURL = outputURL; 157 | exportSession.shouldOptimizeForNetworkUse = NO; 158 | BOOL isMp4=NO; 159 | for (NSString *str in exportSession.supportedFileTypes) 160 | { 161 | NSLog(@"output file type=%@",str); 162 | if ([str isEqualToString:AVFileTypeMPEG4]) { 163 | isMp4=YES; 164 | break; 165 | } 166 | } 167 | if (isMp4) { 168 | exportSession.outputFileType = AVFileTypeMPEG4; 169 | } 170 | else{ 171 | exportSession.outputFileType = AVFileTypeQuickTimeMovie; 172 | } 173 | [exportSession exportAsynchronouslyWithCompletionHandler:^{ 174 | handler(exportSession); 175 | 176 | [exportSession release]; 177 | }]; 178 | } 179 | 180 | /*视频合成 181 | *@param firstUrl 第一段视频的url 182 | *@param secondUrl 第二段视频的url 183 | *@param outputUrl 合成后视频存放的url 184 | *@param size 视频size 185 | * 186 | */ 187 | + (void)mergeVideoFromFristVideoUrl:(NSURL *)firstUrl 188 | secondVideoUrl:(NSURL *)secondUrl 189 | withOutputVideoUrl:(NSURL *)outputUrl 190 | andVideoSize:(CGSize)size 191 | blockHandler:(void (^)(AVAssetExportSession*))handler 192 | { 193 | AVAsset *firstAsset = [AVAsset assetWithURL:firstUrl]; 194 | AVAsset *secondAsset = [AVAsset assetWithURL:secondUrl]; 195 | AVMutableComposition *mixComposition = [AVMutableComposition composition]; 196 | //第一段视频处理 197 | AVMutableCompositionTrack *firstTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 198 | AVAssetTrack *firstAssetTrack=[[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 199 | [firstTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, firstAsset.duration) ofTrack:firstAssetTrack atTime:kCMTimeZero error:nil]; 200 | //第二段视频处理,第二段视频插在第一段视频结束后 201 | AVMutableCompositionTrack *secondTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 202 | AVAssetTrack *secondAssetTrack=[[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 203 | [secondTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, secondAsset.duration) ofTrack:secondAssetTrack atTime:firstAsset.duration error:nil]; 204 | 205 | AVMutableVideoCompositionInstruction *MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 206 | MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration)); 207 | 208 | AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack]; 209 | // UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp; 210 | // BOOL isFirstAssetPortrait_ = NO; 211 | // CGAffineTransform firstTransform = firstAssetTrack.preferredTransform; 212 | // if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;} 213 | // if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) {FirstAssetOrientation_ = UIImageOrientationLeft; isFirstAssetPortrait_ = YES;} 214 | // if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) {FirstAssetOrientation_ = UIImageOrientationUp;} 215 | // if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;} 216 | // CGFloat FirstAssetScaleToFitRatio = 320.0 / firstAssetTrack.naturalSize.width; 217 | // if(isFirstAssetPortrait_) { 218 | // FirstAssetScaleToFitRatio = 320.0 / firstAssetTrack.naturalSize.height; 219 | // CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 220 | // [FirstlayerInstruction setTransform:CGAffineTransformConcat(firstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero]; 221 | // } else { 222 | // CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio); 223 | // [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(firstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero]; 224 | // } 225 | [FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration]; 226 | 227 | AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack]; 228 | // UIImageOrientation SecondAssetOrientation_ = UIImageOrientationUp; 229 | // BOOL isSecondAssetPortrait_ = NO; 230 | // CGAffineTransform secondTransform = secondAssetTrack.preferredTransform; 231 | // if(secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0) {SecondAssetOrientation_= UIImageOrientationRight; isSecondAssetPortrait_ = YES;} 232 | // if(secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0) {SecondAssetOrientation_ = UIImageOrientationLeft; isSecondAssetPortrait_ = YES;} 233 | // if(secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0) {SecondAssetOrientation_ = UIImageOrientationUp;} 234 | // if(secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) {SecondAssetOrientation_ = UIImageOrientationDown;} 235 | // CGFloat SecondAssetScaleToFitRatio = 320.0 / secondAssetTrack.naturalSize.width; 236 | // if(isSecondAssetPortrait_) { 237 | // SecondAssetScaleToFitRatio = 320.0 / secondAssetTrack.naturalSize.height; 238 | // CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio); 239 | // [SecondlayerInstruction setTransform:CGAffineTransformConcat(secondAssetTrack.preferredTransform, SecondAssetScaleFactor) atTime:firstAsset.duration]; 240 | // } else { 241 | // ; 242 | // CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio); 243 | // [SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(secondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:firstAsset.duration]; 244 | // } 245 | 246 | MainInstruction.layerInstructions = [NSArray arrayWithObjects:FirstlayerInstruction,SecondlayerInstruction, nil];; 247 | 248 | AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition]; 249 | MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction]; 250 | MainCompositionInst.frameDuration = CMTimeMake(1, 30); 251 | MainCompositionInst.renderSize = size; 252 | 253 | AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:mixComposition 254 | presetName:AVAssetExportPresetMediumQuality]; 255 | 256 | NSFileManager *fm = [NSFileManager defaultManager]; 257 | if ([fm fileExistsAtPath:outputUrl.path]) { //如果文件存在,先移除 258 | [fm removeItemAtURL:outputUrl error:nil]; 259 | } 260 | 261 | //输出成影片格式 262 | exportSession.outputURL = outputUrl; 263 | exportSession.shouldOptimizeForNetworkUse = NO; 264 | exportSession.outputFileType = AVFileTypeQuickTimeMovie; 265 | exportSession.videoComposition = MainCompositionInst; 266 | [exportSession exportAsynchronouslyWithCompletionHandler:^{ 267 | handler(exportSession); 268 | 269 | NSFileManager *fm = [NSFileManager defaultManager]; 270 | [fm removeItemAtURL:firstUrl error:nil]; 271 | [fm removeItemAtURL:secondUrl error:nil]; 272 | 273 | [exportSession release]; 274 | }]; 275 | } 276 | 277 | 278 | /*视频添加水印 279 | *@param videoUrl 视频的url 280 | *@param img 水印图片 281 | *@param outputPath 处理后视频存放的路径 282 | *@param size 视频size 283 | *@param imgRect 水印图片在视频中的位置 284 | * 285 | */ 286 | + (void) loadVideoByUrl:(NSURL *)videoUrl 287 | andOutputUrl:(NSString *)outputPath 288 | andImage:(UIImage *)img 289 | andVideoSize:(CGSize)size 290 | andImgRect:(CGRect)imgRect 291 | blockHandler:(void (^)(AVAssetExportSession*))handler 292 | { 293 | AVAsset *avAsset = [AVAsset assetWithURL:videoUrl]; 294 | CMTime assetTime = [avAsset duration]; 295 | Float64 duration = CMTimeGetSeconds(assetTime); 296 | NSLog(@"视频时长 %f\n",duration); 297 | 298 | AVMutableComposition *avMutableComposition = [AVMutableComposition composition]; 299 | 300 | AVMutableCompositionTrack *avMutableCompositionTrack =[avMutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 301 | 302 | AVAssetTrack *avAssetTrack = [[avAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 303 | CGSize videoSize=CGSizeMake([avAssetTrack naturalSize].height, [avAssetTrack naturalSize].width); 304 | NSLog(@"videoWidth=%f videoHeight=%f",videoSize.width,videoSize.height); 305 | NSError *error = nil; 306 | // 这块是裁剪,rangtime .前面的是开始时间,后面是裁剪多长 307 | [avMutableCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(duration, 30)) 308 | ofTrack:avAssetTrack 309 | atTime:kCMTimeZero 310 | error:&error]; 311 | //音频 312 | AVMutableCompositionTrack *audioCompositionTrack =[avMutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid]; 313 | NSArray *audioSetList=[avAsset tracksWithMediaType:AVMediaTypeAudio]; 314 | if ([audioSetList count]>0) { //避免用户拍摄没有声音的视频 315 | AVAssetTrack *audioAssetTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0]; 316 | [audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(duration, 30)) 317 | ofTrack:audioAssetTrack 318 | atTime:kCMTimeZero 319 | error:&error]; 320 | } 321 | 322 | AVMutableVideoComposition *avMutableVideoComposition = [AVMutableVideoComposition videoComposition]; 323 | avMutableVideoComposition.renderSize = videoSize; 324 | avMutableVideoComposition.frameDuration = CMTimeMake(1, 30); 325 | 326 | 327 | CALayer *parentLayer = [CALayer layer]; 328 | CALayer *videoLayer = [CALayer layer]; 329 | parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 330 | videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 331 | [parentLayer addSublayer:videoLayer]; 332 | if(img) 333 | { 334 | CALayer *waterMarkLayer = [CALayer layer]; 335 | waterMarkLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 336 | waterMarkLayer.contents = (id)img.CGImage; 337 | [parentLayer addSublayer:waterMarkLayer]; 338 | } 339 | 340 | avMutableVideoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; 341 | 342 | AVMutableVideoCompositionInstruction *avMutableVideoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 343 | 344 | [avMutableVideoCompositionInstruction setTimeRange:CMTimeRangeMake(kCMTimeZero, [avMutableComposition duration])]; 345 | 346 | AVMutableVideoCompositionLayerInstruction *avMutableVideoCompositionLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:avAssetTrack]; 347 | [avMutableVideoCompositionLayerInstruction setTransform:avAssetTrack.preferredTransform atTime:kCMTimeZero]; 348 | 349 | avMutableVideoCompositionInstruction.layerInstructions = [NSArray arrayWithObject:avMutableVideoCompositionLayerInstruction]; 350 | 351 | 352 | avMutableVideoComposition.instructions = [NSArray arrayWithObject:avMutableVideoCompositionInstruction]; 353 | 354 | 355 | NSFileManager *fm = [NSFileManager defaultManager] ; 356 | if ([fm fileExistsAtPath:outputPath]) { 357 | NSLog(@"video is have. then delete that"); 358 | if ([fm removeItemAtPath:outputPath error:&error]) { 359 | NSLog(@"delete is ok"); 360 | }else { 361 | NSLog(@"delete is no error = %@",error.description); 362 | } 363 | } 364 | 365 | AVAssetExportSession *avAssetExportSession = [[AVAssetExportSession alloc] initWithAsset:avMutableComposition presetName:AVAssetExportPresetMediumQuality]; 366 | [avAssetExportSession setVideoComposition:avMutableVideoComposition]; 367 | [avAssetExportSession setOutputURL:[NSURL fileURLWithPath:outputPath]]; 368 | BOOL isMp4=NO; 369 | for (NSString *str in avAssetExportSession.supportedFileTypes) 370 | { 371 | NSLog(@"output file type=%@",str); 372 | if ([str isEqualToString:AVFileTypeMPEG4]) { 373 | isMp4=YES; 374 | break; 375 | } 376 | } 377 | if (isMp4) { 378 | avAssetExportSession.outputFileType = AVFileTypeMPEG4; 379 | } 380 | else{ 381 | avAssetExportSession.outputFileType = AVFileTypeQuickTimeMovie; 382 | } 383 | [avAssetExportSession setShouldOptimizeForNetworkUse:NO]; 384 | [avAssetExportSession exportAsynchronouslyWithCompletionHandler:^(void){ 385 | handler(avAssetExportSession); //通过block的方式给其他页面处理 386 | 387 | [avAssetExportSession release]; 388 | 389 | NSFileManager *fm = [NSFileManager defaultManager]; 390 | BOOL isDeleteOk = [fm removeItemAtURL:videoUrl error:nil]; 391 | NSLog(@"删除缓存 %d",isDeleteOk); 392 | }]; 393 | } 394 | 395 | @end 396 | --------------------------------------------------------------------------------