├── README.md
└── WaterMarkDelete
├── WaterMarkDelete
├── Assets.xcassets
│ ├── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
├── ClipView.h
├── AppDelegate.h
├── FramesShowViewController.h
├── main.m
├── VideoViewController.h
├── RootViewController.h
├── PictureViewController.h
├── ClipView.m
├── WaterMarkDelete.pch
├── UIImage+OpenCV.h
├── Base.lproj
│ ├── Main.storyboard
│ └── LaunchScreen.storyboard
├── Info.plist
├── FramesShowViewController.m
├── AppDelegate.m
├── HJImagesToVideo
│ ├── HJImagesToVideo.h
│ └── HJImagesToVideo.m
├── RootViewController.m
├── PictureViewController.mm
├── UIImage+OpenCV.mm
└── VideoViewController.mm
├── WaterMarkDelete.xcodeproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ ├── xcuserdata
│ │ └── WangYiming.xcuserdatad
│ │ │ └── UserInterfaceState.xcuserstate
│ └── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
├── xcuserdata
│ └── WangYiming.xcuserdatad
│ │ └── xcschemes
│ │ └── xcschememanagement.plist
└── project.pbxproj
├── WaterMarkDeleteTests
├── Info.plist
└── WaterMarkDeleteTests.m
└── WaterMarkDeleteUITests
├── Info.plist
└── WaterMarkDeleteUITests.m
/README.md:
--------------------------------------------------------------------------------
1 | # WaterMarkDelete
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "version" : 1,
4 | "author" : "xcode"
5 | }
6 | }
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/ClipView.h:
--------------------------------------------------------------------------------
1 | //
2 | // ClipView.h
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/6.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface ClipView : UIView
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete.xcodeproj/project.xcworkspace/xcuserdata/WangYiming.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ahgdwang/WaterMarkDelete/HEAD/WaterMarkDelete/WaterMarkDelete.xcodeproj/project.xcworkspace/xcuserdata/WangYiming.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/6.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AppDelegate : UIResponder
12 |
13 | @property (strong, nonatomic) UIWindow *window;
14 |
15 |
16 | @end
17 |
18 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/FramesShowViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // FramesShowViewController.h
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/10.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | @interface FramesShowViewController : UIViewController
12 | @property(strong,nonatomic) NSURL *videoUrl;
13 | @end
14 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/6.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/VideoViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // VideoViewController.h
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/10.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | @interface VideoViewController : UIViewController
12 | @property(strong,nonatomic) NSURL *videoUrl;
13 | @end
14 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete.xcodeproj/xcuserdata/WangYiming.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | WaterMarkDelete.xcscheme
8 |
9 | orderHint
10 | 6
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/RootViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // RootViewController.h
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/6.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import "PictureViewController.h"
12 | #import "VideoViewController.h"
13 | @interface RootViewController : UIViewController
14 |
15 | @end
16 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/PictureViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // PictureViewController.h
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/6.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 | #import
9 | #import
10 | #import "ClipView.h"
11 | #import
12 | @interface PictureViewController : UIViewController
13 | @property(strong,nonatomic) UIImage *image;
14 | @property(strong,nonatomic) UIImage *imageFinished;
15 | @end
16 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/ClipView.m:
--------------------------------------------------------------------------------
1 | //
2 | // ClipView.m
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/6.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import "ClipView.h"
10 |
11 | @implementation ClipView
12 |
13 | /*
14 | // Only override drawRect: if you perform custom drawing.
15 | // An empty implementation adversely affects performance during animation.
16 | - (void)drawRect:(CGRect)rect {
17 | // Drawing code
18 | }
19 | */
20 | -(id)init{
21 | self = [super init];
22 | if (self) {
23 | self.alpha = 0.7;
24 | self.backgroundColor = [UIColor redColor];
25 | }
26 | return self;
27 | }
28 | @end
29 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/WaterMarkDelete.pch:
--------------------------------------------------------------------------------
1 | //
2 | // WaterMarkDelete.pch
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/8.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #ifndef WaterMarkDelete_pch
10 | #define WaterMarkDelete_pch
11 |
12 | // Include any system framework and library headers here that should be included in all compilation units.
13 | // You will also need to set the Prefix Header build setting of one or more of your targets to reference this file.
14 | #ifdef __cplusplus
15 | #import
16 | #endif
17 | #ifdef __OBJC__
18 | #import
19 | #import
20 | #endif
21 | #endif /* WaterMarkDelete_pch */
22 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/UIImage+OpenCV.h:
--------------------------------------------------------------------------------
1 | //
2 | // UIImage+OpenCV.h
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/8.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 |
10 | #import
11 | #import
12 | #import
13 | #import
14 |
15 | #import
16 | #import
17 | @interface UIImage (UIImage_OpenCV)
18 | -(UIImage *)UIImageFromCVMat:(cv::Mat)cvMat;
19 | -(id)WaterMarkDelete:(CGRect) rect;
20 | -(UIImage *)fixOrientation;
21 | @property(nonatomic, readonly) cv::Mat CVMat;
22 | @property(nonatomic, readonly) cv::Mat CVGrayscaleMat;
23 | @end
24 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDeleteTests/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | BNDL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 |
22 |
23 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDeleteUITests/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | BNDL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 |
22 |
23 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDeleteTests/WaterMarkDeleteTests.m:
--------------------------------------------------------------------------------
1 | //
2 | // WaterMarkDeleteTests.m
3 | // WaterMarkDeleteTests
4 | //
5 | // Created by WangYiming on 2018/4/6.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface WaterMarkDeleteTests : XCTestCase
12 |
13 | @end
14 |
15 | @implementation WaterMarkDeleteTests
16 |
17 | - (void)setUp {
18 | [super setUp];
19 | // Put setup code here. This method is called before the invocation of each test method in the class.
20 | }
21 |
22 | - (void)tearDown {
23 | // Put teardown code here. This method is called after the invocation of each test method in the class.
24 | [super tearDown];
25 | }
26 |
27 | - (void)testExample {
28 | // This is an example of a functional test case.
29 | // Use XCTAssert and related functions to verify your tests produce the correct results.
30 | }
31 |
32 | - (void)testPerformanceExample {
33 | // This is an example of a performance test case.
34 | [self measureBlock:^{
35 | // Put the code you want to measure the time of here.
36 | }];
37 | }
38 |
39 | @end
40 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDeleteUITests/WaterMarkDeleteUITests.m:
--------------------------------------------------------------------------------
1 | //
2 | // WaterMarkDeleteUITests.m
3 | // WaterMarkDeleteUITests
4 | //
5 | // Created by WangYiming on 2018/4/6.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface WaterMarkDeleteUITests : XCTestCase
12 |
13 | @end
14 |
15 | @implementation WaterMarkDeleteUITests
16 |
17 | - (void)setUp {
18 | [super setUp];
19 |
20 | // Put setup code here. This method is called before the invocation of each test method in the class.
21 |
22 | // In UI tests it is usually best to stop immediately when a failure occurs.
23 | self.continueAfterFailure = NO;
24 | // UI tests must launch the application that they test. Doing this in setup will make sure it happens for each test method.
25 | [[[XCUIApplication alloc] init] launch];
26 |
27 | // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this.
28 | }
29 |
30 | - (void)tearDown {
31 | // Put teardown code here. This method is called after the invocation of each test method in the class.
32 | [super tearDown];
33 | }
34 |
35 | - (void)testExample {
36 | // Use recording to get started writing UI tests.
37 | // Use XCTAssert and related functions to verify your tests produce the correct results.
38 | }
39 |
40 | @end
41 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | $(DEVELOPMENT_LANGUAGE)
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleVersion
20 | 1
21 | LSRequiresIPhoneOS
22 |
23 | UILaunchStoryboardName
24 | LaunchScreen
25 | UIRequiredDeviceCapabilities
26 |
27 | armv7
28 |
29 | UISupportedInterfaceOrientations
30 |
31 | UIInterfaceOrientationPortrait
32 | UIInterfaceOrientationLandscapeLeft
33 | UIInterfaceOrientationLandscapeRight
34 |
35 | UISupportedInterfaceOrientations~ipad
36 |
37 | UIInterfaceOrientationPortrait
38 | UIInterfaceOrientationPortraitUpsideDown
39 | UIInterfaceOrientationLandscapeLeft
40 | UIInterfaceOrientationLandscapeRight
41 |
42 | NSPhotoLibraryAddUsageDescription
43 | APP需要访问你的相册
44 | NSCameraUsageDescription
45 | APP 需要访问你的相机
46 | NSPhotoLibraryUsageDescription
47 | 此 App 需要您的同意才能读取媒体资料库
48 |
49 |
50 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/FramesShowViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // FramesShowViewController.m
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/10.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import "FramesShowViewController.h"
10 |
11 | @interface FramesShowViewController ()
12 | @property(strong,nonatomic) UIImageView *imageView;
13 | @end
14 |
15 | @implementation FramesShowViewController
16 |
17 | - (void)viewDidLoad {
18 | [super viewDidLoad];
19 | // Do any additional setup after loading the view.
20 | self.imageView = [[UIImageView alloc] initWithFrame:[[UIScreen mainScreen]bounds]];
21 | self.imageView.contentMode = UIViewContentModeScaleAspectFit;
22 | [self.view addSubview:self.imageView];
23 |
24 | AVAsset *movieAsset = [AVAsset assetWithURL:self.videoUrl]; // fileUrl:文件路径
25 | int second = (int)movieAsset.duration.value / movieAsset.duration.timescale; // 获取视频总时长,单位秒
26 | for (float i = 0.0; i < second;) {
27 | self.imageView.image = [self getVideoPreViewImageByTime:1];
28 | i += 0.1;
29 | }
30 | }
31 |
32 | - (void)didReceiveMemoryWarning {
33 | [super didReceiveMemoryWarning];
34 | // Dispose of any resources that can be recreated.
35 | }
36 |
37 | - (UIImage *)getVideoPreViewImageByTime:(float)t
38 |
39 | {
40 | AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:self.videoUrl options:nil];
41 | AVAssetImageGenerator *gen = [[AVAssetImageGenerator alloc] initWithAsset:asset];
42 | gen.appliesPreferredTrackTransform = YES;
43 | CMTime time = CMTimeMakeWithSeconds(t, 1); //0.0 600
44 | NSError *error = nil;
45 | CMTime actualTime;
46 | CGImageRef image = [gen copyCGImageAtTime:time actualTime:&actualTime error:&error];
47 | UIImage *img = [[UIImage alloc] initWithCGImage:image];
48 | CGImageRelease(image);
49 |
50 | return img;
51 | }
52 |
53 | @end
54 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "20x20",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "20x20",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "29x29",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "29x29",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "40x40",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "40x40",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "iphone",
35 | "size" : "60x60",
36 | "scale" : "2x"
37 | },
38 | {
39 | "idiom" : "iphone",
40 | "size" : "60x60",
41 | "scale" : "3x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "20x20",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "20x20",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "29x29",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "29x29",
61 | "scale" : "2x"
62 | },
63 | {
64 | "idiom" : "ipad",
65 | "size" : "40x40",
66 | "scale" : "1x"
67 | },
68 | {
69 | "idiom" : "ipad",
70 | "size" : "40x40",
71 | "scale" : "2x"
72 | },
73 | {
74 | "idiom" : "ipad",
75 | "size" : "76x76",
76 | "scale" : "1x"
77 | },
78 | {
79 | "idiom" : "ipad",
80 | "size" : "76x76",
81 | "scale" : "2x"
82 | },
83 | {
84 | "idiom" : "ipad",
85 | "size" : "83.5x83.5",
86 | "scale" : "2x"
87 | },
88 | {
89 | "idiom" : "ios-marketing",
90 | "size" : "1024x1024",
91 | "scale" : "1x"
92 | }
93 | ],
94 | "info" : {
95 | "version" : 1,
96 | "author" : "xcode"
97 | }
98 | }
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/6.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import "AppDelegate.h"
10 | #import "RootViewController.h"
11 | @interface AppDelegate ()
12 |
13 | @end
14 |
15 | @implementation AppDelegate
16 |
17 |
18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
19 | // Override point for customization after application launch.
20 | self.window = [[UIWindow alloc] initWithFrame:[[UIScreen mainScreen]bounds]];
21 | RootViewController *rootViewController = [[RootViewController alloc] init];
22 | self.window.rootViewController = [[UINavigationController alloc] initWithRootViewController:rootViewController];
23 | [self.window makeKeyAndVisible];
24 |
25 | return YES;
26 | }
27 |
28 |
29 | - (void)applicationWillResignActive:(UIApplication *)application {
30 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
31 | // Use this method to pause ongoing tasks, disable timers, and invalidate graphics rendering callbacks. Games should use this method to pause the game.
32 | }
33 |
34 |
35 | - (void)applicationDidEnterBackground:(UIApplication *)application {
36 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
37 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
38 | }
39 |
40 |
41 | - (void)applicationWillEnterForeground:(UIApplication *)application {
42 | // Called as part of the transition from the background to the active state; here you can undo many of the changes made on entering the background.
43 | }
44 |
45 |
46 | - (void)applicationDidBecomeActive:(UIApplication *)application {
47 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
48 | }
49 |
50 |
51 | - (void)applicationWillTerminate:(UIApplication *)application {
52 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
53 | }
54 |
55 |
56 | @end
57 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/HJImagesToVideo/HJImagesToVideo.h:
--------------------------------------------------------------------------------
1 | //
2 | // HJImagesToVideo.h
3 | // HJImagesToVideo
4 | //
5 | // Created by Harrison Jackson on 8/4/13.
6 | // Copyright (c) 2013 Harrison Jackson. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import
12 | #import
13 | #import
14 |
15 | /**
16 | * Determines defaults for transitions
17 | */
18 | FOUNDATION_EXPORT BOOL const DefaultTransitionShouldAnimate;
19 |
20 | /**
21 | * Determines default frame size for videos
22 | */
23 | FOUNDATION_EXPORT CGSize const DefaultFrameSize;
24 |
25 | /**
26 | * Determines default FPS of video - 10 Images at 10FPS results in a 1 second video clip.
27 | */
28 | FOUNDATION_EXPORT NSInteger const DefaultFrameRate;
29 |
30 | /**
31 | * Number of frames to use in transition
32 | */
33 | FOUNDATION_EXPORT NSInteger const TransitionFrameCount;
34 |
35 | /**
36 | * Number of frames to hold each image before beginning alpha fade into the next
37 | */
38 | FOUNDATION_EXPORT NSInteger const FramesToWaitBeforeTransition;
39 |
40 |
41 |
42 | typedef void(^SuccessBlock)(BOOL success);
43 |
44 |
45 | @interface HJImagesToVideo : NSObject
46 |
47 |
48 | /**
49 | * This is the main function for creating a video from a set of images
50 | *
51 | * FPS of 1 with 10 images results in 10 second video, but not necessarily an only 10 frame video. Transitions will add frames, but maintain expected duration
52 | *
53 | * @param images Images to convert to video
54 | * @param path Path to write video to
55 | * @param size Frame size of image
56 | * @param fps FPS of video
57 | * @param animate Yes results in crossfade between images
58 | * @param callbackBlock Block to execute when video creation completes or fails
59 | */
60 | + (void)videoFromImages:(NSArray *)images
61 | toPath:(NSString *)path
62 | withSize:(CGSize)size
63 | withFPS:(int)fps
64 | animateTransitions:(BOOL)animate
65 | withCallbackBlock:(SuccessBlock)callbackBlock;
66 |
67 | + (void)videoFromImages:(NSArray *)images
68 | toPath:(NSString *)path
69 | withFPS:(int)fps
70 | animateTransitions:(BOOL)animate
71 | withCallbackBlock:(SuccessBlock)callbackBlock;
72 |
73 | + (void)videoFromImages:(NSArray *)images
74 | toPath:(NSString *)path
75 | withSize:(CGSize)size
76 | animateTransitions:(BOOL)animate
77 | withCallbackBlock:(SuccessBlock)callbackBlock;
78 |
79 | + (void)videoFromImages:(NSArray *)images
80 | toPath:(NSString *)path
81 | animateTransitions:(BOOL)animate
82 | withCallbackBlock:(SuccessBlock)callbackBlock;
83 |
84 | + (void)videoFromImages:(NSArray *)images
85 | toPath:(NSString *)path
86 | withCallbackBlock:(SuccessBlock)callbackBlock;
87 |
88 | /**
89 | * Convenience methods for saving to camera roll
90 | *
91 | * @param images Images to convert to video
92 | * @param size Frame size of image
93 | * @param fps FPS of video
94 | * @param animate Yes results in crossfade between images
95 | * @param callbackBlock Block to execute when video creation completes or fails
96 | */
97 | + (void)saveVideoToPhotosWithImages:(NSArray *)images
98 | withSize:(CGSize)size
99 | withFPS:(int)fps
100 | animateTransitions:(BOOL)animate
101 | withCallbackBlock:(SuccessBlock)callbackBlock;
102 |
103 | + (void)saveVideoToPhotosWithImages:(NSArray *)images
104 | withSize:(CGSize)size
105 | animateTransitions:(BOOL)animate
106 | withCallbackBlock:(SuccessBlock)callbackBlock;
107 |
108 | + (void)saveVideoToPhotosWithImages:(NSArray *)images
109 | withFPS:(int)fps
110 | animateTransitions:(BOOL)animate
111 | withCallbackBlock:(SuccessBlock)callbackBlock;
112 |
113 | + (void)saveVideoToPhotosWithImages:(NSArray *)images
114 | animateTransitions:(BOOL)animate
115 | withCallbackBlock:(SuccessBlock)callbackBlock;
116 |
117 | + (void)saveVideoToPhotosWithImages:(NSArray *)images
118 | withCallbackBlock:(SuccessBlock)callbackBlock;
119 |
120 |
121 | @end
122 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/RootViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // RootViewController.m
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/6.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import "RootViewController.h"
10 |
11 | @interface RootViewController ()
12 |
13 | @end
14 |
15 | @implementation RootViewController
16 |
17 | - (void)test {
18 | int a = 1;
19 | }
20 |
21 | #pragma life circle
22 | - (void)viewDidLoad {
23 | [super viewDidLoad];
24 |
25 | // Do any additional setup after loading the view.
26 | CGRect screen = [[UIScreen mainScreen] bounds];
27 | CGFloat button_width = 100;
28 | CGFloat button_height = 100;
29 | CGFloat button_top_view = 260;
30 | CGColorSpaceRef colorspace = CGColorSpaceCreateDeviceRGB();
31 | CGColorRef colorRef = CGColorCreate(colorspace, (CGFloat[]){0,0,0,1});
32 |
33 | self.view.backgroundColor = [UIColor whiteColor];
34 | self.navigationItem.title = @"去除水印";
35 |
36 |
37 | UIButton *videoDelButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
38 | videoDelButton.frame = CGRectMake(0.5*(screen.size.width - button_width) - 100, button_top_view, button_width, button_height);
39 | videoDelButton.titleLabel.font = [UIFont systemFontOfSize:20];
40 | [videoDelButton.titleLabel sizeToFit];
41 | [videoDelButton setTitle:@"视频去除" forState:UIControlStateNormal];
42 | [videoDelButton setTitleColor:[UIColor blackColor] forState:UIControlStateNormal];
43 | videoDelButton.layer.borderColor = colorRef;
44 | [videoDelButton.layer setBorderWidth:2.0];
45 | [videoDelButton.layer setCornerRadius:12.0];
46 | [videoDelButton.layer setMasksToBounds:YES];
47 | [videoDelButton addTarget:self action:@selector(onClickVideoButton) forControlEvents:UIControlEventTouchUpInside];
48 | [self.view addSubview:videoDelButton];
49 |
50 | UIButton *imageDelButton = [UIButton buttonWithType:UIButtonTypeRoundedRect];
51 | imageDelButton.frame = CGRectMake(videoDelButton.frame.origin.x + 200, button_top_view, button_width, button_height);
52 | imageDelButton.titleLabel.font = [UIFont systemFontOfSize:20];
53 | [imageDelButton.titleLabel sizeToFit];
54 | [imageDelButton setTitle:@"图片去除" forState:UIControlStateNormal];
55 | [imageDelButton setTitleColor:[UIColor blackColor] forState:UIControlStateNormal];
56 | [imageDelButton.layer setBorderColor:colorRef];
57 | [imageDelButton.layer setBorderWidth:2.0];
58 | [imageDelButton.layer setCornerRadius:12.0];
59 | [imageDelButton.layer setMasksToBounds:YES];
60 | [imageDelButton addTarget:self action:@selector(onClickImageButton) forControlEvents:UIControlEventTouchUpInside];
61 | [self.view addSubview:imageDelButton];
62 |
63 | }
64 |
65 | - (void)didReceiveMemoryWarning {
66 | [super didReceiveMemoryWarning];
67 | // Dispose of any resources that can be recreated.
68 | }
69 |
70 | #pragma action
71 | - (void)onClickVideoButton{
72 | // VideoViewController *videoController = [[VideoViewController alloc] init];
73 | // [self.navigationController pushViewController:videoController animated:YES];
74 | //选择本地视频
75 | UIImagePickerController *ipc = [[UIImagePickerController alloc] init];
76 | ipc.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;//sourcetype有三种分别是camera,photoLibrary和photoAlbum
77 | NSArray *availableMedia = [UIImagePickerController availableMediaTypesForSourceType:UIImagePickerControllerSourceTypeCamera];//Camera所支持的Media格式都有哪些,共有两个分别是@"public.image",@"public.movie"
78 | ipc.mediaTypes = [NSArray arrayWithObject:availableMedia[1]];//设置媒体类型为public.movie
79 | [self presentViewController:ipc animated:YES completion:nil];
80 | ipc.delegate = self;//设置委托
81 | }
82 |
83 | - (void)onClickImageButton{
84 | UIAlertController *actionSheet = [[UIAlertController alloc] init];
85 |
86 | UIAlertAction *cancel = [UIAlertAction actionWithTitle:@"取消" style:UIAlertActionStyleCancel handler:^(UIAlertAction * _Nonnull action) {
87 | NSLog(@"取消操作");
88 | [self showToast:@"操作已取消"];
89 | }];
90 |
91 | UIAlertAction *takePhoto = [UIAlertAction actionWithTitle:@"拍照" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
92 | NSLog(@"拍照");
93 | [self takePhoto];
94 | }];
95 |
96 | UIAlertAction *fromPictures = [UIAlertAction actionWithTitle:@"从相册中选择" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
97 | NSLog(@"从相册中选择图像");
98 | [self fromPictures];
99 | }];
100 | [actionSheet addAction:cancel];
101 | [actionSheet addAction:takePhoto];
102 | [actionSheet addAction:fromPictures];
103 |
104 | [self presentViewController:actionSheet animated:YES completion:nil];
105 | }
106 | #pragma imagePickerDelegate
107 | -(void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info{
108 | NSString *mediaType=[info objectForKey:UIImagePickerControllerMediaType];
109 |
110 | if ([mediaType isEqualToString:@"public.movie"]){
111 | //如果是视频
112 | NSURL *url = info[UIImagePickerControllerMediaURL];//获得视频的URL
113 | NSLog(@"url %@",url);
114 | VideoViewController *videoController = [[VideoViewController alloc] init];
115 | videoController.videoUrl = url;
116 | [self dismissViewControllerAnimated:YES completion:nil];
117 | [picker dismissViewControllerAnimated:YES completion:nil];
118 | [self.navigationController pushViewController:videoController animated:YES];
119 | }else{
120 | [self dismissViewControllerAnimated:YES completion:nil];
121 | [picker dismissViewControllerAnimated:YES completion:nil];
122 | UIImage *image = [info objectForKey:UIImagePickerControllerOriginalImage];
123 | PictureViewController *pictureViewController = [[PictureViewController alloc] init];
124 | pictureViewController.image = image;
125 | [self.navigationController pushViewController:pictureViewController animated:YES];
126 | }
127 |
128 |
129 |
130 | }
131 | #pragma defineBySelf
132 | -(void)showToast:(NSString *)str{
133 | MBProgressHUD *HUD = [[MBProgressHUD alloc] initWithView:self.view];
134 | HUD.label.text = str;
135 | HUD.mode = MBProgressHUDModeText;
136 | [HUD setOffset:CGPointMake(0.0f, 300.0f)];
137 | HUD.bezelView.style = MBProgressHUDBackgroundStyleSolidColor;
138 | HUD.bezelView.color = [[UIColor whiteColor] colorWithAlphaComponent:0.2];
139 | [self.view addSubview:HUD];
140 | [HUD showAnimated:YES];
141 | dispatch_async(dispatch_get_global_queue(NSQualityOfServiceUserInteractive, 0), ^{
142 | sleep(1);
143 | dispatch_async(dispatch_get_main_queue(), ^{
144 | [HUD hideAnimated:YES];
145 |
146 | });
147 | });
148 | HUD = nil;
149 | }
150 | -(void)takePhoto{
151 | UIImagePickerController *imagePickerController = [[UIImagePickerController alloc] init];
152 | imagePickerController.sourceType = UIImagePickerControllerSourceTypeCamera;
153 | imagePickerController.allowsEditing = NO;
154 | imagePickerController.delegate = self;
155 |
156 | [self presentViewController:imagePickerController animated:YES completion:nil];
157 | }
158 | -(void)fromPictures{
159 | UIImagePickerController *imagePickerController = [[UIImagePickerController alloc] init];
160 | imagePickerController.sourceType = UIImagePickerControllerSourceTypePhotoLibrary;
161 | imagePickerController.allowsEditing = NO;
162 | imagePickerController.delegate = self;
163 |
164 | [self presentViewController:imagePickerController animated:YES completion:nil];
165 | }
166 | @end
167 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/PictureViewController.mm:
--------------------------------------------------------------------------------
1 | //
2 | // PictureViewController.m
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/6.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 | #import "PictureViewController.h"
9 | #import "UIImage+OpenCV.h"
10 | @interface PictureViewController ()
11 | @property(strong,nonatomic) UIImageView *imageView;
12 | @property(assign,nonatomic) CGPoint startPoint;
13 | @property(strong,nonatomic) ClipView *clipView;
14 | @property(assign,nonatomic) CGFloat factor_scale;
15 | @property(assign,nonatomic) CGPoint offsetImageToImageView;
16 | @end
17 |
18 | @implementation PictureViewController
19 |
20 | - (void)viewDidLoad {
21 | [super viewDidLoad];
22 | // Do any additional setup after loading the view.
23 | CGRect screen = [[UIScreen mainScreen]bounds];
24 | CGFloat imageLayerwidth = screen.size.width;
25 | CGFloat imageLayerTopView = 65;
26 |
27 | self.navigationItem.title = @"图片去水印";
28 | UIBarButtonItem *saveButton = [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemDone target:self action:@selector(onSave:)];
29 | self.navigationItem.rightBarButtonItem = saveButton;
30 | self.view.backgroundColor = [UIColor whiteColor];
31 |
32 | self.imageView = [[UIImageView alloc]initWithFrame:CGRectMake(0, imageLayerTopView, imageLayerwidth, screen.size.height - imageLayerTopView)];
33 | self.imageView.contentMode = UIViewContentModeScaleAspectFit;
34 | [self.view addSubview:self.imageView];
35 | self.imageView.image = self.image;
36 |
37 |
38 | }
39 | - (void)viewWillAppear:(BOOL)animated{
40 | self.clipView = [[ClipView alloc] init];
41 | [self.imageView addSubview:self.clipView];
42 |
43 | UIPanGestureRecognizer *pan = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(pan:)];
44 | [self.imageView addGestureRecognizer:pan];
45 | self.imageView.userInteractionEnabled = YES;
46 | }
47 | - (void)didReceiveMemoryWarning {
48 | [super didReceiveMemoryWarning];
49 | // Dispose of any resources that can be recreated.
50 | }
51 | #pragma action
52 | - (void)onSave:(id)sender{
53 | UIImageWriteToSavedPhotosAlbum(self.imageFinished, self, @selector(image:didFinishSavingWithError:contextInfo:), nil);
54 | }
55 | - (void)image:(UIImage *)image didFinishSavingWithError:(NSError *)error contextInfo:(void *)contextInfo{
56 |
57 | if (error == nil) {
58 | NSString *message = @"保存成功";
59 | UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"提示信息" message:message preferredStyle:UIAlertControllerStyleAlert];
60 | UIAlertAction *ok = [UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:nil];
61 | [alert addAction:ok];
62 | [self presentViewController:alert animated:YES completion:nil];
63 | }else{
64 | NSString *message = @"保存失败";
65 | UIAlertController *alert = [UIAlertController alertControllerWithTitle:@"提示信息" message:message preferredStyle:UIAlertControllerStyleAlert];
66 | UIAlertAction *ok = [UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:nil];
67 | [alert addAction:ok];
68 | [self presentViewController:alert animated:YES completion:nil];
69 | }
70 |
71 | }
72 |
73 | #pragma defineBySelf
74 | - (CGPoint)pickerPointJudge:(UIImageView*)imageView pointInView:(CGPoint)point{
75 | CGPoint tempPoint = CGPointMake(0, 0);
76 | CGFloat factor_frame = imageView.frame.size.width/imageView.frame.size.height;
77 | CGFloat factor_image = self.image.size.width/self.image.size.height;
78 | if (factor_frame < factor_image) { //固定宽缩放
79 | self.factor_scale = imageView.frame.size.width/self.image.size.width;
80 | tempPoint.x = point.x;
81 | CGPoint offset = CGPointMake(0, 0.5*(imageView.frame.size.height - self.image.size.height * self.factor_scale));
82 | self.offsetImageToImageView = offset;
83 | if (point.y < 0.5*(imageView.frame.size.height - self.image.size.height * self.factor_scale)) {
84 | tempPoint.y = 0.5*(imageView.frame.size.height - self.image.size.height * self.factor_scale);
85 | }else if (point.y > 0.5*(imageView.frame.size.height + self.image.size.height * self.factor_scale)){
86 | tempPoint.y = 0.5*(imageView.frame.size.height + self.image.size.height * self.factor_scale);
87 | }
88 | else{
89 | tempPoint.y = point.y;
90 | }
91 | }else{
92 | self.factor_scale = imageView.frame.size.height/self.image.size.height;
93 | tempPoint.y = point.y;
94 | CGPoint offset = CGPointMake(0.5*(imageView.frame.size.width - self.image.size.width * self.factor_scale),0);
95 | self.offsetImageToImageView = offset;
96 | if (point.x < 0.5*(imageView.frame.size.width - self.image.size.width * self.factor_scale)) {
97 | tempPoint.x = 0.5*(imageView.frame.size.width - self.image.size.width * self.factor_scale);
98 | }else if (point.x > 0.5*(imageView.frame.size.width + self.image.size.width * self.factor_scale)){
99 | tempPoint.x = 0.5*(imageView.frame.size.width + self.image.size.width * self.factor_scale);
100 | }else{
101 | tempPoint.x = point.x;
102 | }
103 | }
104 | return tempPoint;
105 | }
106 | - (UIImage *)cropImage:(UIImage*)image toRect:(CGRect)rect {
107 | CGFloat (^rad)(CGFloat) = ^CGFloat(CGFloat deg) {
108 | return deg / 180.0f * (CGFloat) M_PI;
109 | };
110 | // determine the orientation of the image and apply a transformation to the crop rectangle to shift it to the correct position
111 | CGAffineTransform rectTransform;
112 | switch (image.imageOrientation) {
113 | case UIImageOrientationLeft:
114 | rectTransform = CGAffineTransformTranslate(CGAffineTransformMakeRotation(rad(90)), 0, -image.size.height);
115 | break;
116 | case UIImageOrientationRight:
117 | rectTransform = CGAffineTransformTranslate(CGAffineTransformMakeRotation(rad(-90)), -image.size.width, 0);
118 | break;
119 | case UIImageOrientationDown:
120 | rectTransform = CGAffineTransformTranslate(CGAffineTransformMakeRotation(rad(-180)), -image.size.width, -image.size.height);
121 | break;
122 | default:
123 | rectTransform = CGAffineTransformIdentity;
124 | };
125 |
126 | // adjust the transformation scale based on the image scale
127 | rectTransform = CGAffineTransformScale(rectTransform, image.scale, image.scale);
128 |
129 | // apply the transformation to the rect to create a new, shifted rect
130 | CGRect transformedCropSquare = CGRectApplyAffineTransform(rect, rectTransform);
131 | // use the rect to crop the image
132 | CGImageRef imageRef = CGImageCreateWithImageInRect(image.CGImage, transformedCropSquare);
133 | // create a new UIImage and set the scale and orientation appropriately
134 | UIImage *result = [UIImage imageWithCGImage:imageRef scale:image.scale orientation:image.imageOrientation];
135 | // memory cleanup
136 | CGImageRelease(imageRef);
137 |
138 | return result;
139 | }
140 | -(void)pan:(UIPanGestureRecognizer*)panner{
141 | CGPoint endPoint = CGPointZero;
142 | if (panner.state == UIGestureRecognizerStateBegan) {
143 | self.startPoint = [self pickerPointJudge:self.imageView pointInView:[panner locationInView:self.imageView]];
144 | }
145 | else if (panner.state == UIGestureRecognizerStateChanged){
146 | endPoint = [self pickerPointJudge:self.imageView pointInView:[panner locationInView:self.imageView]];
147 | CGFloat clipWidth = endPoint.x - self.startPoint.x;
148 | CGFloat clipHeight = endPoint.y - self.startPoint.y;
149 |
150 | self.clipView.frame = CGRectMake(self.startPoint.x, self.startPoint.y, clipWidth, clipHeight);
151 |
152 | }
153 | else if (panner.state == UIGestureRecognizerStateEnded){
154 | CGRect rectInImage = CGRectMake((self.clipView.frame.origin.x - self.offsetImageToImageView.x)/ self.factor_scale, (self.clipView.frame.origin.y - self.offsetImageToImageView.y) / self.factor_scale, self.clipView.frame.size.width/ self.factor_scale, self.clipView.frame.size.height/ self.factor_scale);
155 | //UIImage *imageCut = [self cropImage:self.image toRect:rectInImage];
156 | self.imageFinished = [self.image WaterMarkDelete:rectInImage];
157 |
158 | [self.imageView setImage:self.imageFinished];
159 | [self.clipView removeFromSuperview];
160 | self.clipView = nil;
161 | [self viewWillAppear:YES];
162 | }
163 | }
164 | @end
165 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/UIImage+OpenCV.mm:
--------------------------------------------------------------------------------
1 | //
2 | // UIImage+OpenCV.m
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/8.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import "UIImage+OpenCV.h"
10 |
11 | static void ProviderReleaseDataNOP(void *info, const void *data, size_t size)
12 | {
13 | // Do not release memory
14 | return;
15 | }
16 |
17 | @implementation UIImage (OpenCV)
18 | -(cv::Mat)CVMat
19 | {
20 | UIImage *imageRotate = [self fixOrientation];
21 |
22 | CGColorSpaceRef colorSpace = CGImageGetColorSpace(imageRotate.CGImage);
23 | CGFloat cols = imageRotate.size.width;
24 | CGFloat rows = imageRotate.size.height;
25 |
26 | cv::Mat cvMat(rows, cols, CV_8UC4); // 8 bits per component, 4 channels
27 |
28 | CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, // Pointer to backing data
29 | cols, // Width of bitmap
30 | rows, // Height of bitmap
31 | 8, // Bits per component
32 | cvMat.step[0], // Bytes per row
33 | colorSpace, // Colorspace
34 | kCGImageAlphaNoneSkipLast |
35 | kCGBitmapByteOrderDefault); // Bitmap info flags
36 |
37 | CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), imageRotate.CGImage);
38 | CGContextRelease(contextRef);
39 |
40 | return cvMat;
41 | }
42 | -(cv::Mat)CVGrayscaleMat
43 | {
44 | CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
45 | CGFloat cols = self.size.width;
46 | CGFloat rows = self.size.height;
47 |
48 | cv::Mat cvMat = cv::Mat(rows, cols, CV_8UC1); // 8 bits per component, 1 channel
49 |
50 | CGContextRef contextRef = CGBitmapContextCreate(cvMat.data, // Pointer to backing data
51 | cols, // Width of bitmap
52 | rows, // Height of bitmap
53 | 8, // Bits per component
54 | cvMat.step[0], // Bytes per row
55 | colorSpace, // Colorspace
56 | kCGImageAlphaNone |
57 | kCGBitmapByteOrderDefault); // Bitmap info flags
58 |
59 | CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), self.CGImage);
60 | CGContextRelease(contextRef);
61 | CGColorSpaceRelease(colorSpace);
62 |
63 | return cvMat;
64 | }
65 | -(UIImage *)UIImageFromCVMat:(cv::Mat)cvMat
66 | {
67 | NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
68 | CGColorSpaceRef colorSpace;
69 |
70 | if (cvMat.elemSize() == 1) {
71 | colorSpace = CGColorSpaceCreateDeviceGray();
72 | } else {
73 | colorSpace = CGColorSpaceCreateDeviceRGB();
74 | }
75 |
76 | CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
77 |
78 | // Creating CGImage from cv::Mat
79 | CGImageRef imageRef = CGImageCreate(cvMat.cols, //width
80 | cvMat.rows, //height
81 | 8, //bits per component
82 | 8 * cvMat.elemSize(), //bits per pixel
83 | cvMat.step[0], //bytesPerRow
84 | colorSpace, //colorspace
85 | kCGImageAlphaNone|kCGBitmapByteOrderDefault,// bitmap info
86 | provider, //CGDataProviderRef
87 | NULL, //decode
88 | false, //should interpolate
89 | kCGRenderingIntentDefault //intent
90 | );
91 | UIImage *finalImage = [UIImage imageWithCGImage:imageRef];
92 | CGImageRelease(imageRef);
93 | CGDataProviderRelease(provider);
94 | CGColorSpaceRelease(colorSpace);
95 |
96 | return finalImage;
97 | }
98 | - (id)WaterMarkDelete:(CGRect) rect{
99 |
100 | cv::Mat imageMat = self.CVMat;
101 | cv::Rect roiRect = cv::Rect(cv::Rect(rect.origin.x,rect.origin.y,rect.size.width,rect.size.height));
102 | //制作掩膜
103 | cv::Mat imageMask = cv::Mat(imageMat.size(),CV_8UC1,cv::Scalar::all(0));
104 | cv::Mat roiImage = cv::Mat(roiRect.size(),CV_8UC1,cv::Scalar::all(255));
105 | roiImage.copyTo(imageMask(roiRect));
106 | //通道转换
107 | IplImage imageIpl = imageMat;
108 | IplImage *img3chan = cvCreateImage(cvGetSize(&imageIpl),imageIpl.depth,3);
109 | cvCvtColor(&imageIpl,img3chan,CV_RGBA2RGB);//CV_RGBA2RGB表示4通道转成3通道
110 | CvMat *cvMat = cvCreateMat(imageIpl.height, imageIpl.width, CV_8UC3);//创建容器区域
111 | cvConvert(img3chan, cvMat);
112 | cv::Mat imageMat3chan = cv::cvarrToMat(cvMat);
113 | //图像修复
114 | cv::inpaint(imageMat3chan, imageMask, imageMat3chan, 9, cv::INPAINT_TELEA);
115 |
116 | UIImage *imageResult = [[UIImage alloc] UIImageFromCVMat:imageMat3chan];
117 | return imageResult;
118 | }
119 |
120 | - (UIImage *)fixOrientation {
121 |
122 | // No-op if the orientation is already correct
123 | if (self.imageOrientation == UIImageOrientationUp) return self;
124 |
125 | // We need to calculate the proper transformation to make the image upright.
126 | // We do it in 2 steps: Rotate if Left/Right/Down, and then flip if Mirrored.
127 | CGAffineTransform transform = CGAffineTransformIdentity;
128 |
129 | switch (self.imageOrientation) {
130 | case UIImageOrientationDown:
131 | case UIImageOrientationDownMirrored:
132 | transform = CGAffineTransformTranslate(transform, self.size.width, self.size.height);
133 | transform = CGAffineTransformRotate(transform, M_PI);
134 | break;
135 |
136 | case UIImageOrientationLeft:
137 | case UIImageOrientationLeftMirrored:
138 | transform = CGAffineTransformTranslate(transform, self.size.width, 0);
139 | transform = CGAffineTransformRotate(transform, M_PI_2);
140 | break;
141 |
142 | case UIImageOrientationRight:
143 | case UIImageOrientationRightMirrored:
144 | transform = CGAffineTransformTranslate(transform, 0, self.size.height);
145 | transform = CGAffineTransformRotate(transform, -M_PI_2);
146 | break;
147 | case UIImageOrientationUp:
148 | case UIImageOrientationUpMirrored:
149 | break;
150 | }
151 |
152 | switch (self.imageOrientation) {
153 | case UIImageOrientationUpMirrored:
154 | case UIImageOrientationDownMirrored:
155 | transform = CGAffineTransformTranslate(transform, self.size.width, 0);
156 | transform = CGAffineTransformScale(transform, -1, 1);
157 | break;
158 |
159 | case UIImageOrientationLeftMirrored:
160 | case UIImageOrientationRightMirrored:
161 | transform = CGAffineTransformTranslate(transform, self.size.height, 0);
162 | transform = CGAffineTransformScale(transform, -1, 1);
163 | break;
164 | case UIImageOrientationUp:
165 | case UIImageOrientationDown:
166 | case UIImageOrientationLeft:
167 | case UIImageOrientationRight:
168 | break;
169 | }
170 |
171 | // Now we draw the underlying CGImage into a new context, applying the transform
172 | // calculated above.
173 | CGContextRef ctx = CGBitmapContextCreate(NULL, self.size.width, self.size.height,
174 | CGImageGetBitsPerComponent(self.CGImage), 0,
175 | CGImageGetColorSpace(self.CGImage),
176 | CGImageGetBitmapInfo(self.CGImage));
177 | CGContextConcatCTM(ctx, transform);
178 | switch (self.imageOrientation) {
179 | case UIImageOrientationLeft:
180 | case UIImageOrientationLeftMirrored:
181 | case UIImageOrientationRight:
182 | case UIImageOrientationRightMirrored:
183 | // Grr...
184 | CGContextDrawImage(ctx, CGRectMake(0,0,self.size.height,self.size.width), self.CGImage);
185 | break;
186 |
187 | default:
188 | CGContextDrawImage(ctx, CGRectMake(0,0,self.size.width,self.size.height), self.CGImage);
189 | break;
190 | }
191 |
192 | // And now we just create a new UIImage from the drawing context
193 | CGImageRef cgimg = CGBitmapContextCreateImage(ctx);
194 | UIImage *img = [UIImage imageWithCGImage:cgimg];
195 | CGContextRelease(ctx);
196 | CGImageRelease(cgimg);
197 | return img;
198 | }
199 | @end
200 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/HJImagesToVideo/HJImagesToVideo.m:
--------------------------------------------------------------------------------
1 | //
2 | // HJImagesToVideo.m
3 | // HJImagesToVideo
4 | //
5 | // Created by Harrison Jackson on 8/4/13.
6 | // Copyright (c) 2013 Harrison Jackson. All rights reserved.
7 | //
8 |
9 | #import "HJImagesToVideo.h"
10 |
11 | CGSize const DefaultFrameSize = (CGSize){480, 320};
12 |
13 | NSInteger const DefaultFrameRate = 1;
14 | NSInteger const TransitionFrameCount = 50;
15 | NSInteger const FramesToWaitBeforeTransition = 40;
16 |
17 | BOOL const DefaultTransitionShouldAnimate = YES;
18 |
19 | @implementation HJImagesToVideo
20 |
21 | + (void)videoFromImages:(NSArray *)images
22 | toPath:(NSString *)path
23 | withCallbackBlock:(SuccessBlock)callbackBlock
24 | {
25 | [HJImagesToVideo videoFromImages:images
26 | toPath:path
27 | withSize:DefaultFrameSize
28 | withFPS:DefaultFrameRate
29 | animateTransitions:DefaultTransitionShouldAnimate
30 | withCallbackBlock:callbackBlock];
31 | }
32 |
33 | + (void)videoFromImages:(NSArray *)images
34 | toPath:(NSString *)path
35 | animateTransitions:(BOOL)animate
36 | withCallbackBlock:(SuccessBlock)callbackBlock
37 | {
38 | [HJImagesToVideo videoFromImages:images
39 | toPath:path
40 | withSize:DefaultFrameSize
41 | withFPS:DefaultFrameRate
42 | animateTransitions:animate
43 | withCallbackBlock:callbackBlock];
44 | }
45 |
46 | + (void)videoFromImages:(NSArray *)images
47 | toPath:(NSString *)path
48 | withFPS:(int)fps
49 | animateTransitions:(BOOL)animate
50 | withCallbackBlock:(SuccessBlock)callbackBlock
51 | {
52 | [HJImagesToVideo videoFromImages:images
53 | toPath:path
54 | withSize:DefaultFrameSize
55 | withFPS:fps
56 | animateTransitions:animate
57 | withCallbackBlock:callbackBlock];
58 | }
59 |
60 | + (void)videoFromImages:(NSArray *)images
61 | toPath:(NSString *)path
62 | withSize:(CGSize)size
63 | animateTransitions:(BOOL)animate
64 | withCallbackBlock:(SuccessBlock)callbackBlock
65 | {
66 | [HJImagesToVideo videoFromImages:images
67 | toPath:path
68 | withSize:size
69 | withFPS:DefaultFrameRate
70 | animateTransitions:animate
71 | withCallbackBlock:callbackBlock];
72 | }
73 |
74 | + (void)videoFromImages:(NSArray *)images
75 | toPath:(NSString *)path
76 | withSize:(CGSize)size
77 | withFPS:(int)fps
78 | animateTransitions:(BOOL)animate
79 | withCallbackBlock:(SuccessBlock)callbackBlock
80 | {
81 | [HJImagesToVideo writeImageAsMovie:images
82 | toPath:path
83 | size:size
84 | fps:fps
85 | animateTransitions:animate
86 | withCallbackBlock:callbackBlock];
87 | }
88 |
89 | + (void)saveVideoToPhotosWithImages:(NSArray *)images
90 | withCallbackBlock:(SuccessBlock)callbackBlock
91 | {
92 | [HJImagesToVideo saveVideoToPhotosWithImages:images
93 | withSize:DefaultFrameSize
94 | animateTransitions:DefaultTransitionShouldAnimate
95 | withCallbackBlock:callbackBlock];
96 | }
97 |
98 | + (void)saveVideoToPhotosWithImages:(NSArray *)images
99 | animateTransitions:(BOOL)animate
100 | withCallbackBlock:(SuccessBlock)callbackBlock
101 | {
102 | [HJImagesToVideo saveVideoToPhotosWithImages:images
103 | withSize:DefaultFrameSize
104 | animateTransitions:animate
105 | withCallbackBlock:callbackBlock];
106 | }
107 |
108 | + (void)saveVideoToPhotosWithImages:(NSArray *)images
109 | withSize:(CGSize)size
110 | animateTransitions:(BOOL)animate
111 | withCallbackBlock:(SuccessBlock)callbackBlock
112 | {
113 | [HJImagesToVideo saveVideoToPhotosWithImages:images
114 | withSize:size
115 | withFPS:DefaultFrameRate
116 | animateTransitions:animate
117 | withCallbackBlock:callbackBlock];
118 | }
119 |
120 | + (void)saveVideoToPhotosWithImages:(NSArray *)images
121 | withFPS:(int)fps
122 | animateTransitions:(BOOL)animate
123 | withCallbackBlock:(SuccessBlock)callbackBlock
124 | {
125 | [HJImagesToVideo saveVideoToPhotosWithImages:images
126 | withSize:DefaultFrameSize
127 | withFPS:fps
128 | animateTransitions:animate
129 | withCallbackBlock:callbackBlock];
130 | }
131 |
132 | + (void)saveVideoToPhotosWithImages:(NSArray *)images
133 | withSize:(CGSize)size
134 | withFPS:(int)fps
135 | animateTransitions:(BOOL)animate
136 | withCallbackBlock:(SuccessBlock)callbackBlock
137 | {
138 | NSString *tempPath = [NSTemporaryDirectory() stringByAppendingPathComponent:
139 | [NSString stringWithFormat:@"temp.mp4"]];
140 | [[NSFileManager defaultManager] removeItemAtPath:tempPath error:NULL];
141 |
142 | [HJImagesToVideo videoFromImages:images
143 | toPath:tempPath
144 | withSize:size
145 | withFPS:fps
146 | animateTransitions:animate
147 | withCallbackBlock:^(BOOL success) {
148 |
149 | if (success) {
150 | UISaveVideoAtPathToSavedPhotosAlbum(tempPath, self, nil, nil);
151 | }
152 |
153 | if (callbackBlock) {
154 | callbackBlock(success);
155 | }
156 | }];
157 | }
158 |
159 | + (void)writeImageAsMovie:(NSArray *)array
160 | toPath:(NSString*)path
161 | size:(CGSize)size
162 | fps:(int)fps
163 | animateTransitions:(BOOL)shouldAnimateTransitions
164 | withCallbackBlock:(SuccessBlock)callbackBlock
165 | {
166 | NSLog(@"%@", path);
167 | NSError *error = nil;
168 | AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:path]
169 | fileType:AVFileTypeMPEG4
170 | error:&error];
171 | if (error) {
172 | if (callbackBlock) {
173 | callbackBlock(NO);
174 | }
175 | return;
176 | }
177 | NSParameterAssert(videoWriter);
178 |
179 | NSDictionary *videoSettings = @{AVVideoCodecKey: AVVideoCodecTypeH264,
180 | AVVideoWidthKey: [NSNumber numberWithInt:size.width],
181 | AVVideoHeightKey: [NSNumber numberWithInt:size.height]};
182 |
183 | AVAssetWriterInput* writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo
184 | outputSettings:videoSettings];
185 |
186 | AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
187 | sourcePixelBufferAttributes:nil];
188 | NSParameterAssert(writerInput);
189 | NSParameterAssert([videoWriter canAddInput:writerInput]);
190 | [videoWriter addInput:writerInput];
191 |
192 | //Start a session:
193 | [videoWriter startWriting];
194 | [videoWriter startSessionAtSourceTime:kCMTimeZero];
195 |
196 | CVPixelBufferRef buffer;
197 | CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &buffer);
198 |
199 | CMTime presentTime = CMTimeMake(0, fps);
200 |
201 | int i = 0;
202 | while (1)
203 | {
204 |
205 | if(writerInput.readyForMoreMediaData){
206 |
207 | presentTime = CMTimeMake(i, fps);
208 |
209 | if (i >= [array count]) {
210 | buffer = NULL;
211 | } else {
212 | buffer = [HJImagesToVideo pixelBufferFromCGImage:[array[i] CGImage] size:CGSizeMake(480, 320)];
213 | }
214 |
215 | if (buffer) {
216 | //append buffer
217 |
218 | BOOL appendSuccess = [HJImagesToVideo appendToAdapter:adaptor
219 | pixelBuffer:buffer
220 | atTime:presentTime
221 | withInput:writerInput];
222 | NSAssert(appendSuccess, @"Failed to append");
223 |
224 | if (shouldAnimateTransitions && i + 1 < array.count) {
225 |
226 | //Create time each fade frame is displayed
227 | CMTime fadeTime = CMTimeMake(1, fps*TransitionFrameCount);
228 |
229 | //Add a delay, causing the base image to have more show time before fade begins.
230 | for (int b = 0; b < FramesToWaitBeforeTransition; b++) {
231 | presentTime = CMTimeAdd(presentTime, fadeTime);
232 | }
233 |
234 | //Adjust fadeFrameCount so that the number and curve of the fade frames and their alpha stay consistant
235 | NSInteger framesToFadeCount = TransitionFrameCount - FramesToWaitBeforeTransition;
236 |
237 | //Apply fade frames
238 | for (double j = 1; j < framesToFadeCount; j++) {
239 |
240 | buffer = [HJImagesToVideo crossFadeImage:[array[i] CGImage]
241 | toImage:[array[i + 1] CGImage]
242 | atSize:CGSizeMake(480, 320)
243 | withAlpha:j/framesToFadeCount];
244 |
245 | BOOL appendSuccess = [HJImagesToVideo appendToAdapter:adaptor
246 | pixelBuffer:buffer
247 | atTime:presentTime
248 | withInput:writerInput];
249 | presentTime = CMTimeAdd(presentTime, fadeTime);
250 |
251 | NSAssert(appendSuccess, @"Failed to append");
252 | }
253 | }
254 |
255 | i++;
256 | } else {
257 |
258 | //Finish the session:
259 | [writerInput markAsFinished];
260 |
261 | [videoWriter finishWritingWithCompletionHandler:^{
262 | NSLog(@"Successfully closed video writer");
263 | if (videoWriter.status == AVAssetWriterStatusCompleted) {
264 | if (callbackBlock) {
265 | callbackBlock(YES);
266 | }
267 | } else {
268 | if (callbackBlock) {
269 | callbackBlock(NO);
270 | }
271 | }
272 | }];
273 |
274 | CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
275 |
276 | NSLog (@"Done");
277 | break;
278 | }
279 | }
280 | }
281 | }
282 |
283 | + (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image
284 | size:(CGSize)imageSize
285 | {
286 | NSDictionary *options = @{(id)kCVPixelBufferCGImageCompatibilityKey: @YES,
287 | (id)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES};
288 | CVPixelBufferRef pxbuffer = NULL;
289 | CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, imageSize.width,
290 | imageSize.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
291 | &pxbuffer);
292 | NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
293 |
294 | CVPixelBufferLockBaseAddress(pxbuffer, 0);
295 | void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
296 | NSParameterAssert(pxdata != NULL);
297 |
298 | CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
299 | CGContextRef context = CGBitmapContextCreate(pxdata, imageSize.width,
300 | imageSize.height, 8, 4*imageSize.width, rgbColorSpace,
301 | kCGImageAlphaNoneSkipFirst);
302 | NSParameterAssert(context);
303 |
304 | CGContextDrawImage(context, CGRectMake(0 + (imageSize.width-CGImageGetWidth(image))/2,
305 | (imageSize.height-CGImageGetHeight(image))/2,
306 | CGImageGetWidth(image),
307 | CGImageGetHeight(image)), image);
308 | CGColorSpaceRelease(rgbColorSpace);
309 | CGContextRelease(context);
310 |
311 | CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
312 |
313 | return pxbuffer;
314 | }
315 |
316 | + (CVPixelBufferRef)crossFadeImage:(CGImageRef)baseImage
317 | toImage:(CGImageRef)fadeInImage
318 | atSize:(CGSize)imageSize
319 | withAlpha:(CGFloat)alpha
320 | {
321 | NSDictionary *options = @{(id)kCVPixelBufferCGImageCompatibilityKey: @YES,
322 | (id)kCVPixelBufferCGBitmapContextCompatibilityKey: @YES};
323 | CVPixelBufferRef pxbuffer = NULL;
324 | CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, imageSize.width,
325 | imageSize.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options,
326 | &pxbuffer);
327 | NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
328 |
329 | CVPixelBufferLockBaseAddress(pxbuffer, 0);
330 | void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
331 | NSParameterAssert(pxdata != NULL);
332 |
333 | CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
334 | CGContextRef context = CGBitmapContextCreate(pxdata, imageSize.width,
335 | imageSize.height, 8, 4*imageSize.width, rgbColorSpace,
336 | kCGImageAlphaNoneSkipFirst);
337 | NSParameterAssert(context);
338 |
339 | CGRect drawRect = CGRectMake(0 + (imageSize.width-CGImageGetWidth(baseImage))/2,
340 | (imageSize.height-CGImageGetHeight(baseImage))/2,
341 | CGImageGetWidth(baseImage),
342 | CGImageGetHeight(baseImage));
343 |
344 | CGContextDrawImage(context, drawRect, baseImage);
345 |
346 | CGContextBeginTransparencyLayer(context, nil);
347 | CGContextSetAlpha( context, alpha );
348 | CGContextDrawImage(context, drawRect, fadeInImage);
349 | CGContextEndTransparencyLayer(context);
350 |
351 | CGColorSpaceRelease(rgbColorSpace);
352 | CGContextRelease(context);
353 |
354 | CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
355 |
356 | return pxbuffer;
357 | }
358 |
359 | + (BOOL)appendToAdapter:(AVAssetWriterInputPixelBufferAdaptor*)adaptor
360 | pixelBuffer:(CVPixelBufferRef)buffer
361 | atTime:(CMTime)presentTime
362 | withInput:(AVAssetWriterInput*)writerInput
363 | {
364 | while (!writerInput.readyForMoreMediaData) {
365 | usleep(1);
366 | }
367 |
368 | return [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];
369 | }
370 |
371 |
372 |
373 |
374 | @end
375 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete/VideoViewController.mm:
--------------------------------------------------------------------------------
1 | //
2 | // VideoViewController.m
3 | // WaterMarkDelete
4 | //
5 | // Created by WangYiming on 2018/4/10.
6 | // Copyright © 2018年 WangYiming. All rights reserved.
7 | //
8 |
9 | #import "VideoViewController.h"
10 | #import "ClipView.h"
11 | #import "FramesShowViewController.h"
12 | #import "UIImage+OpenCV.h"
13 | #import
14 | #import "HJImagesToVideo.h"
15 | #import
16 | @interface VideoViewController ()
17 | {
18 | AVPlayer *player;
19 | int second; // 获取视频总时长,单位秒
20 | CGRect rectInImage;
21 | BOOL rectDraw;
22 | }
23 | @property(strong,nonatomic) ClipView *clipView;
24 | @property(strong,nonatomic) UIView *videoView;
25 | @property(strong,nonatomic) AVPlayerViewController *playerVC;
26 | @property(assign,nonatomic) CGPoint startPoint;
27 | @property(assign,nonatomic) CGFloat factor_scale;
28 | @property(assign,nonatomic) CGPoint offsetImageToImageView;
29 | @property(strong,nonatomic) UIImage *image;
30 | @property(strong,nonatomic) AVAssetTrack *srcAudioTrack;
31 | @property(strong,nonatomic) NSURL *picsTovideoPath;
32 | @property(strong,nonatomic) NSMutableArray *imageArray;
33 | @property(strong,nonatomic) AVAsset *movieAsset;
34 |
35 | @property(strong,nonatomic) UIProgressView *progressView;
36 |
37 | @end
38 |
39 | @implementation VideoViewController
40 |
41 | - (void)viewDidLoad {
42 | [super viewDidLoad];
43 | // Do any additional setup after loading the view.
44 | CGFloat bottomheight = 80.0f;
45 | self.view.backgroundColor = [UIColor whiteColor];
46 | CGRect screen = [[UIScreen mainScreen]bounds];
47 | self.videoView = [[UIView alloc] initWithFrame:CGRectMake(0, self.navigationController.navigationBar.frame.size.height, screen.size.width, screen.size.height - self.navigationController.navigationBar.frame.size.height - bottomheight)];
48 | self.videoView.backgroundColor = [UIColor whiteColor];
49 | [self.view addSubview:self.videoView];
50 | self.navigationItem.title = @"视频水印去除";
51 | UIBarButtonItem *saveButton = [[UIBarButtonItem alloc] initWithBarButtonSystemItem:UIBarButtonSystemItemDone target:self action:@selector(onClickSave:)];
52 | saveButton.title = @"执行";
53 | self.navigationItem.rightBarButtonItem = saveButton;
54 | UIView *progressBottomView = [[UIView alloc] initWithFrame:CGRectMake(0, screen.size.height - bottomheight, screen.size.width, bottomheight)];
55 |
56 | self.progressView = [[UIProgressView alloc] initWithFrame:CGRectMake(20, 0, screen.size.width - 40, bottomheight)];
57 | //设置进度条位置(水平居中)
58 | self.progressView.layer.position = CGPointMake(screen.size.width/2,bottomheight/2);
59 | //通过变形改变进度条高度( 横向宽度不变,纵向高度变成默认的5倍)
60 | self.progressView.transform = CGAffineTransformMakeScale(1.0, 10.0);
61 | [progressBottomView addSubview:self.progressView];
62 | [self.view addSubview:progressBottomView];
63 |
64 | //载入播放器
65 | player = [AVPlayer playerWithURL:self.videoUrl];
66 | self.playerVC = [[AVPlayerViewController alloc]init];
67 | self.playerVC.player = player;
68 | self.playerVC.view.frame = CGRectMake(0, 0, self.videoView.frame.size.width, self.videoView.frame.size.height);
69 | player.externalPlaybackVideoGravity = AVLayerVideoGravityResizeAspectFill;
70 | [self.videoView addSubview:self.playerVC.view];
71 | self.playerVC.showsPlaybackControls = NO;
72 |
73 | FramesShowViewController *framesShowCon = [[FramesShowViewController alloc] init];
74 | framesShowCon.videoUrl = self.videoUrl;
75 | //[self presentViewController:framesShowCon animated:YES completion:nil];
76 | self.movieAsset = [AVAsset assetWithURL:self.videoUrl]; // fileUrl:文件路径
77 | second = (int)self.movieAsset.duration.value / self.movieAsset.duration.timescale; // 获取视频总时长,单位秒
78 |
79 | //取第1帧
80 | self.image = [self getVideoPreViewImage];
81 | // //获取音频音轨
82 | // AVAsset *srcAsset = [AVAsset assetWithURL:self.videoUrl];
83 | // NSArray *trackArray = [srcAsset tracksWithMediaType:AVMediaTypeAudio];
84 | // self.srcAudioTrack = [trackArray objectAtIndex:0];
85 | //self.imageArray = [[NSMutableArray alloc] init];
86 | //self.imageArray = nil;
87 | }
88 | - (void)viewWillAppear:(BOOL)animated{
89 | self.clipView = [[ClipView alloc] init];
90 | [self.videoView addSubview:self.clipView];
91 |
92 | UIPanGestureRecognizer *pan = [[UIPanGestureRecognizer alloc] initWithTarget:self action:@selector(pan:)];
93 | [self.videoView addGestureRecognizer:pan];
94 | self.videoView.userInteractionEnabled = YES;
95 | [self.navigationItem.rightBarButtonItem setEnabled:YES];
96 | [player play];
97 | }
98 | - (void)didReceiveMemoryWarning {
99 | [super didReceiveMemoryWarning];
100 | // Dispose of any resources that can be recreated.
101 | }
102 | #pragma mark --onClickSave
103 | -(void)onClickSave:(id)sender{
104 | if (rectDraw) {
105 | [self.navigationItem.rightBarButtonItem setEnabled:NO];
106 | CGRect waterMarkRect = rectInImage;
107 | [self.progressView setProgress:0.2];
108 | //处理每一帧
109 | [self splitVideo:self.videoUrl fps:30 splitCompleteBlock:^(BOOL success, NSMutableArray *splitimgs) {
110 | if (success && splitimgs.count != 0) {
111 | NSLog(@"----->> success");
112 | NSLog(@"---> splitimgs个数:%lu",(unsigned long)splitimgs.count);
113 | dispatch_async(dispatch_get_main_queue(), ^{
114 | [self.progressView setProgress:0.35];
115 | NSMutableArray *imageArrayFinished = [[NSMutableArray alloc] init];
116 | for (int i=0; i<[splitimgs count];) {
117 | UIImage *imageOir = [splitimgs objectAtIndex:i];
118 | UIImage *imageFinished = [imageOir WaterMarkDelete:waterMarkRect];
119 | [self.progressView setProgress:0.5];
120 | [imageArrayFinished addObject:imageFinished];
121 | imageFinished = nil;
122 | i = i + 1;
123 | }
124 | //合成视频
125 | [self fromPicsToVideo:imageArrayFinished];
126 | });
127 | }
128 | }];
129 | }
130 | }
131 |
132 | #pragma mark --panGesture
133 | -(void)pan:(UIPanGestureRecognizer*)panner{
134 | CGPoint endPoint = CGPointZero;
135 | if (panner.state == UIGestureRecognizerStateBegan) {
136 | self.startPoint = [self pickerPointJudge:self.videoView pointInView:[panner locationInView:self.videoView]];
137 | }
138 | else if (panner.state == UIGestureRecognizerStateChanged){
139 | endPoint = [self pickerPointJudge:self.videoView pointInView:[panner locationInView:self.videoView]];;
140 |
141 | CGFloat clipWidth = endPoint.x - self.startPoint.x;
142 | CGFloat clipHeight = endPoint.y - self.startPoint.y;
143 |
144 | self.clipView.frame = CGRectMake(self.startPoint.x, self.startPoint.y, clipWidth, clipHeight);
145 | }
146 | else if (panner.state == UIGestureRecognizerStateEnded){
147 | rectInImage = CGRectMake((self.clipView.frame.origin.x - self.offsetImageToImageView.x)/ self.factor_scale, (self.clipView.frame.origin.y - self.offsetImageToImageView.y) / self.factor_scale, self.clipView.frame.size.width/ self.factor_scale, self.clipView.frame.size.height/ self.factor_scale);
148 | rectDraw = YES;
149 | // CGRect waterMarkRect = rectInImage;
150 | // //处理每一帧
151 | // [self splitVideo:self.videoUrl fps:30 splitCompleteBlock:^(BOOL success, NSMutableArray *splitimgs) {
152 | // if (success && splitimgs.count != 0) {
153 | // NSLog(@"----->> success");
154 | // NSLog(@"---> splitimgs个数:%lu",(unsigned long)splitimgs.count);
155 | // dispatch_async(dispatch_get_main_queue(), ^{
156 | // NSMutableArray *imageArrayFinished = [[NSMutableArray alloc] init];
157 | // for (int i=0; i<[splitimgs count];) {
158 | // UIImage *imageOir = [splitimgs objectAtIndex:i];
159 | // UIImage *imageFinished = [imageOir WaterMarkDelete:waterMarkRect];
160 | // [imageArrayFinished addObject:imageFinished];
161 | // imageFinished = nil;
162 | // i = i + 1;
163 | // }
164 | // //合成视频
165 | // [self fromPicsToVideo:imageArrayFinished];
166 | //
167 | // [self.clipView removeFromSuperview];
168 | // self.clipView = nil;
169 | // [self viewWillAppear:YES];
170 | // });
171 | // }
172 | // }];
173 |
174 |
175 |
176 |
177 |
178 |
179 | //[self frameProsess:rectInImage];
180 | //[self savepics];
181 | //[self fromPicsToVideo:self.imageArray];
182 |
183 | //[self addAudioToVideo:self.srcAudioTrack videoURL:self.picsTovideoPath];
184 |
185 | // [self.clipView removeFromSuperview];
186 | // self.clipView = nil;
187 | // [self viewWillAppear:YES];
188 | }
189 | }
190 | #pragma mark --Imagealgorithm
191 | -(void) frameProsess:(CGRect) waterMaskRect{
192 | //处理每一帧
193 | [self splitVideo:self.videoUrl fps:30 splitCompleteBlock:^(BOOL success, NSMutableArray *splitimgs) {
194 | if (success && splitimgs.count != 0) {
195 | NSLog(@"----->> success");
196 | NSLog(@"---> splitimgs个数:%lu",(unsigned long)splitimgs.count);
197 | NSMutableArray *imageArrayFinished = [[NSMutableArray alloc] init];
198 | for (int i=0; i<[splitimgs count];) {
199 | UIImage *imageFinished = [[splitimgs objectAtIndex:i] WaterMarkDelete:waterMaskRect];
200 | [imageArrayFinished addObject:imageFinished];
201 | }
202 | //合成视频
203 | [self fromPicsToVideo:imageArrayFinished];
204 | }
205 | }];
206 | }
207 | - (void)splitVideo:(NSURL *)fileUrl fps:(float)fps splitCompleteBlock:(void(^)(BOOL success, NSMutableArray *splitimgs))splitCompleteBlock {
208 | if (!fileUrl) {
209 | return;
210 | }
211 | NSMutableArray *splitImages = [NSMutableArray array];
212 | NSDictionary *optDict = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:NO] forKey:AVURLAssetPreferPreciseDurationAndTimingKey];
213 | AVURLAsset *avasset = [[AVURLAsset alloc] initWithURL:fileUrl options:optDict];
214 |
215 | CMTime cmtime = avasset.duration; //视频时间信息结构体
216 | Float64 durationSeconds = CMTimeGetSeconds(cmtime); //视频总秒数
217 | NSMutableArray *times = [NSMutableArray array];
218 | Float64 totalFrames = durationSeconds * fps; //获得视频总帧数
219 | CMTime timeFrame;
220 | for (int i = 1; i <= totalFrames; i++) {
221 | timeFrame = CMTimeMake(i, fps); //第i帧 帧率
222 | NSValue *timeValue = [NSValue valueWithCMTime:timeFrame];
223 | [times addObject:timeValue];
224 | }
225 | AVAssetImageGenerator *imgGenerator = [[AVAssetImageGenerator alloc] initWithAsset:avasset]; //防止时间出现偏差
226 | imgGenerator.requestedTimeToleranceBefore = kCMTimeZero;
227 | imgGenerator.requestedTimeToleranceAfter = kCMTimeZero;
228 | NSInteger timesCount = [times count]; // 获取每一帧的图片
229 | [imgGenerator generateCGImagesAsynchronouslyForTimes:times completionHandler:^(CMTime requestedTime, CGImageRef _Nullable image, CMTime actualTime, AVAssetImageGeneratorResult result, NSError * _Nullable error) {
230 | NSLog(@"current-----: %lld", requestedTime.value);
231 | NSLog(@"timeScale----: %d",requestedTime.timescale); // 帧率
232 | BOOL isSuccess = NO;
233 | switch (result) {
234 | case AVAssetImageGeneratorCancelled:
235 | NSLog(@"Cancelled");
236 | break;
237 | case AVAssetImageGeneratorFailed:
238 | NSLog(@"Failed");
239 | break;
240 | case AVAssetImageGeneratorSucceeded: {
241 | UIImage *frameImg = [UIImage imageWithCGImage:image];
242 | UIImage *frameFit = [self reSizeImage:frameImg toSize:CGSizeMake((int)frameImg.size.width - (int)frameImg.size.width%16, (int)frameImg.size.height - (int)frameImg.size.height%16)];
243 | [splitImages addObject:frameFit];
244 | if (requestedTime.value == timesCount) {
245 | isSuccess = YES;
246 | NSLog(@"completed");
247 | }
248 | }
249 | break;
250 | }
251 | if (splitCompleteBlock) {
252 | splitCompleteBlock(isSuccess,splitImages);
253 | }
254 | }];
255 | }
256 | - (UIImage*) getVideoPreViewImage
257 | {
258 | AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:self.videoUrl options:nil];
259 | AVAssetImageGenerator *gen = [[AVAssetImageGenerator alloc] initWithAsset:asset];
260 | gen.appliesPreferredTrackTransform = YES;
261 | CMTime time = CMTimeMakeWithSeconds(0.0, 600);
262 | NSError *error = nil;
263 | CMTime actualTime;
264 | CGImageRef image = [gen copyCGImageAtTime:time actualTime:&actualTime error:&error];
265 | UIImage *img = [[UIImage alloc] initWithCGImage:image];
266 | UIImage *imgFit = [self reSizeImage:img toSize:CGSizeMake((int)img.size.width - (int)img.size.width%16, (int)img.size.height - (int)img.size.height%16)];
267 | CGImageRelease(image);
268 | return imgFit;
269 | }
270 | - (CGPoint)pickerPointJudge:(UIView*)imageView pointInView:(CGPoint)point{
271 | CGPoint tempPoint = CGPointMake(0, 0);
272 | CGFloat factor_frame = imageView.frame.size.width/imageView.frame.size.height;
273 | CGFloat factor_image = self.image.size.width/self.image.size.height;
274 | if (factor_frame < factor_image) { //固定宽缩放
275 | self.factor_scale = imageView.frame.size.width/self.image.size.width;
276 | tempPoint.x = point.x;
277 | CGPoint offset = CGPointMake(0, 0.5*(imageView.frame.size.height - self.image.size.height * self.factor_scale));
278 | self.offsetImageToImageView = offset;
279 | if (point.y < 0.5*(imageView.frame.size.height - self.image.size.height * self.factor_scale)) {
280 | tempPoint.y = 0.5*(imageView.frame.size.height - self.image.size.height * self.factor_scale);
281 | }else if (point.y > 0.5*(imageView.frame.size.height + self.image.size.height * self.factor_scale)){
282 | tempPoint.y = 0.5*(imageView.frame.size.height + self.image.size.height * self.factor_scale);
283 | }
284 | else{
285 | tempPoint.y = point.y;
286 | }
287 | }else{
288 | self.factor_scale = imageView.frame.size.height/self.image.size.height;
289 | if (point.y > self.videoView.bounds.origin.y + self.videoView.frame.size.height) {
290 | point.y = self.videoView.bounds.origin.y + self.videoView.frame.size.height;
291 | }
292 | tempPoint.y = point.y;
293 | CGPoint offset = CGPointMake(0.5*(imageView.frame.size.width - self.image.size.width * self.factor_scale),0);
294 | self.offsetImageToImageView = offset;
295 | if (point.x < 0.5*(imageView.frame.size.width - self.image.size.width * self.factor_scale)) {
296 | tempPoint.x = 0.5*(imageView.frame.size.width - self.image.size.width * self.factor_scale);
297 | }else if (point.x > 0.5*(imageView.frame.size.width + self.image.size.width * self.factor_scale)){
298 | tempPoint.x = 0.5*(imageView.frame.size.width + self.image.size.width * self.factor_scale);
299 | }else{
300 | tempPoint.x = point.x;
301 | }
302 | }
303 | return tempPoint;
304 | }
305 | -(void)fromPicsToVideo:(NSMutableArray *)imageArray{
306 | //设置mov路径
307 | NSArray *paths =NSSearchPathForDirectoriesInDomains(NSCachesDirectory,NSUserDomainMask,YES);
308 | NSString *moviePath =[[paths objectAtIndex:0]stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4",@"temp"]];
309 | self.picsTovideoPath = [NSURL fileURLWithPath:moviePath];
310 | NSFileManager* fileManager=[NSFileManager defaultManager];
311 | if ([fileManager fileExistsAtPath:moviePath]) {
312 | NSLog(@" have");
313 | BOOL blDele= [fileManager removeItemAtPath:moviePath error:nil];
314 | if (blDele) {
315 | NSLog(@"dele success");
316 | }else {
317 | NSLog(@"dele fail");
318 | }
319 | }
320 | //定义视频的大小
321 | CGSize size =CGSizeMake(self.image.size.width,self.image.size.height);
322 | NSError *error =nil;
323 | // 转成UTF-8编码
324 | unlink([moviePath UTF8String]);
325 | NSLog(@"path->%@",moviePath);
326 | // iphone提供了AVFoundation库来方便的操作多媒体设备,AVAssetWriter这个类可以方便的将图像和音频写成一个完整的视频文件
327 | AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:moviePath] fileType:AVFileTypeQuickTimeMovie error:&error];
328 | NSParameterAssert(videoWriter);
329 | if(error)
330 | NSLog(@"error =%@", [error localizedDescription]);
331 | //mov的格式设置 编码格式 宽度 高度
332 | NSDictionary *videoSettings =[NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecTypeH264,AVVideoCodecKey,
333 | [NSNumber numberWithInt:size.width],AVVideoWidthKey,
334 | [NSNumber numberWithInt:size.height],AVVideoHeightKey,nil];
335 |
336 | AVAssetWriterInput *writerInput =[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
337 |
338 | NSDictionary*sourcePixelBufferAttributesDictionary =[NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB],kCVPixelBufferPixelFormatTypeKey,nil];
339 | // AVAssetWriterInputPixelBufferAdaptor提供CVPixelBufferPool实例,
340 | // 可以使用分配像素缓冲区写入输出文件。使用提供的像素为缓冲池分配通常
341 | // 是更有效的比添加像素缓冲区分配使用一个单独的池
342 | AVAssetWriterInputPixelBufferAdaptor *adaptor =[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
343 |
344 | NSParameterAssert(writerInput);
345 | NSParameterAssert([videoWriter canAddInput:writerInput]);
346 |
347 | if ([videoWriter canAddInput:writerInput])
348 | {
349 | NSLog(@"videoWriter canAddInput:writerInput");
350 | }
351 | else
352 | {
353 | NSLog(@"videoWriter cannotAddInput:writerInput");
354 | }
355 |
356 | [videoWriter addInput:writerInput];
357 |
358 | [videoWriter startWriting];
359 | [videoWriter startSessionAtSourceTime:kCMTimeZero];
360 |
361 | //合成多张图片为一个视频文件
362 | int total_frame = second * 30;
363 | int frames = (int)self.movieAsset.duration.value;
364 | int step = frames/total_frame;
365 | dispatch_queue_t dispatchQueue =dispatch_queue_create("mediaInputQueue",NULL);
366 | int __block frame =0;
367 | [writerInput requestMediaDataWhenReadyOnQueue:dispatchQueue usingBlock:^{
368 |
369 | while([writerInput isReadyForMoreMediaData])
370 | {
371 | if(++frame >=[imageArray count] * step)
372 | {
373 | [writerInput markAsFinished];
374 | [videoWriter finishWritingWithCompletionHandler:^(){
375 | NSLog (@"finished writing");
376 | dispatch_async(dispatch_get_main_queue(), ^{
377 | [self.progressView setProgress:0.75];
378 | [self addAudioToVideo:self.srcAudioTrack videoURL:self.picsTovideoPath];
379 | });
380 | }];
381 | break;
382 | }
383 | CVPixelBufferRef buffer =NULL;
384 | int idx =frame / step;
385 | NSLog(@"idx==%d",idx);
386 | buffer = (CVPixelBufferRef)[self pixelBufferFromCGImage:[[imageArray objectAtIndex:idx] CGImage] size:size];
387 |
388 | if (buffer)
389 | {
390 | if(![adaptor appendPixelBuffer:buffer withPresentationTime:CMTimeMake(frame,600)])//设置每秒钟播放图片的个数
391 | {
392 | NSLog(@"FAIL");
393 | }
394 | else
395 | {
396 | NSLog(@"OK");
397 | }
398 |
399 | CFRelease(buffer);
400 | }
401 | }
402 | }];
403 | }
404 | - (CVPixelBufferRef)pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
405 | {
406 | NSDictionary *options =[NSDictionary dictionaryWithObjectsAndKeys:
407 | [NSNumber numberWithBool:YES],kCVPixelBufferCGImageCompatibilityKey,
408 | [NSNumber numberWithBool:YES],kCVPixelBufferCGBitmapContextCompatibilityKey,nil];
409 | CVPixelBufferRef pxbuffer =NULL;
410 | CVReturn status =CVPixelBufferCreate(kCFAllocatorDefault,size.width,size.height,kCVPixelFormatType_32ARGB,(__bridge CFDictionaryRef) options,&pxbuffer);
411 |
412 | NSParameterAssert(status ==kCVReturnSuccess && pxbuffer !=NULL);
413 |
414 | CVPixelBufferLockBaseAddress(pxbuffer,0);
415 |
416 | void *pxdata =CVPixelBufferGetBaseAddress(pxbuffer);
417 | NSParameterAssert(pxdata !=NULL);
418 | CGColorSpaceRef rgbColorSpace=CGColorSpaceCreateDeviceRGB();
419 | // 当你调用这个函数的时候,Quartz创建一个位图绘制环境,也就是位图上下文。当你向上下文中绘制信息时,Quartz把你要绘制的信息作为位图数据绘制到指定的内存块。一个新的位图上下文的像素格式由三个参数决定:每个组件的位数,颜色空间,alpha选项
420 | CGContextRef context =CGBitmapContextCreate(pxdata,size.width,size.height,8,4*size.width,rgbColorSpace,kCGImageAlphaPremultipliedFirst);
421 | NSParameterAssert(context);
422 |
423 | //使用CGContextDrawImage绘制图片 这里设置不正确的话 会导致视频颠倒
424 | // 当通过CGContextDrawImage绘制图片到一个context中时,如果传入的是UIImage的CGImageRef,因为UIKit和CG坐标系y轴相反,所以图片绘制将会上下颠倒
425 | CGContextDrawImage(context,CGRectMake(0,0,CGImageGetWidth(image),CGImageGetHeight(image)), image);
426 | // 释放色彩空间
427 | CGColorSpaceRelease(rgbColorSpace);
428 | // 释放context
429 | CGContextRelease(context);
430 | // 解锁pixel buffer
431 | CVPixelBufferUnlockBaseAddress(pxbuffer,0);
432 |
433 | return pxbuffer;
434 | }
435 |
436 | -(void)addAudioToVideo:(AVAssetTrack*)srcAudioTrack videoURL:(NSURL*)videoURL{
437 | // mbp提示框
438 | //[MBProgressHUD showMessage:@"正在处理中"];
439 | // 路径
440 | NSArray *paths =NSSearchPathForDirectoriesInDomains(NSCachesDirectory,NSUserDomainMask,YES);
441 | NSString *outPutFilePath =[[paths objectAtIndex:0]stringByAppendingPathComponent:[NSString stringWithFormat:@"%@.mp4",@"merge"]];
442 | NSFileManager* fileManager=[NSFileManager defaultManager];
443 | if ([fileManager fileExistsAtPath:outPutFilePath]) {
444 | NSLog(@" have");
445 | BOOL blDele= [fileManager removeItemAtPath:outPutFilePath error:nil];
446 | if (blDele) {
447 | NSLog(@"dele success");
448 | }else {
449 | NSLog(@"dele fail");
450 | }
451 | }
452 | // 添加合成路径
453 | NSURL *outputFileUrl = [NSURL fileURLWithPath:outPutFilePath];
454 | // 时间起点
455 | CMTime nextClistartTime = kCMTimeZero;
456 | // 创建可变的音视频组合
457 | AVMutableComposition *comosition = [AVMutableComposition composition];
458 | // 视频采集
459 | AVURLAsset *videoAsset = [[AVURLAsset alloc] initWithURL:self.picsTovideoPath options:nil];
460 | // 视频时间范围
461 | CMTimeRange videoTimeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
462 | // 视频通道 枚举 kCMPersistentTrackID_Invalid = 0
463 | AVMutableCompositionTrack *videoTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
464 | // 视频采集通道
465 | AVAssetTrack *videoAssetTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
466 | // 把采集轨道数据加入到可变轨道之中
467 | [videoTrack insertTimeRange:videoTimeRange ofTrack:videoAssetTrack atTime:nextClistartTime error:nil];
468 |
469 | //声音采集
470 | // 因为视频短这里就直接用视频长度了,如果自动化需要自己写判断
471 | CMTimeRange audioTimeRange = videoTimeRange;
472 | // 音频通道
473 | AVMutableCompositionTrack *audioTrack = [comosition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
474 | // 加入合成轨道之中
475 | AVAsset *srcAsset = [AVAsset assetWithURL:self.videoUrl];
476 | NSArray *trackArray = [srcAsset tracksWithMediaType:AVMediaTypeAudio];
477 | [audioTrack insertTimeRange:audioTimeRange ofTrack:[trackArray objectAtIndex:0] atTime:nextClistartTime error:nil];
478 |
479 | // 创建一个输出
480 | AVAssetExportSession *assetExport = [[AVAssetExportSession alloc] initWithAsset:comosition presetName:AVAssetExportPresetMediumQuality];
481 | // 输出类型
482 | assetExport.outputFileType = AVFileTypeQuickTimeMovie;
483 | // 输出地址
484 | assetExport.outputURL = outputFileUrl;
485 | // 优化
486 | assetExport.shouldOptimizeForNetworkUse = YES;
487 | // 合成完毕
488 | [assetExport exportAsynchronouslyWithCompletionHandler:^{
489 | switch ([assetExport status]) {
490 | case AVAssetExportSessionStatusFailed: {
491 | NSLog(@"合成失败:%@",[[assetExport error] description]);
492 | } break;
493 | case AVAssetExportSessionStatusCancelled: {
494 | } break;
495 | case AVAssetExportSessionStatusCompleted: {
496 | NSLog(@"合成成功");
497 | [self saveVideo:outputFileUrl];
498 | dispatch_async(dispatch_get_main_queue(), ^{
499 | [self.progressView setProgress:1.0];
500 | [self.clipView removeFromSuperview];
501 | self.clipView = nil;
502 | [self viewWillAppear:YES];
503 | });
504 | } break;
505 | default: {
506 | break;
507 | } break;
508 | }
509 | // });
510 | }];
511 | }
512 | -(void)saveVideo:(NSURL*)videoURL{
513 |
514 | //UISaveVideoAtPathToSavedPhotosAlbum([videoURL absoluteString], self, @selector(video:didFinishSavingWithError:contextInfo:), nil);
515 | [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
516 | [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:videoURL];
517 | } completionHandler:^(BOOL success, NSError * _Nullable error) {
518 | if (success) {
519 | NSLog(@"保存成功");
520 | UIAlertController * alert = [UIAlertController alertControllerWithTitle:@"提示" message:@"视频保存成功" preferredStyle:UIAlertControllerStyleAlert];
521 | [alert addAction:[UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
522 | NSFileManager* fileManager=[NSFileManager defaultManager];
523 | BOOL blDele= [fileManager removeItemAtURL:videoURL error:nil];
524 | if (blDele) {
525 | NSLog(@"dele1 success");
526 | }else {
527 | NSLog(@"dele1 fail");
528 | }
529 | blDele = [fileManager removeItemAtURL:self.picsTovideoPath error:nil];
530 | if (blDele) {
531 | NSLog(@"dele2 success");
532 | }else {
533 | NSLog(@"dele2 fail");
534 | }
535 | }]];
536 | [self presentViewController:alert animated:true completion:nil];
537 | }
538 |
539 | if (error) {
540 | NSLog(@"%@",error);
541 | NSLog(@"%@",error.description);
542 | UIAlertController * alert = [UIAlertController alertControllerWithTitle:@"提示" message:@"视频保存失败" preferredStyle:UIAlertControllerStyleAlert];
543 |
544 | [alert addAction:[UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
545 |
546 | }]];
547 | [self presentViewController:alert animated:true completion:nil];
548 | }
549 | }];
550 | // ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
551 | // [library writeVideoAtPathToSavedPhotosAlbum:videoURL completionBlock:^(NSURL *assetURL, NSError *error) {
552 | // if (error) {
553 | //
554 | // NSLog(@"Save video fail:%@",error);
555 | //
556 | // } else {
557 | //
558 | // NSLog(@"Save video succeed.");
559 | //
560 | // }
561 | //
562 | // }];
563 | }
564 | // 视频保存回调
565 | - (void)video:(NSString *)videoPath didFinishSavingWithError:(NSError *)error contextInfo: (void *)contextInfo
566 | {
567 |
568 | if (error == nil) {
569 |
570 | UIAlertController * alert = [UIAlertController alertControllerWithTitle:@"提示" message:@"视频保存成功" preferredStyle:UIAlertControllerStyleAlert];
571 |
572 | [alert addAction:[UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
573 |
574 | }]];
575 | [self presentViewController:alert animated:true completion:nil];
576 |
577 | }else{
578 | NSLog(@"%@",error.description);
579 | UIAlertController * alert = [UIAlertController alertControllerWithTitle:@"提示" message:@"视频保存失败" preferredStyle:UIAlertControllerStyleAlert];
580 |
581 | [alert addAction:[UIAlertAction actionWithTitle:@"确定" style:UIAlertActionStyleDefault handler:^(UIAlertAction * _Nonnull action) {
582 |
583 | }]];
584 | [self presentViewController:alert animated:true completion:nil];
585 | }
586 | }
587 | - (UIImage *)reSizeImage:(UIImage *)image toSize:(CGSize)reSize
588 |
589 | {
590 | UIGraphicsBeginImageContext(CGSizeMake(reSize.width, reSize.height));
591 | [image drawInRect:CGRectMake(0, 0, reSize.width, reSize.height)];
592 | UIImage *reSizeImage = UIGraphicsGetImageFromCurrentImageContext();
593 | UIGraphicsEndImageContext();
594 |
595 | return reSizeImage;
596 |
597 | }
598 | //保存图片
599 | -(void)savepics{
600 | NSArray *paths =NSSearchPathForDirectoriesInDomains(NSCachesDirectory,NSUserDomainMask,YES);
601 | for (int i = 0; i < self.imageArray.count; i++)
602 | {
603 | UIImage * imgsave = self.imageArray[i];
604 | NSString *Pathimg =[[paths objectAtIndex:0]stringByAppendingPathComponent:[NSString stringWithFormat:@"%d.png",i]];
605 | [UIImagePNGRepresentation(imgsave) writeToFile:Pathimg atomically:YES];
606 | }
607 | }
608 | @end
609 |
--------------------------------------------------------------------------------
/WaterMarkDelete/WaterMarkDelete.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 50;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 02FFC9287830357AE6912A7D /* Pods_WaterMarkDeleteTests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = A485555A54FF28FBBB17E6B9 /* Pods_WaterMarkDeleteTests.framework */; };
11 | 3E410AE020775091006DA214 /* AppDelegate.m in Sources */ = {isa = PBXBuildFile; fileRef = 3E410ADF20775091006DA214 /* AppDelegate.m */; };
12 | 3E410AE620775092006DA214 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 3E410AE420775092006DA214 /* Main.storyboard */; };
13 | 3E410AE820775099006DA214 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 3E410AE720775099006DA214 /* Assets.xcassets */; };
14 | 3E410AEB20775099006DA214 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 3E410AE920775099006DA214 /* LaunchScreen.storyboard */; };
15 | 3E410AEE20775099006DA214 /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 3E410AED20775099006DA214 /* main.m */; };
16 | 3E410AF820775099006DA214 /* WaterMarkDeleteTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 3E410AF720775099006DA214 /* WaterMarkDeleteTests.m */; };
17 | 3E410B0320775099006DA214 /* WaterMarkDeleteUITests.m in Sources */ = {isa = PBXBuildFile; fileRef = 3E410B0220775099006DA214 /* WaterMarkDeleteUITests.m */; };
18 | 3E410B12207750D8006DA214 /* RootViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 3E410B11207750D8006DA214 /* RootViewController.m */; };
19 | 3E7FEF44207F08CE00AF773D /* HJImagesToVideo.m in Sources */ = {isa = PBXBuildFile; fileRef = 3E7FEF42207F08CE00AF773D /* HJImagesToVideo.m */; };
20 | 3E8A572C2079E93E00DDA2E0 /* libc++.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E8A572B2079E93E00DDA2E0 /* libc++.tbd */; };
21 | 3E8A572E2079E94600DDA2E0 /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E8A572D2079E94600DDA2E0 /* AVFoundation.framework */; };
22 | 3E8A57302079E95000DDA2E0 /* CoreImage.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E8A572F2079E94F00DDA2E0 /* CoreImage.framework */; };
23 | 3E8A57322079E96300DDA2E0 /* CoreGraphics.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E8A57312079E96300DDA2E0 /* CoreGraphics.framework */; };
24 | 3E8A57342079E96A00DDA2E0 /* QuartzCore.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E8A57332079E96A00DDA2E0 /* QuartzCore.framework */; };
25 | 3E8A57362079E97200DDA2E0 /* Accelerate.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E8A57352079E97200DDA2E0 /* Accelerate.framework */; };
26 | 3E8A57382079E97C00DDA2E0 /* CoreVideo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E8A57372079E97C00DDA2E0 /* CoreVideo.framework */; };
27 | 3E8A573A2079E98500DDA2E0 /* CoreMedia.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E8A57392079E98500DDA2E0 /* CoreMedia.framework */; };
28 | 3E8A573C2079E98E00DDA2E0 /* AssetsLibrary.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E8A573B2079E98E00DDA2E0 /* AssetsLibrary.framework */; };
29 | 3E8A57422079ED1E00DDA2E0 /* UIImage+OpenCV.mm in Sources */ = {isa = PBXBuildFile; fileRef = 3E8A57412079ED1E00DDA2E0 /* UIImage+OpenCV.mm */; };
30 | 3E8A574C207A182500DDA2E0 /* ImageIO.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E8A574B207A182500DDA2E0 /* ImageIO.framework */; };
31 | 3E8A574E207A183C00DDA2E0 /* libz.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 3E8A574D207A183C00DDA2E0 /* libz.tbd */; };
32 | 3E9EF9EC2077886800B18DE4 /* PictureViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 3E9EF9EB2077886800B18DE4 /* PictureViewController.mm */; };
33 | 3E9EF9EF20779BE000B18DE4 /* ClipView.m in Sources */ = {isa = PBXBuildFile; fileRef = 3E9EF9EE20779BE000B18DE4 /* ClipView.m */; };
34 | 3EE813E4207CC40400AEA5CD /* VideoViewController.mm in Sources */ = {isa = PBXBuildFile; fileRef = 3EE813E3207CC40400AEA5CD /* VideoViewController.mm */; };
35 | 3EE813E7207CEC5900AEA5CD /* FramesShowViewController.m in Sources */ = {isa = PBXBuildFile; fileRef = 3EE813E6207CEC5900AEA5CD /* FramesShowViewController.m */; };
36 | 7E592CA47C8387039BC92046 /* Pods_WaterMarkDeleteUITests.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 4211EE8D3460798B5048A924 /* Pods_WaterMarkDeleteUITests.framework */; };
37 | 859EA409CB9A2C5B32AF354A /* Pods_WaterMarkDelete.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 71EC092D29E47BEBF873E927 /* Pods_WaterMarkDelete.framework */; };
38 | /* End PBXBuildFile section */
39 |
40 | /* Begin PBXContainerItemProxy section */
41 | 3E410AF420775099006DA214 /* PBXContainerItemProxy */ = {
42 | isa = PBXContainerItemProxy;
43 | containerPortal = 3E410AD320775091006DA214 /* Project object */;
44 | proxyType = 1;
45 | remoteGlobalIDString = 3E410ADA20775091006DA214;
46 | remoteInfo = WaterMarkDelete;
47 | };
48 | 3E410AFF20775099006DA214 /* PBXContainerItemProxy */ = {
49 | isa = PBXContainerItemProxy;
50 | containerPortal = 3E410AD320775091006DA214 /* Project object */;
51 | proxyType = 1;
52 | remoteGlobalIDString = 3E410ADA20775091006DA214;
53 | remoteInfo = WaterMarkDelete;
54 | };
55 | /* End PBXContainerItemProxy section */
56 |
57 | /* Begin PBXFileReference section */
58 | 3E410ADB20775091006DA214 /* WaterMarkDelete.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = WaterMarkDelete.app; sourceTree = BUILT_PRODUCTS_DIR; };
59 | 3E410ADE20775091006DA214 /* AppDelegate.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = AppDelegate.h; sourceTree = ""; };
60 | 3E410ADF20775091006DA214 /* AppDelegate.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = AppDelegate.m; sourceTree = ""; };
61 | 3E410AE520775092006DA214 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
62 | 3E410AE720775099006DA214 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
63 | 3E410AEA20775099006DA214 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
64 | 3E410AEC20775099006DA214 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
65 | 3E410AED20775099006DA214 /* main.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = main.m; sourceTree = ""; };
66 | 3E410AF320775099006DA214 /* WaterMarkDeleteTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = WaterMarkDeleteTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
67 | 3E410AF720775099006DA214 /* WaterMarkDeleteTests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = WaterMarkDeleteTests.m; sourceTree = ""; };
68 | 3E410AF920775099006DA214 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
69 | 3E410AFE20775099006DA214 /* WaterMarkDeleteUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = WaterMarkDeleteUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
70 | 3E410B0220775099006DA214 /* WaterMarkDeleteUITests.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = WaterMarkDeleteUITests.m; sourceTree = ""; };
71 | 3E410B0420775099006DA214 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
72 | 3E410B10207750D8006DA214 /* RootViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = RootViewController.h; sourceTree = ""; };
73 | 3E410B11207750D8006DA214 /* RootViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = RootViewController.m; sourceTree = ""; };
74 | 3E7FEF42207F08CE00AF773D /* HJImagesToVideo.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = HJImagesToVideo.m; sourceTree = ""; };
75 | 3E7FEF43207F08CE00AF773D /* HJImagesToVideo.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = HJImagesToVideo.h; sourceTree = ""; };
76 | 3E8A572B2079E93E00DDA2E0 /* libc++.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libc++.tbd"; path = "usr/lib/libc++.tbd"; sourceTree = SDKROOT; };
77 | 3E8A572D2079E94600DDA2E0 /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
78 | 3E8A572F2079E94F00DDA2E0 /* CoreImage.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreImage.framework; path = System/Library/Frameworks/CoreImage.framework; sourceTree = SDKROOT; };
79 | 3E8A57312079E96300DDA2E0 /* CoreGraphics.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreGraphics.framework; path = System/Library/Frameworks/CoreGraphics.framework; sourceTree = SDKROOT; };
80 | 3E8A57332079E96A00DDA2E0 /* QuartzCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = QuartzCore.framework; path = System/Library/Frameworks/QuartzCore.framework; sourceTree = SDKROOT; };
81 | 3E8A57352079E97200DDA2E0 /* Accelerate.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = Accelerate.framework; path = System/Library/Frameworks/Accelerate.framework; sourceTree = SDKROOT; };
82 | 3E8A57372079E97C00DDA2E0 /* CoreVideo.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreVideo.framework; path = System/Library/Frameworks/CoreVideo.framework; sourceTree = SDKROOT; };
83 | 3E8A57392079E98500DDA2E0 /* CoreMedia.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = CoreMedia.framework; path = System/Library/Frameworks/CoreMedia.framework; sourceTree = SDKROOT; };
84 | 3E8A573B2079E98E00DDA2E0 /* AssetsLibrary.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AssetsLibrary.framework; path = System/Library/Frameworks/AssetsLibrary.framework; sourceTree = SDKROOT; };
85 | 3E8A57402079ED1E00DDA2E0 /* UIImage+OpenCV.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = "UIImage+OpenCV.h"; sourceTree = ""; };
86 | 3E8A57412079ED1E00DDA2E0 /* UIImage+OpenCV.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = "UIImage+OpenCV.mm"; sourceTree = ""; };
87 | 3E8A5743207A0B2000DDA2E0 /* libc++.1.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = "libc++.1.tbd"; path = "usr/lib/libc++.1.tbd"; sourceTree = SDKROOT; };
88 | 3E8A574B207A182500DDA2E0 /* ImageIO.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ImageIO.framework; path = System/Library/Frameworks/ImageIO.framework; sourceTree = SDKROOT; };
89 | 3E8A574D207A183C00DDA2E0 /* libz.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = libz.tbd; path = usr/lib/libz.tbd; sourceTree = SDKROOT; };
90 | 3E8A574F207A258500DDA2E0 /* WaterMarkDelete.pch */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = WaterMarkDelete.pch; sourceTree = ""; };
91 | 3E9EF9EA2077886700B18DE4 /* PictureViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = PictureViewController.h; sourceTree = ""; };
92 | 3E9EF9EB2077886800B18DE4 /* PictureViewController.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = PictureViewController.mm; sourceTree = ""; };
93 | 3E9EF9ED20779BE000B18DE4 /* ClipView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = ClipView.h; sourceTree = ""; };
94 | 3E9EF9EE20779BE000B18DE4 /* ClipView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = ClipView.m; sourceTree = ""; };
95 | 3EE813E2207CC40400AEA5CD /* VideoViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = VideoViewController.h; sourceTree = ""; };
96 | 3EE813E3207CC40400AEA5CD /* VideoViewController.mm */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.cpp.objcpp; path = VideoViewController.mm; sourceTree = ""; };
97 | 3EE813E5207CEC5900AEA5CD /* FramesShowViewController.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = FramesShowViewController.h; sourceTree = ""; };
98 | 3EE813E6207CEC5900AEA5CD /* FramesShowViewController.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = FramesShowViewController.m; sourceTree = ""; };
99 | 3EE813E8207E0BF000AEA5CD /* MBProgressHUD.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; path = MBProgressHUD.framework; sourceTree = BUILT_PRODUCTS_DIR; };
100 | 4211EE8D3460798B5048A924 /* Pods_WaterMarkDeleteUITests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_WaterMarkDeleteUITests.framework; sourceTree = BUILT_PRODUCTS_DIR; };
101 | 4BAAA442F94A7A24C6A3307E /* Pods-WaterMarkDeleteTests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-WaterMarkDeleteTests.debug.xcconfig"; path = "Pods/Target Support Files/Pods-WaterMarkDeleteTests/Pods-WaterMarkDeleteTests.debug.xcconfig"; sourceTree = ""; };
102 | 600D02087DEA53AE1D7EB913 /* Pods-WaterMarkDeleteUITests.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-WaterMarkDeleteUITests.debug.xcconfig"; path = "Pods/Target Support Files/Pods-WaterMarkDeleteUITests/Pods-WaterMarkDeleteUITests.debug.xcconfig"; sourceTree = ""; };
103 | 71EC092D29E47BEBF873E927 /* Pods_WaterMarkDelete.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_WaterMarkDelete.framework; sourceTree = BUILT_PRODUCTS_DIR; };
104 | 7AB8ED2930F4DEC749E317FD /* Pods-WaterMarkDeleteUITests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-WaterMarkDeleteUITests.release.xcconfig"; path = "Pods/Target Support Files/Pods-WaterMarkDeleteUITests/Pods-WaterMarkDeleteUITests.release.xcconfig"; sourceTree = ""; };
105 | 959066A9B7D90C48302B0FFD /* Pods-WaterMarkDelete.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-WaterMarkDelete.debug.xcconfig"; path = "Pods/Target Support Files/Pods-WaterMarkDelete/Pods-WaterMarkDelete.debug.xcconfig"; sourceTree = ""; };
106 | A485555A54FF28FBBB17E6B9 /* Pods_WaterMarkDeleteTests.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_WaterMarkDeleteTests.framework; sourceTree = BUILT_PRODUCTS_DIR; };
107 | CBDE110B2912D1A891931DAA /* Pods-WaterMarkDeleteTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-WaterMarkDeleteTests.release.xcconfig"; path = "Pods/Target Support Files/Pods-WaterMarkDeleteTests/Pods-WaterMarkDeleteTests.release.xcconfig"; sourceTree = ""; };
108 | CDD8DF8C97DEF91FB4114BE1 /* Pods-WaterMarkDelete.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-WaterMarkDelete.release.xcconfig"; path = "Pods/Target Support Files/Pods-WaterMarkDelete/Pods-WaterMarkDelete.release.xcconfig"; sourceTree = ""; };
109 | /* End PBXFileReference section */
110 |
111 | /* Begin PBXFrameworksBuildPhase section */
112 | 3E410AD820775091006DA214 /* Frameworks */ = {
113 | isa = PBXFrameworksBuildPhase;
114 | buildActionMask = 2147483647;
115 | files = (
116 | 3E8A574E207A183C00DDA2E0 /* libz.tbd in Frameworks */,
117 | 3E8A574C207A182500DDA2E0 /* ImageIO.framework in Frameworks */,
118 | 3E8A573C2079E98E00DDA2E0 /* AssetsLibrary.framework in Frameworks */,
119 | 3E8A573A2079E98500DDA2E0 /* CoreMedia.framework in Frameworks */,
120 | 3E8A57382079E97C00DDA2E0 /* CoreVideo.framework in Frameworks */,
121 | 3E8A57362079E97200DDA2E0 /* Accelerate.framework in Frameworks */,
122 | 3E8A57342079E96A00DDA2E0 /* QuartzCore.framework in Frameworks */,
123 | 3E8A57322079E96300DDA2E0 /* CoreGraphics.framework in Frameworks */,
124 | 3E8A57302079E95000DDA2E0 /* CoreImage.framework in Frameworks */,
125 | 3E8A572E2079E94600DDA2E0 /* AVFoundation.framework in Frameworks */,
126 | 3E8A572C2079E93E00DDA2E0 /* libc++.tbd in Frameworks */,
127 | 859EA409CB9A2C5B32AF354A /* Pods_WaterMarkDelete.framework in Frameworks */,
128 | );
129 | runOnlyForDeploymentPostprocessing = 0;
130 | };
131 | 3E410AF020775099006DA214 /* Frameworks */ = {
132 | isa = PBXFrameworksBuildPhase;
133 | buildActionMask = 2147483647;
134 | files = (
135 | 02FFC9287830357AE6912A7D /* Pods_WaterMarkDeleteTests.framework in Frameworks */,
136 | );
137 | runOnlyForDeploymentPostprocessing = 0;
138 | };
139 | 3E410AFB20775099006DA214 /* Frameworks */ = {
140 | isa = PBXFrameworksBuildPhase;
141 | buildActionMask = 2147483647;
142 | files = (
143 | 7E592CA47C8387039BC92046 /* Pods_WaterMarkDeleteUITests.framework in Frameworks */,
144 | );
145 | runOnlyForDeploymentPostprocessing = 0;
146 | };
147 | /* End PBXFrameworksBuildPhase section */
148 |
149 | /* Begin PBXGroup section */
150 | 3E410AD220775091006DA214 = {
151 | isa = PBXGroup;
152 | children = (
153 | 3E410ADD20775091006DA214 /* WaterMarkDelete */,
154 | 3E410AF620775099006DA214 /* WaterMarkDeleteTests */,
155 | 3E410B0120775099006DA214 /* WaterMarkDeleteUITests */,
156 | 3E410ADC20775091006DA214 /* Products */,
157 | B991F3CF7352BB29FF1BECE2 /* Pods */,
158 | BB9473284FE4CB292D1B9F23 /* Frameworks */,
159 | );
160 | sourceTree = "";
161 | };
162 | 3E410ADC20775091006DA214 /* Products */ = {
163 | isa = PBXGroup;
164 | children = (
165 | 3E410ADB20775091006DA214 /* WaterMarkDelete.app */,
166 | 3E410AF320775099006DA214 /* WaterMarkDeleteTests.xctest */,
167 | 3E410AFE20775099006DA214 /* WaterMarkDeleteUITests.xctest */,
168 | );
169 | name = Products;
170 | sourceTree = "";
171 | };
172 | 3E410ADD20775091006DA214 /* WaterMarkDelete */ = {
173 | isa = PBXGroup;
174 | children = (
175 | 3E7FEF41207F08CE00AF773D /* HJImagesToVideo */,
176 | 3E410B10207750D8006DA214 /* RootViewController.h */,
177 | 3E410B11207750D8006DA214 /* RootViewController.m */,
178 | 3E410ADE20775091006DA214 /* AppDelegate.h */,
179 | 3E410ADF20775091006DA214 /* AppDelegate.m */,
180 | 3E410AE420775092006DA214 /* Main.storyboard */,
181 | 3E410AE720775099006DA214 /* Assets.xcassets */,
182 | 3E410AE920775099006DA214 /* LaunchScreen.storyboard */,
183 | 3E410AEC20775099006DA214 /* Info.plist */,
184 | 3E410AED20775099006DA214 /* main.m */,
185 | 3E9EF9EA2077886700B18DE4 /* PictureViewController.h */,
186 | 3E9EF9EB2077886800B18DE4 /* PictureViewController.mm */,
187 | 3E9EF9ED20779BE000B18DE4 /* ClipView.h */,
188 | 3E9EF9EE20779BE000B18DE4 /* ClipView.m */,
189 | 3E8A57402079ED1E00DDA2E0 /* UIImage+OpenCV.h */,
190 | 3E8A57412079ED1E00DDA2E0 /* UIImage+OpenCV.mm */,
191 | 3E8A574F207A258500DDA2E0 /* WaterMarkDelete.pch */,
192 | 3EE813E2207CC40400AEA5CD /* VideoViewController.h */,
193 | 3EE813E3207CC40400AEA5CD /* VideoViewController.mm */,
194 | 3EE813E5207CEC5900AEA5CD /* FramesShowViewController.h */,
195 | 3EE813E6207CEC5900AEA5CD /* FramesShowViewController.m */,
196 | );
197 | path = WaterMarkDelete;
198 | sourceTree = "";
199 | };
200 | 3E410AF620775099006DA214 /* WaterMarkDeleteTests */ = {
201 | isa = PBXGroup;
202 | children = (
203 | 3E410AF720775099006DA214 /* WaterMarkDeleteTests.m */,
204 | 3E410AF920775099006DA214 /* Info.plist */,
205 | );
206 | path = WaterMarkDeleteTests;
207 | sourceTree = "";
208 | };
209 | 3E410B0120775099006DA214 /* WaterMarkDeleteUITests */ = {
210 | isa = PBXGroup;
211 | children = (
212 | 3E410B0220775099006DA214 /* WaterMarkDeleteUITests.m */,
213 | 3E410B0420775099006DA214 /* Info.plist */,
214 | );
215 | path = WaterMarkDeleteUITests;
216 | sourceTree = "";
217 | };
218 | 3E7FEF41207F08CE00AF773D /* HJImagesToVideo */ = {
219 | isa = PBXGroup;
220 | children = (
221 | 3E7FEF42207F08CE00AF773D /* HJImagesToVideo.m */,
222 | 3E7FEF43207F08CE00AF773D /* HJImagesToVideo.h */,
223 | );
224 | path = HJImagesToVideo;
225 | sourceTree = "";
226 | };
227 | B991F3CF7352BB29FF1BECE2 /* Pods */ = {
228 | isa = PBXGroup;
229 | children = (
230 | 959066A9B7D90C48302B0FFD /* Pods-WaterMarkDelete.debug.xcconfig */,
231 | CDD8DF8C97DEF91FB4114BE1 /* Pods-WaterMarkDelete.release.xcconfig */,
232 | 4BAAA442F94A7A24C6A3307E /* Pods-WaterMarkDeleteTests.debug.xcconfig */,
233 | CBDE110B2912D1A891931DAA /* Pods-WaterMarkDeleteTests.release.xcconfig */,
234 | 600D02087DEA53AE1D7EB913 /* Pods-WaterMarkDeleteUITests.debug.xcconfig */,
235 | 7AB8ED2930F4DEC749E317FD /* Pods-WaterMarkDeleteUITests.release.xcconfig */,
236 | );
237 | name = Pods;
238 | sourceTree = "";
239 | };
240 | BB9473284FE4CB292D1B9F23 /* Frameworks */ = {
241 | isa = PBXGroup;
242 | children = (
243 | 3EE813E8207E0BF000AEA5CD /* MBProgressHUD.framework */,
244 | 3E8A574D207A183C00DDA2E0 /* libz.tbd */,
245 | 3E8A574B207A182500DDA2E0 /* ImageIO.framework */,
246 | 3E8A5743207A0B2000DDA2E0 /* libc++.1.tbd */,
247 | 3E8A573B2079E98E00DDA2E0 /* AssetsLibrary.framework */,
248 | 3E8A57392079E98500DDA2E0 /* CoreMedia.framework */,
249 | 3E8A57372079E97C00DDA2E0 /* CoreVideo.framework */,
250 | 3E8A57352079E97200DDA2E0 /* Accelerate.framework */,
251 | 3E8A57332079E96A00DDA2E0 /* QuartzCore.framework */,
252 | 3E8A57312079E96300DDA2E0 /* CoreGraphics.framework */,
253 | 3E8A572F2079E94F00DDA2E0 /* CoreImage.framework */,
254 | 3E8A572D2079E94600DDA2E0 /* AVFoundation.framework */,
255 | 3E8A572B2079E93E00DDA2E0 /* libc++.tbd */,
256 | 71EC092D29E47BEBF873E927 /* Pods_WaterMarkDelete.framework */,
257 | A485555A54FF28FBBB17E6B9 /* Pods_WaterMarkDeleteTests.framework */,
258 | 4211EE8D3460798B5048A924 /* Pods_WaterMarkDeleteUITests.framework */,
259 | );
260 | name = Frameworks;
261 | sourceTree = "";
262 | };
263 | /* End PBXGroup section */
264 |
265 | /* Begin PBXNativeTarget section */
266 | 3E410ADA20775091006DA214 /* WaterMarkDelete */ = {
267 | isa = PBXNativeTarget;
268 | buildConfigurationList = 3E410B0720775099006DA214 /* Build configuration list for PBXNativeTarget "WaterMarkDelete" */;
269 | buildPhases = (
270 | 6F49885E520C9B55AFCE23CC /* [CP] Check Pods Manifest.lock */,
271 | 3E410AD720775091006DA214 /* Sources */,
272 | 3E410AD820775091006DA214 /* Frameworks */,
273 | 3E410AD920775091006DA214 /* Resources */,
274 | 3284F5B162B5D46BB2D07E39 /* [CP] Embed Pods Frameworks */,
275 | );
276 | buildRules = (
277 | );
278 | dependencies = (
279 | );
280 | name = WaterMarkDelete;
281 | productName = WaterMarkDelete;
282 | productReference = 3E410ADB20775091006DA214 /* WaterMarkDelete.app */;
283 | productType = "com.apple.product-type.application";
284 | };
285 | 3E410AF220775099006DA214 /* WaterMarkDeleteTests */ = {
286 | isa = PBXNativeTarget;
287 | buildConfigurationList = 3E410B0A20775099006DA214 /* Build configuration list for PBXNativeTarget "WaterMarkDeleteTests" */;
288 | buildPhases = (
289 | 816E6246C10393202BECC6CD /* [CP] Check Pods Manifest.lock */,
290 | 3E410AEF20775099006DA214 /* Sources */,
291 | 3E410AF020775099006DA214 /* Frameworks */,
292 | 3E410AF120775099006DA214 /* Resources */,
293 | );
294 | buildRules = (
295 | );
296 | dependencies = (
297 | 3E410AF520775099006DA214 /* PBXTargetDependency */,
298 | );
299 | name = WaterMarkDeleteTests;
300 | productName = WaterMarkDeleteTests;
301 | productReference = 3E410AF320775099006DA214 /* WaterMarkDeleteTests.xctest */;
302 | productType = "com.apple.product-type.bundle.unit-test";
303 | };
304 | 3E410AFD20775099006DA214 /* WaterMarkDeleteUITests */ = {
305 | isa = PBXNativeTarget;
306 | buildConfigurationList = 3E410B0D20775099006DA214 /* Build configuration list for PBXNativeTarget "WaterMarkDeleteUITests" */;
307 | buildPhases = (
308 | EC821002F57620F60E4EDCD9 /* [CP] Check Pods Manifest.lock */,
309 | 3E410AFA20775099006DA214 /* Sources */,
310 | 3E410AFB20775099006DA214 /* Frameworks */,
311 | 3E410AFC20775099006DA214 /* Resources */,
312 | );
313 | buildRules = (
314 | );
315 | dependencies = (
316 | 3E410B0020775099006DA214 /* PBXTargetDependency */,
317 | );
318 | name = WaterMarkDeleteUITests;
319 | productName = WaterMarkDeleteUITests;
320 | productReference = 3E410AFE20775099006DA214 /* WaterMarkDeleteUITests.xctest */;
321 | productType = "com.apple.product-type.bundle.ui-testing";
322 | };
323 | /* End PBXNativeTarget section */
324 |
325 | /* Begin PBXProject section */
326 | 3E410AD320775091006DA214 /* Project object */ = {
327 | isa = PBXProject;
328 | attributes = {
329 | LastUpgradeCheck = 0930;
330 | ORGANIZATIONNAME = WangYiming;
331 | TargetAttributes = {
332 | 3E410ADA20775091006DA214 = {
333 | CreatedOnToolsVersion = 9.3;
334 | };
335 | 3E410AF220775099006DA214 = {
336 | CreatedOnToolsVersion = 9.3;
337 | TestTargetID = 3E410ADA20775091006DA214;
338 | };
339 | 3E410AFD20775099006DA214 = {
340 | CreatedOnToolsVersion = 9.3;
341 | TestTargetID = 3E410ADA20775091006DA214;
342 | };
343 | };
344 | };
345 | buildConfigurationList = 3E410AD620775091006DA214 /* Build configuration list for PBXProject "WaterMarkDelete" */;
346 | compatibilityVersion = "Xcode 9.3";
347 | developmentRegion = en;
348 | hasScannedForEncodings = 0;
349 | knownRegions = (
350 | en,
351 | Base,
352 | );
353 | mainGroup = 3E410AD220775091006DA214;
354 | productRefGroup = 3E410ADC20775091006DA214 /* Products */;
355 | projectDirPath = "";
356 | projectRoot = "";
357 | targets = (
358 | 3E410ADA20775091006DA214 /* WaterMarkDelete */,
359 | 3E410AF220775099006DA214 /* WaterMarkDeleteTests */,
360 | 3E410AFD20775099006DA214 /* WaterMarkDeleteUITests */,
361 | );
362 | };
363 | /* End PBXProject section */
364 |
365 | /* Begin PBXResourcesBuildPhase section */
366 | 3E410AD920775091006DA214 /* Resources */ = {
367 | isa = PBXResourcesBuildPhase;
368 | buildActionMask = 2147483647;
369 | files = (
370 | 3E410AEB20775099006DA214 /* LaunchScreen.storyboard in Resources */,
371 | 3E410AE820775099006DA214 /* Assets.xcassets in Resources */,
372 | 3E410AE620775092006DA214 /* Main.storyboard in Resources */,
373 | );
374 | runOnlyForDeploymentPostprocessing = 0;
375 | };
376 | 3E410AF120775099006DA214 /* Resources */ = {
377 | isa = PBXResourcesBuildPhase;
378 | buildActionMask = 2147483647;
379 | files = (
380 | );
381 | runOnlyForDeploymentPostprocessing = 0;
382 | };
383 | 3E410AFC20775099006DA214 /* Resources */ = {
384 | isa = PBXResourcesBuildPhase;
385 | buildActionMask = 2147483647;
386 | files = (
387 | );
388 | runOnlyForDeploymentPostprocessing = 0;
389 | };
390 | /* End PBXResourcesBuildPhase section */
391 |
392 | /* Begin PBXShellScriptBuildPhase section */
393 | 3284F5B162B5D46BB2D07E39 /* [CP] Embed Pods Frameworks */ = {
394 | isa = PBXShellScriptBuildPhase;
395 | buildActionMask = 2147483647;
396 | files = (
397 | );
398 | inputPaths = (
399 | "${SRCROOT}/Pods/Target Support Files/Pods-WaterMarkDelete/Pods-WaterMarkDelete-frameworks.sh",
400 | "${BUILT_PRODUCTS_DIR}/GPUImage/GPUImage.framework",
401 | "${BUILT_PRODUCTS_DIR}/MBProgressHUD/MBProgressHUD.framework",
402 | "${BUILT_PRODUCTS_DIR}/OpenCV/opencv2.framework",
403 | );
404 | name = "[CP] Embed Pods Frameworks";
405 | outputPaths = (
406 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/GPUImage.framework",
407 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/MBProgressHUD.framework",
408 | "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/opencv2.framework",
409 | );
410 | runOnlyForDeploymentPostprocessing = 0;
411 | shellPath = /bin/sh;
412 | shellScript = "\"${SRCROOT}/Pods/Target Support Files/Pods-WaterMarkDelete/Pods-WaterMarkDelete-frameworks.sh\"\n";
413 | showEnvVarsInLog = 0;
414 | };
415 | 6F49885E520C9B55AFCE23CC /* [CP] Check Pods Manifest.lock */ = {
416 | isa = PBXShellScriptBuildPhase;
417 | buildActionMask = 2147483647;
418 | files = (
419 | );
420 | inputPaths = (
421 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
422 | "${PODS_ROOT}/Manifest.lock",
423 | );
424 | name = "[CP] Check Pods Manifest.lock";
425 | outputPaths = (
426 | "$(DERIVED_FILE_DIR)/Pods-WaterMarkDelete-checkManifestLockResult.txt",
427 | );
428 | runOnlyForDeploymentPostprocessing = 0;
429 | shellPath = /bin/sh;
430 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
431 | showEnvVarsInLog = 0;
432 | };
433 | 816E6246C10393202BECC6CD /* [CP] Check Pods Manifest.lock */ = {
434 | isa = PBXShellScriptBuildPhase;
435 | buildActionMask = 2147483647;
436 | files = (
437 | );
438 | inputPaths = (
439 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
440 | "${PODS_ROOT}/Manifest.lock",
441 | );
442 | name = "[CP] Check Pods Manifest.lock";
443 | outputPaths = (
444 | "$(DERIVED_FILE_DIR)/Pods-WaterMarkDeleteTests-checkManifestLockResult.txt",
445 | );
446 | runOnlyForDeploymentPostprocessing = 0;
447 | shellPath = /bin/sh;
448 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
449 | showEnvVarsInLog = 0;
450 | };
451 | EC821002F57620F60E4EDCD9 /* [CP] Check Pods Manifest.lock */ = {
452 | isa = PBXShellScriptBuildPhase;
453 | buildActionMask = 2147483647;
454 | files = (
455 | );
456 | inputPaths = (
457 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
458 | "${PODS_ROOT}/Manifest.lock",
459 | );
460 | name = "[CP] Check Pods Manifest.lock";
461 | outputPaths = (
462 | "$(DERIVED_FILE_DIR)/Pods-WaterMarkDeleteUITests-checkManifestLockResult.txt",
463 | );
464 | runOnlyForDeploymentPostprocessing = 0;
465 | shellPath = /bin/sh;
466 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
467 | showEnvVarsInLog = 0;
468 | };
469 | /* End PBXShellScriptBuildPhase section */
470 |
471 | /* Begin PBXSourcesBuildPhase section */
472 | 3E410AD720775091006DA214 /* Sources */ = {
473 | isa = PBXSourcesBuildPhase;
474 | buildActionMask = 2147483647;
475 | files = (
476 | 3E410AEE20775099006DA214 /* main.m in Sources */,
477 | 3E9EF9EC2077886800B18DE4 /* PictureViewController.mm in Sources */,
478 | 3E9EF9EF20779BE000B18DE4 /* ClipView.m in Sources */,
479 | 3EE813E7207CEC5900AEA5CD /* FramesShowViewController.m in Sources */,
480 | 3E410AE020775091006DA214 /* AppDelegate.m in Sources */,
481 | 3EE813E4207CC40400AEA5CD /* VideoViewController.mm in Sources */,
482 | 3E8A57422079ED1E00DDA2E0 /* UIImage+OpenCV.mm in Sources */,
483 | 3E7FEF44207F08CE00AF773D /* HJImagesToVideo.m in Sources */,
484 | 3E410B12207750D8006DA214 /* RootViewController.m in Sources */,
485 | );
486 | runOnlyForDeploymentPostprocessing = 0;
487 | };
488 | 3E410AEF20775099006DA214 /* Sources */ = {
489 | isa = PBXSourcesBuildPhase;
490 | buildActionMask = 2147483647;
491 | files = (
492 | 3E410AF820775099006DA214 /* WaterMarkDeleteTests.m in Sources */,
493 | );
494 | runOnlyForDeploymentPostprocessing = 0;
495 | };
496 | 3E410AFA20775099006DA214 /* Sources */ = {
497 | isa = PBXSourcesBuildPhase;
498 | buildActionMask = 2147483647;
499 | files = (
500 | 3E410B0320775099006DA214 /* WaterMarkDeleteUITests.m in Sources */,
501 | );
502 | runOnlyForDeploymentPostprocessing = 0;
503 | };
504 | /* End PBXSourcesBuildPhase section */
505 |
506 | /* Begin PBXTargetDependency section */
507 | 3E410AF520775099006DA214 /* PBXTargetDependency */ = {
508 | isa = PBXTargetDependency;
509 | target = 3E410ADA20775091006DA214 /* WaterMarkDelete */;
510 | targetProxy = 3E410AF420775099006DA214 /* PBXContainerItemProxy */;
511 | };
512 | 3E410B0020775099006DA214 /* PBXTargetDependency */ = {
513 | isa = PBXTargetDependency;
514 | target = 3E410ADA20775091006DA214 /* WaterMarkDelete */;
515 | targetProxy = 3E410AFF20775099006DA214 /* PBXContainerItemProxy */;
516 | };
517 | /* End PBXTargetDependency section */
518 |
519 | /* Begin PBXVariantGroup section */
520 | 3E410AE420775092006DA214 /* Main.storyboard */ = {
521 | isa = PBXVariantGroup;
522 | children = (
523 | 3E410AE520775092006DA214 /* Base */,
524 | );
525 | name = Main.storyboard;
526 | sourceTree = "";
527 | };
528 | 3E410AE920775099006DA214 /* LaunchScreen.storyboard */ = {
529 | isa = PBXVariantGroup;
530 | children = (
531 | 3E410AEA20775099006DA214 /* Base */,
532 | );
533 | name = LaunchScreen.storyboard;
534 | sourceTree = "";
535 | };
536 | /* End PBXVariantGroup section */
537 |
538 | /* Begin XCBuildConfiguration section */
539 | 3E410B0520775099006DA214 /* Debug */ = {
540 | isa = XCBuildConfiguration;
541 | buildSettings = {
542 | ALWAYS_SEARCH_USER_PATHS = NO;
543 | CLANG_ANALYZER_NONNULL = YES;
544 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
545 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++17";
546 | CLANG_CXX_LIBRARY = "libc++";
547 | CLANG_ENABLE_MODULES = YES;
548 | CLANG_ENABLE_OBJC_ARC = YES;
549 | CLANG_ENABLE_OBJC_WEAK = YES;
550 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
551 | CLANG_WARN_BOOL_CONVERSION = YES;
552 | CLANG_WARN_COMMA = YES;
553 | CLANG_WARN_CONSTANT_CONVERSION = YES;
554 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
555 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
556 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
557 | CLANG_WARN_EMPTY_BODY = YES;
558 | CLANG_WARN_ENUM_CONVERSION = YES;
559 | CLANG_WARN_INFINITE_RECURSION = YES;
560 | CLANG_WARN_INT_CONVERSION = YES;
561 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
562 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
563 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
564 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
565 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
566 | CLANG_WARN_STRICT_PROTOTYPES = YES;
567 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
568 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
569 | CLANG_WARN_UNREACHABLE_CODE = YES;
570 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
571 | CODE_SIGN_IDENTITY = "iPhone Developer";
572 | COPY_PHASE_STRIP = NO;
573 | DEBUG_INFORMATION_FORMAT = dwarf;
574 | ENABLE_STRICT_OBJC_MSGSEND = YES;
575 | ENABLE_TESTABILITY = YES;
576 | GCC_C_LANGUAGE_STANDARD = gnu11;
577 | GCC_DYNAMIC_NO_PIC = NO;
578 | GCC_NO_COMMON_BLOCKS = YES;
579 | GCC_OPTIMIZATION_LEVEL = 0;
580 | GCC_PRECOMPILE_PREFIX_HEADER = NO;
581 | GCC_PREFIX_HEADER = "";
582 | GCC_PREPROCESSOR_DEFINITIONS = (
583 | "DEBUG=1",
584 | "$(inherited)",
585 | );
586 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
587 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
588 | GCC_WARN_UNDECLARED_SELECTOR = YES;
589 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
590 | GCC_WARN_UNUSED_FUNCTION = YES;
591 | GCC_WARN_UNUSED_VARIABLE = YES;
592 | IPHONEOS_DEPLOYMENT_TARGET = 11.3;
593 | MTL_ENABLE_DEBUG_INFO = YES;
594 | ONLY_ACTIVE_ARCH = YES;
595 | SDKROOT = iphoneos;
596 | };
597 | name = Debug;
598 | };
599 | 3E410B0620775099006DA214 /* Release */ = {
600 | isa = XCBuildConfiguration;
601 | buildSettings = {
602 | ALWAYS_SEARCH_USER_PATHS = NO;
603 | CLANG_ANALYZER_NONNULL = YES;
604 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
605 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++17";
606 | CLANG_CXX_LIBRARY = "libc++";
607 | CLANG_ENABLE_MODULES = YES;
608 | CLANG_ENABLE_OBJC_ARC = YES;
609 | CLANG_ENABLE_OBJC_WEAK = YES;
610 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
611 | CLANG_WARN_BOOL_CONVERSION = YES;
612 | CLANG_WARN_COMMA = YES;
613 | CLANG_WARN_CONSTANT_CONVERSION = YES;
614 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
615 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
616 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
617 | CLANG_WARN_EMPTY_BODY = YES;
618 | CLANG_WARN_ENUM_CONVERSION = YES;
619 | CLANG_WARN_INFINITE_RECURSION = YES;
620 | CLANG_WARN_INT_CONVERSION = YES;
621 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
622 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
623 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
624 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
625 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
626 | CLANG_WARN_STRICT_PROTOTYPES = YES;
627 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
628 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
629 | CLANG_WARN_UNREACHABLE_CODE = YES;
630 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
631 | CODE_SIGN_IDENTITY = "iPhone Developer";
632 | COPY_PHASE_STRIP = NO;
633 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
634 | ENABLE_NS_ASSERTIONS = NO;
635 | ENABLE_STRICT_OBJC_MSGSEND = YES;
636 | GCC_C_LANGUAGE_STANDARD = gnu11;
637 | GCC_NO_COMMON_BLOCKS = YES;
638 | GCC_PRECOMPILE_PREFIX_HEADER = NO;
639 | GCC_PREFIX_HEADER = "";
640 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
641 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
642 | GCC_WARN_UNDECLARED_SELECTOR = YES;
643 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
644 | GCC_WARN_UNUSED_FUNCTION = YES;
645 | GCC_WARN_UNUSED_VARIABLE = YES;
646 | IPHONEOS_DEPLOYMENT_TARGET = 11.3;
647 | MTL_ENABLE_DEBUG_INFO = NO;
648 | SDKROOT = iphoneos;
649 | VALIDATE_PRODUCT = YES;
650 | };
651 | name = Release;
652 | };
653 | 3E410B0820775099006DA214 /* Debug */ = {
654 | isa = XCBuildConfiguration;
655 | baseConfigurationReference = 959066A9B7D90C48302B0FFD /* Pods-WaterMarkDelete.debug.xcconfig */;
656 | buildSettings = {
657 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
658 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++17";
659 | CLANG_CXX_LIBRARY = "libc++";
660 | CODE_SIGN_STYLE = Automatic;
661 | DEVELOPMENT_TEAM = XJ5GDW35L8;
662 | FRAMEWORK_SEARCH_PATHS = (
663 | "$(inherited)",
664 | "$(PROJECT_DIR)",
665 | );
666 | GCC_PRECOMPILE_PREFIX_HEADER = YES;
667 | GCC_PREFIX_HEADER = WaterMarkDelete/WaterMarkDelete.pch;
668 | INFOPLIST_FILE = WaterMarkDelete/Info.plist;
669 | LD_RUNPATH_SEARCH_PATHS = (
670 | "$(inherited)",
671 | "@executable_path/Frameworks",
672 | );
673 | PRODUCT_BUNDLE_IDENTIFIER = WYM.WaterMarkDelete;
674 | PRODUCT_NAME = "$(TARGET_NAME)";
675 | TARGETED_DEVICE_FAMILY = "1,2";
676 | };
677 | name = Debug;
678 | };
679 | 3E410B0920775099006DA214 /* Release */ = {
680 | isa = XCBuildConfiguration;
681 | baseConfigurationReference = CDD8DF8C97DEF91FB4114BE1 /* Pods-WaterMarkDelete.release.xcconfig */;
682 | buildSettings = {
683 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
684 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++17";
685 | CLANG_CXX_LIBRARY = "libc++";
686 | CODE_SIGN_STYLE = Automatic;
687 | DEVELOPMENT_TEAM = XJ5GDW35L8;
688 | FRAMEWORK_SEARCH_PATHS = (
689 | "$(inherited)",
690 | "$(PROJECT_DIR)",
691 | );
692 | GCC_PRECOMPILE_PREFIX_HEADER = YES;
693 | GCC_PREFIX_HEADER = WaterMarkDelete/WaterMarkDelete.pch;
694 | INFOPLIST_FILE = WaterMarkDelete/Info.plist;
695 | LD_RUNPATH_SEARCH_PATHS = (
696 | "$(inherited)",
697 | "@executable_path/Frameworks",
698 | );
699 | PRODUCT_BUNDLE_IDENTIFIER = WYM.WaterMarkDelete;
700 | PRODUCT_NAME = "$(TARGET_NAME)";
701 | TARGETED_DEVICE_FAMILY = "1,2";
702 | };
703 | name = Release;
704 | };
705 | 3E410B0B20775099006DA214 /* Debug */ = {
706 | isa = XCBuildConfiguration;
707 | baseConfigurationReference = 4BAAA442F94A7A24C6A3307E /* Pods-WaterMarkDeleteTests.debug.xcconfig */;
708 | buildSettings = {
709 | BUNDLE_LOADER = "$(TEST_HOST)";
710 | CODE_SIGN_STYLE = Automatic;
711 | INFOPLIST_FILE = WaterMarkDeleteTests/Info.plist;
712 | LD_RUNPATH_SEARCH_PATHS = (
713 | "$(inherited)",
714 | "@executable_path/Frameworks",
715 | "@loader_path/Frameworks",
716 | );
717 | PRODUCT_BUNDLE_IDENTIFIER = WYM.WaterMarkDeleteTests;
718 | PRODUCT_NAME = "$(TARGET_NAME)";
719 | TARGETED_DEVICE_FAMILY = "1,2";
720 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/WaterMarkDelete.app/WaterMarkDelete";
721 | };
722 | name = Debug;
723 | };
724 | 3E410B0C20775099006DA214 /* Release */ = {
725 | isa = XCBuildConfiguration;
726 | baseConfigurationReference = CBDE110B2912D1A891931DAA /* Pods-WaterMarkDeleteTests.release.xcconfig */;
727 | buildSettings = {
728 | BUNDLE_LOADER = "$(TEST_HOST)";
729 | CODE_SIGN_STYLE = Automatic;
730 | INFOPLIST_FILE = WaterMarkDeleteTests/Info.plist;
731 | LD_RUNPATH_SEARCH_PATHS = (
732 | "$(inherited)",
733 | "@executable_path/Frameworks",
734 | "@loader_path/Frameworks",
735 | );
736 | PRODUCT_BUNDLE_IDENTIFIER = WYM.WaterMarkDeleteTests;
737 | PRODUCT_NAME = "$(TARGET_NAME)";
738 | TARGETED_DEVICE_FAMILY = "1,2";
739 | TEST_HOST = "$(BUILT_PRODUCTS_DIR)/WaterMarkDelete.app/WaterMarkDelete";
740 | };
741 | name = Release;
742 | };
743 | 3E410B0E20775099006DA214 /* Debug */ = {
744 | isa = XCBuildConfiguration;
745 | baseConfigurationReference = 600D02087DEA53AE1D7EB913 /* Pods-WaterMarkDeleteUITests.debug.xcconfig */;
746 | buildSettings = {
747 | CODE_SIGN_STYLE = Automatic;
748 | INFOPLIST_FILE = WaterMarkDeleteUITests/Info.plist;
749 | LD_RUNPATH_SEARCH_PATHS = (
750 | "$(inherited)",
751 | "@executable_path/Frameworks",
752 | "@loader_path/Frameworks",
753 | );
754 | PRODUCT_BUNDLE_IDENTIFIER = WYM.WaterMarkDeleteUITests;
755 | PRODUCT_NAME = "$(TARGET_NAME)";
756 | TARGETED_DEVICE_FAMILY = "1,2";
757 | TEST_TARGET_NAME = WaterMarkDelete;
758 | };
759 | name = Debug;
760 | };
761 | 3E410B0F20775099006DA214 /* Release */ = {
762 | isa = XCBuildConfiguration;
763 | baseConfigurationReference = 7AB8ED2930F4DEC749E317FD /* Pods-WaterMarkDeleteUITests.release.xcconfig */;
764 | buildSettings = {
765 | CODE_SIGN_STYLE = Automatic;
766 | INFOPLIST_FILE = WaterMarkDeleteUITests/Info.plist;
767 | LD_RUNPATH_SEARCH_PATHS = (
768 | "$(inherited)",
769 | "@executable_path/Frameworks",
770 | "@loader_path/Frameworks",
771 | );
772 | PRODUCT_BUNDLE_IDENTIFIER = WYM.WaterMarkDeleteUITests;
773 | PRODUCT_NAME = "$(TARGET_NAME)";
774 | TARGETED_DEVICE_FAMILY = "1,2";
775 | TEST_TARGET_NAME = WaterMarkDelete;
776 | };
777 | name = Release;
778 | };
779 | /* End XCBuildConfiguration section */
780 |
781 | /* Begin XCConfigurationList section */
782 | 3E410AD620775091006DA214 /* Build configuration list for PBXProject "WaterMarkDelete" */ = {
783 | isa = XCConfigurationList;
784 | buildConfigurations = (
785 | 3E410B0520775099006DA214 /* Debug */,
786 | 3E410B0620775099006DA214 /* Release */,
787 | );
788 | defaultConfigurationIsVisible = 0;
789 | defaultConfigurationName = Release;
790 | };
791 | 3E410B0720775099006DA214 /* Build configuration list for PBXNativeTarget "WaterMarkDelete" */ = {
792 | isa = XCConfigurationList;
793 | buildConfigurations = (
794 | 3E410B0820775099006DA214 /* Debug */,
795 | 3E410B0920775099006DA214 /* Release */,
796 | );
797 | defaultConfigurationIsVisible = 0;
798 | defaultConfigurationName = Release;
799 | };
800 | 3E410B0A20775099006DA214 /* Build configuration list for PBXNativeTarget "WaterMarkDeleteTests" */ = {
801 | isa = XCConfigurationList;
802 | buildConfigurations = (
803 | 3E410B0B20775099006DA214 /* Debug */,
804 | 3E410B0C20775099006DA214 /* Release */,
805 | );
806 | defaultConfigurationIsVisible = 0;
807 | defaultConfigurationName = Release;
808 | };
809 | 3E410B0D20775099006DA214 /* Build configuration list for PBXNativeTarget "WaterMarkDeleteUITests" */ = {
810 | isa = XCConfigurationList;
811 | buildConfigurations = (
812 | 3E410B0E20775099006DA214 /* Debug */,
813 | 3E410B0F20775099006DA214 /* Release */,
814 | );
815 | defaultConfigurationIsVisible = 0;
816 | defaultConfigurationName = Release;
817 | };
818 | /* End XCConfigurationList section */
819 | };
820 | rootObject = 3E410AD320775091006DA214 /* Project object */;
821 | }
822 |
--------------------------------------------------------------------------------