├── .gitignore ├── TTtest.plist ├── release └── com.if-she.cydia.vcam_0.0.1-1_iphoneos-arm.deb ├── control ├── Makefile ├── README.md ├── bak-snip ├── tweak代码片段.m ├── Untitled-2.mm ├── Untitled-1.mm ├── 尝试修改samplebuffer的.mm └── Tweak.x.bak ├── util.h └── Tweak.x /.gitignore: -------------------------------------------------------------------------------- 1 | .theos 2 | packages -------------------------------------------------------------------------------- /TTtest.plist: -------------------------------------------------------------------------------- 1 | { Filter = { Bundles = ( "com.apple.UIKit" ); }; } 2 | -------------------------------------------------------------------------------- /release/com.if-she.cydia.vcam_0.0.1-1_iphoneos-arm.deb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wordgege/iPhone-VCAM/HEAD/release/com.if-she.cydia.vcam_0.0.1-1_iphoneos-arm.deb -------------------------------------------------------------------------------- /control: -------------------------------------------------------------------------------- 1 | Package: com.if-she.cydia.vcam 2 | Name: VCAM4iOS 3 | Version: 0.0.1 4 | Architecture: iphoneos-arm 5 | Description: VCAM4iOS 6 | Maintainer: trizau 7 | Author: trizau 8 | Section: Tweaks 9 | Depends: mobilesubstrate (>= 0.9.5000) 10 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | TARGET := iphone:clang:latest:9 2 | INSTALL_TARGET_PROCESSES = SpringBoard 3 | 4 | THEOS_DEVICE_IP=192.168.1.5 5 | 6 | 7 | include $(THEOS)/makefiles/common.mk 8 | 9 | TWEAK_NAME = TTtest 10 | 11 | TTtest_FILES = Tweak.x 12 | TTtest_CFLAGS = -fobjc-arc 13 | 14 | include $(THEOS_MAKE_PATH)/tweak.mk 15 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # iPhone-VCAM 2 | 3 | 基于 *Cydia Substrate* 的虚拟摄像头 4 | 5 | # 作用 6 | 7 | 替换 iOS 系统摄像头画面 8 | 9 | ## 生效软件 10 | - 支持绝大多数App 11 | 12 | ## 支持系统版本 13 | 14 | - 开发测试版本为 iOS13.3、因为手上没有其他版本所以无法真机测试 15 | - 理论支持 iOS11.0 以上 16 | - iOS15可能出点问题,且目前iOS15越狱方案没有 17 | 18 | # 开始使用 19 | 20 | ## 安装 21 | 22 | ## 使用 23 | - 以下 **-** 符号表示 **音量减键**, **+** 表示 **音量加键**, 在一秒内切换点击以触发 24 | 25 | ### 完整模式 26 | 弹窗较多,有些软件会在弹窗后暂停运行 27 | - 快捷键 + - 28 | - 功能见按钮说明 29 | - 下载视频 30 | - *每次下载完成后都会有系统的静音提示框弹出* 31 | 1. 视频文件 32 | 1. 在线视频地址,需要确保这个链接指向了一个可访问的视频 33 | 2. 如果文件损坏或不可播放或不受支持不会发生任何变化 34 | 2. 流式媒体(暂未支持) 35 | 36 | ### 便捷模式 37 | 尽量减少弹窗以防止打断当前程序的运行状态 38 | - 快捷键 - + 触发**选择视频** 39 | - 如果设置了**下载视频**功能,则此快捷键改为触发**下载视频** 40 | - 将**下载视频**的连接设置为空时,继续使用**选择视频** 41 | - 下载完成后会有静音模式弹窗会弹出 42 | - 如果远程文件不可用则禁用替换 43 | 44 | # 常见问题 45 | - 以下 **-** 符号表示 **音量减键**, **+** 表示 **音量加键**, 在一秒内切换点击以触发 46 | 47 | ## Q: 该怎样选择视频分辨率? 48 | A: 使用摄像头后 + -,将会出现详细信息。 当宽度大于高度时,表示视频的方向时旋转的,大部分情况下后置摄像头需要逆时针90度,前置摄像头顺时针90度,有时需要旋转并水平翻转。视频的具体方向因不同软件处理方式不同需要自行观察。**替换预览始终保持正确的方向**,*如果视频的宽高和提示的不一致时,可能出现画面与识别结果偏移、预览拉伸甚至闪退的情况* 49 | - 简单来说,替换视频的宽高必须和 + - 快捷键提示的 W, H一致,根据预览到的画面调整替换视频角度 50 | 51 | ## Q: 拍照后画面旋转? 52 | A: 预览始终保持了正确的方向,部分软件会直接处理横向的图片,但是给用户预览的时候把预览旋转过来了 53 | - 简单来说,根据被旋转的方向,把替换视频提前往相反的方向旋转一次就好了 54 | 55 | 56 | 57 | # TODO 58 | - 音频支持 59 | - 修复有些软件录像循环后失效问题 60 | -------------------------------------------------------------------------------- /bak-snip/tweak代码片段.m: -------------------------------------------------------------------------------- 1 | %hookf(OSStatus, ){ 2 | NSLog(@"--------------> CMSampleBufferCreate"); 3 | return %orig; 4 | } 5 | 6 | 7 | class_replaceMethod([sampleBufferDelegate class], @selector(captureOutput:didOutputSampleBuffer:fromConnection:), imp_implementationWithBlock(^(id *self, SEL _cmd, AVCaptureOutput *output, CMSampleBufferRef sampleBuffer, AVCaptureConnection *connection){ 8 | NSLog(@"求求你了,出现吧"); 9 | }), NULL); 10 | 11 | 12 | // 先动态hook然后调用原始方法使用这个queue 13 | MSHookMessageEx( 14 | [sampleBufferDelegate class], @selector(captureOutput:didOutputSampleBuffer:fromConnection:), 15 | imp_implementationWithBlock(^(id *self, SEL _cmd, AVCaptureOutput *output, CMSampleBufferRef sampleBuffer, AVCaptureConnection *connection){ 16 | NSLog(@"求求你了,出现吧!!!!"); 17 | }), NULL 18 | ); 19 | 20 | 21 | 22 | 23 | // %subclass MyDelegate : NSObject 24 | // - (void)captureOutput:(AVCaptureOutput *)output didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{ 25 | // NSLog(@"这是成功了 吗"); 26 | // %orig; 27 | // } 28 | // %end 29 | 30 | 31 | 32 | imp_implementationWithBlock(^(id _self, AVCaptureOutput *output, CMSampleBufferRef sampleBuffer, AVCaptureConnection *connection){ 33 | NSLog(@"求求你了,出现吧! 【self = %@】 params = %@ , thread id = %@]", _self, output, [NSThread currentThread]); 34 | 35 | @try{ 36 | if (original_method == nil) { 37 | NSLog(@"what? 居然是空 的"); 38 | }else { 39 | original_method(_self, @selector(captureOutput:didOutputSampleBuffer:fromConnection:), output, sampleBuffer, connection); 40 | } 41 | // [_self captureOutput:output didOutputSampleBuffer:sampleBuffer fromConnection:connection]; 42 | // objc_msgSendTyped(_self, @selector(captureOutput:didOutputSampleBuffer:fromConnection:), output, sampleBuffer, connection); 43 | // @throw [NSException exceptionWithName:@"这是我抛出的一场" reason:@"这是怎么回事呢" userInfo:nil]; 44 | }@catch(NSException *except) { 45 | NSLog(@"这里出错了->%@", except); 46 | } 47 | // 如何调用原来的方法? 48 | }) -------------------------------------------------------------------------------- /util.h: -------------------------------------------------------------------------------- 1 | //此方法放在@interface XXXViewController 之前 2 | // @see https://www.jianshu.com/p/c2f8ef80e925 3 | 4 | // RotatePixelBufferToAngle(pixelBuffer, radians(90)); 5 | static double radians (double degrees) {return degrees * M_PI/180;} 6 | 7 | static double ScalingFactorForAngle(double angle, CGSize originalSize) { 8 | double oriWidth = originalSize.height; 9 | double oriHeight = originalSize.width; 10 | double horizontalSpace = fabs( oriWidth*cos(angle) ) + fabs( oriHeight*sin(angle) ); 11 | double scalingFactor = oriWidth / horizontalSpace ; 12 | return scalingFactor; 13 | } 14 | 15 | CGColorSpaceRef rgbColorSpace = NULL; 16 | CIContext *context = nil; 17 | CIImage *ci_originalImage = nil; 18 | CIImage *ci_transformedImage = nil; 19 | CIImage *ci_userTempImage = nil; 20 | 21 | static inline void RotatePixelBufferToAngle(CVPixelBufferRef thePixelBuffer, double theAngle) { 22 | 23 | @autoreleasepool { 24 | 25 | if (context==nil) { 26 | rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 27 | context = [CIContext contextWithOptions:@{kCIContextWorkingColorSpace: (__bridge id)rgbColorSpace, 28 | kCIContextOutputColorSpace : (__bridge id)rgbColorSpace}]; 29 | } 30 | 31 | long int w = CVPixelBufferGetWidth(thePixelBuffer); 32 | long int h = CVPixelBufferGetHeight(thePixelBuffer); 33 | 34 | ci_originalImage = [CIImage imageWithCVPixelBuffer:thePixelBuffer]; 35 | ci_userTempImage = [ci_originalImage imageByApplyingTransform:CGAffineTransformMakeScale(0.6, 0.6)]; 36 | // CGImageRef UICG_image = [context createCGImage:ci_userTempImage fromRect:[ci_userTempImage extent]]; 37 | 38 | double angle = theAngle; 39 | angle = angle+M_PI; 40 | double scalingFact = ScalingFactorForAngle(angle, CGSizeMake(w, h)); 41 | 42 | 43 | CGAffineTransform transform = CGAffineTransformMakeTranslation(w/2.0, h/2.0); 44 | transform = CGAffineTransformRotate(transform, angle); 45 | transform = CGAffineTransformTranslate(transform, -w/2.0, -h/2.0); 46 | 47 | //rotate it by applying a transform 48 | ci_transformedImage = [ci_originalImage imageByApplyingTransform:transform]; 49 | 50 | CVPixelBufferLockBaseAddress(thePixelBuffer, 0); 51 | 52 | CGRect extentR = [ci_transformedImage extent]; 53 | CGPoint centerP = CGPointMake(extentR.size.width/2.0+extentR.origin.x, 54 | extentR.size.height/2.0+extentR.origin.y); 55 | CGSize scaledSize = CGSizeMake(w*scalingFact, h*scalingFact); 56 | CGRect cropRect = CGRectMake(centerP.x-scaledSize.width/2.0, centerP.y-scaledSize.height/2.0, 57 | scaledSize.height, scaledSize.width); 58 | 59 | 60 | CGImageRef cg_img = [context createCGImage:ci_transformedImage fromRect:cropRect]; 61 | ci_transformedImage = [CIImage imageWithCGImage:cg_img]; 62 | 63 | ci_transformedImage = [ci_transformedImage imageByApplyingTransform:CGAffineTransformMakeScale(1.0/scalingFact, 1.0/scalingFact)]; 64 | [context render:ci_transformedImage toCVPixelBuffer:thePixelBuffer bounds:CGRectMake(0, 0, w, h) colorSpace:NULL]; 65 | 66 | CGImageRelease(cg_img); 67 | CVPixelBufferUnlockBaseAddress(thePixelBuffer, 0); 68 | } 69 | } -------------------------------------------------------------------------------- /bak-snip/Untitled-2.mm: -------------------------------------------------------------------------------- 1 | #include 2 | #import 3 | #import 4 | 5 | static NSFileManager *g_fileManager = nil; // 文件管理对象 6 | static UIPasteboard *g_pasteboard = nil; // 剪贴板对象 7 | static BOOL g_canReleaseBuffer = YES; // 根据此标识检测是否可以释放buffer 8 | static BOOL g_bufferReload = YES; // 根据此标识判断是否需要重新刷新视频文件 9 | static AVSampleBufferDisplayLayer *g_previewLayer = nil; // 原生相机预览 10 | static BOOL g_haveVideoDataOutput = NO; // 如果存在 VideoDataOutput, 预览画面会同步VideoDataOutput的画面, 如果没有则会直接读取视频显示 11 | static BOOL g_cameraRunning = NO; 12 | 13 | NSString *g_tempFile = @"/var/mobile/Library/Caches/temp.mov"; // 临时文件位置 14 | 15 | // 原生相机预览处理 16 | /*AVPlayer *g_player = nil; 17 | AVPlayerLayer *g_previewLayer = nil; 18 | AVPlayerItemVideoOutput *g_playerOutput = nil; 19 | CVPixelBufferRef g_pixelBuffer = nil;*/ 20 | 21 | 22 | @interface GetFrame : NSObject 23 | + (CMSampleBufferRef)getCurrentFrame:(CMSampleBufferRef) originSampleBuffer; 24 | + (UIWindow*)getKeyWindow; 25 | @end 26 | 27 | @implementation GetFrame 28 | + (CMSampleBufferRef)getCurrentFrame:(CMSampleBufferRef _Nullable) originSampleBuffer{ 29 | static AVAssetReader *reader = nil; 30 | static AVAssetReaderTrackOutput *trackout = nil; 31 | static CMSampleBufferRef sampleBuffer = nil; 32 | static BOOL previewBuffer = NO; 33 | 34 | if (sampleBuffer != nil && !g_canReleaseBuffer) return sampleBuffer; // 不能释放buffer时返回上一个buffer 35 | 36 | // 如果上一次是预览,但是获得了新的output输出就按照originSampleBuffer生成新的reader pool 37 | if (originSampleBuffer != nil && previewBuffer) { 38 | g_bufferReload = YES; 39 | NSLog(@"新的buffer"); 40 | } 41 | 42 | if (originSampleBuffer == nil) previewBuffer = YES; 43 | else previewBuffer = NO; 44 | 45 | // if (sampleBuffer != nil && originSampleBuffer != nil) { 46 | // CMSampleBufferSetDataBuffer(sampleBuffer, CMSampleBufferGetDataBuffer(originSampleBuffer)); 47 | // } 48 | 49 | // NSLog(@"------>%@", CMSampleBufferGetImageBuffer(sampleBuffer)); 50 | // NSLog(@"======>%@", CMSampleBufferGetImageBuffer(originSampleBuffer)); 51 | 52 | 53 | 54 | static NSTimeInterval renewTime = 0; 55 | // 没有替换视频则返回空,使用原来的数据 56 | if ([g_fileManager fileExistsAtPath:g_tempFile] == NO) return nil; 57 | // 选择了新的替换视频 58 | if ([g_fileManager fileExistsAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile]]) { 59 | NSTimeInterval nowTime = [[NSDate date] timeIntervalSince1970]; 60 | if (nowTime - renewTime > 3) { 61 | renewTime = [[NSDate date] timeIntervalSince1970]; 62 | g_bufferReload = YES; 63 | } 64 | } 65 | 66 | // 播放完成重新读取 67 | if (reader != nil && [reader status] != AVAssetReaderStatusReading) { 68 | g_bufferReload = YES; 69 | } 70 | 71 | if (g_bufferReload) { 72 | g_bufferReload = NO; 73 | // AVAsset *asset = [AVAsset assetWithURL: [NSURL URLWithString:downloadFilePath]]; 74 | AVAsset *asset = [AVAsset assetWithURL: [NSURL URLWithString:[NSString stringWithFormat:@"file://%@", g_tempFile]]]; 75 | reader = [AVAssetReader assetReaderWithAsset:asset error:nil]; 76 | 77 | AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject]; // 获取轨道 78 | // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange : YUV420 用于标清视频[420v] 79 | // kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : YUV422 用于高清视频[420f] 80 | // kCVPixelFormatType_32BGRA : 输出的是BGRA的格式,适用于OpenGL和CoreImage 81 | 82 | OSType type = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; 83 | if (originSampleBuffer != nil) { 84 | type = CVPixelBufferGetPixelFormatType(CMSampleBufferGetImageBuffer(originSampleBuffer)); 85 | } 86 | NSDictionary *readerOutputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey:@(type)}; // 将视频帧解压缩为 32 位 BGRA 格式 87 | 88 | trackout = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:readerOutputSettings]; 89 | 90 | [reader addOutput:trackout]; 91 | [reader startReading]; 92 | // NSLog(@"这是初始化读取"); 93 | } 94 | // NSLog(@"刷新了"); 95 | 96 | CMSampleBufferRef newsampleBuffer = [trackout copyNextSampleBuffer]; 97 | if (newsampleBuffer != nil) { 98 | if (sampleBuffer != nil) CFRelease(sampleBuffer); 99 | sampleBuffer = newsampleBuffer; 100 | } 101 | return sampleBuffer; 102 | } 103 | // 下载文件 104 | -(NSString*)downloadFile:(NSString*)url{ 105 | NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES); 106 | NSString *documentsDirectory = [paths objectAtIndex:0]; 107 | NSString *filePath = [NSString stringWithFormat:@"%@/%@", documentsDirectory,@"temp.mp4"]; 108 | NSString *downloadFilePath = nil; 109 | if ([g_fileManager fileExistsAtPath:filePath]){ 110 | downloadFilePath = [NSString stringWithFormat:@"file://%@", filePath]; 111 | }else { 112 | if (downloadFilePath == nil) { 113 | NSLog(@"开始下载 url = %@", url); 114 | downloadFilePath = @"正在下载"; 115 | NSData *urlData = [NSData dataWithContentsOfURL:[NSURL URLWithString:url]]; 116 | if (urlData) { 117 | if ([urlData writeToFile:filePath atomically:YES]){ 118 | downloadFilePath = [NSString stringWithFormat:@"file://%@", filePath]; 119 | NSLog(@"保存完成 downloadFilePath = %@", downloadFilePath); 120 | }else { 121 | downloadFilePath = nil; 122 | NSLog(@"保存失败 downloadFilePath = %@", downloadFilePath); 123 | } 124 | } 125 | }else { 126 | NSLog(@"暂停下载 url = %@", url); 127 | } 128 | } 129 | return downloadFilePath; 130 | } 131 | +(UIWindow*)getKeyWindow{ 132 | // need using [GetFrame getKeyWindow].rootViewController 133 | UIWindow *keyWindow = nil; 134 | if (keyWindow == nil) { 135 | NSArray *windows = UIApplication.sharedApplication.windows; 136 | for(UIWindow *window in windows){ 137 | if(window.isKeyWindow) { 138 | keyWindow = window; 139 | break; 140 | } 141 | } 142 | } 143 | return keyWindow; 144 | } 145 | @end 146 | 147 | %hook AVCaptureVideoPreviewLayer 148 | - (void)addSublayer:(CALayer *)layer{ 149 | %orig; 150 | // self.opacity = 0; 151 | // self.borderColor = [UIColor blackColor].CGColor; 152 | 153 | static CADisplayLink *displayLink = nil; 154 | if (displayLink == nil) { 155 | displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(step:)]; 156 | [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes]; 157 | } 158 | 159 | // 播放条目 160 | if ([g_fileManager fileExistsAtPath:g_tempFile] && ![[self sublayers] containsObject:g_previewLayer]) { 161 | /* 162 | NSURL *url = [NSURL URLWithString:[NSString stringWithFormat:@"file://%@", g_tempFile]]; 163 | AVPlayerItem *item = [AVPlayerItem playerItemWithURL:url]; 164 | // layer 165 | g_player = [AVPlayer playerWithPlayerItem:item]; 166 | g_player.volume = 0; 167 | // 用于获取cmsamplebuffer 168 | g_playerOutput = [[AVPlayerItemVideoOutput alloc] init]; 169 | [item addOutput:g_playerOutput]; 170 | // 输出layer 171 | g_previewLayer = [AVPlayerLayer playerLayerWithPlayer:g_player]; 172 | [g_previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 173 | */ 174 | g_previewLayer = [[AVSampleBufferDisplayLayer alloc] init]; 175 | [g_previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 176 | 177 | // black mask 178 | CALayer *mask = [CALayer new]; 179 | mask.backgroundColor = [UIColor blackColor].CGColor; 180 | [self insertSublayer:mask above:layer]; 181 | [self insertSublayer:g_previewLayer above:mask]; 182 | 183 | // layer size init 184 | dispatch_async(dispatch_get_main_queue(), ^{ 185 | g_previewLayer.frame = self.bounds; 186 | mask.frame = [UIApplication sharedApplication].keyWindow.bounds; 187 | }); 188 | // NSLog(@"添加了 %@", [self sublayers]); 189 | } 190 | } 191 | %new 192 | -(void)step:(CADisplayLink *)sender{ 193 | // NSLog(@"我被调用了"); 194 | if (g_cameraRunning && g_previewLayer != nil) { 195 | // NSLog(@"g_previewLayer.readyForMoreMediaData %@ %@", g_previewLayer.readyForMoreMediaData?@"yes":@"no", g_haveVideoDataOutput?@"yes":@"no"); 196 | g_previewLayer.frame = self.bounds; 197 | if (!g_haveVideoDataOutput && g_previewLayer.readyForMoreMediaData) { 198 | [g_previewLayer flush]; 199 | NSLog(@"=--=--=--=-=--="); 200 | [g_previewLayer enqueueSampleBuffer:[GetFrame getCurrentFrame:nil]]; 201 | } 202 | } 203 | /* 204 | if (g_player != nil){ 205 | CMTime duration = g_player.currentItem.asset.duration; 206 | CMTime time = g_player.currentTime; 207 | // NSLog(@"出来吧啊啊啊 啊 %f %@", CMTimeGetSeconds(time), [output copyPixelBufferForItemTime:time itemTimeForDisplay:nil]); 208 | g_previewLayer.frame = self.bounds; // 209 | if (CMTimeGetSeconds(time) == CMTimeGetSeconds(duration)) [g_player seekToTime:kCMTimeZero]; 210 | if (g_canReleaseBuffer) { 211 | if (g_pixelBuffer != nil) CFRelease(g_pixelBuffer); 212 | g_pixelBuffer = [g_playerOutput copyPixelBufferForItemTime:time itemTimeForDisplay:nil]; 213 | [g_player play]; // 不晓得为什么自带的相机播放一会儿就暂停了, 反正加上这句不会报错 214 | }else { 215 | // 当前buffer不能被释放时暂停视频,不对g_pixelBuffer做变动 216 | [g_player pause]; 217 | } 218 | // NSLog(@"------>%f", CMTimeGetSeconds(time)); 219 | }*/ 220 | } 221 | %end 222 | 223 | 224 | %hook AVCaptureSession 225 | -(void) startRunning { 226 | g_cameraRunning = YES; 227 | g_bufferReload = YES; 228 | g_haveVideoDataOutput = NO; 229 | NSLog(@"开始使用摄像头了, 预设值是 %@", [self sessionPreset]); 230 | %orig; 231 | } 232 | -(void) stopRunning { 233 | g_cameraRunning = NO; 234 | NSLog(@"停止使用摄像头了"); 235 | g_haveVideoDataOutput = YES; 236 | %orig; 237 | } 238 | - (void)addInput:(AVCaptureDeviceInput *)input { 239 | if ([[input device] position] > 0) { 240 | // [CCNotice notice:@"开始使用前置摄像头" :[NSString stringWithFormat:@"format=%@", [[input device] activeFormat]]]; 241 | NSDate *datenow = [NSDate date]; 242 | NSDateFormatter *formatter = [[NSDateFormatter alloc] init]; 243 | [formatter setDateFormat:@"YYYY-MM-dd HH:mm:ss"]; 244 | 245 | AVCaptureDeviceFormat *activeFormat = [[input device] activeFormat]; 246 | 247 | NSString *format= [NSString stringWithFormat:@"%@", activeFormat]; 248 | 249 | NSString *str = [NSString stringWithFormat:@"%@\n%@-%@\n%@", 250 | [formatter stringFromDate:datenow], 251 | [NSProcessInfo processInfo].processName, 252 | [[input device] position] == 1 ? @"back" : @"front", 253 | [NSString stringWithFormat:@"<%@", [format substringFromIndex: 36]] 254 | ]; 255 | NSData *data = [str dataUsingEncoding:NSUTF8StringEncoding]; 256 | 257 | [g_pasteboard setString:[NSString stringWithFormat:@"CCVCAM%@", [data base64EncodedStringWithOptions:0]]]; 258 | } 259 | g_haveVideoDataOutput = NO; 260 | // NSLog(@"添加了一个输入设备 %@", [[input device] activeFormat]); 261 | %orig; 262 | } 263 | - (void)addOutput:(AVCaptureOutput *)output{ 264 | NSLog(@"添加了一个输出设备 %@", output); 265 | g_haveVideoDataOutput = NO; 266 | %orig; 267 | } 268 | %end 269 | 270 | 271 | %hook AVCaptureStillImageOutput 272 | - (void)captureStillImageAsynchronouslyFromConnection:(AVCaptureConnection *)connection completionHandler:(void (^)(CMSampleBufferRef imageDataSampleBuffer, NSError *error))handler{ 273 | g_canReleaseBuffer = NO; 274 | // NSLog(@"拍照了 %@", handler); 275 | void (^newHandler)(CMSampleBufferRef imageDataSampleBuffer, NSError *error) = ^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { 276 | // NSLog(@"拍照调用 %@", handler); 277 | handler([GetFrame getCurrentFrame:imageDataSampleBuffer], error); 278 | g_canReleaseBuffer = YES; 279 | }; 280 | %orig(connection, [newHandler copy]); 281 | } 282 | %end 283 | 284 | %hook AVCapturePhotoOutput 285 | - (void)capturePhotoWithSettings:(AVCapturePhotoSettings *)settings delegate:(id)delegate{ 286 | NSLog(@"capturePhotoWithSettings--->[%@] [%@]", settings, delegate); 287 | // static void *(original_method)(id self, SEL _cmd) = NULL; 288 | // if (original_method == NULL) { 289 | // MSHookMessageEx( 290 | // [delegate class], @selector(), 291 | // ); 292 | // } 293 | %orig; 294 | } 295 | %end 296 | 297 | %hook AVCaptureVideoDataOutput 298 | - (void)setSampleBufferDelegate:(id)sampleBufferDelegate queue:(dispatch_queue_t)sampleBufferCallbackQueue{ 299 | // NSLog(@"sampleBufferDelegate--->%@", [sampleBufferDelegate class]); // TODO:: 同一个软件可能会有不同的代理对象,需要每个对象替换一次 300 | if (sampleBufferDelegate == nil || sampleBufferCallbackQueue == nil) { 301 | NSLog(@"咋是空的啊"); 302 | return %orig; 303 | } 304 | static NSMutableArray *hooked; 305 | if (hooked == nil) hooked = [NSMutableArray new]; 306 | NSString *className = NSStringFromClass([sampleBufferDelegate class]); 307 | if ([hooked containsObject:className] == NO) { 308 | [hooked addObject:className]; 309 | __block void (*original_method)(id self, SEL _cmd, AVCaptureOutput *output, CMSampleBufferRef sampleBuffer, AVCaptureConnection *connection) = nil; 310 | // NSLog(@"准备hook-->%@ %p", [sampleBufferDelegate class], original_method); 311 | 312 | // NSLog(@"---------> AVCaptureVideoDataOutput -> videoSettings = %@", [self videoSettings]); 313 | // 先动态hook然后调用原始方法使用这个queue 314 | MSHookMessageEx( 315 | [sampleBufferDelegate class], @selector(captureOutput:didOutputSampleBuffer:fromConnection:), 316 | imp_implementationWithBlock(^(id self, AVCaptureOutput *output, CMSampleBufferRef sampleBuffer, AVCaptureConnection *connection){ 317 | // NSLog(@"求求你了,出现吧! 【self = %@】 params = %p", self, original_method); 318 | g_haveVideoDataOutput = YES; 319 | CMSampleBufferRef newBuffer = [GetFrame getCurrentFrame:sampleBuffer]; 320 | if (newBuffer != nil) { 321 | sampleBuffer = newBuffer; 322 | } 323 | // 用buffer来刷新预览 324 | if (g_previewLayer != nil && g_previewLayer.readyForMoreMediaData) { 325 | [g_previewLayer flush]; 326 | [g_previewLayer enqueueSampleBuffer:sampleBuffer]; 327 | } 328 | original_method(self, @selector(captureOutput:didOutputSampleBuffer:fromConnection:), output, sampleBuffer, connection); 329 | }), (IMP*)&original_method 330 | ); 331 | } 332 | // NSLog(@"AVCaptureVideoDataOutput -> setSampleBufferDelegate [%@] [%@]", sampleBufferDelegate, sampleBufferCallbackQueue); 333 | %orig; 334 | } 335 | %end 336 | 337 | // 元数据 338 | // %hook AVCaptureMetadataOutput 339 | // - (void)setMetadataObjectsDelegate:(id)objectsDelegate queue:(dispatch_queue_t)objectsCallbackQueue{ 340 | // if (objectsDelegate == nil || objectsCallbackQueue == nil) { 341 | // NSLog(@"咋是空的啊 AVCaptureMetadataOutput"); 342 | // return %orig; 343 | // } 344 | // static void *(*original_method)(id self, SEL _cmd, AVCaptureOutput *output, NSArray<__kindof AVMetadataObject *> *metadataObjects, AVCaptureConnection *connection) = NULL; 345 | // if (original_method == NULL) { 346 | // NSLog(@"挂钩setMetadataObjectsDelegate"); 347 | // MSHookMessageEx( 348 | // [objectsDelegate class], @selector(captureOutput:didOutputMetadataObjects:fromConnection:), 349 | // imp_implementationWithBlock(^(id self, AVCaptureOutput *output, NSArray<__kindof AVMetadataObject *> *metadataObjects, AVCaptureConnection *connection){ 350 | // // NSLog(@"捕获到元数据 %@", metadataObjects); 351 | 352 | // original_method(self, @selector(captureOutput:didOutputMetadataObjects:fromConnection:), output, metadataObjects, connection); 353 | // }), (IMP*)&original_method 354 | // ); 355 | // } 356 | // NSLog(@"AVCaptureMetadataOutput -> setMetadataObjectsDelegate [%@] [%@]", objectsDelegate, objectsCallbackQueue); 357 | // %orig; 358 | // } 359 | // %end 360 | 361 | 362 | // UI 363 | @interface CCUIImagePickerDelegate : NSObject 364 | @end 365 | @implementation CCUIImagePickerDelegate 366 | // 选择图片成功调用此方法 367 | - (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info { 368 | [[GetFrame getKeyWindow].rootViewController dismissViewControllerAnimated:YES completion:nil]; 369 | NSLog(@"%@", info); 370 | // NSString *result = @"应用失败!"; 371 | // 选择的图片信息存储于info字典中 372 | NSString *selectFile = info[@"UIImagePickerControllerMediaURL"]; 373 | if ([g_fileManager fileExistsAtPath:g_tempFile]) [g_fileManager removeItemAtPath:g_tempFile error:nil]; 374 | 375 | if ([g_fileManager copyItemAtPath:selectFile toPath:g_tempFile error:nil]) { 376 | [g_fileManager createDirectoryAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile] withIntermediateDirectories:YES attributes:nil error:nil]; 377 | // result = @"应用成功!"; 378 | sleep(1); 379 | [g_fileManager removeItemAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile] error:nil]; 380 | } 381 | // UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"VCAM" message:result preferredStyle:UIAlertControllerStyleAlert]; 382 | // UIAlertAction *cancel = [UIAlertAction actionWithTitle:@"oj8k" style:UIAlertActionStyleDefault handler:nil]; 383 | // [alertController addAction:cancel]; 384 | // [[GetFrame getKeyWindow].rootViewController presentViewController:alertController animated:YES completion:nil]; 385 | 386 | } 387 | // 取消图片选择调用此方法 388 | - (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker { 389 | [[GetFrame getKeyWindow].rootViewController dismissViewControllerAnimated:YES completion:nil]; 390 | // selectFile = nil; 391 | } 392 | @end 393 | 394 | 395 | // UI 396 | static NSTimeInterval g_volume_up_time = 0; 397 | static NSTimeInterval g_volume_down_time = 0; 398 | 399 | %hook VolumeControl 400 | -(void)increaseVolume { 401 | // NSLog(@"增加了音量?%@", [NSThread currentThread]); 402 | // NSLog(@"开始下载了"); 403 | // NSString *file = [[GetFrame alloc] downloadFile:@"http://192.168.1.3:8080/nier.mp4"]; 404 | // NSLog(@"下载完成了file = %@", file); 405 | NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]; 406 | if (g_volume_down_time != 0 && nowtime - g_volume_down_time < 1) { 407 | static CCUIImagePickerDelegate *delegate = nil; 408 | if (delegate == nil) delegate = [CCUIImagePickerDelegate new]; 409 | UIImagePickerController *picker = [[UIImagePickerController alloc] init]; 410 | picker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary; 411 | picker.mediaTypes = [NSArray arrayWithObjects:@"public.movie",/* @"public.image",*/ nil]; 412 | picker.videoQuality = UIImagePickerControllerQualityTypeHigh; 413 | picker.allowsEditing = YES; 414 | picker.delegate = delegate; 415 | [[GetFrame getKeyWindow].rootViewController presentViewController:picker animated:YES completion:nil]; 416 | } 417 | g_volume_up_time = nowtime; 418 | %orig; 419 | } 420 | -(void)decreaseVolume { 421 | static CCUIImagePickerDelegate *delegate = nil; 422 | if (delegate == nil) delegate = [CCUIImagePickerDelegate new]; 423 | 424 | NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]; 425 | if (g_volume_up_time != 0 && nowtime - g_volume_up_time < 1) { 426 | 427 | // 剪贴板上的分辨率信息 428 | NSString *str = g_pasteboard.string; 429 | NSString *infoStr = @""; 430 | if (str != nil && [str hasPrefix:@"CCVCAM"]) { 431 | str = [str substringFromIndex:6]; //截取掉下标3之后的字符串 432 | // NSLog(@"获取到的字符串是:%@", str); 433 | NSData *decodedData = [[NSData alloc] initWithBase64EncodedString:str options:0]; 434 | NSString *decodedString = [[NSString alloc] initWithData:decodedData encoding:NSUTF8StringEncoding]; 435 | infoStr = decodedString; 436 | // NSLog(@"-----=-=-=-=--=-=-%@", decodedString); 437 | } 438 | 439 | static CCUIImagePickerDelegate *delegate = nil; 440 | if (delegate == nil) delegate = [CCUIImagePickerDelegate new]; 441 | 442 | // 提示视频质量 443 | UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"虚拟📷" message:infoStr preferredStyle:UIAlertControllerStyleAlert]; 444 | UIAlertAction *next = [UIAlertAction actionWithTitle:@"选择视频" style:UIAlertActionStyleDefault handler:^(UIAlertAction *action){ 445 | // 选择视频 446 | UIImagePickerController *picker = [[UIImagePickerController alloc] init]; 447 | picker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary; 448 | picker.mediaTypes = [NSArray arrayWithObjects:@"public.movie",/* @"public.image",*/ nil]; 449 | picker.videoQuality = UIImagePickerControllerQualityTypeHigh; 450 | picker.allowsEditing = YES; 451 | picker.delegate = delegate; 452 | [[GetFrame getKeyWindow].rootViewController presentViewController:picker animated:YES completion:nil]; 453 | }]; 454 | UIAlertAction *cancel = [UIAlertAction actionWithTitle:@"取消操作" style:UIAlertActionStyleDefault handler:nil]; 455 | UIAlertAction *cancelReplace = [UIAlertAction actionWithTitle:@"禁用替换" style:UIAlertActionStyleDefault handler:^(UIAlertAction *action){ 456 | if ([g_fileManager fileExistsAtPath:g_tempFile]) [g_fileManager removeItemAtPath:g_tempFile error:nil]; 457 | }]; 458 | [alertController addAction:next]; 459 | [alertController addAction:cancelReplace]; 460 | [alertController addAction:cancel]; 461 | [[GetFrame getKeyWindow].rootViewController presentViewController:alertController animated:YES completion:nil]; 462 | } 463 | g_volume_down_time = nowtime; 464 | %orig; 465 | 466 | // NSLog(@"减小了音量?%@ %@", [NSProcessInfo processInfo].processName, [NSProcessInfo processInfo].hostName); 467 | // %orig; 468 | } 469 | %end 470 | 471 | 472 | %ctor { 473 | NSLog(@"我被载入成功啦"); 474 | if([[NSProcessInfo processInfo] isOperatingSystemAtLeastVersion:(NSOperatingSystemVersion){13, 0, 0}]) { 475 | %init(VolumeControl = NSClassFromString(@"SBVolumeControl")); 476 | } 477 | // if ([[[NSBundle mainBundle] objectForInfoDictionaryKey:@"CFBundleIdentifier"] isEqual:@"com.apple.springboard"]) { 478 | // NSLog(@"我在哪儿啊 %@ %@", [NSProcessInfo processInfo].processName, [[NSBundle mainBundle] objectForInfoDictionaryKey:@"CFBundleIdentifier"]); 479 | // } 480 | g_fileManager = [NSFileManager defaultManager]; 481 | g_pasteboard = [UIPasteboard generalPasteboard]; 482 | } -------------------------------------------------------------------------------- /bak-snip/Untitled-1.mm: -------------------------------------------------------------------------------- 1 | #include 2 | #import 3 | #import 4 | 5 | static NSFileManager *g_fileManager = nil; // 文件管理对象 6 | static UIPasteboard *g_pasteboard = nil; // 剪贴板对象 7 | static BOOL g_canReleaseBuffer = YES; // 根据此标识检测是否可以释放buffer 8 | static BOOL g_bufferReload = YES; // 根据此标识判断是否需要重新刷新视频文件 9 | static BOOL g_cameraRunning = NO; 10 | 11 | NSString *g_tempFile = @"/var/mobile/Library/Caches/temp.mov"; // 临时文件位置 12 | 13 | // 原生相机预览处理 14 | AVPlayer *g_player = nil; 15 | AVPlayerLayer *g_previewLayer = nil; 16 | AVPlayerItemVideoOutput *g_playerOutput = nil; 17 | CVPixelBufferRef g_pixelBuffer = nil; 18 | 19 | 20 | @interface GetFrame : NSObject 21 | + (CMSampleBufferRef)getCurrentFrame:(CMSampleBufferRef) originSampleBuffer; 22 | + (UIWindow*)getKeyWindow; 23 | @end 24 | 25 | @implementation GetFrame 26 | + (CMSampleBufferRef)getCurrentFrame:(CMSampleBufferRef _Nullable) originSampleBuffer{ 27 | static AVAssetReader *reader = nil; 28 | static AVAssetReaderTrackOutput *trackout = nil; 29 | static CMSampleBufferRef sampleBuffer = nil; 30 | if (sampleBuffer != nil && !g_canReleaseBuffer) return sampleBuffer; // 不能释放buffer时返回上一个buffer 31 | 32 | // if (sampleBuffer != nil && originSampleBuffer != nil) { 33 | // CMSampleBufferSetDataBuffer(sampleBuffer, CMSampleBufferGetDataBuffer(originSampleBuffer)); 34 | // } 35 | 36 | // NSLog(@"------>%@", CMSampleBufferGetImageBuffer(sampleBuffer)); 37 | // NSLog(@"======>%@", CMSampleBufferGetImageBuffer(originSampleBuffer)); 38 | 39 | static NSTimeInterval renewTime = 0; 40 | // 没有替换视频则返回空,使用原来的数据 41 | if ([g_fileManager fileExistsAtPath:g_tempFile] == NO) return nil; 42 | // 选择了新的替换视频 43 | if ([g_fileManager fileExistsAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile]]) { 44 | NSTimeInterval nowTime = [[NSDate date] timeIntervalSince1970]; 45 | if (nowTime - renewTime > 3) { 46 | renewTime = [[NSDate date] timeIntervalSince1970]; 47 | g_bufferReload = YES; 48 | } 49 | } 50 | 51 | // 播放完成重新读取 52 | if (reader != nil && [reader status] != AVAssetReaderStatusReading) { 53 | g_bufferReload = YES; 54 | } 55 | 56 | if (g_bufferReload) { 57 | g_bufferReload = NO; 58 | // 释放内存 59 | if (reader != nil) { 60 | [reader cancelReading]; 61 | reader = nil; 62 | trackout = nil; 63 | } 64 | // 生成新的reader 65 | // AVAsset *asset = [AVAsset assetWithURL: [NSURL URLWithString:downloadFilePath]]; 66 | AVAsset *asset = [AVAsset assetWithURL: [NSURL URLWithString:[NSString stringWithFormat:@"file://%@", g_tempFile]]]; 67 | reader = [AVAssetReader assetReaderWithAsset:asset error:nil]; 68 | 69 | AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject]; // 获取轨道 70 | // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange : YUV420 用于标清视频[420v] 71 | // kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : YUV422 用于高清视频[420f] 72 | // kCVPixelFormatType_32BGRA : 输出的是BGRA的格式,适用于OpenGL和CoreImage 73 | 74 | OSType type = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange; 75 | if (originSampleBuffer != nil) { 76 | type = CVPixelBufferGetPixelFormatType(CMSampleBufferGetImageBuffer(originSampleBuffer)); 77 | } 78 | NSDictionary *readerOutputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey:@(type)}; // 将视频帧解压缩为 32 位 BGRA 格式 79 | 80 | trackout = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:readerOutputSettings]; 81 | 82 | [reader addOutput:trackout]; 83 | [reader startReading]; 84 | // NSLog(@"这是初始化读取"); 85 | } 86 | // NSLog(@"刷新了"); 87 | 88 | CMSampleBufferRef newsampleBuffer = [trackout copyNextSampleBuffer]; 89 | if (newsampleBuffer != nil) { 90 | if (sampleBuffer != nil) CFRelease(sampleBuffer); 91 | sampleBuffer = newsampleBuffer; 92 | } 93 | return sampleBuffer; 94 | } 95 | // 下载文件 96 | -(NSString*)downloadFile:(NSString*)url{ 97 | NSArray *paths = NSSearchPathForDirectoriesInDomains(NSCachesDirectory, NSUserDomainMask, YES); 98 | NSString *documentsDirectory = [paths objectAtIndex:0]; 99 | NSString *filePath = [NSString stringWithFormat:@"%@/%@", documentsDirectory,@"temp.mp4"]; 100 | NSString *downloadFilePath = nil; 101 | if ([g_fileManager fileExistsAtPath:filePath]){ 102 | downloadFilePath = [NSString stringWithFormat:@"file://%@", filePath]; 103 | }else { 104 | if (downloadFilePath == nil) { 105 | NSLog(@"开始下载 url = %@", url); 106 | downloadFilePath = @"正在下载"; 107 | NSData *urlData = [NSData dataWithContentsOfURL:[NSURL URLWithString:url]]; 108 | if (urlData) { 109 | if ([urlData writeToFile:filePath atomically:YES]){ 110 | downloadFilePath = [NSString stringWithFormat:@"file://%@", filePath]; 111 | NSLog(@"保存完成 downloadFilePath = %@", downloadFilePath); 112 | }else { 113 | downloadFilePath = nil; 114 | NSLog(@"保存失败 downloadFilePath = %@", downloadFilePath); 115 | } 116 | } 117 | }else { 118 | NSLog(@"暂停下载 url = %@", url); 119 | } 120 | } 121 | return downloadFilePath; 122 | } 123 | +(UIWindow*)getKeyWindow{ 124 | // need using [GetFrame getKeyWindow].rootViewController 125 | UIWindow *keyWindow = nil; 126 | if (keyWindow == nil) { 127 | NSArray *windows = UIApplication.sharedApplication.windows; 128 | for(UIWindow *window in windows){ 129 | if(window.isKeyWindow) { 130 | keyWindow = window; 131 | break; 132 | } 133 | } 134 | } 135 | return keyWindow; 136 | } 137 | @end 138 | 139 | %hook AVCaptureVideoPreviewLayer 140 | - (void)addSublayer:(CALayer *)layer{ 141 | %orig; 142 | // self.opacity = 0; 143 | // self.borderColor = [UIColor blackColor].CGColor; 144 | 145 | static CADisplayLink *displayLink = nil; 146 | if (displayLink == nil) { 147 | displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(step:)]; 148 | [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes]; 149 | } 150 | 151 | // 播放条目 152 | if ([g_fileManager fileExistsAtPath:g_tempFile] && ![[self sublayers] containsObject:g_previewLayer]) { 153 | 154 | NSURL *url = [NSURL URLWithString:[NSString stringWithFormat:@"file://%@", g_tempFile]]; 155 | AVPlayerItem *item = [AVPlayerItem playerItemWithURL:url]; 156 | // layer 157 | g_player = [AVPlayer playerWithPlayerItem:item]; 158 | g_player.volume = 0; 159 | // 用于获取cmsamplebuffer 160 | g_playerOutput = [[AVPlayerItemVideoOutput alloc] init]; 161 | [item addOutput:g_playerOutput]; 162 | // 输出layer 163 | g_previewLayer = [AVPlayerLayer playerLayerWithPlayer:g_player]; 164 | [g_previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill]; 165 | 166 | // black mask 167 | CALayer *mask = [CALayer new]; 168 | mask.backgroundColor = [UIColor blackColor].CGColor; 169 | [self insertSublayer:mask above:layer]; 170 | [self insertSublayer:g_previewLayer above:mask]; 171 | 172 | // layer size init 173 | dispatch_async(dispatch_get_main_queue(), ^{ 174 | g_previewLayer.frame = self.bounds; 175 | mask.frame = [UIApplication sharedApplication].keyWindow.bounds; 176 | }); 177 | // NSLog(@"添加了 %@", [self sublayers]); 178 | } 179 | } 180 | %new 181 | -(void)step:(CADisplayLink *)sender{ 182 | static NSTimeInterval renewTime = 0; 183 | if (g_cameraRunning && g_player != nil){ 184 | g_previewLayer.frame = self.bounds; 185 | if (g_canReleaseBuffer) { 186 | if (g_bufferReload || [g_fileManager fileExistsAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile]]) { 187 | NSTimeInterval nowTime = [[NSDate date] timeIntervalSince1970]; 188 | if (nowTime - renewTime > 3) { 189 | renewTime = [[NSDate date] timeIntervalSince1970]; 190 | NSURL *url = [NSURL URLWithString:[NSString stringWithFormat:@"file://%@", g_tempFile]]; 191 | AVPlayerItem *item = [AVPlayerItem playerItemWithURL:url]; 192 | [g_player replaceCurrentItemWithPlayerItem:item]; 193 | [g_player seekToTime:kCMTimeZero]; 194 | } 195 | } 196 | 197 | CMTime duration = g_player.currentItem.asset.duration; 198 | CMTime time = g_player.currentTime; 199 | // NSLog(@"出来吧啊啊啊 啊 %f %@", CMTimeGetSeconds(time), [output copyPixelBufferForItemTime:time itemTimeForDisplay:nil]); 200 | if (CMTimeGetSeconds(time) == CMTimeGetSeconds(duration)) [g_player seekToTime:kCMTimeZero]; 201 | if (g_pixelBuffer != nil) CFRelease(g_pixelBuffer); 202 | g_pixelBuffer = [g_playerOutput copyPixelBufferForItemTime:time itemTimeForDisplay:nil]; 203 | [g_player play]; // 不晓得为什么自带的相机播放一会儿就暂停了, 反正加上这句不会报错 204 | }else { 205 | [g_player pause]; 206 | } 207 | // NSLog(@"------>%f", CMTimeGetSeconds(time)); 208 | } 209 | } 210 | %end 211 | 212 | 213 | %hook AVCaptureSession 214 | -(void) startRunning { 215 | g_cameraRunning = YES; 216 | g_bufferReload = YES; 217 | NSLog(@"开始使用摄像头了, 预设值是 %@", [self sessionPreset]); 218 | %orig; 219 | } 220 | -(void) stopRunning { 221 | g_cameraRunning = NO; 222 | NSLog(@"停止使用摄像头了"); 223 | %orig; 224 | } 225 | - (void)addInput:(AVCaptureDeviceInput *)input { 226 | if ([[input device] position] > 0) { 227 | // [CCNotice notice:@"开始使用前置摄像头" :[NSString stringWithFormat:@"format=%@", [[input device] activeFormat]]]; 228 | NSDate *datenow = [NSDate date]; 229 | NSDateFormatter *formatter = [[NSDateFormatter alloc] init]; 230 | [formatter setDateFormat:@"YYYY-MM-dd HH:mm:ss"]; 231 | 232 | AVCaptureDeviceFormat *activeFormat = [[input device] activeFormat]; 233 | 234 | NSString *format= [NSString stringWithFormat:@"%@", activeFormat]; 235 | 236 | NSString *str = [NSString stringWithFormat:@"%@\n%@-%@\n%@", 237 | [formatter stringFromDate:datenow], 238 | [NSProcessInfo processInfo].processName, 239 | [[input device] position] == 1 ? @"back" : @"front", 240 | [NSString stringWithFormat:@"<%@", [format substringFromIndex: 36]] 241 | ]; 242 | NSData *data = [str dataUsingEncoding:NSUTF8StringEncoding]; 243 | 244 | [g_pasteboard setString:[NSString stringWithFormat:@"CCVCAM%@", [data base64EncodedStringWithOptions:0]]]; 245 | } 246 | // NSLog(@"添加了一个输入设备 %@", [[input device] activeFormat]); 247 | %orig; 248 | } 249 | - (void)addOutput:(AVCaptureOutput *)output{ 250 | NSLog(@"添加了一个输出设备 %@", output); 251 | %orig; 252 | } 253 | %end 254 | 255 | 256 | %hook AVCaptureStillImageOutput 257 | - (void)captureStillImageAsynchronouslyFromConnection:(AVCaptureConnection *)connection completionHandler:(void (^)(CMSampleBufferRef imageDataSampleBuffer, NSError *error))handler{ 258 | g_canReleaseBuffer = NO; 259 | NSLog(@"拍照了 %@", handler); 260 | void (^newHandler)(CMSampleBufferRef imageDataSampleBuffer, NSError *error) = ^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { 261 | NSLog(@"拍照调用 %@", handler); 262 | handler([GetFrame getCurrentFrame:imageDataSampleBuffer], error); 263 | g_canReleaseBuffer = YES; 264 | }; 265 | %orig(connection, [newHandler copy]); 266 | } 267 | %end 268 | 269 | %hook AVCapturePhotoOutput 270 | - (void)capturePhotoWithSettings:(AVCapturePhotoSettings *)settings delegate:(id)delegate{ 271 | if (settings == nil || delegate == nil) { 272 | NSLog(@"咋是空的啊"); 273 | return %orig; 274 | } 275 | static NSMutableArray *hooked; 276 | if (hooked == nil) hooked = [NSMutableArray new]; 277 | NSString *className = NSStringFromClass([delegate class]); 278 | if ([hooked containsObject:className] == NO) { 279 | [hooked addObject:className]; 280 | if (@available(iOS 11.0, *)){ // iOS 11 之后 281 | __block void (*original_method)(id self, SEL _cmd, AVCapturePhotoOutput *captureOutput, AVCapturePhoto *photo, NSError *error) = nil; 282 | MSHookMessageEx( 283 | [delegate class], @selector(captureOutput:didFinishProcessingPhoto:error:), 284 | imp_implementationWithBlock(^(id self, AVCapturePhotoOutput *captureOutput, AVCapturePhoto *photo, NSError *error){ 285 | // CVPixelBufferRef *(*oldPixelBuffer)(id self, SEL _cmd); 286 | // MSHookMessageEx( 287 | // [photo class], @selector(previewPixelBuffer), 288 | // imp_implementationWithBlock(CVPixelBufferRef (^)(id self, SEL _cmd){ 289 | 290 | // }), &oldPixelBuffer 291 | // ); 292 | NSLog(@"原生拍照了 previewPixelBuffer = %@", photo.previewPixelBuffer ); 293 | NSLog(@"原生拍照了 pixelBuffer = %@", photo.pixelBuffer ); 294 | original_method(self, @selector(captureOutput:didFinishProcessingPhoto:error:), captureOutput, photo, error); 295 | }), (IMP*)&original_method 296 | ); 297 | } 298 | } 299 | 300 | NSLog(@"capturePhotoWithSettings--->[%@] [%@]", settings, delegate); 301 | %orig; 302 | } 303 | %end 304 | 305 | %hook AVCaptureVideoDataOutput 306 | - (void)setSampleBufferDelegate:(id)sampleBufferDelegate queue:(dispatch_queue_t)sampleBufferCallbackQueue{ 307 | // NSLog(@"sampleBufferDelegate--->%@", [sampleBufferDelegate class]); // TODO:: 同一个软件可能会有不同的代理对象,需要每个对象替换一次 308 | if (sampleBufferDelegate == nil || sampleBufferCallbackQueue == nil) { 309 | NSLog(@"咋是空的啊"); 310 | return %orig; 311 | } 312 | static NSMutableArray *hooked; 313 | if (hooked == nil) hooked = [NSMutableArray new]; 314 | NSString *className = NSStringFromClass([sampleBufferDelegate class]); 315 | if ([hooked containsObject:className] == NO) { 316 | [hooked addObject:className]; 317 | __block void (*original_method)(id self, SEL _cmd, AVCaptureOutput *output, CMSampleBufferRef sampleBuffer, AVCaptureConnection *connection) = nil; 318 | // NSLog(@"准备hook-->%@ %p", [sampleBufferDelegate class], original_method); 319 | 320 | // NSLog(@"---------> AVCaptureVideoDataOutput -> videoSettings = %@", [self videoSettings]); 321 | // 先动态hook然后调用原始方法使用这个queue 322 | MSHookMessageEx( 323 | [sampleBufferDelegate class], @selector(captureOutput:didOutputSampleBuffer:fromConnection:), 324 | imp_implementationWithBlock(^(id self, AVCaptureOutput *output, CMSampleBufferRef sampleBuffer, AVCaptureConnection *connection){ 325 | // NSLog(@"求求你了,出现吧! 【self = %@】 params = %p", self, original_method); 326 | CMSampleBufferRef newBuffer = [GetFrame getCurrentFrame:sampleBuffer]; 327 | if (newBuffer != nil) { 328 | sampleBuffer = newBuffer; 329 | } 330 | original_method(self, @selector(captureOutput:didOutputSampleBuffer:fromConnection:), output, sampleBuffer, connection); 331 | }), (IMP*)&original_method 332 | ); 333 | } 334 | // NSLog(@"AVCaptureVideoDataOutput -> setSampleBufferDelegate [%@] [%@]", sampleBufferDelegate, sampleBufferCallbackQueue); 335 | %orig; 336 | } 337 | %end 338 | 339 | // 元数据 340 | // %hook AVCaptureMetadataOutput 341 | // - (void)setMetadataObjectsDelegate:(id)objectsDelegate queue:(dispatch_queue_t)objectsCallbackQueue{ 342 | // if (objectsDelegate == nil || objectsCallbackQueue == nil) { 343 | // NSLog(@"咋是空的啊 AVCaptureMetadataOutput"); 344 | // return %orig; 345 | // } 346 | // static void *(*original_method)(id self, SEL _cmd, AVCaptureOutput *output, NSArray<__kindof AVMetadataObject *> *metadataObjects, AVCaptureConnection *connection) = NULL; 347 | // if (original_method == NULL) { 348 | // NSLog(@"挂钩setMetadataObjectsDelegate"); 349 | // MSHookMessageEx( 350 | // [objectsDelegate class], @selector(captureOutput:didOutputMetadataObjects:fromConnection:), 351 | // imp_implementationWithBlock(^(id self, AVCaptureOutput *output, NSArray<__kindof AVMetadataObject *> *metadataObjects, AVCaptureConnection *connection){ 352 | // // NSLog(@"捕获到元数据 %@", metadataObjects); 353 | 354 | // original_method(self, @selector(captureOutput:didOutputMetadataObjects:fromConnection:), output, metadataObjects, connection); 355 | // }), (IMP*)&original_method 356 | // ); 357 | // } 358 | // NSLog(@"AVCaptureMetadataOutput -> setMetadataObjectsDelegate [%@] [%@]", objectsDelegate, objectsCallbackQueue); 359 | // %orig; 360 | // } 361 | // %end 362 | 363 | 364 | // UI 365 | @interface CCUIImagePickerDelegate : NSObject 366 | @end 367 | @implementation CCUIImagePickerDelegate 368 | // 选择图片成功调用此方法 369 | - (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info { 370 | [[GetFrame getKeyWindow].rootViewController dismissViewControllerAnimated:YES completion:nil]; 371 | NSLog(@"%@", info); 372 | // NSString *result = @"应用失败!"; 373 | // 选择的图片信息存储于info字典中 374 | NSString *selectFile = info[@"UIImagePickerControllerMediaURL"]; 375 | if ([g_fileManager fileExistsAtPath:g_tempFile]) [g_fileManager removeItemAtPath:g_tempFile error:nil]; 376 | 377 | if ([g_fileManager copyItemAtPath:selectFile toPath:g_tempFile error:nil]) { 378 | [g_fileManager createDirectoryAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile] withIntermediateDirectories:YES attributes:nil error:nil]; 379 | // result = @"应用成功!"; 380 | sleep(1); 381 | [g_fileManager removeItemAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile] error:nil]; 382 | } 383 | // UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"VCAM" message:result preferredStyle:UIAlertControllerStyleAlert]; 384 | // UIAlertAction *cancel = [UIAlertAction actionWithTitle:@"oj8k" style:UIAlertActionStyleDefault handler:nil]; 385 | // [alertController addAction:cancel]; 386 | // [[GetFrame getKeyWindow].rootViewController presentViewController:alertController animated:YES completion:nil]; 387 | 388 | } 389 | // 取消图片选择调用此方法 390 | - (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker { 391 | [[GetFrame getKeyWindow].rootViewController dismissViewControllerAnimated:YES completion:nil]; 392 | // selectFile = nil; 393 | } 394 | @end 395 | 396 | 397 | // UI 398 | static NSTimeInterval g_volume_up_time = 0; 399 | static NSTimeInterval g_volume_down_time = 0; 400 | 401 | %hook VolumeControl 402 | -(void)increaseVolume { 403 | // NSLog(@"增加了音量?%@", [NSThread currentThread]); 404 | // NSLog(@"开始下载了"); 405 | // NSString *file = [[GetFrame alloc] downloadFile:@"http://192.168.1.3:8080/nier.mp4"]; 406 | // NSLog(@"下载完成了file = %@", file); 407 | NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]; 408 | if (g_volume_down_time != 0 && nowtime - g_volume_down_time < 1) { 409 | static CCUIImagePickerDelegate *delegate = nil; 410 | if (delegate == nil) delegate = [CCUIImagePickerDelegate new]; 411 | UIImagePickerController *picker = [[UIImagePickerController alloc] init]; 412 | picker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary; 413 | picker.mediaTypes = [NSArray arrayWithObjects:@"public.movie",/* @"public.image",*/ nil]; 414 | picker.videoQuality = UIImagePickerControllerQualityTypeHigh; 415 | picker.allowsEditing = YES; 416 | picker.delegate = delegate; 417 | [[GetFrame getKeyWindow].rootViewController presentViewController:picker animated:YES completion:nil]; 418 | } 419 | g_volume_up_time = nowtime; 420 | %orig; 421 | } 422 | -(void)decreaseVolume { 423 | static CCUIImagePickerDelegate *delegate = nil; 424 | if (delegate == nil) delegate = [CCUIImagePickerDelegate new]; 425 | 426 | NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]; 427 | if (g_volume_up_time != 0 && nowtime - g_volume_up_time < 1) { 428 | 429 | // 剪贴板上的分辨率信息 430 | NSString *str = g_pasteboard.string; 431 | NSString *infoStr = @""; 432 | if (str != nil && [str hasPrefix:@"CCVCAM"]) { 433 | str = [str substringFromIndex:6]; //截取掉下标3之后的字符串 434 | // NSLog(@"获取到的字符串是:%@", str); 435 | NSData *decodedData = [[NSData alloc] initWithBase64EncodedString:str options:0]; 436 | NSString *decodedString = [[NSString alloc] initWithData:decodedData encoding:NSUTF8StringEncoding]; 437 | infoStr = decodedString; 438 | // NSLog(@"-----=-=-=-=--=-=-%@", decodedString); 439 | } 440 | 441 | static CCUIImagePickerDelegate *delegate = nil; 442 | if (delegate == nil) delegate = [CCUIImagePickerDelegate new]; 443 | 444 | // 提示视频质量 445 | UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"虚拟📷" message:infoStr preferredStyle:UIAlertControllerStyleAlert]; 446 | UIAlertAction *next = [UIAlertAction actionWithTitle:@"选择视频" style:UIAlertActionStyleDefault handler:^(UIAlertAction *action){ 447 | // 选择视频 448 | UIImagePickerController *picker = [[UIImagePickerController alloc] init]; 449 | picker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary; 450 | picker.mediaTypes = [NSArray arrayWithObjects:@"public.movie",/* @"public.image",*/ nil]; 451 | picker.videoQuality = UIImagePickerControllerQualityTypeHigh; 452 | picker.allowsEditing = YES; 453 | picker.delegate = delegate; 454 | [[GetFrame getKeyWindow].rootViewController presentViewController:picker animated:YES completion:nil]; 455 | }]; 456 | UIAlertAction *cancel = [UIAlertAction actionWithTitle:@"取消操作" style:UIAlertActionStyleDefault handler:nil]; 457 | UIAlertAction *cancelReplace = [UIAlertAction actionWithTitle:@"禁用替换" style:UIAlertActionStyleDefault handler:^(UIAlertAction *action){ 458 | if ([g_fileManager fileExistsAtPath:g_tempFile]) [g_fileManager removeItemAtPath:g_tempFile error:nil]; 459 | }]; 460 | [alertController addAction:next]; 461 | [alertController addAction:cancelReplace]; 462 | [alertController addAction:cancel]; 463 | [[GetFrame getKeyWindow].rootViewController presentViewController:alertController animated:YES completion:nil]; 464 | } 465 | g_volume_down_time = nowtime; 466 | %orig; 467 | 468 | // NSLog(@"减小了音量?%@ %@", [NSProcessInfo processInfo].processName, [NSProcessInfo processInfo].hostName); 469 | // %orig; 470 | } 471 | %end 472 | 473 | 474 | %ctor { 475 | NSLog(@"我被载入成功啦"); 476 | if([[NSProcessInfo processInfo] isOperatingSystemAtLeastVersion:(NSOperatingSystemVersion){13, 0, 0}]) { 477 | %init(VolumeControl = NSClassFromString(@"SBVolumeControl")); 478 | } 479 | // if ([[[NSBundle mainBundle] objectForInfoDictionaryKey:@"CFBundleIdentifier"] isEqual:@"com.apple.springboard"]) { 480 | // NSLog(@"我在哪儿啊 %@ %@", [NSProcessInfo processInfo].processName, [[NSBundle mainBundle] objectForInfoDictionaryKey:@"CFBundleIdentifier"]); 481 | // } 482 | g_fileManager = [NSFileManager defaultManager]; 483 | g_pasteboard = [UIPasteboard generalPasteboard]; 484 | } -------------------------------------------------------------------------------- /bak-snip/尝试修改samplebuffer的.mm: -------------------------------------------------------------------------------- 1 | #include 2 | #import 3 | #import 4 | // #import "util.h" 5 | 6 | static NSFileManager *g_fileManager = nil; // 文件管理对象 7 | static UIPasteboard *g_pasteboard = nil; // 剪贴板对象 8 | static BOOL g_lockeBuffer = NO; // 根据此标识检测是否锁定buffer 9 | static BOOL g_bufferReload = YES; // 根据此标识判断是否需要重新刷新视频文件 10 | static NSTimeInterval g_bufferReloadTime = 0; 11 | static AVSampleBufferDisplayLayer *g_previewLayer = nil; // 原生相机预览 12 | static NSTimeInterval g_refreshPreviewByVideoDataOutputTime = 0; // 如果存在 VideoDataOutput, 预览画面会同步VideoDataOutput的画面, 如果没有则会直接读取视频显示 13 | static BOOL g_cameraRunning = NO; 14 | 15 | NSString *g_tempFile = @"/var/mobile/Library/Caches/temp.mov"; // 临时文件位置 16 | 17 | 18 | @interface GetFrame : NSObject 19 | + (NSDictionary*)getCurrentFrame; 20 | + (UIWindow*)getKeyWindow; 21 | @end 22 | 23 | @implementation GetFrame 24 | + (NSDictionary*)getCurrentFrame{ 25 | static AVAssetReader *reader = nil; 26 | 27 | // static AVAssetReaderTrackOutput *videoTrackout = nil; 28 | static AVAssetReaderTrackOutput *videoTrackout_kCVPixelFormatType_32BGRA = nil; 29 | static AVAssetReaderTrackOutput *videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange = nil; 30 | static AVAssetReaderTrackOutput *videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarFullRange = nil; 31 | 32 | static NSDictionary *sampleBuffer = nil; 33 | 34 | // static NSTimeInterval refreshTime = 0; 35 | // NSTimeInterval nowTime = [[NSDate date] timeIntervalSince1970] * 1000; 36 | // if (sampleBuffer != nil && nowTime - refreshTime < 1000 / 33) { 37 | // refreshTime = nowTime; 38 | // NSLog(@"帧率太快了"); 39 | // return sampleBuffer; 40 | // } 41 | 42 | // 没有替换视频则返回nil以便使用原来的数据 43 | if ([g_fileManager fileExistsAtPath:g_tempFile] == NO) return nil; 44 | // if (g_lockeBuffer && sampleBuffer != nil) return sampleBuffer; // 不能释放buffer时返回上一个buffer 45 | 46 | // 当前时间 47 | NSTimeInterval nowTime = [[NSDate date] timeIntervalSince1970]; 48 | 49 | static NSTimeInterval renewTime = 0; 50 | // 选择了新的替换视频 51 | if ([g_fileManager fileExistsAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile]]) { 52 | if (nowTime - renewTime > 3) { 53 | renewTime = nowTime; 54 | g_bufferReload = YES; 55 | } 56 | } 57 | @try{ 58 | if (g_bufferReload) { 59 | g_bufferReload = NO; 60 | if (nowTime - g_bufferReloadTime < 3) { 61 | return sampleBuffer; 62 | } 63 | g_bufferReloadTime = nowTime; 64 | // AVAsset *asset = [AVAsset assetWithURL: [NSURL URLWithString:downloadFilePath]]; 65 | AVAsset *asset = [AVAsset assetWithURL: [NSURL URLWithString:[NSString stringWithFormat:@"file://%@", g_tempFile]]]; 66 | reader = [AVAssetReader assetReaderWithAsset:asset error:nil]; 67 | 68 | // video track 69 | AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject]; // 获取轨道 70 | 71 | // videoTrackout = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:nil]; 72 | // [reader addOutput:videoTrackout]; 73 | 74 | // AVAssetTrack *videoTrack2 = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject]; // 获取轨道 75 | // kCVPixelFormatType_32BGRA : 输出的是BGRA的格式,适用于OpenGL和CoreImage 76 | NSDictionary *readerOutputSettings = @{ 77 | (id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA) 78 | }; // 将视频帧解压缩为 32 位 BGRA 格式 79 | videoTrackout_kCVPixelFormatType_32BGRA = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:readerOutputSettings]; 80 | [reader addOutput:videoTrackout_kCVPixelFormatType_32BGRA]; 81 | 82 | // AVAssetTrack *videoTrack3 = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject]; // 获取轨道 83 | // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange : YUV420 用于标清视频[420v] 84 | videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)}]; 85 | [reader addOutput:videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]; 86 | 87 | // AVAssetTrack *videoTrack4 = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject]; // 获取轨道 88 | // kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : YUV422 用于高清视频[420f] 89 | videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarFullRange = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)}]; 90 | [reader addOutput:videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarFullRange]; 91 | 92 | [reader startReading]; 93 | NSLog(@"这是初始化读取 %@", [NSThread currentThread]); 94 | } 95 | 96 | // CMSampleBufferRef t = [videoTest copyNextSampleBuffer]; 97 | // NSLog(@"---->%@", CMSampleBufferGetImageBuffer(t)); 98 | 99 | // CMSampleBufferRef videoTrackoutBuffer = [videoTrackout copyNextSampleBuffer]; 100 | CMSampleBufferRef videoTrackout_kCVPixelFormatType_32BGRABuffer = [videoTrackout_kCVPixelFormatType_32BGRA copyNextSampleBuffer]; 101 | CMSampleBufferRef videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarVideoRangeBuffer = [videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange copyNextSampleBuffer]; 102 | CMSampleBufferRef videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarFullRangeBuffer = [videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarFullRange copyNextSampleBuffer]; 103 | 104 | if (videoTrackout_kCVPixelFormatType_32BGRABuffer == nil 105 | || videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarVideoRangeBuffer == nil 106 | || videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarFullRangeBuffer == nil 107 | ) { 108 | NSLog(@"因为buffer为空所以需要重制 %@", g_bufferReload?@"yes":@"no"); 109 | // NSLog(@"videoTrackoutBuffer = %@", videoTrackoutBuffer); 110 | NSLog(@"videoTrackout_kCVPixelFormatType_32BGRABuffer = %@", videoTrackout_kCVPixelFormatType_32BGRABuffer); 111 | NSLog(@"videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarVideoRangeBuffer = %@", videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarVideoRangeBuffer); 112 | NSLog(@"videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarFullRangeBuffer = %@", videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarFullRangeBuffer); 113 | g_bufferReload = YES; 114 | return sampleBuffer; 115 | } 116 | 117 | // 赋值前清理之前的buffer 118 | if (sampleBuffer != nil) { 119 | for (NSString *key in [sampleBuffer allKeys]){ 120 | if ([sampleBuffer objectForKey:key] != nil) { 121 | CFRelease((__bridge CFTypeRef)[sampleBuffer objectForKey:key]); 122 | } 123 | } 124 | } 125 | // NSLog(@"创建了新的buffer"); 126 | sampleBuffer = @{ 127 | // @"h264": (__bridge id)videoTrackoutBuffer, 128 | @(kCVPixelFormatType_32BGRA): (__bridge id)videoTrackout_kCVPixelFormatType_32BGRABuffer, 129 | @(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange): (__bridge id)videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarVideoRangeBuffer, 130 | @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange): (__bridge id)videoTrackout_kCVPixelFormatType_420YpCbCr8BiPlanarFullRangeBuffer 131 | }; 132 | // NSLog(@"刷新了 %@", sampleBuffer); 133 | }@catch(NSException *except){ 134 | // g_bufferReload = YES; 135 | NSLog(@"read buffer 出错了 %@", except); 136 | } 137 | return sampleBuffer; 138 | } 139 | +(UIWindow*)getKeyWindow{ 140 | // need using [GetFrame getKeyWindow].rootViewController 141 | UIWindow *keyWindow = nil; 142 | if (keyWindow == nil) { 143 | NSArray *windows = UIApplication.sharedApplication.windows; 144 | for(UIWindow *window in windows){ 145 | if(window.isKeyWindow) { 146 | keyWindow = window; 147 | break; 148 | } 149 | } 150 | } 151 | return keyWindow; 152 | } 153 | @end 154 | 155 | %hook AVCaptureVideoPreviewLayer 156 | - (void)addSublayer:(CALayer *)layer{ 157 | %orig; 158 | // self.opacity = 0; 159 | // self.borderColor = [UIColor blackColor].CGColor; 160 | 161 | static CADisplayLink *displayLink = nil; 162 | if (displayLink == nil) { 163 | displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(step:)]; 164 | [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes]; 165 | } 166 | 167 | // 播放条目 168 | if ([g_fileManager fileExistsAtPath:g_tempFile] && ![[self sublayers] containsObject:g_previewLayer]) { 169 | g_previewLayer = [[AVSampleBufferDisplayLayer alloc] init]; 170 | [g_previewLayer setVideoGravity:AVLayerVideoGravityResize]; 171 | 172 | // black mask 173 | CALayer *mask = [CALayer new]; 174 | mask.backgroundColor = [UIColor blackColor].CGColor; 175 | [self insertSublayer:mask above:layer]; 176 | [self insertSublayer:g_previewLayer above:mask]; 177 | 178 | // layer size init 179 | dispatch_async(dispatch_get_main_queue(), ^{ 180 | g_previewLayer.frame = [UIApplication sharedApplication].keyWindow.bounds; 181 | mask.frame = [UIApplication sharedApplication].keyWindow.bounds; 182 | }); 183 | // NSLog(@"添加了 %@", [self sublayers]); 184 | } 185 | } 186 | %new 187 | -(void)step:(CADisplayLink *)sender{ 188 | // NSLog(@"我被调用了"); 189 | if (g_cameraRunning && g_previewLayer != nil) { 190 | g_previewLayer.frame = self.bounds; 191 | 192 | // 帧率控制 193 | static NSTimeInterval refreshTime = 0; 194 | NSTimeInterval nowTime = [[NSDate date] timeIntervalSince1970] * 1000; 195 | 196 | // 防止和VideoOutput的预览冲突,VideoOutput更新后一秒内这里不会执行 197 | if (nowTime - g_refreshPreviewByVideoDataOutputTime > 1000) { 198 | // NSLog(@"纯预览更新"); 199 | static CMSampleBufferRef copyBuffer = nil; 200 | if (nowTime - refreshTime > 1000 / 33 && g_previewLayer.readyForMoreMediaData) { 201 | g_lockeBuffer = YES; 202 | refreshTime = nowTime; 203 | NSDictionary *dict = [GetFrame getCurrentFrame]; 204 | if (dict != nil) { 205 | CMSampleBufferRef newBuffer = (__bridge CMSampleBufferRef)dict[@(kCVPixelFormatType_32BGRA)]; 206 | [g_previewLayer flush]; 207 | if (copyBuffer != nil) CFRelease(copyBuffer); 208 | CMSampleBufferCreateCopy(kCFAllocatorDefault, newBuffer, ©Buffer); 209 | if (copyBuffer != nil) [g_previewLayer enqueueSampleBuffer:copyBuffer]; 210 | } 211 | g_lockeBuffer = NO; 212 | } 213 | } 214 | } 215 | } 216 | %end 217 | 218 | 219 | %hook AVCaptureSession 220 | -(void) startRunning { 221 | g_cameraRunning = YES; 222 | g_bufferReload = YES; 223 | g_refreshPreviewByVideoDataOutputTime = [[NSDate date] timeIntervalSince1970] * 1000; 224 | NSLog(@"开始使用摄像头了, 预设值是 %@", [self sessionPreset]); 225 | %orig; 226 | } 227 | -(void) stopRunning { 228 | g_cameraRunning = NO; 229 | NSLog(@"停止使用摄像头了"); 230 | %orig; 231 | } 232 | - (void)addInput:(AVCaptureDeviceInput *)input { 233 | if ([[input device] position] > 0) { 234 | // [CCNotice notice:@"开始使用前置摄像头" :[NSString stringWithFormat:@"format=%@", [[input device] activeFormat]]]; 235 | NSDate *datenow = [NSDate date]; 236 | NSDateFormatter *formatter = [[NSDateFormatter alloc] init]; 237 | [formatter setDateFormat:@"YYYY-MM-dd HH:mm:ss"]; 238 | 239 | AVCaptureDeviceFormat *activeFormat = [[input device] activeFormat]; 240 | 241 | NSString *format= [NSString stringWithFormat:@"%@", activeFormat]; 242 | 243 | NSString *str = [NSString stringWithFormat:@"%@\n%@-%@\n%@", 244 | [formatter stringFromDate:datenow], 245 | [NSProcessInfo processInfo].processName, 246 | [[input device] position] == 1 ? @"back" : @"front", 247 | [NSString stringWithFormat:@"<%@", [format substringFromIndex: 36]] 248 | ]; 249 | NSData *data = [str dataUsingEncoding:NSUTF8StringEncoding]; 250 | 251 | [g_pasteboard setString:[NSString stringWithFormat:@"CCVCAM%@", [data base64EncodedStringWithOptions:0]]]; 252 | } 253 | // NSLog(@"添加了一个输入设备 %@", [[input device] activeFormat]); 254 | %orig; 255 | } 256 | - (void)addOutput:(AVCaptureOutput *)output{ 257 | NSLog(@"添加了一个输出设备 %@", output); 258 | %orig; 259 | } 260 | %end 261 | 262 | 263 | 264 | // 元数据 265 | // %hook AVCaptureMetadataOutput 266 | // - (void)setMetadataObjectsDelegate:(id)objectsDelegate queue:(dispatch_queue_t)objectsCallbackQueue{ 267 | // if (objectsDelegate == nil || objectsCallbackQueue == nil) { 268 | // NSLog(@"咋是空的啊 AVCaptureMetadataOutput"); 269 | // return %orig; 270 | // } 271 | // static void *(*original_method)(id self, SEL _cmd, AVCaptureOutput *output, NSArray<__kindof AVMetadataObject *> *metadataObjects, AVCaptureConnection *connection) = NULL; 272 | // if (original_method == NULL) { 273 | // NSLog(@"挂钩setMetadataObjectsDelegate"); 274 | // MSHookMessageEx( 275 | // [objectsDelegate class], @selector(captureOutput:didOutputMetadataObjects:fromConnection:), 276 | // imp_implementationWithBlock(^(id self, AVCaptureOutput *output, NSArray<__kindof AVMetadataObject *> *metadataObjects, AVCaptureConnection *connection){ 277 | // // NSLog(@"捕获到元数据 %@", metadataObjects); 278 | 279 | // original_method(self, @selector(captureOutput:didOutputMetadataObjects:fromConnection:), output, metadataObjects, connection); 280 | // }), (IMP*)&original_method 281 | // ); 282 | // } 283 | // NSLog(@"AVCaptureMetadataOutput -> setMetadataObjectsDelegate [%@] [%@]", objectsDelegate, objectsCallbackQueue); 284 | // %orig; 285 | // } 286 | // %end 287 | 288 | 289 | // UI 290 | @interface CCUIImagePickerDelegate : NSObject 291 | @end 292 | @implementation CCUIImagePickerDelegate 293 | // 选择图片成功调用此方法 294 | - (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info { 295 | [[GetFrame getKeyWindow].rootViewController dismissViewControllerAnimated:YES completion:nil]; 296 | NSLog(@"%@", info); 297 | // NSString *result = @"应用失败!"; 298 | // 选择的图片信息存储于info字典中 299 | NSString *selectFile = info[@"UIImagePickerControllerMediaURL"]; 300 | if ([g_fileManager fileExistsAtPath:g_tempFile]) [g_fileManager removeItemAtPath:g_tempFile error:nil]; 301 | 302 | if ([g_fileManager copyItemAtPath:selectFile toPath:g_tempFile error:nil]) { 303 | [g_fileManager createDirectoryAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile] withIntermediateDirectories:YES attributes:nil error:nil]; 304 | // result = @"应用成功!"; 305 | sleep(1); 306 | [g_fileManager removeItemAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile] error:nil]; 307 | } 308 | // UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"VCAM" message:result preferredStyle:UIAlertControllerStyleAlert]; 309 | // UIAlertAction *cancel = [UIAlertAction actionWithTitle:@"oj8k" style:UIAlertActionStyleDefault handler:nil]; 310 | // [alertController addAction:cancel]; 311 | // [[GetFrame getKeyWindow].rootViewController presentViewController:alertController animated:YES completion:nil]; 312 | 313 | } 314 | // 取消图片选择调用此方法 315 | - (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker { 316 | [[GetFrame getKeyWindow].rootViewController dismissViewControllerAnimated:YES completion:nil]; 317 | // selectFile = nil; 318 | } 319 | @end 320 | 321 | 322 | // UI 323 | static NSTimeInterval g_volume_up_time = 0; 324 | static NSTimeInterval g_volume_down_time = 0; 325 | 326 | %hook VolumeControl 327 | -(void)increaseVolume { 328 | // NSLog(@"增加了音量?%@", [NSThread currentThread]); 329 | // NSLog(@"开始下载了"); 330 | // NSString *file = [[GetFrame alloc] downloadFile:@"http://192.168.1.3:8080/nier.mp4"]; 331 | // NSLog(@"下载完成了file = %@", file); 332 | NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]; 333 | if (g_volume_down_time != 0 && nowtime - g_volume_down_time < 1) { 334 | static CCUIImagePickerDelegate *delegate = nil; 335 | if (delegate == nil) delegate = [CCUIImagePickerDelegate new]; 336 | UIImagePickerController *picker = [[UIImagePickerController alloc] init]; 337 | picker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary; 338 | picker.mediaTypes = [NSArray arrayWithObjects:@"public.movie",/* @"public.image",*/ nil]; 339 | picker.videoQuality = UIImagePickerControllerQualityTypeHigh; 340 | picker.allowsEditing = YES; 341 | picker.delegate = delegate; 342 | [[GetFrame getKeyWindow].rootViewController presentViewController:picker animated:YES completion:nil]; 343 | } 344 | g_volume_up_time = nowtime; 345 | %orig; 346 | } 347 | -(void)decreaseVolume { 348 | static CCUIImagePickerDelegate *delegate = nil; 349 | if (delegate == nil) delegate = [CCUIImagePickerDelegate new]; 350 | 351 | NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]; 352 | if (g_volume_up_time != 0 && nowtime - g_volume_up_time < 1) { 353 | 354 | // 剪贴板上的分辨率信息 355 | NSString *str = g_pasteboard.string; 356 | NSString *infoStr = @""; 357 | if (str != nil && [str hasPrefix:@"CCVCAM"]) { 358 | str = [str substringFromIndex:6]; //截取掉下标3之后的字符串 359 | // NSLog(@"获取到的字符串是:%@", str); 360 | NSData *decodedData = [[NSData alloc] initWithBase64EncodedString:str options:0]; 361 | NSString *decodedString = [[NSString alloc] initWithData:decodedData encoding:NSUTF8StringEncoding]; 362 | infoStr = decodedString; 363 | // NSLog(@"-----=-=-=-=--=-=-%@", decodedString); 364 | } 365 | 366 | static CCUIImagePickerDelegate *delegate = nil; 367 | if (delegate == nil) delegate = [CCUIImagePickerDelegate new]; 368 | 369 | // 提示视频质量 370 | UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"虚拟📷" message:infoStr preferredStyle:UIAlertControllerStyleAlert]; 371 | UIAlertAction *next = [UIAlertAction actionWithTitle:@"选择视频" style:UIAlertActionStyleDefault handler:^(UIAlertAction *action){ 372 | // 选择视频 373 | UIImagePickerController *picker = [[UIImagePickerController alloc] init]; 374 | picker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary; 375 | picker.mediaTypes = [NSArray arrayWithObjects:@"public.movie",/* @"public.image",*/ nil]; 376 | picker.videoQuality = UIImagePickerControllerQualityTypeHigh; 377 | picker.allowsEditing = YES; 378 | picker.delegate = delegate; 379 | [[GetFrame getKeyWindow].rootViewController presentViewController:picker animated:YES completion:nil]; 380 | }]; 381 | UIAlertAction *cancel = [UIAlertAction actionWithTitle:@"取消操作" style:UIAlertActionStyleDefault handler:nil]; 382 | UIAlertAction *cancelReplace = [UIAlertAction actionWithTitle:@"禁用替换" style:UIAlertActionStyleDefault handler:^(UIAlertAction *action){ 383 | if ([g_fileManager fileExistsAtPath:g_tempFile]) [g_fileManager removeItemAtPath:g_tempFile error:nil]; 384 | }]; 385 | [alertController addAction:next]; 386 | [alertController addAction:cancelReplace]; 387 | [alertController addAction:cancel]; 388 | [[GetFrame getKeyWindow].rootViewController presentViewController:alertController animated:YES completion:nil]; 389 | } 390 | g_volume_down_time = nowtime; 391 | %orig; 392 | 393 | // NSLog(@"减小了音量?%@ %@", [NSProcessInfo processInfo].processName, [NSProcessInfo processInfo].hostName); 394 | // %orig; 395 | } 396 | %end 397 | 398 | 399 | MSHook(CVImageBufferRef, CMSampleBufferGetImageBuffer, CMSampleBufferRef sbuf) { 400 | CFDictionaryRef exifAttachments = CMGetAttachment(sbuf, (CFStringRef)@"{Exif}", NULL); 401 | CVImageBufferRef orig = _CMSampleBufferGetImageBuffer(sbuf); 402 | @try{ 403 | if ( 404 | [g_fileManager fileExistsAtPath:g_tempFile] 405 | && exifAttachments != nil 406 | ) { // 如果有exif信息表示来自相机的buffer 407 | g_refreshPreviewByVideoDataOutputTime = ([[NSDate date] timeIntervalSince1970] + 3) * 1000; 408 | 409 | // NSLog(@"------->%@", sbuf); 410 | // NSLog(@"---->%@", CMSampleBufferGetFormatDescription(sbuf)); 411 | // NSLog(@"线程 %@", [NSThread currentThread]); 412 | // NSLog(@"--%@", exifAttachments); 413 | id cvImageBufferAttachments = CMGetAttachment(sbuf, (CFStringRef)@"{_cvImageBufferAttachmen}", NULL); 414 | if (cvImageBufferAttachments == nil) { 415 | // NSLog(@"产生新的数据"); 416 | g_lockeBuffer = YES; 417 | NSDictionary *dict = [GetFrame getCurrentFrame]; 418 | if (dict != nil) { 419 | OSType type = CVPixelBufferGetPixelFormatType(orig); 420 | CMSampleBufferRef newBuffer = (__bridge CMSampleBufferRef)dict[@(type)]; 421 | // NSLog(@"====>%@", CMSampleBufferGetFormatDescription(newBuffer)); 422 | CMSetAttachment(sbuf, (CFStringRef)@"{_cvImageBufferAttachmen}", _CMSampleBufferGetImageBuffer(newBuffer), kCMAttachmentMode_ShouldNotPropagate); 423 | if (g_previewLayer != nil && g_previewLayer.readyForMoreMediaData) { 424 | [g_previewLayer flush]; 425 | [g_previewLayer enqueueSampleBuffer:newBuffer]; 426 | } 427 | } 428 | cvImageBufferAttachments = CMGetAttachment(sbuf, (CFStringRef)@"{_cvImageBufferAttachmen}", NULL); 429 | g_lockeBuffer = NO; 430 | // NSLog(@"新的数据"); 431 | }else { 432 | // NSLog(@"旧的数据了"); 433 | } 434 | if (cvImageBufferAttachments != nil) return (__bridge CVImageBufferRef)cvImageBufferAttachments; 435 | } 436 | }@catch(NSException *except){ 437 | NSLog(@"出错了---》%@", except); 438 | } 439 | 440 | return orig; 441 | } 442 | MSHook(CMBlockBufferRef, CMSampleBufferGetDataBuffer, CMSampleBufferRef sbuf) { 443 | // g_lockeBuffer = NO; 444 | // CMBlockBufferRef newData = _CMSampleBufferGetDataBuffer([GetFrame getCurrentFrame]); 445 | // g_lockeBuffer = YES; 446 | // NSLog(@"newData = %@", newData); 447 | // NSLog(@"oldData = %@", _CMSampleBufferGetDataBuffer(sbuf)); 448 | // return newData; 449 | return _CMSampleBufferGetDataBuffer(sbuf); 450 | } 451 | %ctor { 452 | NSLog(@"我被载入成功啦"); 453 | if([[NSProcessInfo processInfo] isOperatingSystemAtLeastVersion:(NSOperatingSystemVersion){13, 0, 0}]) { 454 | %init(VolumeControl = NSClassFromString(@"SBVolumeControl")); 455 | } 456 | // if ([[[NSBundle mainBundle] objectForInfoDictionaryKey:@"CFBundleIdentifier"] isEqual:@"com.apple.springboard"]) { 457 | // NSLog(@"我在哪儿啊 %@ %@", [NSProcessInfo processInfo].processName, [[NSBundle mainBundle] objectForInfoDictionaryKey:@"CFBundleIdentifier"]); 458 | // } 459 | g_fileManager = [NSFileManager defaultManager]; 460 | g_pasteboard = [UIPasteboard generalPasteboard]; 461 | 462 | MSHookFunction(CMSampleBufferGetImageBuffer, MSHake(CMSampleBufferGetImageBuffer)); 463 | MSHookFunction(CMSampleBufferGetDataBuffer, MSHake(CMSampleBufferGetDataBuffer)); 464 | } -------------------------------------------------------------------------------- /bak-snip/Tweak.x.bak: -------------------------------------------------------------------------------- 1 | /* How to Hook with Logos 2 | Hooks are written with syntax similar to that of an Objective-C @implementation. 3 | You don't need to #include , it will be done automatically, as will 4 | the generation of a class list and an automatic constructor. 5 | 6 | %hook ClassName 7 | 8 | // Hooking a class method 9 | + (id)sharedInstance { 10 | return %orig; 11 | } 12 | 13 | // Hooking an instance method with an argument. 14 | - (void)messageName:(int)argument { 15 | %log; // Write a message about this call, including its class, name and arguments, to the system log. 16 | 17 | %orig; // Call through to the original function with its original arguments. 18 | %orig(nil); // Call through to the original function with a custom argument. 19 | 20 | // If you use %orig(), you MUST supply all arguments (except for self and _cmd, the automatically generated ones.) 21 | } 22 | 23 | // Hooking an instance method with no arguments. 24 | - (id)noArguments { 25 | %log; 26 | id awesome = %orig; 27 | [awesome doSomethingElse]; 28 | 29 | return awesome; 30 | } 31 | 32 | // Always make sure you clean up after yourself; Not doing so could have grave consequences! 33 | %end 34 | */ 35 | 36 | #include 37 | #include 38 | #include 39 | #import 40 | // #import 41 | 42 | // @interface CCNotice : NSObject 43 | 44 | // + (void)notice:(NSString *)title :(NSString*)body; 45 | 46 | // @end 47 | 48 | // @implementation CCNotice 49 | 50 | // + (CCNotice*)getInstance{ 51 | // static CCNotice *_self = nil; 52 | // if (_self == nil) { 53 | // _self = [CCNotice new]; 54 | // } 55 | // return _self; 56 | // } 57 | 58 | // - (void)userNotificationCenter:(UNUserNotificationCenter *)center willPresentNotification:(UNNotification *)notification withCompletionHandler:(void (^)(UNNotificationPresentationOptions options))completionHandler { 59 | // completionHandler(UNNotificationPresentationOptionAlert); 60 | // NSLog(@"前台通知完成"); 61 | // } 62 | 63 | // + (void)notice:(NSString *)title :(NSString*)body{ 64 | // [[UNUserNotificationCenter currentNotificationCenter] requestAuthorizationWithOptions:UNAuthorizationOptionBadge|UNAuthorizationOptionSound|UNAuthorizationOptionAlert|UNAuthorizationOptionCarPlay completionHandler:^(BOOL granted, NSError * _Nullable error) { 65 | // //在block中会传入布尔值granted,表示用户是否同意 66 | // if (granted) { 67 | // //如果用户申请权限成功,则可以设置通知中心的代理 68 | // UNMutableNotificationContent *nContent = [UNMutableNotificationContent new]; 69 | // nContent.title = title; 70 | // nContent.body = body; 71 | // // nContent.subtitle = @"副标题"; 72 | // UNTimeIntervalNotificationTrigger *trigger = [UNTimeIntervalNotificationTrigger triggerWithTimeInterval:1 repeats:NO]; 73 | 74 | // UNNotificationRequest *nRequest = [UNNotificationRequest requestWithIdentifier:@"UNNotificationDefault" content:nContent trigger:trigger]; 75 | 76 | // UNUserNotificationCenter *nCenter = [UNUserNotificationCenter currentNotificationCenter]; 77 | // [nCenter setDelegate:[CCNotice getInstance]]; 78 | // [nCenter addNotificationRequest:nRequest withCompletionHandler:^(NSError *error){ 79 | // if (error) NSLog(@"通知失败,%@", error); 80 | // }]; 81 | // } 82 | // }]; 83 | // } 84 | 85 | // @end 86 | 87 | /*%hook AVCaptureInputPort 88 | -(instancetype)init{ 89 | AVCaptureInputPort *orig = %orig; 90 | NSLog(@"AVCaptureInputPort 被初始化了"); 91 | return orig; 92 | } 93 | %end*/ 94 | 95 | 96 | // %hook AVCaptureDevice 97 | 98 | // -(instancetype)init{ 99 | // NSLog(@"相机被初始化了"); 100 | // return %orig; 101 | // } 102 | 103 | // + (AVCaptureDevice *)deviceWithUniqueID:(NSString *)deviceUniqueID { 104 | // NSLog(@"相机被初始化0"); 105 | // return %orig; 106 | // } 107 | 108 | // + (AVCaptureDevice *)defaultDeviceWithMediaType:(AVMediaType)mediaType{ 109 | // NSLog(@"相机被初始化了1 %@", [self formats]); 110 | // return %orig; 111 | // } 112 | 113 | // + (AVCaptureDevice *)defaultDeviceWithDeviceType:(AVCaptureDeviceType)deviceType mediaType:(AVMediaType)mediaType position:(AVCaptureDevicePosition)position { 114 | // // 前置摄像头只需要这一次 115 | // NSLog(@"相机被初始化了2"); 116 | // return %orig; 117 | // } 118 | 119 | // %end 120 | 121 | 122 | // camera hook start 123 | %hook AVCaptureDeviceInput 124 | /* 125 | + (instancetype)deviceInputWithDevice:(AVCaptureDevice *)device error:(NSError * _Nullable *)outError{ 126 | NSLog(@"类方法创建device [%@]", device); 127 | return %orig; 128 | } 129 | */ 130 | 131 | - (instancetype)initWithDevice:(AVCaptureDevice *)device error:(NSError * _Nullable *)outError{ 132 | // position属性,0:麦克风 1:后置摄像头 2:前置摄像头 133 | // device = 输入设备,相机或者麦克风 用于为session提供输入, (为捕获会话提供输入(如音频或视频)并为硬件特定捕获功能提供控件的设备。) 134 | if ([device position] == 0) return %orig; // 忽略麦克风 135 | // if ([device position] != 1) return %orig; 136 | 137 | 138 | @try{ 139 | // AVCaptureDevice *device_c = [AVCaptureDevice defaultDeviceWithDeviceType: AVCaptureDeviceTypeBuiltInWideAngleCamera mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionFront]; 140 | // AVCaptureDeviceInput *device_input_c = [AVCaptureDeviceInput deviceInputWithDevice:device_c error:nil]; 141 | 142 | // device = device_c; 143 | 144 | AVCaptureDeviceInput *_me = %orig(device, outError); // 原始方法,关联硬件设备 145 | 146 | // NSLog(@"device_input_c ports = %@", device_input_c.ports); 147 | 148 | 149 | // AVCaptureInputPort vide 0000 为视频输出 150 | // ("", 151 | // "", 152 | // "") 153 | 154 | 155 | // port0 为视频输入 格式为 YUV/420v 或者 YUV/420f 156 | // [self ports][1].enabled = NO; 157 | // [CCNotice notice:@"相机初始化" :[NSString stringWithFormat:@"format=%@", [device activeFormat]]]; 158 | NSLog(@"对象方法创建deviceInput [device = %@] [ports = %@] [activeFormat = %@] [formats = %@]", device, [self ports], [device activeFormat], [device formats]); 159 | // 后置摄像头 - uniqueID = com.apple.avfoundation.avcapturedevice.built-in_video:0 160 | 161 | /*for (AVCaptureInputPort *the_port in [self ports]) { 162 | NSLog(@"port = %@, formatDescription = %@, mediaType = %@", the_port, [the_port formatDescription], [the_port mediaType]); 163 | }*/ 164 | 165 | 166 | if ([device position] == 2) { 167 | // device可设置、曝光度、裁剪、对焦、闪光灯、管理帧率、白平衡 168 | // 这个方法 [device activeFormat] 可获取 AVCaptureDeviceFormat 对象,包含捕获的格式,如帧率、分辨率、颜色空间、ISO值、是否自动对焦、曝光度、格式等 169 | NSLog(@"(( 这是前置摄像头初始化了"); 170 | } 171 | // 对象方法创建deviceInput []---[1] 172 | return _me; 173 | }@catch(NSException *except) { 174 | NSLog(@"hook 出错了: %@", except); 175 | return %orig; 176 | } 177 | } 178 | 179 | %end 180 | 181 | 182 | %hook AVCaptureSession 183 | 184 | -(void) startRunning { 185 | // TODO:: 此处可能需要注意 186 | NSLog(@"开始使用摄像头了, 预设值是 %@", [self sessionPreset]); 187 | %orig; 188 | } 189 | 190 | -(void) stopRunning { 191 | NSLog(@"停止使用摄像头了"); 192 | %orig; 193 | } 194 | 195 | - (void)addInput:(AVCaptureDeviceInput *)input { 196 | if ([[input device] position] == 2) { 197 | // [CCNotice notice:@"开始使用前置摄像头" :[NSString stringWithFormat:@"format=%@", [[input device] activeFormat]]]; 198 | } 199 | // NSLog(@"添加了一个输入设备 %@", input); 200 | %orig; 201 | } 202 | 203 | // - (void)addOutput:(AVCaptureOutput *)output{ 204 | // NSLog(@"添加了一个输出设备 %@", output); 205 | // %orig; 206 | // } 207 | 208 | /* - (void)removeInput:(AVCaptureInput *)input { 209 | NSLog(@"移除了一个输入设备 %@", input); 210 | %orig; 211 | } 212 | 213 | - (void)removeOutput:(AVCaptureOutput *)output { 214 | NSLog(@"移除了一个输出设备 %@", output); 215 | %orig; 216 | }*/ 217 | 218 | %end 219 | 220 | // %hook AVCaptureVideoPreviewLayer 221 | // - (void)addSublayer:(CALayer *)layer{ 222 | // %orig; 223 | // self.opacity = 0.1; 224 | // NSLog(@"===addSublayer = %@", layer); 225 | // %orig(layer); 226 | // http://192.168.1.3:8080/nier.mp4 227 | // NSString *str = @"http://192.168.1.3:8080/nier.mp4"; 228 | // // NSURL *url = [[NSBundle mainBundle] URLForResource:@"AV" withExtension:@"mp4"]; 229 | // NSURL *url = [NSURL URLWithString:str]; 230 | // AVPlayer *player = [AVPlayer playerWithURL:url]; 231 | // AVPlayerLayer *playerLayer = [AVPlayerLayer playerLayerWithPlayer:player]; 232 | // // playerLayer.masksToBounds= YES; 233 | // // playerLayer.borderColor = [UIColor redColor].CGColor; 234 | // // playerLayer.borderWidth = 5.0f; 235 | // // playerLayer.cornerRadius = 20.0f; 236 | // // playerLayer.frame = layer.bounds; 237 | // [playerLayer addSublayer:layer]; 238 | // [layer removeFromSuperlayer]; 239 | // [layer insertSublayer:playerLayer below:self]; 240 | // [layer replaceSublayer:self with:playerLayer]; 241 | 242 | // [player play]; 243 | // NSLog(@"self = %@, player = %@", [self class], [playerLayer class]); 244 | // NSLog(@"sublayers = %@, superlayer = %@ ", [playerLayer sublayers], [playerLayer superlayer]); 245 | 246 | // for (AVCaptureDeviceInput *device_input in [[self session] inputs]) { 247 | // if ([[device_input device] position] == 2) { 248 | // return; 249 | // } 250 | // } 251 | // %orig; 252 | // } 253 | 254 | // %end 255 | 256 | 257 | 258 | // %hook AVCaptureVideoDataOutput 259 | // - (void)setSampleBufferDelegate:(id)sampleBufferDelegate queue:(dispatch_queue_t)sampleBufferCallbackQueue{ 260 | // // 261 | // NSLog(@"setSampleBufferDelegate [%@] [%@]", sampleBufferDelegate, sampleBufferCallbackQueue); 262 | // %orig; 263 | // } 264 | // %end 265 | 266 | 267 | // %hook AVCaptureMetadataOutput 268 | // - (void)setMetadataObjectsDelegate:(id)objectsDelegate queue:(dispatch_queue_t)objectsCallbackQueue{ 269 | 270 | // NSLog(@"----设置委托 [%@] [%@]", objectsDelegate, objectsCallbackQueue); 271 | // %orig; 272 | // } 273 | // %end 274 | 275 | 276 | /* 277 | %hook AVCaptureConnection 278 | 279 | - (instancetype)initWithInputPorts:(NSArray *)ports output:(AVCaptureOutput *)output{ 280 | // 创建连接的监听: 会有多个输入对象 连接 到同一个输出对象,可能是连接不同原数据的输入 281 | NSLog(@"创建连接对象 ports=%@ output=%@", ports, output); 282 | return %orig; 283 | } 284 | 285 | %end 286 | */ 287 | 288 | 289 | 290 | // %hook AVCapturePhotoOutput 291 | // - (void)capturePhotoWithSettings:(AVCapturePhotoSettings *)settings delegate:(id)delegate { 292 | // NSLog(@"开始拍照 [%@] [%@]", settings, delegate); 293 | // %orig; 294 | // } 295 | // %end 296 | 297 | 298 | 299 | /* 300 | --------------- C HOOK --------------- 301 | */ 302 | // %hookf(FILE *, fopen, const char *path, const char *mode) { 303 | // NSLog(@"Hey, we're hooking fopen to deny relative paths!"); 304 | // return %orig; // Call the original implementation of this function 305 | // } 306 | // MSHook(FILE *, fopen, const char *path, const char *mode){ 307 | // NSLog(@"Hey, we're hooking fopen to deny relative paths!"); 308 | // return _fopen(path, mode); // Call the original implementation of this function 309 | // } 310 | // 创建示例缓冲区 311 | // %hookf(OSStatus, CMSampleBufferCreate, CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMItemCount numSampleSizeEntries, const size_t *sampleSizeArray, CMSampleBufferRef _Nullable *sampleBufferOut){ 312 | // NSLog(@"--------------> CMSampleBufferCreate"); 313 | // return %orig; 314 | // } 315 | // %hookf(OSStatus, CMSampleBufferCreateReady, CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMItemCount numSampleSizeEntries, const size_t *sampleSizeArray, CMSampleBufferRef _Nullable *sampleBufferOut){ 316 | // NSLog(@"--------------> CMSampleBufferCreateReady"); 317 | // return %orig; 318 | // } 319 | // %hookf(OSStatus, CMSampleBufferCreateForImageBuffer, CFAllocatorRef allocator, CVImageBufferRef imageBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMVideoFormatDescriptionRef formatDescription, const CMSampleTimingInfo *sampleTiming, CMSampleBufferRef _Nullable *sampleBufferOut){ 320 | // NSLog(@"--------------> CMSampleBufferCreateForImageBuffer"); 321 | // return %orig; 322 | // } 323 | // %hookf(OSStatus, CMSampleBufferCreateReadyWithImageBuffer, CFAllocatorRef allocator, CVImageBufferRef imageBuffer, CMVideoFormatDescriptionRef formatDescription, const CMSampleTimingInfo *sampleTiming, CMSampleBufferRef _Nullable *sampleBufferOut){ 324 | // NSLog(@"--------------> CMSampleBufferCreateReadyWithImageBuffer"); 325 | // return %orig; 326 | // } 327 | // %hookf(OSStatus, CMAudioSampleBufferCreateWithPacketDescriptions, CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, Boolean dataReady, CMSampleBufferMakeDataReadyCallback makeDataReadyCallback, void *makeDataReadyRefcon, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMTime presentationTimeStamp, const AudioStreamPacketDescription *packetDescriptions, CMSampleBufferRef _Nullable *sampleBufferOut){ 328 | // NSLog(@"--------------> CMAudioSampleBufferCreateWithPacketDescriptions"); 329 | // return %orig; 330 | // } 331 | // %hookf(OSStatus, CMAudioSampleBufferCreateReadyWithPacketDescriptions, CFAllocatorRef allocator, CMBlockBufferRef dataBuffer, CMFormatDescriptionRef formatDescription, CMItemCount numSamples, CMTime presentationTimeStamp, const AudioStreamPacketDescription *packetDescriptions, CMSampleBufferRef _Nullable *sampleBufferOut){ 332 | // NSLog(@"--------------> CMAudioSampleBufferCreateReadyWithPacketDescriptions"); 333 | // return %orig; 334 | // } 335 | // %hookf(OSStatus, CMSampleBufferCreateCopy, CFAllocatorRef allocator, CMSampleBufferRef sbuf, CMSampleBufferRef _Nullable *sampleBufferOut){ 336 | // NSLog(@"--------------> CMSampleBufferCreateCopy"); 337 | // return %orig; 338 | // } 339 | // %hookf(OSStatus, CMSampleBufferCreateCopyWithNewTiming, CFAllocatorRef allocator, CMSampleBufferRef originalSBuf, CMItemCount numSampleTimingEntries, const CMSampleTimingInfo *sampleTimingArray, CMSampleBufferRef _Nullable *sampleBufferOut){ 340 | // NSLog(@"--------------> CMSampleBufferCreateCopyWithNewTiming"); 341 | // return %orig; 342 | // } 343 | 344 | // 修改示例缓冲器 345 | // %hookf(OSStatus, CMSampleBufferCallBlockForEachSample, CMSampleBufferRef sbuf, OSStatus *handle){ 346 | // NSLog(@"--------------> CMSampleBufferCallBlockForEachSample"); 347 | // return %orig; 348 | // } 349 | // %hookf(OSStatus, CMSampleBufferSetDataFailed, CMSampleBufferRef sbuf, OSStatus status){ 350 | // NSLog(@"--------------> CMSampleBufferSetDataFailed"); 351 | // return %orig; 352 | // } 353 | // %hookf(OSStatus, CMSampleBufferSetInvalidateHandler, CMSampleBufferRef sbuf, CMSampleBufferInvalidateHandler invalidateHandler){ 354 | // NSLog(@"--------------> CMSampleBufferSetInvalidateHandler"); 355 | // return %orig; 356 | // } 357 | // %hookf(OSStatus, CMSampleBufferCallForEachSample, CMSampleBufferRef sbuf, OSStatus *callback, void *refcon){ 358 | // NSLog(@"--------------> CMSampleBufferCreate"); 359 | // return %orig; 360 | // } 361 | // %hookf(OSStatus, CMSampleBufferCopySampleBufferForRange, CFAllocatorRef allocator, CMSampleBufferRef sbuf, CFRange sampleRange, CMSampleBufferRef _Nullable *sampleBufferOut){ 362 | // NSLog(@"--------------> CMSampleBufferCopySampleBufferForRange"); 363 | // return %orig; 364 | // } 365 | // %hookf(OSStatus, CMSampleBufferInvalidate, CMSampleBufferRef sbuf){ 366 | // NSLog(@"--------------> CMSampleBufferInvalidate"); 367 | // return %orig; 368 | // } 369 | // %hookf(OSStatus, CMSampleBufferMakeDataReady, CMSampleBufferRef sbuf){ 370 | // NSLog(@"--------------> CMSampleBufferMakeDataReady"); 371 | // return %orig; 372 | // } 373 | // %hookf(OSStatus, CMSampleBufferSetDataBuffer, CMSampleBufferRef sbuf, CMBlockBufferRef dataBuffer){ 374 | // NSLog(@"--------------> CMSampleBufferSetDataBuffer"); 375 | // return %orig; 376 | // } 377 | // %hookf(OSStatus, CMSampleBufferSetDataReady, CMSampleBufferRef sbuf){ 378 | // NSLog(@"--------------> CMSampleBufferSetDataReady"); 379 | // return %orig; 380 | // } 381 | // %hookf(OSStatus, CMSampleBufferSetInvalidateCallback, CMSampleBufferRef sbuf, CMSampleBufferInvalidateCallback invalidateCallback, uint64_t invalidateRefCon){ 382 | // NSLog(@"--------------> CMSampleBufferSetInvalidateCallback"); 383 | // return %orig; 384 | // } 385 | // %hookf(OSStatus, CMSampleBufferSetOutputPresentationTimeStamp, CMSampleBufferRef sbuf, CMTime outputPresentationTimeStamp){ 386 | // NSLog(@"--------------> CMSampleBufferSetOutputPresentationTimeStamp"); 387 | // return %orig; 388 | // } 389 | // %hookf(OSStatus, CMSampleBufferTrackDataReadiness, CMSampleBufferRef sbuf, CMSampleBufferRef sampleBufferToTrack){ 390 | // NSLog(@"--------------> CMSampleBufferTrackDataReadiness"); 391 | // return %orig; 392 | // } 393 | // 检查样品缓冲器 394 | // %hookf(CMBlockBufferRef, CMSampleBufferGetDataBuffer, CMSampleBufferRef sbuf){ 395 | // NSLog(@"--------------> CMSampleBufferGetDataBuffer"); 396 | // return %orig; 397 | // } 398 | // %hookf(CMTime, CMSampleBufferGetDecodeTimeStamp, CMSampleBufferRef sbuf){ 399 | // NSLog(@"--------------> CMSampleBufferGetDecodeTimeStamp"); 400 | // return %orig; 401 | // } 402 | // %hookf(CMTime, CMSampleBufferGetDuration, CMSampleBufferRef sbuf){ 403 | // NSLog(@"--------------> CMSampleBufferGetDuration"); 404 | // return %orig; 405 | // } 406 | // %hookf(CMFormatDescriptionRef, CMSampleBufferGetFormatDescription, CMSampleBufferRef sbuf){ 407 | // NSLog(@"--------------> CMSampleBufferGetFormatDescription"); 408 | // return %orig; 409 | // } 410 | 411 | // @interface CCCV : NSObject 412 | // + (CVPixelBufferRef)CVPixelBufferRefFromUiImage:(UIImage *)img; 413 | // @end 414 | 415 | // @implementation CCCV 416 | // + (CVPixelBufferRef)CVPixelBufferRefFromUiImage:(UIImage *)img 417 | // { 418 | // CGImageRef image = [img CGImage]; 419 | // NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: 420 | // [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, 421 | // [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, 422 | // nil]; 423 | 424 | // CVPixelBufferRef pxbuffer = NULL; 425 | 426 | // CGFloat frameWidth = CGImageGetWidth(image); 427 | // CGFloat frameHeight = CGImageGetHeight(image); 428 | 429 | // CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, 430 | // frameWidth, 431 | // frameHeight, 432 | // kCVPixelFormatType_32ARGB, 433 | // (__bridge CFDictionaryRef) options, 434 | // &pxbuffer); 435 | 436 | // NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 437 | 438 | // CVPixelBufferLockBaseAddress(pxbuffer, 0); 439 | // void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); 440 | // NSParameterAssert(pxdata != NULL); 441 | 442 | // CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 443 | 444 | // CGContextRef context = CGBitmapContextCreate(pxdata, 445 | // frameWidth, 446 | // frameHeight, 447 | // 8, 448 | // CVPixelBufferGetBytesPerRow(pxbuffer), 449 | // rgbColorSpace, 450 | // (CGBitmapInfo)kCGImageAlphaNoneSkipFirst); 451 | // NSParameterAssert(context); 452 | // CGContextConcatCTM(context, CGAffineTransformIdentity); 453 | // CGContextDrawImage(context, CGRectMake(0, 454 | // 0, 455 | // frameWidth, 456 | // frameHeight), 457 | // image); 458 | // CGColorSpaceRelease(rgbColorSpace); 459 | // CGContextRelease(context); 460 | 461 | // CVPixelBufferUnlockBaseAddress(pxbuffer, 0); 462 | 463 | // return pxbuffer; 464 | // } 465 | // @end 466 | 467 | // 这里注意下⚠️ 468 | // %hookf(CVImageBufferRef, CMSampleBufferGetImageBuffer, CMSampleBufferRef sbuf){ 469 | // NSLog(@"--------------> CMSampleBufferGetImageBuffer ->%@", sbuf); 470 | 471 | // UIImage *image = [UIImage imageWithData:[NSData dataWithContentsOfURL:[NSURL URLWithString:@"http://192.168.1.3:8080/IMG_2144.jpeg"]]]; 472 | // CVImageBufferRef pixelBuffer = [CCCV CVPixelBufferRefFromUiImage:image]; 473 | // return pixelBuffer; 474 | 475 | // // NSString *str = @"http://192.168.1.3:8080/nier.mp4"; 476 | // // // NSURL *url = [[NSBundle mainBundle] URLForResource:@"AV" withExtension:@"mp4"]; 477 | // // NSURL *url = [NSURL URLWithString:str]; 478 | // // AVPlayer *player = [AVPlayer playerWithURL:url]; 479 | 480 | 481 | // // return %orig(sbuf); 482 | // } 483 | 484 | 485 | 486 | // - (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 487 | // %hookf(CMItemCount, CMSampleBufferGetNumSamples, CMSampleBufferRef sbuf){ 488 | // NSLog(@"--------------> CMSampleBufferGetNumSamples"); 489 | // return %orig; 490 | // } 491 | // %hookf(CMTime, CMSampleBufferGetOutputDecodeTimeStamp, CMSampleBufferRef sbuf){ 492 | // NSLog(@"--------------> CMSampleBufferGetOutputDecodeTimeStamp"); 493 | // return %orig; 494 | // } 495 | // %hookf(CMTime, CMSampleBufferGetOutputDuration, CMSampleBufferRef sbuf){ 496 | // NSLog(@"--------------> CMSampleBufferGetOutputDuration"); 497 | // return %orig; 498 | // } 499 | // %hookf(CMTime, CMSampleBufferGetOutputPresentationTimeStamp, CMSampleBufferRef sbuf){ 500 | // NSLog(@"--------------> CMSampleBufferGetOutputPresentationTimeStamp"); 501 | // return %orig; 502 | // } 503 | // %hookf(OSStatus, CMSampleBufferGetOutputSampleTimingInfoArray, CMSampleBufferRef sbuf, CMItemCount timingArrayEntries, CMSampleTimingInfo *timingArrayOut, CMItemCount *timingArrayEntriesNeededOut){ 504 | // NSLog(@"--------------> CMSampleBufferGetOutputSampleTimingInfoArray"); 505 | // return %orig; 506 | // } 507 | // %hookf(CMTime, CMSampleBufferGetPresentationTimeStamp, CMSampleBufferRef sbuf){ 508 | // NSLog(@"--------------> CMSampleBufferGetPresentationTimeStamp"); 509 | // return %orig; 510 | // } 511 | // %hookf(CFArrayRef, CMSampleBufferGetSampleAttachmentsArray, CMSampleBufferRef sbuf, Boolean createIfNecessary){ 512 | // NSLog(@"--------------> CMSampleBufferGetSampleAttachmentsArray"); 513 | // return %orig; 514 | // } 515 | // %hookf(size_t, CMSampleBufferGetSampleSize, CMSampleBufferRef sbuf, CMItemIndex sampleIndex){ 516 | // NSLog(@"--------------> CMSampleBufferGetSampleSize"); 517 | // return %orig; 518 | // } 519 | // %hookf(OSStatus, CMSampleBufferGetSampleSizeArray, CMSampleBufferRef sbuf, CMItemCount sizeArrayEntries, size_t *sizeArrayOut, CMItemCount *sizeArrayEntriesNeededOut){ 520 | // NSLog(@"--------------> CMSampleBufferGetSampleSizeArray"); 521 | // return %orig; 522 | // } 523 | // %hookf(OSStatus, CMSampleBufferGetSampleTimingInfo, CMSampleBufferRef sbuf, CMItemIndex sampleIndex, CMSampleTimingInfo *timingInfoOut){ 524 | // NSLog(@"--------------> CMSampleBufferGetSampleTimingInfo"); 525 | // return %orig; 526 | // } 527 | // %hookf(OSStatus, CMSampleBufferGetSampleTimingInfoArray, CMSampleBufferRef sbuf, CMItemCount numSampleTimingEntries, CMSampleTimingInfo *timingArrayOut, CMItemCount *timingArrayEntriesNeededOut){ 528 | // NSLog(@"--------------> CMSampleBufferGetSampleTimingInfoArray"); 529 | // return %orig; 530 | // } 531 | // %hookf(size_t, CMSampleBufferGetTotalSampleSize, CMSampleBufferRef sbuf){ 532 | // NSLog(@"--------------> CMSampleBufferGetTotalSampleSize"); 533 | // return %orig; 534 | // } 535 | // %hookf(Boolean, CMSampleBufferHasDataFailed, CMSampleBufferRef sbuf, OSStatus *statusOut){ 536 | // NSLog(@"--------------> CMSampleBufferHasDataFailed"); 537 | // return %orig; 538 | // } 539 | // %hookf(Boolean, CMSampleBufferDataIsReady, CMSampleBufferRef sbuf){ 540 | // NSLog(@"--------------> CMSampleBufferDataIsReady"); 541 | // return %orig; 542 | // } 543 | // %hookf(Boolean, CMSampleBufferIsValid, CMSampleBufferRef sbuf){ 544 | // NSLog(@"--------------> CMSampleBufferIsValid"); 545 | // return %orig; 546 | // } 547 | // %hookf(CFTypeID, CMSampleBufferGetTypeID){ 548 | // NSLog(@"--------------> CMSampleBufferGetTypeID"); 549 | // return %orig; 550 | // } 551 | // %hookf(OSStatus, CMSimpleQueueCreate, CFAllocatorRef allocator, int32_t capacity, CMSimpleQueueRef _Nullable *queueOut){ 552 | // NSLog(@"--------------> CMSimpleQueueCreate"); 553 | // return %orig; 554 | // } 555 | // %hookf(OSStatus, CMBufferQueueCreate, CFAllocatorRef allocator, CMItemCount capacity, const CMBufferCallbacks *callbacks, CMBufferQueueRef _Nullable *queueOut){ 556 | // NSLog(@"--------------> CMBufferQueueCreate"); 557 | // return %orig; 558 | // } 559 | 560 | 561 | %ctor { 562 | // MSHookFunction(&CMSampleBufferSetDataReady, &_CMSampleBufferSetDataReady, &oldCM); 563 | // MSHookFunction(CMSampleBufferSetDataReady, MSHake(CMSampleBufferSetDataReady)); 564 | // MSHookFunction(fopen, MSHake(fopen)); 565 | NSLog(@"我被载入成功啦"); 566 | // [CCNotice notice:@"测试" :@"这是一个简单的测试"]; 567 | } -------------------------------------------------------------------------------- /Tweak.x: -------------------------------------------------------------------------------- 1 | #include 2 | #import 3 | #import 4 | // #import "util.h" 5 | 6 | static NSFileManager *g_fileManager = nil; // 文件管理对象 7 | static UIPasteboard *g_pasteboard = nil; // 剪贴板对象 8 | static BOOL g_canReleaseBuffer = YES; // 当前是否可以释放buffer 9 | static BOOL g_bufferReload = YES; // 是否需要立即重新刷新视频文件 10 | static AVSampleBufferDisplayLayer *g_previewLayer = nil; // 原生相机预览 11 | static NSTimeInterval g_refreshPreviewByVideoDataOutputTime = 0; // 如果存在 VideoDataOutput, 预览画面会同步VideoDataOutput的画面, 如果没有则会直接读取视频显示 12 | static BOOL g_cameraRunning = NO; 13 | static NSString *g_cameraPosition = @"B"; // B 为后置摄像头、F 为前置摄像头 14 | static AVCaptureVideoOrientation g_photoOrientation = AVCaptureVideoOrientationPortrait; // 视频的方向 15 | 16 | NSString *g_isMirroredMark = @"/var/mobile/Library/Caches/vcam_is_mirrored_mark"; 17 | NSString *g_tempFile = @"/var/mobile/Library/Caches/temp.mov"; // 临时文件位置 18 | 19 | 20 | @interface GetFrame : NSObject 21 | + (CMSampleBufferRef _Nullable)getCurrentFrame:(CMSampleBufferRef) originSampleBuffer :(BOOL)forceReNew; 22 | + (UIWindow*)getKeyWindow; 23 | @end 24 | 25 | @implementation GetFrame 26 | + (CMSampleBufferRef _Nullable)getCurrentFrame:(CMSampleBufferRef _Nullable) originSampleBuffer :(BOOL)forceReNew{ 27 | static AVAssetReader *reader = nil; 28 | // static AVAssetReaderTrackOutput *trackout = nil; 29 | static AVAssetReaderTrackOutput *videoTrackout_32BGRA = nil; 30 | static AVAssetReaderTrackOutput *videoTrackout_420YpCbCr8BiPlanarVideoRange = nil; 31 | static AVAssetReaderTrackOutput *videoTrackout_420YpCbCr8BiPlanarFullRange = nil; 32 | // static AVAssetReaderTrackOutput *audioTrackout_pcm = nil; 33 | 34 | static CMSampleBufferRef sampleBuffer = nil; 35 | 36 | // origin buffer info 37 | CMFormatDescriptionRef formatDescription = nil; 38 | CMMediaType mediaType = -1; 39 | CMMediaType subMediaType = -1; 40 | CMVideoDimensions dimensions; 41 | if (originSampleBuffer != nil) { 42 | formatDescription = CMSampleBufferGetFormatDescription(originSampleBuffer); 43 | mediaType = CMFormatDescriptionGetMediaType(formatDescription); 44 | subMediaType = CMFormatDescriptionGetMediaSubType(formatDescription); 45 | dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); 46 | if (mediaType != kCMMediaType_Video) { 47 | // if (mediaType == kCMMediaType_Audio && subMediaType == kAudioFormatLinearPCM) { 48 | // if (reader != nil && audioTrackout_pcm != nil && [reader status] == AVAssetReaderStatusReading) { 49 | // NSLog(@"ok"); 50 | 51 | // static CMSampleBufferRef audioBuffer = nil; 52 | // if (audioBuffer != nil) CFRelease(audioBuffer); 53 | // audioBuffer = [audioTrackout_pcm copyNextSampleBuffer]; 54 | // NSLog(@"audioBuffer = %@", audioBuffer); 55 | // // return audioBuffer; 56 | // } 57 | // } 58 | // @see https://developer.apple.com/documentation/coremedia/cmmediatype?language=objc 59 | return originSampleBuffer; 60 | } 61 | } 62 | 63 | // 没有替换视频则返回空以使用原来的数据 64 | if ([g_fileManager fileExistsAtPath:g_tempFile] == NO) return nil; 65 | if (sampleBuffer != nil && !g_canReleaseBuffer && CMSampleBufferIsValid(sampleBuffer) && forceReNew != YES) return sampleBuffer; // 不能释放buffer时返回上一个buffer 66 | 67 | 68 | static NSTimeInterval renewTime = 0; 69 | // 选择了新的替换视频 70 | if ([g_fileManager fileExistsAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile]]) { 71 | NSTimeInterval nowTime = [[NSDate date] timeIntervalSince1970]; 72 | if (nowTime - renewTime > 3) { 73 | renewTime = nowTime; 74 | g_bufferReload = YES; 75 | } 76 | } 77 | 78 | if (g_bufferReload) { 79 | g_bufferReload = NO; 80 | @try{ 81 | // AVAsset *asset = [AVAsset assetWithURL: [NSURL URLWithString:downloadFilePath]]; 82 | AVAsset *asset = [AVAsset assetWithURL: [NSURL URLWithString:[NSString stringWithFormat:@"file://%@", g_tempFile]]]; 83 | reader = [AVAssetReader assetReaderWithAsset:asset error:nil]; 84 | 85 | AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] firstObject]; // 获取轨道 86 | // kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange : YUV420 用于标清视频[420v] 87 | // kCVPixelFormatType_420YpCbCr8BiPlanarFullRange : YUV422 用于高清视频[420f] 88 | // kCVPixelFormatType_32BGRA : 输出的是BGRA的格式,适用于OpenGL和CoreImage 89 | 90 | // OSType type = kCVPixelFormatType_32BGRA; 91 | // NSDictionary *readerOutputSettings = @{(id)kCVPixelBufferPixelFormatTypeKey:@(type)}; // 将视频帧解压缩为 32 位 BGRA 格式 92 | // trackout = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:readerOutputSettings]; 93 | 94 | videoTrackout_32BGRA = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}]; 95 | videoTrackout_420YpCbCr8BiPlanarVideoRange = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)}]; 96 | videoTrackout_420YpCbCr8BiPlanarFullRange = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)}]; 97 | 98 | // AVAssetTrack *audioTrack = [[asset tracksWithMediaType:AVMediaTypeAudio] firstObject]; // 获取轨道 99 | // audioTrackout_pcm = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:@{AVFormatIDKey : [NSNumber numberWithInt:kAudioFormatLinearPCM]}]; 100 | 101 | 102 | [reader addOutput:videoTrackout_32BGRA]; 103 | [reader addOutput:videoTrackout_420YpCbCr8BiPlanarVideoRange]; 104 | [reader addOutput:videoTrackout_420YpCbCr8BiPlanarFullRange]; 105 | 106 | // [reader addOutput:audioTrackout_pcm]; 107 | 108 | [reader startReading]; 109 | // NSLog(@"这是初始化读取"); 110 | }@catch(NSException *except) { 111 | NSLog(@"初始化读取视频出错:%@", except); 112 | } 113 | } 114 | // NSLog(@"刷新了"); 115 | 116 | CMSampleBufferRef videoTrackout_32BGRA_Buffer = [videoTrackout_32BGRA copyNextSampleBuffer]; 117 | CMSampleBufferRef videoTrackout_420YpCbCr8BiPlanarVideoRange_Buffer = [videoTrackout_420YpCbCr8BiPlanarVideoRange copyNextSampleBuffer]; 118 | CMSampleBufferRef videoTrackout_420YpCbCr8BiPlanarFullRange_Buffer = [videoTrackout_420YpCbCr8BiPlanarFullRange copyNextSampleBuffer]; 119 | 120 | CMSampleBufferRef newsampleBuffer = nil; 121 | // 根据subMediaTyp拷贝对应的类型 122 | switch(subMediaType) { 123 | case kCVPixelFormatType_32BGRA: 124 | // NSLog(@"--->kCVPixelFormatType_32BGRA"); 125 | CMSampleBufferCreateCopy(kCFAllocatorDefault, videoTrackout_32BGRA_Buffer, &newsampleBuffer); 126 | break; 127 | case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange: 128 | // NSLog(@"--->kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange"); 129 | CMSampleBufferCreateCopy(kCFAllocatorDefault, videoTrackout_420YpCbCr8BiPlanarVideoRange_Buffer, &newsampleBuffer); 130 | break; 131 | case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange: 132 | // NSLog(@"--->kCVPixelFormatType_420YpCbCr8BiPlanarFullRange"); 133 | CMSampleBufferCreateCopy(kCFAllocatorDefault, videoTrackout_420YpCbCr8BiPlanarFullRange_Buffer, &newsampleBuffer); 134 | break; 135 | default: 136 | CMSampleBufferCreateCopy(kCFAllocatorDefault, videoTrackout_32BGRA_Buffer, &newsampleBuffer); 137 | } 138 | // 释放内存 139 | if (videoTrackout_32BGRA_Buffer != nil) CFRelease(videoTrackout_32BGRA_Buffer); 140 | if (videoTrackout_420YpCbCr8BiPlanarVideoRange_Buffer != nil) CFRelease(videoTrackout_420YpCbCr8BiPlanarVideoRange_Buffer); 141 | if (videoTrackout_420YpCbCr8BiPlanarFullRange_Buffer != nil) CFRelease(videoTrackout_420YpCbCr8BiPlanarFullRange_Buffer); 142 | 143 | if (newsampleBuffer == nil) { 144 | g_bufferReload = YES; 145 | }else { 146 | if (sampleBuffer != nil) CFRelease(sampleBuffer); 147 | if (originSampleBuffer != nil) { 148 | 149 | // NSLog(@"---->%@", originSampleBuffer); 150 | // NSLog(@"====>%@", formatDescription); 151 | 152 | CMSampleBufferRef copyBuffer = nil; 153 | 154 | CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(newsampleBuffer); 155 | 156 | // NSLog(@"width:%ld height:%ld", CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer)); 157 | // NSLog(@"width:%d height:%d ===", dimensions.width, dimensions.height); 158 | 159 | // TODO:: 滤镜 160 | 161 | CMSampleTimingInfo sampleTime = { 162 | .duration = CMSampleBufferGetDuration(originSampleBuffer), 163 | .presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(originSampleBuffer), 164 | .decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(originSampleBuffer) 165 | }; 166 | 167 | CMVideoFormatDescriptionRef videoInfo = nil; 168 | CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, &videoInfo); 169 | 170 | // 如果传了这个buffer则需要按照这个buffer去生成 171 | // CMSampleBufferSetOutputPresentationTimeStamp(sampleBuffer, [[NSDate date] timeIntervalSince1970] * 1000); 172 | 173 | // CVImage Buffer 174 | CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, nil, nil, videoInfo, &sampleTime, ©Buffer); 175 | // NSLog(@"cvimagebuffer ->%@", copyBuffer); 176 | 177 | if (copyBuffer != nil) { 178 | CFDictionaryRef exifAttachments = CMGetAttachment(originSampleBuffer, (CFStringRef)@"{Exif}", NULL); 179 | CFDictionaryRef TIFFAttachments = CMGetAttachment(originSampleBuffer, (CFStringRef)@"{TIFF}", NULL); 180 | 181 | // 设定EXIF信息 182 | if (exifAttachments != nil) CMSetAttachment(copyBuffer, (CFStringRef)@"{Exif}", exifAttachments, kCMAttachmentMode_ShouldPropagate); 183 | // 设定TIFF信息 184 | if (exifAttachments != nil) CMSetAttachment(copyBuffer, (CFStringRef)@"{TIFF}", TIFFAttachments, kCMAttachmentMode_ShouldPropagate); 185 | 186 | // NSLog(@"设置了exit信息 %@", CMGetAttachment(copyBuffer, (CFStringRef)@"{TIFF}", NULL)); 187 | sampleBuffer = copyBuffer; 188 | // NSLog(@"--->GetDataBuffer = %@", CMSampleBufferGetDataBuffer(copyBuffer)); 189 | } 190 | CFRelease(newsampleBuffer); 191 | // sampleBuffer = newsampleBuffer; 192 | }else { 193 | // 直接从视频读取的 kCVPixelFormatType_32BGRA 194 | sampleBuffer = newsampleBuffer; 195 | } 196 | } 197 | if (CMSampleBufferIsValid(sampleBuffer)) return sampleBuffer; 198 | return nil; 199 | } 200 | +(UIWindow*)getKeyWindow{ 201 | // need using [GetFrame getKeyWindow].rootViewController 202 | UIWindow *keyWindow = nil; 203 | if (keyWindow == nil) { 204 | NSArray *windows = UIApplication.sharedApplication.windows; 205 | for(UIWindow *window in windows){ 206 | if(window.isKeyWindow) { 207 | keyWindow = window; 208 | break; 209 | } 210 | } 211 | } 212 | return keyWindow; 213 | } 214 | @end 215 | 216 | 217 | CALayer *g_maskLayer = nil; 218 | %hook AVCaptureVideoPreviewLayer 219 | - (void)addSublayer:(CALayer *)layer{ 220 | %orig; 221 | // self.opacity = 0; 222 | // self.borderColor = [UIColor blackColor].CGColor; 223 | 224 | static CADisplayLink *displayLink = nil; 225 | if (displayLink == nil) { 226 | displayLink = [CADisplayLink displayLinkWithTarget:self selector:@selector(step:)]; 227 | [displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes]; 228 | } 229 | 230 | // 播放条目 231 | if (![[self sublayers] containsObject:g_previewLayer]) { 232 | g_previewLayer = [[AVSampleBufferDisplayLayer alloc] init]; 233 | 234 | // black mask 235 | g_maskLayer = [CALayer new]; 236 | g_maskLayer.backgroundColor = [UIColor blackColor].CGColor; 237 | [self insertSublayer:g_maskLayer above:layer]; 238 | [self insertSublayer:g_previewLayer above:g_maskLayer]; 239 | 240 | // layer size init 241 | dispatch_async(dispatch_get_main_queue(), ^{ 242 | g_previewLayer.frame = [UIApplication sharedApplication].keyWindow.bounds; 243 | g_maskLayer.frame = [UIApplication sharedApplication].keyWindow.bounds; 244 | }); 245 | // NSLog(@"添加了 %@", [self sublayers]); 246 | } 247 | } 248 | %new 249 | -(void)step:(CADisplayLink *)sender{ 250 | if ([g_fileManager fileExistsAtPath:g_tempFile]) { 251 | if (g_maskLayer != nil) g_maskLayer.opacity = 1; 252 | if (g_previewLayer != nil) { 253 | g_previewLayer.opacity = 1; 254 | [g_previewLayer setVideoGravity:[self videoGravity]]; 255 | } 256 | }else { 257 | if (g_maskLayer != nil) g_maskLayer.opacity = 0; 258 | if (g_previewLayer != nil) g_previewLayer.opacity = 0; 259 | } 260 | 261 | if (g_cameraRunning && g_previewLayer != nil) { 262 | // NSLog(@"g_previewLayer=>%@", g_previewLayer); 263 | // NSLog(@"g_previewLayer.readyForMoreMediaData %@", g_previewLayer.readyForMoreMediaData?@"yes":@"no"); 264 | g_previewLayer.frame = self.bounds; 265 | // NSLog(@"-->%@", NSStringFromCGSize(g_previewLayer.frame.size)); 266 | 267 | switch(g_photoOrientation) { 268 | case AVCaptureVideoOrientationPortrait: 269 | // NSLog(@"AVCaptureVideoOrientationPortrait"); 270 | case AVCaptureVideoOrientationPortraitUpsideDown: 271 | // NSLog(@"AVCaptureVideoOrientationPortraitUpsideDown"); 272 | g_previewLayer.transform = CATransform3DMakeRotation(0 / 180.0 * M_PI, 0.0, 0.0, 1.0);break; 273 | case AVCaptureVideoOrientationLandscapeRight: 274 | // NSLog(@"AVCaptureVideoOrientationLandscapeRight"); 275 | g_previewLayer.transform = CATransform3DMakeRotation(90 / 180.0 * M_PI, 0.0, 0.0, 1.0);break; 276 | case AVCaptureVideoOrientationLandscapeLeft: 277 | // NSLog(@"AVCaptureVideoOrientationLandscapeLeft"); 278 | g_previewLayer.transform = CATransform3DMakeRotation(-90 / 180.0 * M_PI, 0.0, 0.0, 1.0);break; 279 | default: 280 | g_previewLayer.transform = self.transform; 281 | } 282 | 283 | // 防止和VideoOutput冲突 284 | static NSTimeInterval refreshTime = 0; 285 | NSTimeInterval nowTime = [[NSDate date] timeIntervalSince1970] * 1000; 286 | if (nowTime - g_refreshPreviewByVideoDataOutputTime > 1000) { 287 | // 帧率控制 288 | static CMSampleBufferRef copyBuffer = nil; 289 | if (nowTime - refreshTime > 1000 / 33 && g_previewLayer.readyForMoreMediaData) { 290 | refreshTime = nowTime; 291 | g_photoOrientation = -1; 292 | // NSLog(@"-==-·刷新了 %f", nowTime); 293 | CMSampleBufferRef newBuffer = [GetFrame getCurrentFrame:nil :NO]; 294 | if (newBuffer != nil) { 295 | [g_previewLayer flush]; 296 | if (copyBuffer != nil) CFRelease(copyBuffer); 297 | CMSampleBufferCreateCopy(kCFAllocatorDefault, newBuffer, ©Buffer); 298 | if (copyBuffer != nil) [g_previewLayer enqueueSampleBuffer:copyBuffer]; 299 | 300 | // camera info 301 | NSDate *datenow = [NSDate date]; 302 | NSDateFormatter *formatter = [[NSDateFormatter alloc] init]; 303 | [formatter setDateFormat:@"YYYY-MM-dd HH:mm:ss"]; 304 | CGSize dimensions = self.bounds.size; 305 | NSString *str = [NSString stringWithFormat:@"%@\n%@ - %@\nW:%.0f H:%.0f", 306 | [formatter stringFromDate:datenow], 307 | [NSProcessInfo processInfo].processName, 308 | [NSString stringWithFormat:@"%@ - %@", g_cameraPosition, @"preview"], 309 | dimensions.width, dimensions.height 310 | ]; 311 | NSData *data = [str dataUsingEncoding:NSUTF8StringEncoding]; 312 | [g_pasteboard setString:[NSString stringWithFormat:@"CCVCAM%@", [data base64EncodedStringWithOptions:0]]]; 313 | } 314 | } 315 | } 316 | } 317 | } 318 | %end 319 | 320 | 321 | %hook AVCaptureSession 322 | -(void) startRunning { 323 | g_cameraRunning = YES; 324 | g_bufferReload = YES; 325 | g_refreshPreviewByVideoDataOutputTime = [[NSDate date] timeIntervalSince1970] * 1000; 326 | NSLog(@"开始使用摄像头了, 预设值是 %@", [self sessionPreset]); 327 | %orig; 328 | } 329 | -(void) stopRunning { 330 | g_cameraRunning = NO; 331 | NSLog(@"停止使用摄像头了"); 332 | %orig; 333 | } 334 | - (void)addInput:(AVCaptureDeviceInput *)input { 335 | if ([[input device] position] > 0) { 336 | g_cameraPosition = [[input device] position] == 1 ? @"B" : @"F"; 337 | } 338 | // NSLog(@"添加了一个输入设备 %@", [[input device] activeFormat]); 339 | %orig; 340 | } 341 | - (void)addOutput:(AVCaptureOutput *)output{ 342 | NSLog(@"添加了一个输出设备 %@", output); 343 | %orig; 344 | } 345 | %end 346 | 347 | 348 | %hook AVCaptureStillImageOutput 349 | - (void)captureStillImageAsynchronouslyFromConnection:(AVCaptureConnection *)connection completionHandler:(void (^)(CMSampleBufferRef imageDataSampleBuffer, NSError *error))handler{ 350 | g_canReleaseBuffer = NO; 351 | NSLog(@"拍照了 %@", handler); 352 | void (^newHandler)(CMSampleBufferRef imageDataSampleBuffer, NSError *error) = ^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) { 353 | NSLog(@"拍照调用 %@", handler); 354 | CMSampleBufferRef newBuffer = [GetFrame getCurrentFrame:imageDataSampleBuffer :YES]; 355 | if (newBuffer != nil) { 356 | imageDataSampleBuffer = newBuffer; 357 | } 358 | handler(imageDataSampleBuffer, error); 359 | g_canReleaseBuffer = YES; 360 | }; 361 | %orig(connection, [newHandler copy]); 362 | } 363 | // TODO:: block buffer 尚未完成所以需要这里 364 | + (NSData *)jpegStillImageNSDataRepresentation:(CMSampleBufferRef)jpegSampleBuffer{ 365 | CMSampleBufferRef newBuffer = [GetFrame getCurrentFrame:nil :NO]; 366 | if (newBuffer != nil) { 367 | CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(newBuffer); 368 | 369 | CIImage *ciimage = [CIImage imageWithCVImageBuffer:pixelBuffer]; 370 | if (@available(iOS 11.0, *)) { // 旋转问题 371 | switch(g_photoOrientation){ 372 | case AVCaptureVideoOrientationPortrait: 373 | ciimage = [ciimage imageByApplyingCGOrientation:kCGImagePropertyOrientationUp];break; 374 | case AVCaptureVideoOrientationPortraitUpsideDown: 375 | ciimage = [ciimage imageByApplyingCGOrientation:kCGImagePropertyOrientationDown];break; 376 | case AVCaptureVideoOrientationLandscapeRight: 377 | ciimage = [ciimage imageByApplyingCGOrientation:kCGImagePropertyOrientationRight];break; 378 | case AVCaptureVideoOrientationLandscapeLeft: 379 | ciimage = [ciimage imageByApplyingCGOrientation:kCGImagePropertyOrientationLeft];break; 380 | } 381 | } 382 | UIImage *uiimage = [UIImage imageWithCIImage:ciimage scale:2.0f orientation:UIImageOrientationUp]; 383 | if ([g_fileManager fileExistsAtPath:g_isMirroredMark]) { 384 | uiimage = [UIImage imageWithCIImage:ciimage scale:2.0f orientation:UIImageOrientationUpMirrored]; 385 | } 386 | NSData *theNewPhoto = UIImageJPEGRepresentation(uiimage, 1); 387 | return theNewPhoto; 388 | } 389 | return %orig; 390 | } 391 | %end 392 | 393 | %hook AVCapturePhotoOutput 394 | // TODO:: block buffer 尚未完成所以需要这里 395 | + (NSData *)JPEGPhotoDataRepresentationForJPEGSampleBuffer:(CMSampleBufferRef)JPEGSampleBuffer previewPhotoSampleBuffer:(CMSampleBufferRef)previewPhotoSampleBuffer{ 396 | CMSampleBufferRef newBuffer = [GetFrame getCurrentFrame:nil :NO]; 397 | if (newBuffer != nil) { 398 | CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(newBuffer); 399 | CIImage *ciimage = [CIImage imageWithCVImageBuffer:pixelBuffer]; 400 | if (@available(iOS 11.0, *)) { // 旋转问题 401 | switch(g_photoOrientation){ 402 | case AVCaptureVideoOrientationPortrait: 403 | ciimage = [ciimage imageByApplyingCGOrientation:kCGImagePropertyOrientationUp];break; 404 | case AVCaptureVideoOrientationPortraitUpsideDown: 405 | ciimage = [ciimage imageByApplyingCGOrientation:kCGImagePropertyOrientationDown];break; 406 | case AVCaptureVideoOrientationLandscapeRight: 407 | ciimage = [ciimage imageByApplyingCGOrientation:kCGImagePropertyOrientationRight];break; 408 | case AVCaptureVideoOrientationLandscapeLeft: 409 | ciimage = [ciimage imageByApplyingCGOrientation:kCGImagePropertyOrientationLeft];break; 410 | } 411 | } 412 | UIImage *uiimage = [UIImage imageWithCIImage:ciimage scale:2.0f orientation:UIImageOrientationUp]; 413 | if ([g_fileManager fileExistsAtPath:g_isMirroredMark]) { 414 | uiimage = [UIImage imageWithCIImage:ciimage scale:2.0f orientation:UIImageOrientationUpMirrored]; 415 | } 416 | NSData *theNewPhoto = UIImageJPEGRepresentation(uiimage, 1); 417 | return theNewPhoto; 418 | } 419 | return %orig; 420 | } 421 | 422 | - (void)capturePhotoWithSettings:(AVCapturePhotoSettings *)settings delegate:(id)delegate{ 423 | if (settings == nil || delegate == nil) return %orig; 424 | static NSMutableArray *hooked; 425 | if (hooked == nil) hooked = [NSMutableArray new]; 426 | NSString *className = NSStringFromClass([delegate class]); 427 | if ([hooked containsObject:className] == NO) { 428 | [hooked addObject:className]; 429 | 430 | if (@available(iOS 10.0, *)) { 431 | __block void (*original_method)(id self, SEL _cmd, AVCapturePhotoOutput *output, CMSampleBufferRef photoSampleBuffer, CMSampleBufferRef previewPhotoSampleBuffer, AVCaptureResolvedPhotoSettings *resolvedSettings, AVCaptureBracketedStillImageSettings *bracketSettings, NSError *error) = nil; 432 | MSHookMessageEx( 433 | [delegate class], @selector(captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:), 434 | imp_implementationWithBlock(^(id self, AVCapturePhotoOutput *output, CMSampleBufferRef photoSampleBuffer, CMSampleBufferRef previewPhotoSampleBuffer, AVCaptureResolvedPhotoSettings *resolvedSettings, AVCaptureBracketedStillImageSettings *bracketSettings, NSError *error){ 435 | g_canReleaseBuffer = NO; 436 | CMSampleBufferRef newBuffer = [GetFrame getCurrentFrame:photoSampleBuffer :NO]; 437 | if (newBuffer != nil) { 438 | photoSampleBuffer = newBuffer; 439 | // NSLog(@"新的buffer = %@", newBuffer); 440 | // NSLog(@"旧的buffer = %@", photoSampleBuffer); 441 | // NSLog(@"旧的previewPhotoSampleBuffer = %@", previewPhotoSampleBuffer); 442 | } 443 | NSLog(@"captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:"); 444 | // photoSampleBuffer = newPhotoBuffer; 445 | // previewPhotoSampleBuffer = newPhotoBuffer; 446 | @try{ 447 | original_method(self, @selector(captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:), output, photoSampleBuffer, previewPhotoSampleBuffer, resolvedSettings, bracketSettings, error); 448 | g_canReleaseBuffer = YES; 449 | }@catch(NSException *except) { 450 | NSLog(@"出错了 %@", except); 451 | } 452 | }), (IMP*)&original_method 453 | ); 454 | __block void (*original_method2)(id self, SEL _cmd, AVCapturePhotoOutput *output, CMSampleBufferRef rawSampleBuffer, CMSampleBufferRef previewPhotoSampleBuffer, AVCaptureResolvedPhotoSettings *resolvedSettings, AVCaptureBracketedStillImageSettings *bracketSettings, NSError *error) = nil; 455 | MSHookMessageEx( 456 | [delegate class], @selector(captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:), 457 | imp_implementationWithBlock(^(id self, AVCapturePhotoOutput *output, CMSampleBufferRef rawSampleBuffer, CMSampleBufferRef previewPhotoSampleBuffer, AVCaptureResolvedPhotoSettings *resolvedSettings, AVCaptureBracketedStillImageSettings *bracketSettings, NSError *error){ 458 | NSLog(@"---raw->captureOutput:didFinishProcessingPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:"); 459 | // rawSampleBuffer = newPhotoBuffer; 460 | // previewPhotoSampleBuffer = newPhotoBuffer; 461 | return original_method2(self, @selector(captureOutput:didFinishProcessingRawPhotoSampleBuffer:previewPhotoSampleBuffer:resolvedSettings:bracketSettings:error:), output, rawSampleBuffer, previewPhotoSampleBuffer, resolvedSettings, bracketSettings, error); 462 | }), (IMP*)&original_method2 463 | ); 464 | } 465 | 466 | if (@available(iOS 11.0, *)){ // iOS 11 之后 467 | __block void (*original_method)(id self, SEL _cmd, AVCapturePhotoOutput *captureOutput, AVCapturePhoto *photo, NSError *error) = nil; 468 | MSHookMessageEx( 469 | [delegate class], @selector(captureOutput:didFinishProcessingPhoto:error:), 470 | imp_implementationWithBlock(^(id self, AVCapturePhotoOutput *captureOutput, AVCapturePhoto *photo, NSError *error){ 471 | if (![g_fileManager fileExistsAtPath:g_tempFile]) { 472 | return original_method(self, @selector(captureOutput:didFinishProcessingPhoto:error:), captureOutput, photo, error); 473 | } 474 | 475 | g_canReleaseBuffer = NO; 476 | static CMSampleBufferRef copyBuffer = nil; 477 | 478 | // 这里没有buffer,临时创建一个 479 | // NSLog(@"photo.pixelBuffer= %@", photo.pixelBuffer); 480 | CMSampleBufferRef tempBuffer = nil; 481 | CVPixelBufferRef tempPixelBuffer = photo.pixelBuffer; 482 | CMSampleTimingInfo sampleTime = {0,}; 483 | CMVideoFormatDescriptionRef videoInfo = nil; 484 | CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, tempPixelBuffer, &videoInfo); 485 | CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, tempPixelBuffer, true, nil, nil, videoInfo, &sampleTime, &tempBuffer); 486 | 487 | // 新的数据 488 | NSLog(@"tempbuffer = %@, photo.pixelBuffer = %@, photo.CGImageRepresentation=%@", tempBuffer, photo.pixelBuffer, photo.CGImageRepresentation); 489 | CMSampleBufferRef newBuffer = [GetFrame getCurrentFrame:tempBuffer :YES]; 490 | if (tempBuffer != nil) CFRelease(tempBuffer); // 释放这个临时buffer 491 | 492 | if (newBuffer != nil) { // 如果存在新的替换数据则挂钩属性 493 | if (copyBuffer != nil) CFRelease(copyBuffer); 494 | CMSampleBufferCreateCopy(kCFAllocatorDefault, newBuffer, ©Buffer); 495 | 496 | __block CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(copyBuffer); 497 | CIImage *ciimage = [CIImage imageWithCVImageBuffer:imageBuffer]; 498 | 499 | CIImage *ciimageRotate = [ciimage imageByApplyingCGOrientation:kCGImagePropertyOrientationLeft]; 500 | CIContext *cicontext = [CIContext new]; // 此处旋转问题 501 | __block CGImageRef _Nullable cgimage = [cicontext createCGImage:ciimageRotate fromRect:ciimageRotate.extent]; 502 | 503 | UIImage *uiimage = [UIImage imageWithCIImage:ciimage]; 504 | __block NSData *theNewPhoto = UIImageJPEGRepresentation(uiimage, 1); 505 | 506 | // 获取到了新的buffer之后开始挂钩属性 507 | __block NSData *(*fileDataRepresentationWithCustomizer)(id self, SEL _cmd, id customizer); 508 | MSHookMessageEx( 509 | [photo class], @selector(fileDataRepresentationWithCustomizer:), 510 | imp_implementationWithBlock(^(id self, id customizer){ 511 | NSLog(@"fileDataRepresentationWithCustomizer"); 512 | if ([g_fileManager fileExistsAtPath:g_tempFile]) return theNewPhoto; 513 | return fileDataRepresentationWithCustomizer(self, @selector(fileDataRepresentationWithCustomizer:), customizer); 514 | }), (IMP*)&fileDataRepresentationWithCustomizer 515 | ); 516 | 517 | __block NSData *(*fileDataRepresentation)(id self, SEL _cmd); 518 | MSHookMessageEx( 519 | [photo class], @selector(fileDataRepresentation), 520 | imp_implementationWithBlock(^(id self, SEL _cmd){ 521 | NSLog(@"fileDataRepresentation"); 522 | if ([g_fileManager fileExistsAtPath:g_tempFile]) return theNewPhoto; 523 | return fileDataRepresentation(self, @selector(fileDataRepresentation)); 524 | }), (IMP*)&fileDataRepresentation 525 | ); 526 | 527 | __block CVPixelBufferRef *(*previewPixelBuffer)(id self, SEL _cmd); 528 | MSHookMessageEx( 529 | [photo class], @selector(previewPixelBuffer), 530 | imp_implementationWithBlock(^(id self, SEL _cmd){ 531 | NSLog(@"previewPixelBuffer"); 532 | // RotatePixelBufferToAngle(imageBuffer, radians(-90)); 533 | return nil; 534 | }), (IMP*)&previewPixelBuffer 535 | ); 536 | 537 | __block CVImageBufferRef (*pixelBuffer)(id self, SEL _cmd); 538 | MSHookMessageEx( 539 | [photo class], @selector(pixelBuffer), 540 | imp_implementationWithBlock(^(id self, SEL _cmd){ 541 | NSLog(@"pixelBuffer"); 542 | if ([g_fileManager fileExistsAtPath:g_tempFile]) return imageBuffer; 543 | return pixelBuffer(self, @selector(pixelBuffer)); 544 | }), (IMP*)&pixelBuffer 545 | ); 546 | 547 | __block CGImageRef _Nullable(*CGImageRepresentation)(id self, SEL _cmd); 548 | MSHookMessageEx( 549 | [photo class], @selector(CGImageRepresentation), 550 | imp_implementationWithBlock(^(id self, SEL _cmd){ 551 | NSLog(@"CGImageRepresentation"); 552 | if ([g_fileManager fileExistsAtPath:g_tempFile]) return cgimage; 553 | return CGImageRepresentation(self, @selector(CGImageRepresentation)); 554 | }), (IMP*)&CGImageRepresentation 555 | ); 556 | 557 | __block CGImageRef _Nullable(*previewCGImageRepresentation)(id self, SEL _cmd); 558 | MSHookMessageEx( 559 | [photo class], @selector(previewCGImageRepresentation), 560 | imp_implementationWithBlock(^(id self, SEL _cmd){ 561 | NSLog(@"previewCGImageRepresentation"); 562 | if ([g_fileManager fileExistsAtPath:g_tempFile]) return cgimage; 563 | return previewCGImageRepresentation(self, @selector(previewCGImageRepresentation)); 564 | }), (IMP*)&previewCGImageRepresentation 565 | ); 566 | } 567 | g_canReleaseBuffer = YES; 568 | 569 | // NSLog(@"原生拍照了 previewPixelBuffer = %@", photo.previewPixelBuffer ); 570 | // NSLog(@"原生拍照了 fileDataRepresentatio = %@", [photo fileDataRepresentation]); 571 | 572 | return original_method(self, @selector(captureOutput:didFinishProcessingPhoto:error:), captureOutput, photo, error); 573 | }), (IMP*)&original_method 574 | ); 575 | } 576 | } 577 | 578 | NSLog(@"capturePhotoWithSettings--->[%@] [%@]", settings, delegate); 579 | %orig; 580 | } 581 | %end 582 | 583 | %hook AVCaptureVideoDataOutput 584 | - (void)setSampleBufferDelegate:(id)sampleBufferDelegate queue:(dispatch_queue_t)sampleBufferCallbackQueue{ 585 | // NSLog(@"sampleBufferDelegate--->%@", [sampleBufferDelegate class]); // TODO:: 同一个软件可能会有不同的代理对象,需要每个对象替换一次 586 | if (sampleBufferDelegate == nil || sampleBufferCallbackQueue == nil) return %orig; 587 | static NSMutableArray *hooked; 588 | if (hooked == nil) hooked = [NSMutableArray new]; 589 | NSString *className = NSStringFromClass([sampleBufferDelegate class]); 590 | if ([hooked containsObject:className] == NO) { 591 | [hooked addObject:className]; 592 | __block void (*original_method)(id self, SEL _cmd, AVCaptureOutput *output, CMSampleBufferRef sampleBuffer, AVCaptureConnection *connection) = nil; 593 | // NSLog(@"准备hook-->%@ %p", [sampleBufferDelegate class], original_method); 594 | 595 | // NSLog(@"---------> AVCaptureVideoDataOutput -> videoSettings = %@", [self videoSettings]); 596 | // 先动态hook然后调用原始方法使用这个queue 597 | MSHookMessageEx( 598 | [sampleBufferDelegate class], @selector(captureOutput:didOutputSampleBuffer:fromConnection:), 599 | imp_implementationWithBlock(^(id self, AVCaptureOutput *output, CMSampleBufferRef sampleBuffer, AVCaptureConnection *connection){ 600 | // NSLog(@"求求你了,出现吧! 【self = %@】 params = %p", self, original_method); 601 | g_refreshPreviewByVideoDataOutputTime = ([[NSDate date] timeIntervalSince1970]) * 1000; 602 | 603 | CMSampleBufferRef newBuffer = [GetFrame getCurrentFrame:sampleBuffer :NO]; 604 | 605 | // 用buffer来刷新预览 606 | NSString *previewType = @"buffer"; 607 | g_photoOrientation = [connection videoOrientation]; 608 | if (newBuffer != nil && g_previewLayer != nil && g_previewLayer.readyForMoreMediaData) { 609 | [g_previewLayer flush]; 610 | [g_previewLayer enqueueSampleBuffer:newBuffer]; 611 | previewType = @"buffer - preview"; 612 | } 613 | 614 | static NSTimeInterval oldTime = 0; 615 | NSTimeInterval nowTime = g_refreshPreviewByVideoDataOutputTime; 616 | if (nowTime - oldTime > 3000) { // 3秒钟刷新一次 617 | oldTime = nowTime; 618 | // camera info 619 | // NSLog(@"set camera info"); 620 | CMFormatDescriptionRef formatDescription = CMSampleBufferGetFormatDescription(sampleBuffer); 621 | CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(formatDescription); 622 | NSDate *datenow = [NSDate date]; 623 | NSDateFormatter *formatter = [[NSDateFormatter alloc] init]; 624 | [formatter setDateFormat:@"YYYY-MM-dd HH:mm:ss"]; 625 | NSString *str = [NSString stringWithFormat:@"%@\n%@ - %@\nW:%d H:%d", 626 | [formatter stringFromDate:datenow], 627 | [NSProcessInfo processInfo].processName, 628 | [NSString stringWithFormat:@"%@ - %@", g_cameraPosition, previewType], 629 | dimensions.width, dimensions.height 630 | ]; 631 | NSData *data = [str dataUsingEncoding:NSUTF8StringEncoding]; 632 | [g_pasteboard setString:[NSString stringWithFormat:@"CCVCAM%@", [data base64EncodedStringWithOptions:0]]]; 633 | } 634 | 635 | return original_method(self, @selector(captureOutput:didOutputSampleBuffer:fromConnection:), output, newBuffer != nil? newBuffer: sampleBuffer, connection); 636 | }), (IMP*)&original_method 637 | ); 638 | } 639 | // NSLog(@"AVCaptureVideoDataOutput -> setSampleBufferDelegate [%@] [%@]", sampleBufferDelegate, sampleBufferCallbackQueue); 640 | %orig; 641 | } 642 | %end 643 | 644 | // 元数据 645 | // %hook AVCaptureMetadataOutput 646 | // - (void)setMetadataObjectsDelegate:(id)objectsDelegate queue:(dispatch_queue_t)objectsCallbackQueue{ 647 | // if (objectsDelegate == nil || objectsCallbackQueue == nil) { 648 | // NSLog(@"咋是空的啊 AVCaptureMetadataOutput"); 649 | // return %orig; 650 | // } 651 | // static void *(*original_method)(id self, SEL _cmd, AVCaptureOutput *output, NSArray<__kindof AVMetadataObject *> *metadataObjects, AVCaptureConnection *connection) = NULL; 652 | // if (original_method == NULL) { 653 | // NSLog(@"挂钩setMetadataObjectsDelegate"); 654 | // MSHookMessageEx( 655 | // [objectsDelegate class], @selector(captureOutput:didOutputMetadataObjects:fromConnection:), 656 | // imp_implementationWithBlock(^(id self, AVCaptureOutput *output, NSArray<__kindof AVMetadataObject *> *metadataObjects, AVCaptureConnection *connection){ 657 | // // NSLog(@"捕获到元数据 %@", metadataObjects); 658 | 659 | // original_method(self, @selector(captureOutput:didOutputMetadataObjects:fromConnection:), output, metadataObjects, connection); 660 | // }), (IMP*)&original_method 661 | // ); 662 | // } 663 | // NSLog(@"AVCaptureMetadataOutput -> setMetadataObjectsDelegate [%@] [%@]", objectsDelegate, objectsCallbackQueue); 664 | // %orig; 665 | // } 666 | // %end 667 | 668 | 669 | // UI 670 | @interface CCUIImagePickerDelegate : NSObject 671 | @end 672 | @implementation CCUIImagePickerDelegate 673 | // 选择图片成功调用此方法 674 | - (void)imagePickerController:(UIImagePickerController *)picker didFinishPickingMediaWithInfo:(NSDictionary *)info { 675 | [[GetFrame getKeyWindow].rootViewController dismissViewControllerAnimated:YES completion:nil]; 676 | NSLog(@"%@", info); 677 | // NSString *result = @"应用失败!"; 678 | // 选择的图片信息存储于info字典中 679 | NSString *selectFile = info[@"UIImagePickerControllerMediaURL"]; 680 | if ([g_fileManager fileExistsAtPath:g_tempFile]) [g_fileManager removeItemAtPath:g_tempFile error:nil]; 681 | 682 | if ([g_fileManager copyItemAtPath:selectFile toPath:g_tempFile error:nil]) { 683 | [g_fileManager createDirectoryAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile] withIntermediateDirectories:YES attributes:nil error:nil]; 684 | // result = @"应用成功!"; 685 | sleep(1); 686 | [g_fileManager removeItemAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile] error:nil]; 687 | } 688 | // UIAlertController *alertController = [UIAlertController alertControllerWithTitle:@"VCAM" message:result preferredStyle:UIAlertControllerStyleAlert]; 689 | // UIAlertAction *cancel = [UIAlertAction actionWithTitle:@"oj8k" style:UIAlertActionStyleDefault handler:nil]; 690 | // [alertController addAction:cancel]; 691 | // [[GetFrame getKeyWindow].rootViewController presentViewController:alertController animated:YES completion:nil]; 692 | 693 | } 694 | // 取消图片选择调用此方法 695 | - (void)imagePickerControllerDidCancel:(UIImagePickerController *)picker { 696 | [[GetFrame getKeyWindow].rootViewController dismissViewControllerAnimated:YES completion:nil]; 697 | // selectFile = nil; 698 | } 699 | @end 700 | 701 | 702 | // UI 703 | static NSTimeInterval g_volume_up_time = 0; 704 | static NSTimeInterval g_volume_down_time = 0; 705 | static NSString *g_downloadAddress = @""; // 下载地址 706 | static BOOL g_downloadRunning = NO; // 是否正在下载中 707 | 708 | void ui_selectVideo(){ 709 | static CCUIImagePickerDelegate *delegate = nil; 710 | if (delegate == nil) delegate = [CCUIImagePickerDelegate new]; 711 | UIImagePickerController *picker = [[UIImagePickerController alloc] init]; 712 | picker.sourceType = UIImagePickerControllerSourceTypePhotoLibrary; 713 | picker.mediaTypes = [NSArray arrayWithObjects:@"public.movie",/* @"public.image",*/ nil]; 714 | picker.videoQuality = UIImagePickerControllerQualityTypeHigh; 715 | if (@available(iOS 11.0, *)) picker.videoExportPreset = AVAssetExportPresetPassthrough; 716 | picker.allowsEditing = YES; 717 | picker.delegate = delegate; 718 | [[GetFrame getKeyWindow].rootViewController presentViewController:picker animated:YES completion:nil]; 719 | } 720 | 721 | @interface AVSystemController : NSObject 722 | + (id)sharedAVSystemController; 723 | - (BOOL)getVolume:(float*)arg1 forCategory:(id)arg2; 724 | - (BOOL)setVolumeTo:(float)arg1 forCategory:(id)arg2; 725 | @end 726 | 727 | /** 728 | * 下载视频 729 | * @param bool quick 是否为便捷下载,这种情况下尽量减少弹窗 730 | */ 731 | void ui_downloadVideo(){ 732 | if (g_downloadRunning) return; 733 | 734 | void (^startDownload)(void) = ^{ 735 | g_downloadRunning = YES; 736 | 737 | NSString *tempPath = [NSString stringWithFormat:@"%@.downloading.mov", g_tempFile]; 738 | 739 | NSData *urlData = [NSData dataWithContentsOfURL:[NSURL URLWithString:g_downloadAddress]]; 740 | if ([urlData writeToFile:tempPath atomically:YES]) { 741 | AVAsset *asset = [AVAsset assetWithURL: [NSURL URLWithString:[NSString stringWithFormat:@"file://%@", tempPath]]]; 742 | if (asset.playable) { 743 | // 文件下载完成 744 | if ([g_fileManager fileExistsAtPath:g_tempFile]) [g_fileManager removeItemAtPath:g_tempFile error:nil]; 745 | [g_fileManager moveItemAtPath:tempPath toPath:g_tempFile error:nil]; 746 | [[%c(AVSystemController) sharedAVSystemController] setVolumeTo:0 forCategory:@"Ringtone"]; 747 | // 标识视频有变动 748 | [g_fileManager createDirectoryAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile] withIntermediateDirectories:YES attributes:nil error:nil]; 749 | sleep(1); 750 | [g_fileManager removeItemAtPath:[NSString stringWithFormat:@"%@.new", g_tempFile] error:nil]; 751 | }else { 752 | if ([g_fileManager fileExistsAtPath:tempPath]) [g_fileManager removeItemAtPath:tempPath error:nil]; 753 | } 754 | }else { 755 | if ([g_fileManager fileExistsAtPath:g_tempFile]) [g_fileManager removeItemAtPath:g_tempFile error:nil]; 756 | } 757 | [[%c(AVSystemController) sharedAVSystemController] setVolumeTo:0 forCategory:@"Ringtone"]; 758 | g_downloadRunning = NO; 759 | }; 760 | dispatch_async(dispatch_queue_create("download", nil), startDownload); 761 | } 762 | 763 | %hook VolumeControl 764 | -(void)increaseVolume { 765 | NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]; 766 | if (g_volume_down_time != 0 && nowtime - g_volume_down_time < 1) { 767 | if ([g_downloadAddress isEqual:@""]) { 768 | ui_selectVideo(); 769 | }else { 770 | ui_downloadVideo(); 771 | } 772 | } 773 | g_volume_up_time = nowtime; 774 | %orig; 775 | } 776 | -(void)decreaseVolume { 777 | static CCUIImagePickerDelegate *delegate = nil; 778 | if (delegate == nil) delegate = [CCUIImagePickerDelegate new]; 779 | 780 | NSTimeInterval nowtime = [[NSDate date] timeIntervalSince1970]; 781 | if (g_volume_up_time != 0 && nowtime - g_volume_up_time < 1) { 782 | 783 | // 剪贴板上的分辨率信息 784 | NSString *str = g_pasteboard.string; 785 | NSString *infoStr = @"使用镜头后将记录信息"; 786 | if (str != nil && [str hasPrefix:@"CCVCAM"]) { 787 | str = [str substringFromIndex:6]; //截取掉下标3之后的字符串 788 | // NSLog(@"获取到的字符串是:%@", str); 789 | NSData *decodedData = [[NSData alloc] initWithBase64EncodedString:str options:0]; 790 | NSString *decodedString = [[NSString alloc] initWithData:decodedData encoding:NSUTF8StringEncoding]; 791 | infoStr = decodedString; 792 | // NSLog(@"-----=-=-=-=--=-=-%@", decodedString); 793 | } 794 | 795 | // 提示视频质量 796 | NSString *title = @"iOS-VCAM"; 797 | if ([g_fileManager fileExistsAtPath:g_tempFile]) title = @"iOS-VCAM ✅"; 798 | UIAlertController *alertController = [UIAlertController alertControllerWithTitle:title message:infoStr preferredStyle:UIAlertControllerStyleAlert]; 799 | 800 | UIAlertAction *next = [UIAlertAction actionWithTitle:@"选择视频" style:UIAlertActionStyleDefault handler:^(UIAlertAction *action){ 801 | ui_selectVideo(); 802 | }]; 803 | UIAlertAction *download = [UIAlertAction actionWithTitle:@"下载视频" style:UIAlertActionStyleDefault handler:^(UIAlertAction *action){ 804 | // 设置下载地址 805 | UIAlertController* alert = [UIAlertController alertControllerWithTitle:@"下载视频" message:@"尽量使用MOV格式视频\nMP4也可, 其他类型尚未测试" preferredStyle:UIAlertControllerStyleAlert]; 806 | [alert addTextFieldWithConfigurationHandler:^(UITextField *textField) { 807 | if ([g_downloadAddress isEqual:@""]) { 808 | textField.placeholder = @"远程视频地址"; 809 | }else { 810 | textField.text = g_downloadAddress; 811 | } 812 | textField.keyboardType = UIKeyboardTypeURL; 813 | }]; 814 | UIAlertAction* okAction = [UIAlertAction actionWithTitle:@"确认" style:UIAlertActionStyleDefault handler:^(UIAlertAction * action) { 815 | //响应事件 得到文本信息 816 | g_downloadAddress = alert.textFields[0].text; 817 | NSString *resultStr = @"便捷模式已更改为从远程下载\n\n需要保证是一个可访问视频地址\n\n完成后会有系统的静音提示\n下载失败禁用替换"; 818 | if ([g_downloadAddress isEqual:@""]) { 819 | resultStr = @"便捷模式已改为从相册选取"; 820 | } 821 | UIAlertController* resultAlert = [UIAlertController alertControllerWithTitle:@"便捷模式更改" message:resultStr preferredStyle:UIAlertControllerStyleAlert]; 822 | 823 | UIAlertAction *ok = [UIAlertAction actionWithTitle:@"了解" style:UIAlertActionStyleDefault handler:nil]; 824 | [resultAlert addAction:ok]; 825 | [[GetFrame getKeyWindow].rootViewController presentViewController:resultAlert animated:YES completion:nil]; 826 | }]; 827 | UIAlertAction *cancel = [UIAlertAction actionWithTitle:@"取消" style:UIAlertActionStyleDefault handler:nil]; 828 | [alert addAction:okAction]; 829 | [alert addAction:cancel]; 830 | [[GetFrame getKeyWindow].rootViewController presentViewController:alert animated:YES completion:nil]; 831 | }]; 832 | UIAlertAction *cancelReplace = [UIAlertAction actionWithTitle:@"禁用替换" style:UIAlertActionStyleDestructive handler:^(UIAlertAction *action){ 833 | if ([g_fileManager fileExistsAtPath:g_tempFile]) [g_fileManager removeItemAtPath:g_tempFile error:nil]; 834 | }]; 835 | 836 | NSString *isMirroredText = @"尝试修复拍照翻转"; 837 | if ([g_fileManager fileExistsAtPath:g_isMirroredMark]) isMirroredText = @"尝试修复拍照翻转 ✅"; 838 | UIAlertAction *isMirrored = [UIAlertAction actionWithTitle:isMirroredText style:UIAlertActionStyleDefault handler:^(UIAlertAction *action){ 839 | if ([g_fileManager fileExistsAtPath:g_isMirroredMark]) { 840 | [g_fileManager removeItemAtPath:g_isMirroredMark error:nil]; 841 | }else { 842 | [g_fileManager createDirectoryAtPath:g_isMirroredMark withIntermediateDirectories:YES attributes:nil error:nil]; 843 | } 844 | }]; 845 | UIAlertAction *cancel = [UIAlertAction actionWithTitle:@"取消操作" style:UIAlertActionStyleCancel handler:nil]; 846 | UIAlertAction *showHelp = [UIAlertAction actionWithTitle:@"- 查看帮助 -" style:UIAlertActionStyleDefault handler:^(UIAlertAction *action){ 847 | NSURL *URL = [NSURL URLWithString:@"https://github.com/trizau/iOS-VCAM"]; 848 | [[UIApplication sharedApplication]openURL:URL]; 849 | }]; 850 | 851 | [alertController addAction:next]; 852 | [alertController addAction:download]; 853 | [alertController addAction:cancelReplace]; 854 | [alertController addAction:cancel]; 855 | [alertController addAction:showHelp]; 856 | [alertController addAction:isMirrored]; 857 | [[GetFrame getKeyWindow].rootViewController presentViewController:alertController animated:YES completion:nil]; 858 | } 859 | g_volume_down_time = nowtime; 860 | %orig; 861 | 862 | // NSLog(@"减小了音量?%@ %@", [NSProcessInfo processInfo].processName, [NSProcessInfo processInfo].hostName); 863 | // %orig; 864 | } 865 | %end 866 | 867 | 868 | %ctor { 869 | NSLog(@"我被载入成功啦"); 870 | if([[NSProcessInfo processInfo] isOperatingSystemAtLeastVersion:(NSOperatingSystemVersion){13, 0, 0}]) { 871 | %init(VolumeControl = NSClassFromString(@"SBVolumeControl")); 872 | } 873 | // if ([[[NSBundle mainBundle] objectForInfoDictionaryKey:@"CFBundleIdentifier"] isEqual:@"com.apple.springboard"]) { 874 | // NSLog(@"我在哪儿啊 %@ %@", [NSProcessInfo processInfo].processName, [[NSBundle mainBundle] objectForInfoDictionaryKey:@"CFBundleIdentifier"]); 875 | // } 876 | g_fileManager = [NSFileManager defaultManager]; 877 | g_pasteboard = [UIPasteboard generalPasteboard]; 878 | } 879 | 880 | %dtor{ 881 | g_fileManager = nil; 882 | g_pasteboard = nil; 883 | g_canReleaseBuffer = YES; 884 | g_bufferReload = YES; 885 | g_previewLayer = nil; 886 | g_refreshPreviewByVideoDataOutputTime = 0; 887 | g_cameraRunning = NO; 888 | NSLog(@"卸载完成了"); 889 | } --------------------------------------------------------------------------------