├── README.md └── ReplyKitDemo-Socket ├── JWExtensionKit ├── FIAgoraClientBufferSocketManager.h ├── FIAgoraClientBufferSocketManager.m ├── FIAgoraSampleHandlerSocketManager.h ├── FIAgoraSampleHandlerSocketManager.m ├── JWExtensionKit.docc │ └── JWExtensionKit.md ├── JWExtensionKit.h ├── Socket │ ├── GCDAsyncSocket.h │ ├── GCDAsyncSocket.m │ ├── NTESSocket.h │ ├── NTESSocket.m │ ├── NTESSocketPacket.h │ ├── NTESSocketPacket.m │ ├── NTESTPCircularBuffer.c │ └── NTESTPCircularBuffer.h ├── Vendors │ ├── libyuv.a │ ├── libyuv.h │ └── libyuv │ │ ├── basic_types.h │ │ ├── compare.h │ │ ├── compare_row.h │ │ ├── convert.h │ │ ├── convert_argb.h │ │ ├── convert_from.h │ │ ├── convert_from_argb.h │ │ ├── cpu_id.h │ │ ├── macros_msa.h │ │ ├── mjpeg_decoder.h │ │ ├── planar_functions.h │ │ ├── rotate.h │ │ ├── rotate_argb.h │ │ ├── rotate_row.h │ │ ├── row.h │ │ ├── scale.h │ │ ├── scale_argb.h │ │ ├── scale_row.h │ │ ├── version.h │ │ └── video_common.h └── Video │ ├── NTESI420Frame.h │ ├── NTESI420Frame.m │ ├── NTESVideoUtil.h │ ├── NTESVideoUtil.m │ ├── NTESYUVConverter.h │ └── NTESYUVConverter.mm ├── ReplyKitDemo.xcodeproj ├── project.pbxproj ├── project.xcworkspace │ ├── contents.xcworkspacedata │ ├── xcshareddata │ │ └── IDEWorkspaceChecks.plist │ └── xcuserdata │ │ └── summerxx.xcuserdatad │ │ └── UserInterfaceState.xcuserstate ├── xcshareddata │ └── xcschemes │ │ ├── ReplyKitDemo.xcscheme │ │ └── extension-demo.xcscheme └── xcuserdata │ └── summerxx.xcuserdatad │ ├── xcdebugger │ └── Breakpoints_v2.xcbkptlist │ └── xcschemes │ └── xcschememanagement.plist ├── ReplyKitDemo ├── AppDelegate.h ├── AppDelegate.m ├── Assets.xcassets │ ├── AccentColor.colorset │ │ └── Contents.json │ ├── AppIcon.appiconset │ │ └── Contents.json │ ├── Contents.json │ └── focus.imageset │ │ ├── Contents.json │ │ └── focus.png ├── Base.lproj │ ├── LaunchScreen.storyboard │ └── Main.storyboard ├── CameraCapture │ ├── CaptureViewController.h │ └── CaptureViewController.m ├── DisplayLayer │ ├── VideoDisplayLayer.h │ └── VideoDisplayLayer.m ├── FJDeepSleepPreventer │ ├── FJDeepSleepPreventer.h │ ├── FJDeepSleepPreventer.m │ ├── FJDeepSleepPreventerPlus.h │ ├── FJDeepSleepPreventerPlus.m │ └── Silence.wav ├── H264DeCode解码 │ ├── VideoH264Decoder.h │ └── VideoH264Decoder.m ├── H264EnCode编码 │ ├── VideoH264EnCode.h │ └── VideoH264EnCode.m ├── Info.plist ├── SceneDelegate.h ├── SceneDelegate.m ├── main.m └── 屏幕共享演示 │ ├── ViewController.h │ └── ViewController.m ├── ReplyKitDemoTests └── ReplyKitDemoTests.m ├── ReplyKitDemoUITests ├── ReplyKitDemoUITests.m └── ReplyKitDemoUITestsLaunchTests.m └── extension-demo ├── Info.plist ├── SampleHandler.h └── SampleHandler.m /README.md: -------------------------------------------------------------------------------- 1 | # ReplayKitShareScreen-socket 2 | 使用replayKit iOS12 之后相关 api 完成系统/app 内 屏幕采集直播视频数据, 采用 socket进行进程间Broadcast Unload Extension 向 宿主 app 传输数据, 后台保活持续采集屏幕数据, 摄像头采集, 数据编码解码 3 | 4 | 编译环境 Xcode14.2, iOS12 5 | 6 | - 系统屏幕数据采集 7 | - app 内屏幕共享 8 | - 使用socket 由 Broadcast Unload Extension 向宿主 app 传输数据 9 | - 视频解码 10 | - 程序永久保活 11 | - 创建 framework 供 Broadcast Unload Extension 和宿主 app 调用共用类 12 | 13 | [TOC] 14 | 15 | ### 1. 第一步创建 Broadcast Unload Extension 16 | 17 | 步骤: File -> new -> Target 18 | 19 | ![截屏2023-01-06 17.39.39](https://p.ipic.vip/hq1jyl.png) 20 | 21 | 创建好之后生成 一个扩展 App, 自动生成如图的一个 sampleHandr类 22 | 23 | ![截屏2023-01-06 17.41.55](https://p.ipic.vip/pai0gm.png) 24 | 25 | ```objective-c 26 | - (void)broadcastStartedWithSetupInfo:(NSDictionary *)setupInfo { 27 | // User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional. 28 | // 宿主 app开始直播屏幕的时候这里会走一次 29 | // 设置 socket 30 | // 其中 FIAgoraSampleHandlerSocketManager这个类可以看 Demo 的实现 31 | [[FIAgoraSampleHandlerSocketManager sharedManager] setUpSocket]; 32 | } 33 | 34 | - (void)broadcastPaused { 35 | // User has requested to pause the broadcast. Samples will stop being delivered. 36 | } 37 | 38 | - (void)broadcastResumed { 39 | // User has requested to resume the broadcast. Samples delivery will resume. 40 | } 41 | 42 | - (void)broadcastFinished { 43 | // User has requested to finish the broadcast. 44 | } 45 | 46 | // 实时采集数据 47 | - (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType { 48 | 49 | switch (sampleBufferType) { 50 | case RPSampleBufferTypeVideo: 51 | // Handle video sample buffer 52 | // 发送视频数据导宿主 App 53 | [[FIAgoraSampleHandlerSocketManager sharedManager] sendVideoBufferToHostApp:sampleBuffer]; 54 | break; 55 | case RPSampleBufferTypeAudioApp: 56 | // Handle audio sample buffer for app audio 57 | break; 58 | case RPSampleBufferTypeAudioMic: 59 | // Handle audio sample buffer for mic audio 60 | break; 61 | 62 | default: 63 | break; 64 | } 65 | } 66 | 67 | ``` 68 | 69 | ### 2. FIAgoraSampleHandlerSocketManager 关于数据传输的类 都放到一个framework 当中 70 | 71 | - 步骤: File -> new -> Target 创建 framework, 如图 1 72 | - 创建好之后在宿主 app 和 extension 分别引用, 如图 2 73 | 74 | 75 | 76 | ![截屏2023-01-06 17.46.46](https://p.ipic.vip/e860da.png) 77 | 78 | ![1672998592654](https://p.ipic.vip/14zjj6.jpg) 79 | 80 | ### 3. 宿主 App 81 | 82 | - 手动启动直播 83 | - 需要永久保活 84 | - 监测数据回调 85 | - 编码 86 | - 推流 87 | 88 | 1. 初始化开启直播的按钮 89 | 90 | ```objective-c 91 | // 设置系统的广播 Picker 视图 92 | - (void)setupSystemBroadcastPickerView 93 | { 94 | // 兼容 iOS12 或更高的版本 95 | if (@available(iOS 12.0, *)) { 96 | self.broadcastPickerView = [[RPSystemBroadcastPickerView alloc] initWithFrame:CGRectMake(50, 200, 100, 100)]; 97 | self.broadcastPickerView.preferredExtension = @"summerxx.com.screen-share-ios.broadcast-extension"; 98 | self.broadcastPickerView.backgroundColor = UIColor.cyanColor; 99 | self.broadcastPickerView.showsMicrophoneButton = NO; 100 | [self.view addSubview:self.broadcastPickerView]; 101 | } 102 | // 改变系统提供的按钮的 UI, 这里有个风险, 以后可能会失效, 暂时用没有什么问题 103 | UIButton *startButton = [UIButton buttonWithType:UIButtonTypeCustom]; 104 | startButton.frame = CGRectMake(50, 310, 100, 100); 105 | startButton.backgroundColor = UIColor.cyanColor; 106 | [startButton setTitle:@"开启摄像头" forState:UIControlStateNormal]; 107 | [startButton setTitleColor:UIColor.blackColor forState:UIControlStateNormal]; 108 | [startButton addTarget:self action:@selector(startAction) forControlEvents:UIControlEventTouchUpInside]; 109 | [self.view addSubview:startButton]; 110 | } 111 | ``` 112 | 113 | 2. 保活 114 | 115 | ![截屏2023-01-06 17.57.10](https://p.ipic.vip/cw7kcy.png) 116 | 117 | ```objective-c 118 | 监听 119 | [[NSNotificationCenter defaultCenter]addObserver:self selector:@selector(didEnterBackGround) name:UIApplicationDidEnterBackgroundNotification object:nil]; 120 | [[NSNotificationCenter defaultCenter]addObserver:self selector:@selector(willEnterForeground) name:UIApplicationWillEnterForegroundNotification object:nil]; 121 | 122 | - (void)willEnterForeground 123 | { 124 | // 这里具体可看 Demo 125 | [[FJDeepSleepPreventerPlus sharedInstance] stop]; 126 | } 127 | 128 | - (void)didEnterBackGround 129 | { 130 | [[FJDeepSleepPreventerPlus sharedInstance] start]; 131 | } 132 | ``` 133 | 134 | 3. 数据回调 135 | 136 | ```objective-c 137 | __weak __typeof(self) weakSelf = self; 138 | [FIAgoraClientBufferSocketManager sharedManager].testBlock = ^(NSString * testText, CMSampleBufferRef sampleBuffer) { 139 | 140 | // 进行视频编码 141 | [weakSelf.h264code encodeSampleBuffer:sampleBuffer H264DataBlock:^(NSData * data) { 142 | NSLog(@"%@", data); 143 | // 编码后可进行推流流程 144 | }]; 145 | }; 146 | ``` 147 | 148 | 149 | 150 | 到此就基本结束了 151 | 152 | 其中 数据传输的方式, 还可以用 App Group 的方式 我也实现了 153 | 154 | Demo 在这里 [https://github.com/summerxx27/ReplayKitShareScreen](https://github.com/summerxx27/ReplayKitShareScreen) 155 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/FIAgoraClientBufferSocketManager.h: -------------------------------------------------------------------------------- 1 | // 2 | // FIAgoraClientBufferSocketManager.h 3 | // FIAgoraVideo 4 | // 5 | // Created by flagadmin on 2020/5/7. 6 | // Copyright © 2020 flagadmin. All rights reserved. 7 | // 8 | #import 9 | #import 10 | 11 | NS_ASSUME_NONNULL_BEGIN 12 | typedef void(^TestBlock) (NSString *testText,CMSampleBufferRef sampleBuffer); 13 | 14 | @interface FIAgoraClientBufferSocketManager : NSObject 15 | + (FIAgoraClientBufferSocketManager *)sharedManager; 16 | - (void)stopSocket; 17 | - (void)setupSocket; 18 | @property(nonatomic, copy) TestBlock testBlock; 19 | 20 | 21 | @end 22 | 23 | NS_ASSUME_NONNULL_END 24 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/FIAgoraClientBufferSocketManager.m: -------------------------------------------------------------------------------- 1 | // 2 | // FIAgoraClientBufferSocketManager.m 3 | // FIAgoraVideo 4 | // 5 | // Created by flagadmin on 2020/5/7. 6 | // Copyright © 2020 flagadmin. All rights reserved. 7 | // 8 | #import 9 | #import "NTESYUVConverter.h" 10 | #import "NTESI420Frame.h" 11 | #import "GCDAsyncSocket.h" 12 | #import "NTESSocketPacket.h" 13 | #import "NTESTPCircularBuffer.h" 14 | #import "FIAgoraClientBufferSocketManager.h" 15 | 16 | @interface FIAgoraClientBufferSocketManager() 17 | @property (nonatomic, strong) GCDAsyncSocket *socket; 18 | @property (nonatomic, strong) dispatch_queue_t queue; 19 | @property (nonatomic, strong) NSMutableArray *sockets; 20 | @property (nonatomic, assign) NTESTPCircularBuffer *recvBuffer; 21 | @property(nonatomic, strong) NSString *testText; 22 | 23 | 24 | @end 25 | 26 | @implementation FIAgoraClientBufferSocketManager 27 | + (FIAgoraClientBufferSocketManager *)sharedManager{ 28 | static FIAgoraClientBufferSocketManager *shareInstance = nil; 29 | static dispatch_once_t onceToken; 30 | dispatch_once(&onceToken, ^{ 31 | shareInstance = [[self alloc] init]; 32 | }); 33 | return shareInstance; 34 | } 35 | #pragma mark - 屏幕共享 36 | 37 | - (void)stopSocket 38 | { 39 | if (_socket) 40 | { 41 | [_socket disconnect]; 42 | _socket = nil; 43 | [_sockets removeAllObjects]; 44 | NTESTPCircularBufferCleanup(_recvBuffer); 45 | } 46 | [[NSNotificationCenter defaultCenter] removeObserver:self name:NSUserDefaultsDidChangeNotification object:nil]; 47 | 48 | } 49 | 50 | - (void)setupSocket 51 | { 52 | self.sockets = [NSMutableArray array]; 53 | _recvBuffer = (NTESTPCircularBuffer *)malloc(sizeof(NTESTPCircularBuffer)); // 需要释放 54 | NTESTPCircularBufferInit(_recvBuffer, kRecvBufferMaxSize); 55 | // self.queue = dispatch_queue_create("com.netease.edu.rp.server", DISPATCH_QUEUE_SERIAL); 56 | self.queue = dispatch_get_main_queue(); 57 | self.socket = [[GCDAsyncSocket alloc] initWithDelegate:self delegateQueue:self.queue]; 58 | self.socket.IPv6Enabled = NO; 59 | NSError *error; 60 | // [self.socket acceptOnUrl:[NSURL fileURLWithPath:serverURL] error:&error]; 61 | [self.socket acceptOnPort:8999 error:&error]; 62 | [self.socket readDataWithTimeout:-1 tag:0]; 63 | NSLog(@"%@", error); 64 | NSNotificationCenter *center =[NSNotificationCenter defaultCenter]; 65 | [center addObserver:self 66 | selector:@selector(defaultsChanged:) 67 | name:NSUserDefaultsDidChangeNotification 68 | object:nil]; 69 | } 70 | 71 | #pragma mark - GCDAsyncSocketDelegate 72 | - (void)socketDidDisconnect:(GCDAsyncSocket *)sock withError:(nullable NSError *)err 73 | { 74 | NTESTPCircularBufferClear(self.recvBuffer); 75 | [self.sockets removeObject:sock]; 76 | } 77 | 78 | - (void)socketDidCloseReadStream:(GCDAsyncSocket *)sock 79 | { 80 | NTESTPCircularBufferClear(self.recvBuffer); 81 | [self.sockets removeObject:sock]; 82 | } 83 | 84 | - (void)socket:(GCDAsyncSocket *)sock didAcceptNewSocket:(GCDAsyncSocket *)newSocket 85 | { 86 | NTESTPCircularBufferClear(self.recvBuffer); 87 | [self.sockets addObject:newSocket]; 88 | [newSocket readDataWithTimeout:-1 tag:0]; 89 | } 90 | 91 | - (void)socket:(GCDAsyncSocket *)sock didReadData:(NSData *)data withTag:(long)tag 92 | { 93 | static uint64_t currenDataSize = 0; 94 | static uint64_t targeDataSize = 0; 95 | 96 | BOOL isHeader = NO; 97 | if (data.length == sizeof(NTESPacketHead)) { // 检查是不是帧头 98 | NTESPacketHead *header = (NTESPacketHead *)data.bytes; 99 | if (header->version == 1 && header->command_id == 1 && header->service_id == 1) { 100 | isHeader = YES; 101 | targeDataSize = header->data_len; 102 | currenDataSize = 0; 103 | } 104 | } else { 105 | currenDataSize += data.length; 106 | } 107 | 108 | if (isHeader) { // a.接收到新的帧头,需要先把原来的缓存处理或者清空 109 | [self handleRecvBuffer]; 110 | NTESTPCircularBufferProduceBytes(self.recvBuffer, 111 | data.bytes, 112 | (int32_t)data.length); 113 | } else if (currenDataSize >= targeDataSize 114 | && currenDataSize != -1) { // b.加上新来的数据后缓存中已经满足一帧 115 | NTESTPCircularBufferProduceBytes(self.recvBuffer, 116 | data.bytes, 117 | (int32_t)data.length); 118 | currenDataSize = -1; 119 | [self handleRecvBuffer]; 120 | } else { // c.不够一帧,只添加不处理 121 | NTESTPCircularBufferProduceBytes(self.recvBuffer, 122 | data.bytes, 123 | (int32_t)data.length); 124 | } 125 | [sock readDataWithTimeout:-1 tag:0]; 126 | } 127 | 128 | 129 | - (void)handleRecvBuffer { 130 | if (!self.sockets.count) 131 | { 132 | return; 133 | } 134 | 135 | int32_t availableBytes = 0; 136 | void * buffer = NTESTPCircularBufferTail(self.recvBuffer, &availableBytes); 137 | int32_t headSize = sizeof(NTESPacketHead); 138 | 139 | if(availableBytes <= headSize) { 140 | // NSLog(@" > 不够文件头"); 141 | NTESTPCircularBufferClear(self.recvBuffer); 142 | return; 143 | } 144 | 145 | NTESPacketHead head; 146 | memset(&head, 0, sizeof(head)); 147 | memcpy(&head, buffer, headSize); 148 | uint64_t dataLen = head.data_len; 149 | 150 | if(dataLen > availableBytes - headSize && dataLen >0) { 151 | // NSLog(@" > 不够数据体"); 152 | NTESTPCircularBufferClear(self.recvBuffer); 153 | return; 154 | } 155 | 156 | void *data = malloc(dataLen); 157 | memset(data, 0, dataLen); 158 | memcpy(data, buffer + headSize, dataLen); 159 | NTESTPCircularBufferClear(self.recvBuffer); // 处理完一帧数据就清空缓存 160 | 161 | if([self respondsToSelector:@selector(onRecvData:)]) { 162 | @autoreleasepool { 163 | [self onRecvData:[NSData dataWithBytes:data length:dataLen]]; 164 | }; 165 | } 166 | 167 | free(data); 168 | } 169 | 170 | - (void)defaultsChanged:(NSNotification *)notification 171 | { 172 | GCDAsyncSocket *socket = self.sockets.count ? self.sockets[0] : nil; 173 | 174 | NSUserDefaults *defaults = (NSUserDefaults*)[notification object]; 175 | id setting = nil; 176 | // 分辨率 177 | static NSInteger quality; 178 | setting = [defaults objectForKey:@"videochat_preferred_video_quality"]; 179 | if (quality != [setting integerValue] && setting) 180 | { 181 | quality = [setting integerValue]; 182 | NTESPacketHead head; 183 | head.service_id = 0; 184 | head.command_id = 1; // 1:分辨率 2:裁剪比例 3:视频方向 185 | head.data_len = 0; 186 | head.version = 0; 187 | NSString *str = [NSString stringWithFormat:@"%d", [setting intValue]]; 188 | [socket writeData:[NTESSocketPacket packetWithBuffer:[str dataUsingEncoding:NSUTF8StringEncoding] head:&head] withTimeout:-1 tag:0]; 189 | } 190 | 191 | // 视频方向 192 | static NSInteger orientation; 193 | setting = [defaults objectForKey:@"videochat_preferred_video_orientation"]; 194 | if (orientation != [setting integerValue] && setting) 195 | { 196 | orientation = [setting integerValue]; 197 | NTESPacketHead head; 198 | head.service_id = 0; 199 | head.command_id = 3; // 1:分辨率 2:裁剪比例 3:视频方向 200 | head.data_len = 0; 201 | head.version = 0; 202 | head.serial_id = 0; 203 | NSString *str = [NSString stringWithFormat:@"%@", setting]; 204 | [socket writeData:[NTESSocketPacket packetWithBuffer:[str dataUsingEncoding:NSUTF8StringEncoding] head:&head] withTimeout:-1 tag:0]; 205 | 206 | } 207 | } 208 | 209 | 210 | #pragma mark - NTESSocketDelegate 211 | 212 | - (void)onRecvData:(NSData *)data 213 | { 214 | static int i = 0; 215 | i++; 216 | dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ 217 | NTESI420Frame *frame = [NTESI420Frame initWithData:data]; 218 | CMSampleBufferRef sampleBuffer = [frame convertToSampleBuffer]; 219 | NSLog(@"收到了%d条数据", i); 220 | NSString *testText = [NSString stringWithFormat:@"收到了%d条数据",i]; 221 | if (self.testBlock) { 222 | self.testBlock(testText, sampleBuffer); 223 | } 224 | if (sampleBuffer == NULL) {//防止内存泄漏 225 | return; 226 | } 227 | CFRelease(sampleBuffer); 228 | }); 229 | 230 | } 231 | 232 | @end 233 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/FIAgoraSampleHandlerSocketManager.h: -------------------------------------------------------------------------------- 1 | // 2 | // FIAgoraSocketManager.h 3 | // FIAgoraVideo 4 | // 5 | // Created by flagadmin on 2020/5/7. 6 | // Copyright © 2020 flagadmin. All rights reserved. 7 | // 8 | 9 | #import 10 | #import 11 | 12 | NS_ASSUME_NONNULL_BEGIN 13 | @interface FIAgoraSampleHandlerSocketManager : NSObject 14 | + (FIAgoraSampleHandlerSocketManager *)sharedManager; 15 | - (void)setUpSocket; 16 | - (void)socketDelloc; 17 | - (void)sendVideoBufferToHostApp:(CMSampleBufferRef)sampleBuffer; 18 | 19 | @end 20 | 21 | NS_ASSUME_NONNULL_END 22 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/FIAgoraSampleHandlerSocketManager.m: -------------------------------------------------------------------------------- 1 | // 2 | // FIAgoraSocketManager.m 3 | // FIAgoraVideo 4 | // 5 | // Created by flagadmin on 2020/5/7. 6 | // Copyright © 2020 flagadmin. All rights reserved. 7 | // 8 | #import "NTESYUVConverter.h" 9 | #import "NTESI420Frame.h" 10 | #import "GCDAsyncSocket.h" 11 | #import "NTESSocketPacket.h" 12 | #import "NTESTPCircularBuffer.h" 13 | #import "FIAgoraSampleHandlerSocketManager.h" 14 | 15 | 16 | @interface FIAgoraSampleHandlerSocketManager() 17 | 18 | @property (nonatomic, assign) CGFloat cropRate; 19 | @property (nonatomic, assign) CGSize targetSize; 20 | @property (nonatomic, assign) NTESVideoPackOrientation orientation; 21 | 22 | @property (nonatomic, copy) NSString *ip; 23 | @property (nonatomic, copy) NSString *clientPort; 24 | @property (nonatomic, copy) NSString *serverPort; 25 | @property (nonatomic, strong) dispatch_queue_t videoQueue; 26 | @property (nonatomic, assign) NSUInteger frameCount; 27 | @property (nonatomic, assign) BOOL connected; 28 | @property (nonatomic, strong) dispatch_source_t timer; 29 | 30 | @property (nonatomic, strong) GCDAsyncSocket *socket; 31 | @property (nonatomic, strong) dispatch_queue_t queue; 32 | @property (nonatomic, assign) NTESTPCircularBuffer *recvBuffer; 33 | 34 | 35 | @end 36 | 37 | @implementation FIAgoraSampleHandlerSocketManager 38 | + (FIAgoraSampleHandlerSocketManager *)sharedManager{ 39 | static FIAgoraSampleHandlerSocketManager *shareInstance = nil; 40 | static dispatch_once_t onceToken; 41 | dispatch_once(&onceToken, ^{ 42 | shareInstance = [[self alloc] init]; 43 | shareInstance.targetSize = CGSizeMake(540, 960); 44 | shareInstance.cropRate = 9.0/16; 45 | shareInstance.orientation = NTESVideoPackOrientationPortrait; 46 | shareInstance.ip = @"127.0.0.1"; 47 | shareInstance.serverPort = @"8898"; 48 | shareInstance.clientPort = [NSString stringWithFormat:@"%d", arc4random()%9999]; 49 | shareInstance.videoQueue = dispatch_queue_create("com.netease.edu.rp.videoprocess", DISPATCH_QUEUE_SERIAL); 50 | 51 | }); 52 | return shareInstance; 53 | } 54 | 55 | - (void)setUpSocket{ 56 | 57 | _recvBuffer = (NTESTPCircularBuffer *)malloc(sizeof(NTESTPCircularBuffer)); // 需要释放 58 | NTESTPCircularBufferInit(_recvBuffer, kRecvBufferMaxSize); 59 | self.queue = dispatch_queue_create("com.netease.edu.rp.client", DISPATCH_QUEUE_SERIAL); 60 | self.socket = [[GCDAsyncSocket alloc] initWithDelegate:self delegateQueue:self.queue]; 61 | // self.socket.IPv6Enabled = NO; 62 | // [self.socket connectToUrl:[NSURL fileURLWithPath:serverURL] withTimeout:5 error:nil]; 63 | NSError *error; 64 | [self.socket connectToHost:@"127.0.0.1" onPort:8999 error:&error]; 65 | [self.socket readDataWithTimeout:-1 tag:0]; 66 | NSLog(@"setupSocket:%@",error); 67 | } 68 | - (void)socketDelloc{ 69 | _connected = NO; 70 | 71 | if (_socket) { 72 | [_socket disconnect]; 73 | _socket = nil; 74 | NTESTPCircularBufferCleanup(_recvBuffer); 75 | } 76 | 77 | if(_timer) { 78 | _timer = nil; 79 | } 80 | } 81 | #pragma mark - 处理分辨率切换等 82 | - (void)onRecvData:(NSData *)data head:(NTESPacketHead *)head 83 | { 84 | if (!data) 85 | { 86 | return; 87 | } 88 | 89 | switch (head->command_id) 90 | { 91 | case 1: 92 | { 93 | NSString *qualityStr = [NSString stringWithUTF8String:[data bytes]]; 94 | int qualit = [qualityStr intValue]; 95 | switch (qualit) { 96 | case 0: 97 | self.targetSize = CGSizeMake(480, 640); 98 | break; 99 | case 1: 100 | self.targetSize = CGSizeMake(144, 177); 101 | break; 102 | case 2: 103 | self.targetSize = CGSizeMake(288, 352); 104 | break; 105 | case 3: 106 | self.targetSize = CGSizeMake(320, 480); 107 | break; 108 | case 4: 109 | self.targetSize = CGSizeMake(480, 640); 110 | break; 111 | case 5: 112 | self.targetSize = CGSizeMake(540, 960); 113 | break; 114 | case 6: 115 | self.targetSize = CGSizeMake(720, 1280); 116 | break; 117 | default: 118 | break; 119 | } 120 | NSLog(@"change target size %@", @(self.targetSize)); 121 | } 122 | break; 123 | case 2: 124 | break; 125 | case 3: 126 | { 127 | NSString *orientationStr = [NSString stringWithUTF8String:[data bytes]]; 128 | int orient = [orientationStr intValue]; 129 | switch (orient) { 130 | case 0: 131 | self.orientation = NTESVideoPackOrientationPortrait; 132 | break; 133 | case 1: 134 | self.orientation = NTESVideoPackOrientationLandscapeLeft; 135 | break; 136 | case 2: 137 | self.orientation = NTESVideoPackOrientationPortraitUpsideDown; 138 | break; 139 | case 3: 140 | self.orientation = NTESVideoPackOrientationLandscapeRight; 141 | break; 142 | default: 143 | break; 144 | }; 145 | NSLog(@"change orientation %@", @(self.orientation)); 146 | 147 | } 148 | break; 149 | default: 150 | break; 151 | } 152 | } 153 | 154 | #pragma mark - Process 155 | - (void)sendVideoBufferToHostApp:(CMSampleBufferRef)sampleBuffer { 156 | if (!self.socket) 157 | { 158 | return; 159 | } 160 | CFRetain(sampleBuffer); 161 | 162 | dispatch_async(self.videoQueue, ^{ // queue optimal 163 | @autoreleasepool { 164 | 165 | if (self.frameCount > 1000) 166 | { 167 | CFRelease(sampleBuffer); 168 | return; 169 | } 170 | self.frameCount ++ ; 171 | CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 172 | // To data 173 | NTESI420Frame *videoFrame = nil; 174 | videoFrame = [NTESYUVConverter pixelBufferToI420:pixelBuffer 175 | withCrop:self.cropRate 176 | targetSize:self.targetSize 177 | andOrientation:self.orientation]; 178 | CFRelease(sampleBuffer); 179 | 180 | // To Host App 181 | if (videoFrame) 182 | { 183 | NSData *raw = [videoFrame bytes]; 184 | // NSData *data = [NTESSocketPacket packetWithBuffer:raw]; 185 | NSData *headerData = [NTESSocketPacket packetWithBuffer:raw]; 186 | [self.socket writeData:headerData withTimeout:5 tag:0]; 187 | [self.socket writeData:raw withTimeout:5 tag:0]; 188 | 189 | } 190 | self.frameCount --; 191 | }; 192 | }); 193 | } 194 | 195 | - (NSData *)packetWithBuffer:(NSData *)rawData 196 | { 197 | NSMutableData *mutableData = [NSMutableData data]; 198 | @autoreleasepool { 199 | if (rawData.length == 0) 200 | { 201 | return NULL; 202 | } 203 | 204 | size_t size = rawData.length; 205 | void *data = malloc(sizeof(NTESPacketHead)); 206 | NTESPacketHead *head = (NTESPacketHead *)malloc(sizeof(NTESPacketHead)); 207 | head->version = 1; 208 | head->command_id = 0; 209 | head->service_id = 0; 210 | head->serial_id = 0; 211 | head->data_len = (uint32_t)size; 212 | 213 | size_t headSize = sizeof(NTESPacketHead); 214 | memcpy(data, head, headSize); 215 | NSData *headData = [NSData dataWithBytes:data length:headSize]; 216 | [mutableData appendData:headData]; 217 | [mutableData appendData:rawData]; 218 | 219 | free(data); 220 | free(head); 221 | } 222 | return [mutableData copy]; 223 | } 224 | 225 | - (NSData *)packetWithBuffer:(const void *)buffer 226 | size:(size_t)size 227 | packetSize:(size_t *)packetSize 228 | { 229 | if (0 == size) 230 | { 231 | return NULL; 232 | } 233 | 234 | void *data = malloc(sizeof(NTESPacketHead) + size); 235 | NTESPacketHead *head = (NTESPacketHead *)malloc(sizeof(NTESPacketHead)); 236 | head->version = 1; 237 | head->command_id = 0; 238 | head->service_id = 0; 239 | head->serial_id = 0; 240 | head->data_len = (uint32_t)size; 241 | 242 | size_t headSize = sizeof(NTESPacketHead); 243 | *packetSize = size + headSize; 244 | memcpy(data, head, headSize); 245 | memcpy(data + headSize, buffer, size); 246 | 247 | 248 | NSData *result = [NSData dataWithBytes:data length:*packetSize]; 249 | 250 | free(head); 251 | free(data); 252 | return result; 253 | } 254 | 255 | #pragma mark - Socket 256 | 257 | - (void)setupSocket 258 | { 259 | _recvBuffer = (NTESTPCircularBuffer *)malloc(sizeof(NTESTPCircularBuffer)); // 需要释放 260 | NTESTPCircularBufferInit(_recvBuffer, kRecvBufferMaxSize); 261 | self.queue = dispatch_queue_create("com.netease.edu.rp.client", DISPATCH_QUEUE_SERIAL); 262 | self.socket = [[GCDAsyncSocket alloc] initWithDelegate:self delegateQueue:self.queue]; 263 | // self.socket.IPv6Enabled = NO; 264 | // [self.socket connectToUrl:[NSURL fileURLWithPath:serverURL] withTimeout:5 error:nil]; 265 | NSError *error; 266 | [self.socket connectToHost:@"127.0.0.1" onPort:8999 error:&error]; 267 | [self.socket readDataWithTimeout:-1 tag:0]; 268 | NSLog(@"setupSocket:%@",error); 269 | } 270 | 271 | - (void)socket:(GCDAsyncSocket *)sock didConnectToUrl:(NSURL *)url 272 | { 273 | [self.socket readDataWithTimeout:-1 tag:0]; 274 | } 275 | 276 | - (void)socket:(GCDAsyncSocket *)sock didConnectToHost:(NSString *)host port:(uint16_t)port 277 | { 278 | [self.socket readDataWithTimeout:-1 tag:0]; 279 | self.connected = YES; 280 | } 281 | 282 | - (void)socket:(GCDAsyncSocket *)sock didWriteDataWithTag:(long)tag 283 | { 284 | 285 | } 286 | 287 | - (void)socket:(GCDAsyncSocket *)sock didReadData:(NSData *)data withTag:(long)tag 288 | { 289 | NTESTPCircularBufferProduceBytes(self.recvBuffer, data.bytes, (int32_t)data.length); 290 | [self handleRecvBuffer]; 291 | [sock readDataWithTimeout:-1 tag:0]; 292 | } 293 | 294 | - (void)socketDidDisconnect:(GCDAsyncSocket *)sock withError:(NSError *)err 295 | { 296 | self.connected = NO; 297 | [self.socket disconnect]; 298 | self.socket = nil; 299 | [self setupSocket]; 300 | [self.socket readDataWithTimeout:-1 tag:0]; 301 | } 302 | 303 | - (void)handleRecvBuffer { 304 | if (!self.socket) 305 | { 306 | return; 307 | } 308 | 309 | int32_t availableBytes = 0; 310 | void * buffer = NTESTPCircularBufferTail(self.recvBuffer, &availableBytes); 311 | int32_t headSize = sizeof(NTESPacketHead); 312 | 313 | if (availableBytes <= headSize) 314 | { 315 | return; 316 | } 317 | 318 | NTESPacketHead head; 319 | memset(&head, 0, sizeof(head)); 320 | memcpy(&head, buffer, headSize); 321 | uint64_t dataLen = head.data_len; 322 | 323 | if(dataLen > availableBytes - headSize && dataLen >0) { 324 | return; 325 | } 326 | 327 | void *data = malloc(dataLen); 328 | memset(data, 0, dataLen); 329 | memcpy(data, buffer + headSize, dataLen); 330 | NTESTPCircularBufferConsume(self.recvBuffer, (int32_t)(headSize+dataLen)); 331 | 332 | 333 | if([self respondsToSelector:@selector(onRecvData:head:)]) { 334 | @autoreleasepool { 335 | [self onRecvData:[NSData dataWithBytes:data length:dataLen] head:&head]; 336 | }; 337 | } 338 | 339 | free(data); 340 | 341 | if (availableBytes - headSize - dataLen >= headSize) 342 | { 343 | [self handleRecvBuffer]; 344 | } 345 | } 346 | 347 | @end 348 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/JWExtensionKit.docc/JWExtensionKit.md: -------------------------------------------------------------------------------- 1 | # ``JWExtensionKit`` 2 | 3 | Summary 4 | 5 | ## Overview 6 | 7 | Text 8 | 9 | ## Topics 10 | 11 | ### Group 12 | 13 | - ``Symbol`` -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/JWExtensionKit.h: -------------------------------------------------------------------------------- 1 | // 2 | // JWExtensionKit.h 3 | // JWExtensionKit 4 | // 5 | // Created by summerxx on 2022/12/29. 6 | // 7 | 8 | #import 9 | 10 | //! Project version number for JWExtensionKit. 11 | FOUNDATION_EXPORT double JWExtensionKitVersionNumber; 12 | 13 | //! Project version string for JWExtensionKit. 14 | FOUNDATION_EXPORT const unsigned char JWExtensionKitVersionString[]; 15 | 16 | // In this header, you should import all the public headers of your framework using statements like #import 17 | 18 | 19 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Socket/NTESSocket.h: -------------------------------------------------------------------------------- 1 | // 2 | // NTESSocket.h 3 | // DailyProj 4 | // 5 | // Created by He on 2019/1/30. 6 | // Copyright © 2019 He. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | typedef struct { 12 | uint8_t version; 13 | uint8_t service_id; 14 | uint8_t command_id; 15 | uint8_t serial_id; 16 | uint64_t data_len; 17 | } NTESPacketHead; 18 | 19 | #define kRecvBufferMaxSize 1024 * 1024 *15 20 | #define kRecvBufferPerSize 1024 21 | 22 | NS_ASSUME_NONNULL_BEGIN 23 | 24 | @protocol NTESSocketDelegate 25 | 26 | @optional 27 | - (void)onRecvData:(NSData *)data; 28 | - (void)onRecvData:(NSData *)data head:(NTESPacketHead *)head; 29 | - (void)didDisconnected; 30 | @end 31 | 32 | @interface NTESSocket : NSObject 33 | @property(nonatomic, copy) NSString *ip; 34 | @property(nonatomic, copy) NSString *port; 35 | @property(nonatomic, weak) id delegate; 36 | 37 | - (instancetype)initWithPort:(NSString *)port IP:(NSString *)IP; 38 | 39 | // Server 40 | - (BOOL)startAcceptClient; 41 | 42 | // Client 43 | - (BOOL)connectToServerWithPort:(NSString *)port IP:(NSString *)IP; 44 | - (void)startRecv; 45 | 46 | // Common 47 | - (void)stop; 48 | - (void)sendData:(NSData *)data; 49 | - (void)sendData:(NSData *)data head:(NTESPacketHead *)head; 50 | 51 | @end 52 | 53 | NS_ASSUME_NONNULL_END 54 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Socket/NTESSocket.m: -------------------------------------------------------------------------------- 1 | // 2 | // NTESSocket.m 3 | // DailyProj 4 | // 5 | // Created by He on 2019/1/30. 6 | // Copyright © 2019 He. All rights reserved. 7 | // 8 | 9 | #import "NTESSocket.h" 10 | #import 11 | #import 12 | #import 13 | #import 14 | #import "NTESTPCircularBuffer.h" 15 | 16 | @interface NTESSocket() 17 | @property (nonatomic, assign) int socket; 18 | @property (nonatomic, assign) int serverSocket; 19 | @property (nonatomic, assign) int clientSocket; 20 | @property (nonatomic, assign) BOOL isServerSocket; 21 | @property (nonatomic, strong) dispatch_queue_t queue; 22 | @property (atomic, assign) BOOL isWork; 23 | @property (nonatomic, assign) NTESTPCircularBuffer *recvBuffer; 24 | @end 25 | 26 | @implementation NTESSocket 27 | 28 | #pragma mark - API 29 | 30 | - (void)dealloc { 31 | _isWork = NO; 32 | if(_socket != -1) { 33 | close(_socket); 34 | } 35 | if(_serverSocket != -1) { 36 | close(_serverSocket); 37 | } 38 | if(_clientSocket != -1) { 39 | close(_clientSocket); 40 | } 41 | } 42 | 43 | - (instancetype)initWithPort:(NSString *)port IP:(NSString *)IP 44 | { 45 | if(self = [super init]) { 46 | _socket = -1; 47 | _serverSocket = -1; 48 | _clientSocket = -1; 49 | _port = port; 50 | _ip = IP; 51 | _queue = dispatch_queue_create("com.netease.ddddaily.send", DISPATCH_QUEUE_SERIAL); 52 | if(![self setupSocket]) { 53 | return nil; 54 | } 55 | 56 | if(![self bindSocket]) { 57 | return nil; 58 | } 59 | } 60 | return self; 61 | } 62 | 63 | // Server 64 | - (BOOL)startAcceptClient { 65 | return [self listenAndAccept]; 66 | } 67 | 68 | - (void)stop { 69 | NSLog(@" >> 停止"); 70 | _isWork = NO; 71 | 72 | if(_socket != -1) { 73 | close(_socket); 74 | _socket = -1; 75 | } 76 | if(_serverSocket != -1) { 77 | close(_serverSocket); 78 | _serverSocket = -1; 79 | } 80 | if(_clientSocket != -1) { 81 | close(_clientSocket); 82 | _clientSocket = -1; 83 | } 84 | if(_delegate && [_delegate respondsToSelector:@selector(didDisconnected)]) { 85 | [_delegate didDisconnected]; 86 | } 87 | } 88 | 89 | // Client 90 | - (BOOL)connectToServerWithPort:(NSString *)port IP:(NSString *)IP { 91 | struct sockaddr_in addr_in; 92 | addr_in.sin_family = AF_INET; 93 | addr_in.sin_addr.s_addr = inet_addr([IP UTF8String]); 94 | addr_in.sin_port = htons([port intValue]); 95 | 96 | int success = connect(self.socket, (const struct sockaddr *)&addr_in, sizeof(struct sockaddr_in)); 97 | if(-1 == success) { 98 | NSLog(@" > 连接到服务端失败 port:%@ IP:%@", port, IP); 99 | return NO; 100 | } 101 | self.isServerSocket = NO; 102 | return YES; 103 | } 104 | 105 | // Common 106 | - (void)startRecv { 107 | if(self.isServerSocket) { 108 | [self receiveDataFromSocket:self.clientSocket]; 109 | }else { 110 | [self receiveDataFromSocket:self.socket]; 111 | } 112 | } 113 | 114 | - (void)sendData:(NSData *)data { 115 | if(self.isServerSocket) { 116 | [self sendDataToSocket:self.clientSocket buffer:[data bytes] size:data.length]; 117 | }else { 118 | [self sendDataToSocket:self.socket buffer:[data bytes] size:data.length]; 119 | } 120 | } 121 | 122 | - (void)sendData:(NSData *)data head:(NTESPacketHead *)head { 123 | if(self.isServerSocket) { 124 | [self sendDataToSocket:self.clientSocket 125 | buffer:[data bytes] 126 | size:data.length 127 | head:head]; 128 | } 129 | 130 | } 131 | 132 | #pragma mark - Internal 133 | 134 | - (BOOL)setupSocket { 135 | _socket = socket(AF_INET, SOCK_STREAM, 0); 136 | int opt = 1; 137 | setsockopt(_socket, SOL_SOCKET, SO_REUSEADDR, &opt, sizeof(opt)); 138 | if(_socket == -1) { 139 | NSLog(@" > 创建socket失败"); 140 | return NO; 141 | } 142 | return YES; 143 | } 144 | 145 | - (BOOL)bindSocket { 146 | if(_socket <= 0) { 147 | NSLog(@" > socket创建失败"); 148 | return NO; 149 | } 150 | struct sockaddr_in addr_in; 151 | addr_in.sin_family = AF_INET; 152 | addr_in.sin_addr.s_addr = inet_addr([self.ip UTF8String]); 153 | addr_in.sin_port = htons([self.port intValue]); 154 | 155 | int bd = bind(_socket, (const struct sockaddr *)&addr_in, sizeof(struct sockaddr_in)); 156 | if(-1 == bd) { 157 | NSLog(@" > Bind socket失败"); 158 | return NO; 159 | } 160 | return YES; 161 | } 162 | 163 | - (BOOL)listenAndAccept { 164 | int success = listen(_socket, 10); 165 | if(-1 == success) { 166 | NSLog(@" > Listen socket失败"); 167 | return NO; 168 | } 169 | self.isServerSocket = YES; 170 | dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0), ^{ 171 | do { 172 | struct sockaddr_in recvAddr; 173 | socklen_t recv_size = sizeof(struct sockaddr_in); 174 | NSLog(@" > 开始监听 %@ %@", self.ip, self.port); 175 | int client = accept(self.socket, (struct sockaddr *)&recvAddr, &recv_size); 176 | if(-1 == client) { 177 | NSLog(@" > 连接 客户端socket失败, 结束 %@", @(self.isWork)); 178 | }else { 179 | if (self.clientSocket != -1) 180 | close(self.clientSocket); 181 | self.clientSocket = client; 182 | [self receiveDataFromSocket:self.clientSocket]; 183 | } 184 | }while(self.isWork); 185 | 186 | }); 187 | return YES; 188 | } 189 | 190 | - (void)receiveDataFromSocket:(int)socket 191 | { 192 | if(-1 == socket) { 193 | NSLog(@" > 接收 目标socket为空"); 194 | return; 195 | } 196 | self.isWork = YES; 197 | if(_recvBuffer == NULL) { 198 | _recvBuffer = (NTESTPCircularBuffer *)malloc(sizeof(NTESTPCircularBuffer)); 199 | NTESTPCircularBufferInit(_recvBuffer, kRecvBufferMaxSize); 200 | } 201 | 202 | dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_BACKGROUND, 0); 203 | dispatch_async(queue, ^{ 204 | char *buffer = malloc(kRecvBufferPerSize); 205 | ssize_t size = -1; 206 | bool connected = true; 207 | while (self.isWork && connected) { 208 | memset(buffer, 0, kRecvBufferPerSize); 209 | size = recv(socket, buffer, kRecvBufferPerSize, 0); 210 | if(size == 0) { 211 | NSLog(@" > 断开"); 212 | connected = false; 213 | break; 214 | }else if(size > 0){ 215 | NTESTPCircularBufferProduceBytes(self.recvBuffer, buffer, (int32_t)size); 216 | [self handleRecvBuffer]; 217 | } 218 | }; 219 | free(buffer); 220 | if(!self.isServerSocket) { 221 | [self stop]; 222 | }else { 223 | self.clientSocket = -1; 224 | } 225 | }); 226 | 227 | } 228 | -(void)sendDataToSocket:(int)socket 229 | buffer:(const void *)buffer 230 | size:(size_t)size 231 | head:(NTESPacketHead *)head 232 | { 233 | size_t packetSize = 0; 234 | void *packetBuffer = [self packetWithBuffer:buffer size:size packetSize:&packetSize head:head]; 235 | 236 | if(socket <= 0 ||packetBuffer == NULL || 0 == packetSize) { 237 | // NSLog(@" >> 异常数据"); 238 | free(packetBuffer); 239 | return; 240 | } 241 | dispatch_async(self.queue, ^{ 242 | size_t length = send(socket, packetBuffer, packetSize, 0); 243 | free(packetBuffer); 244 | if(length == -1) { 245 | if(!self.isServerSocket) { 246 | [self stop]; 247 | } 248 | } 249 | }); 250 | } 251 | - (void)sendDataToSocket:(int)socket 252 | buffer:(const void *)buffer 253 | size:(size_t)size { 254 | if(socket == -1) { 255 | return; 256 | } 257 | 258 | size_t packetSize = 0; 259 | void *packetBuffer = [self packetWithBuffer:buffer size:size packetSize:&packetSize]; 260 | 261 | if(socket <= 0 ||packetBuffer == NULL || 0 == packetSize) { 262 | if(packetBuffer) { 263 | free(packetBuffer); 264 | } 265 | return; 266 | } 267 | dispatch_async(self.queue, ^{ 268 | size_t length = send(socket, packetBuffer, packetSize, 0); 269 | free(packetBuffer); 270 | if(length == -1) { 271 | if(!self.serverSocket) { 272 | [self stop]; 273 | }else { 274 | self.clientSocket = -1; 275 | } 276 | } 277 | }); 278 | 279 | } 280 | 281 | - (void)handleRecvBuffer { 282 | int32_t availableBytes = 0; 283 | void * buffer = NTESTPCircularBufferTail(self.recvBuffer, &availableBytes); 284 | int32_t headSize = sizeof(NTESPacketHead); 285 | 286 | if(availableBytes <= headSize) { 287 | // NSLog(@" > 不够文件头"); 288 | return; 289 | } 290 | 291 | NTESPacketHead head; 292 | memset(&head, 0, sizeof(head)); 293 | memcpy(&head, buffer, headSize); 294 | uint64_t dataLen = head.data_len; 295 | 296 | if(dataLen > availableBytes - headSize && dataLen >0) { 297 | // NSLog(@" > 不够数据体"); 298 | return; 299 | } 300 | 301 | void *data = malloc(dataLen); 302 | memset(data, 0, dataLen); 303 | memcpy(data, buffer + headSize, dataLen); 304 | NTESTPCircularBufferConsume(self.recvBuffer, (int32_t)(headSize+dataLen)); 305 | 306 | if(self.delegate && [self.delegate respondsToSelector:@selector(onRecvData:head:)]) { 307 | @autoreleasepool { 308 | [self.delegate onRecvData:[NSData dataWithBytes:data length:dataLen] head:&head]; 309 | }; 310 | } 311 | if(self.delegate && [self.delegate respondsToSelector:@selector(onRecvData:)]) { 312 | @autoreleasepool { 313 | [self.delegate onRecvData:[NSData dataWithBytes:data length:dataLen]]; 314 | }; 315 | } 316 | 317 | free(data); 318 | 319 | if (availableBytes - headSize - dataLen >= headSize) 320 | { 321 | [self handleRecvBuffer]; 322 | } 323 | } 324 | 325 | #pragma mark - Packet 326 | - (void *)packetWithBuffer:(const void *)buffer 327 | size:(size_t)size 328 | packetSize:(size_t *)packetSize 329 | { 330 | if (0 == size) 331 | { 332 | return NULL; 333 | } 334 | 335 | void *data = malloc(sizeof(NTESPacketHead) + size); 336 | NTESPacketHead * head = (NTESPacketHead *)malloc(sizeof(NTESPacketHead)); 337 | head->version = 1; 338 | head->command_id = 0; 339 | head->service_id = 0; 340 | head->serial_id = 0; 341 | head->data_len = (uint32_t)size; 342 | 343 | size_t headSize = sizeof(NTESPacketHead); 344 | *packetSize = size + headSize; 345 | memcpy(data, head, headSize); 346 | memcpy(data + headSize, buffer, size); 347 | 348 | free(head); 349 | 350 | return data; 351 | } 352 | 353 | - (void *)packetWithBuffer:(const void *)buffer 354 | size:(size_t)size 355 | packetSize:(size_t *)packetSize 356 | head:(NTESPacketHead *)head 357 | { 358 | if(0 == size) { 359 | return NULL; 360 | } 361 | void *data = malloc(sizeof(NTESPacketHead) + size); 362 | 363 | head->data_len = (uint32_t)size; 364 | 365 | size_t headSize = sizeof(NTESPacketHead); 366 | *packetSize = size + headSize; 367 | memcpy(data, head, headSize); 368 | memcpy(data + headSize, buffer, size); 369 | 370 | return data; 371 | } 372 | 373 | @end 374 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Socket/NTESSocketPacket.h: -------------------------------------------------------------------------------- 1 | // 2 | // NTESSocketPacket.h 3 | // NIMEducationDemo 4 | // 5 | // Created by He on 2019/5/6. 6 | // Copyright © 2019 Netease. All rights reserved. 7 | // 8 | 9 | #import 10 | 11 | typedef struct { 12 | uint8_t version; 13 | uint8_t service_id; 14 | uint8_t command_id; 15 | uint8_t serial_id; 16 | uint64_t data_len; 17 | } NTESPacketHead; 18 | 19 | #define kRecvBufferMaxSize 1024 * 1024 *15 20 | #define kRecvBufferPerSize 1024 21 | 22 | NS_ASSUME_NONNULL_BEGIN 23 | 24 | @interface NTESSocketPacket : NSObject 25 | 26 | + (NSData *)packetWithBuffer:(NSData *)rawData; 27 | + (NSData *)packetWithBuffer:(NSData *)rawData head:(NTESPacketHead *)head; 28 | 29 | @end 30 | 31 | NS_ASSUME_NONNULL_END 32 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Socket/NTESSocketPacket.m: -------------------------------------------------------------------------------- 1 | // 2 | // NTESSocketPacket.m 3 | // NIMEducationDemo 4 | // 5 | // Created by He on 2019/5/6. 6 | // Copyright © 2019 Netease. All rights reserved. 7 | // 8 | 9 | #import "NTESSocketPacket.h" 10 | 11 | @implementation NTESSocketPacket 12 | 13 | + (NSData *)packetWithBuffer:(NSData *)rawData 14 | { 15 | NSMutableData *mutableData = [NSMutableData data]; 16 | @autoreleasepool { 17 | if (rawData.length == 0) 18 | { 19 | return NULL; 20 | } 21 | static uint64_t serial_id = 0; 22 | size_t size = rawData.length; 23 | void *data = malloc(sizeof(NTESPacketHead)); 24 | NTESPacketHead *head = (NTESPacketHead *)malloc(sizeof(NTESPacketHead)); 25 | head->version = 1; 26 | head->command_id = 1; 27 | head->service_id = 1; 28 | head->serial_id = serial_id++; 29 | head->data_len = (uint32_t)size; 30 | 31 | size_t headSize = sizeof(NTESPacketHead); 32 | memcpy(data, head, headSize); 33 | NSData *headData = [NSData dataWithBytes:data length:headSize]; 34 | [mutableData appendData:headData]; 35 | // [mutableData appendData:rawData]; 36 | 37 | free(data); 38 | free(head); 39 | } 40 | return [mutableData copy]; 41 | } 42 | 43 | + (NSData *)packetWithBuffer:(NSData *)rawData head:(NTESPacketHead *)head 44 | { 45 | if (rawData) 46 | { 47 | head->data_len = rawData.length; 48 | } 49 | 50 | NSMutableData *mutableData = [NSMutableData data]; 51 | NSData *headData = [NSData dataWithBytes:head length:sizeof(NTESPacketHead)]; 52 | [mutableData appendData:headData]; 53 | 54 | if (rawData) 55 | { 56 | [mutableData appendData:rawData]; 57 | } 58 | return mutableData.copy; 59 | } 60 | 61 | @end 62 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Socket/NTESTPCircularBuffer.c: -------------------------------------------------------------------------------- 1 | // 2 | // NTESTPCircularBuffer.c 3 | // Circular/Ring buffer implementation 4 | // 5 | // https://github.com/michaeltyson/TPCircularBuffer 6 | // 7 | // Created by Michael Tyson on 10/12/2011. 8 | // 9 | // Copyright (C) 2012-2013 A Tasty Pixel 10 | // 11 | // This software is provided 'as-is', without any express or implied 12 | // warranty. In no event will the authors be held liable for any damages 13 | // arising from the use of this software. 14 | // 15 | // Permission is granted to anyone to use this software for any purpose, 16 | // including commercial applications, and to alter it and redistribute it 17 | // freely, subject to the following restrictions: 18 | // 19 | // 1. The origin of this software must not be misrepresented; you must not 20 | // claim that you wrote the original software. If you use this software 21 | // in a product, an acknowledgment in the product documentation would be 22 | // appreciated but is not required. 23 | // 24 | // 2. Altered source versions must be plainly marked as such, and must not be 25 | // misrepresented as being the original software. 26 | // 27 | // 3. This notice may not be removed or altered from any source distribution. 28 | // 29 | 30 | #include "NTESTPCircularBuffer.h" 31 | #include 32 | #include 33 | #include 34 | 35 | #define reportNTESResult(result,operation) (_reportNTESResult((result),(operation),strrchr(__FILE__, '/')+1,__LINE__)) 36 | static inline bool _reportNTESResult(kern_return_t result, const char *operation, const char* file, int line) { 37 | if ( result != ERR_SUCCESS ) { 38 | printf("%s:%d: %s: %s\n", file, line, operation, mach_error_string(result)); 39 | return false; 40 | } 41 | return true; 42 | } 43 | 44 | bool _NTESTPCircularBufferInit(NTESTPCircularBuffer *buffer, int32_t length, size_t structSize) { 45 | 46 | assert(length > 0); 47 | 48 | if ( structSize != sizeof(NTESTPCircularBuffer) ) { 49 | fprintf(stderr, "NTESTPCircularBuffer: Header version mismatch. Check for old versions of NTESTPCircularBuffer in your project\n"); 50 | abort(); 51 | } 52 | 53 | // Keep trying until we get our buffer, needed to handle race conditions 54 | int retries = 3; 55 | while ( true ) { 56 | 57 | buffer->length = (int32_t)round_page(length); // We need whole page sizes 58 | 59 | // Temporarily allocate twice the length, so we have the contiguous address space to 60 | // support a second instance of the buffer directly after 61 | vm_address_t bufferAddress; 62 | kern_return_t result = vm_allocate(mach_task_self(), 63 | &bufferAddress, 64 | buffer->length * 2, 65 | VM_FLAGS_ANYWHERE); // allocate anywhere it'll fit 66 | if ( result != ERR_SUCCESS ) { 67 | if ( retries-- == 0 ) { 68 | reportNTESResult(result, "Buffer allocation"); 69 | return false; 70 | } 71 | // Try again if we fail 72 | continue; 73 | } 74 | 75 | // Now replace the second half of the allocation with a virtual copy of the first half. Deallocate the second half... 76 | result = vm_deallocate(mach_task_self(), 77 | bufferAddress + buffer->length, 78 | buffer->length); 79 | if ( result != ERR_SUCCESS ) { 80 | if ( retries-- == 0 ) { 81 | reportNTESResult(result, "Buffer deallocation"); 82 | return false; 83 | } 84 | // If this fails somehow, deallocate the whole region and try again 85 | vm_deallocate(mach_task_self(), bufferAddress, buffer->length); 86 | continue; 87 | } 88 | 89 | // Re-map the buffer to the address space immediately after the buffer 90 | vm_address_t virtualAddress = bufferAddress + buffer->length; 91 | vm_prot_t cur_prot, max_prot; 92 | result = vm_remap(mach_task_self(), 93 | &virtualAddress, // mirror target 94 | buffer->length, // size of mirror 95 | 0, // auto alignment 96 | 0, // force remapping to virtualAddress 97 | mach_task_self(), // same task 98 | bufferAddress, // mirror source 99 | 0, // MAP READ-WRITE, NOT COPY 100 | &cur_prot, // unused protection struct 101 | &max_prot, // unused protection struct 102 | VM_INHERIT_DEFAULT); 103 | if ( result != ERR_SUCCESS ) { 104 | if ( retries-- == 0 ) { 105 | reportNTESResult(result, "Remap buffer memory"); 106 | return false; 107 | } 108 | // If this remap failed, we hit a race condition, so deallocate and try again 109 | vm_deallocate(mach_task_self(), bufferAddress, buffer->length); 110 | continue; 111 | } 112 | 113 | if ( virtualAddress != bufferAddress+buffer->length ) { 114 | // If the memory is not contiguous, clean up both allocated buffers and try again 115 | if ( retries-- == 0 ) { 116 | printf("Couldn't map buffer memory to end of buffer\n"); 117 | return false; 118 | } 119 | 120 | vm_deallocate(mach_task_self(), virtualAddress, buffer->length); 121 | vm_deallocate(mach_task_self(), bufferAddress, buffer->length); 122 | continue; 123 | } 124 | 125 | buffer->buffer = (void*)bufferAddress; 126 | buffer->fillCount = 0; 127 | buffer->head = buffer->tail = 0; 128 | buffer->atomic = true; 129 | 130 | return true; 131 | } 132 | return false; 133 | } 134 | 135 | void NTESTPCircularBufferCleanup(NTESTPCircularBuffer *buffer) { 136 | vm_deallocate(mach_task_self(), (vm_address_t)buffer->buffer, buffer->length * 2); 137 | memset(buffer, 0, sizeof(NTESTPCircularBuffer)); 138 | } 139 | 140 | void NTESTPCircularBufferClear(NTESTPCircularBuffer *buffer) { 141 | int32_t fillCount; 142 | if ( NTESTPCircularBufferTail(buffer, &fillCount) ) { 143 | NTESTPCircularBufferConsume(buffer, fillCount); 144 | } 145 | } 146 | 147 | void NTESTPCircularBufferSetAtomic(NTESTPCircularBuffer *buffer, bool atomic) { 148 | buffer->atomic = atomic; 149 | } 150 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Socket/NTESTPCircularBuffer.h: -------------------------------------------------------------------------------- 1 | // 2 | // NTESTPCircularBuffer.h 3 | // Circular/Ring buffer implementation 4 | // 5 | // https://github.com/michaeltyson/TPCircularBuffer 6 | // 7 | // Created by Michael Tyson on 10/12/2011. 8 | // 9 | // 10 | // This implementation makes use of a virtual memory mapping technique that inserts a virtual copy 11 | // of the buffer memory directly after the buffer's end, negating the need for any buffer wrap-around 12 | // logic. Clients can simply use the returned memory address as if it were contiguous space. 13 | // 14 | // The implementation is thread-safe in the case of a single producer and single consumer. 15 | // 16 | // Virtual memory technique originally proposed by Philip Howard (http://vrb.slashusr.org/), and 17 | // adapted to Darwin by Kurt Revis (http://www.snoize.com, 18 | // http://www.snoize.com/Code/PlayBufferedSoundFile.tar.gz) 19 | // 20 | // 21 | // Copyright (C) 2012-2013 A Tasty Pixel 22 | // 23 | // This software is provided 'as-is', without any express or implied 24 | // warranty. In no event will the authors be held liable for any damages 25 | // arising from the use of this software. 26 | // 27 | // Permission is granted to anyone to use this software for any purpose, 28 | // including commercial applications, and to alter it and redistribute it 29 | // freely, subject to the following restrictions: 30 | // 31 | // 1. The origin of this software must not be misrepresented; you must not 32 | // claim that you wrote the original software. If you use this software 33 | // in a product, an acknowledgment in the product documentation would be 34 | // appreciated but is not required. 35 | // 36 | // 2. Altered source versions must be plainly marked as such, and must not be 37 | // misrepresented as being the original software. 38 | // 39 | // 3. This notice may not be removed or altered from any source distribution. 40 | // 41 | 42 | #ifndef NTESTPCircularBuffer_h 43 | #define NTESTPCircularBuffer_h 44 | 45 | #include 46 | #include 47 | #include 48 | #include 49 | 50 | #ifdef __cplusplus 51 | extern "C" { 52 | #endif 53 | 54 | typedef struct { 55 | void *buffer; 56 | int32_t length; 57 | int32_t tail; 58 | int32_t head; 59 | volatile int32_t fillCount; 60 | bool atomic; 61 | } NTESTPCircularBuffer; 62 | 63 | /*! 64 | * Initialise buffer 65 | * 66 | * Note that the length is advisory only: Because of the way the 67 | * memory mirroring technique works, the true buffer length will 68 | * be multiples of the device page size (e.g. 4096 bytes) 69 | * 70 | * If you intend to use the AudioBufferList utilities, you should 71 | * always allocate a bit more space than you need for pure audio 72 | * data, so there's room for the metadata. How much extra is required 73 | * depends on how many AudioBufferList structures are used, which is 74 | * a function of how many audio frames each buffer holds. A good rule 75 | * of thumb is to add 15%, or at least another 2048 bytes or so. 76 | * 77 | * @param buffer Circular buffer 78 | * @param length Length of buffer 79 | */ 80 | #define NTESTPCircularBufferInit(buffer, length) \ 81 | _NTESTPCircularBufferInit(buffer, length, sizeof(*buffer)) 82 | bool _NTESTPCircularBufferInit(NTESTPCircularBuffer *buffer, int32_t length, size_t structSize); 83 | 84 | /*! 85 | * Cleanup buffer 86 | * 87 | * Releases buffer resources. 88 | */ 89 | void NTESTPCircularBufferCleanup(NTESTPCircularBuffer *buffer); 90 | 91 | /*! 92 | * Clear buffer 93 | * 94 | * Resets buffer to original, empty state. 95 | * 96 | * This is safe for use by consumer while producer is accessing 97 | * buffer. 98 | */ 99 | void NTESTPCircularBufferClear(NTESTPCircularBuffer *buffer); 100 | 101 | /*! 102 | * Set the atomicity 103 | * 104 | * If you set the atomiticy to false using this method, the buffer will 105 | * not use atomic operations. This can be used to give the compiler a little 106 | * more optimisation opportunities when the buffer is only used on one thread. 107 | * 108 | * Important note: Only set this to false if you know what you're doing! 109 | * 110 | * The default value is true (the buffer will use atomic operations) 111 | * 112 | * @param buffer Circular buffer 113 | * @param atomic Whether the buffer is atomic (default true) 114 | */ 115 | void NTESTPCircularBufferSetAtomic(NTESTPCircularBuffer *buffer, bool atomic); 116 | 117 | // Reading (consuming) 118 | 119 | /*! 120 | * Access end of buffer 121 | * 122 | * This gives you a pointer to the end of the buffer, ready 123 | * for reading, and the number of available bytes to read. 124 | * 125 | * @param buffer Circular buffer 126 | * @param availableBytes On output, the number of bytes ready for reading 127 | * @return Pointer to the first bytes ready for reading, or NULL if buffer is empty 128 | */ 129 | static __inline__ __attribute__((always_inline)) void* NTESTPCircularBufferTail(NTESTPCircularBuffer *buffer, int32_t* availableBytes) { 130 | *availableBytes = buffer->fillCount; 131 | if ( *availableBytes == 0 ) return NULL; 132 | return (void*)((char*)buffer->buffer + buffer->tail); 133 | } 134 | 135 | /*! 136 | * Consume bytes in buffer 137 | * 138 | * This frees up the just-read bytes, ready for writing again. 139 | * 140 | * @param buffer Circular buffer 141 | * @param amount Number of bytes to consume 142 | */ 143 | static __inline__ __attribute__((always_inline)) void NTESTPCircularBufferConsume(NTESTPCircularBuffer *buffer, int32_t amount) { 144 | buffer->tail = (buffer->tail + amount) % buffer->length; 145 | 146 | if ( buffer->atomic ) { 147 | OSAtomicAdd32Barrier(-amount, &buffer->fillCount); 148 | } else { 149 | buffer->fillCount -= amount; 150 | } 151 | 152 | assert(buffer->fillCount >= 0); 153 | } 154 | 155 | /*! 156 | * Access front of buffer 157 | * 158 | * This gives you a pointer to the front of the buffer, ready 159 | * for writing, and the number of available bytes to write. 160 | * 161 | * @param buffer Circular buffer 162 | * @param availableBytes On output, the number of bytes ready for writing 163 | * @return Pointer to the first bytes ready for writing, or NULL if buffer is full 164 | */ 165 | static __inline__ __attribute__((always_inline)) void* NTESTPCircularBufferHead(NTESTPCircularBuffer *buffer, int32_t* availableBytes) { 166 | *availableBytes = (buffer->length - buffer->fillCount); 167 | if ( *availableBytes == 0 ) return NULL; 168 | return (void*)((char*)buffer->buffer + buffer->head); 169 | } 170 | 171 | // Writing (producing) 172 | 173 | /*! 174 | * Produce bytes in buffer 175 | * 176 | * This marks the given section of the buffer ready for reading. 177 | * 178 | * @param buffer Circular buffer 179 | * @param amount Number of bytes to produce 180 | */ 181 | static __inline__ __attribute__((always_inline)) void NTESTPCircularBufferProduce(NTESTPCircularBuffer *buffer, int32_t amount) { 182 | buffer->head = (buffer->head + amount) % buffer->length; 183 | if ( buffer->atomic ) { 184 | OSAtomicAdd32Barrier(amount, &buffer->fillCount); 185 | } else { 186 | buffer->fillCount += amount; 187 | } 188 | assert(buffer->fillCount <= buffer->length); 189 | } 190 | 191 | /*! 192 | * Helper routine to copy bytes to buffer 193 | * 194 | * This copies the given bytes to the buffer, and marks them ready for reading. 195 | * 196 | * @param buffer Circular buffer 197 | * @param src Source buffer 198 | * @param len Number of bytes in source buffer 199 | * @return true if bytes copied, false if there was insufficient space 200 | */ 201 | static __inline__ __attribute__((always_inline)) bool NTESTPCircularBufferProduceBytes(NTESTPCircularBuffer *buffer, const void* src, int32_t len) { 202 | int32_t space; 203 | void *ptr = NTESTPCircularBufferHead(buffer, &space); 204 | if ( space < len ) return false; 205 | memcpy(ptr, src, len); 206 | NTESTPCircularBufferProduce(buffer, len); 207 | return true; 208 | } 209 | 210 | /*! 211 | * Deprecated method 212 | */ 213 | static __inline__ __attribute__((always_inline)) __deprecated_msg("use NTESTPCircularBufferSetAtomic(false) and NTESTPCircularBufferConsume instead") 214 | void NTESTPCircularBufferConsumeNoBarrier(NTESTPCircularBuffer *buffer, int32_t amount) { 215 | buffer->tail = (buffer->tail + amount) % buffer->length; 216 | buffer->fillCount -= amount; 217 | assert(buffer->fillCount >= 0); 218 | } 219 | 220 | /*! 221 | * Deprecated method 222 | */ 223 | static __inline__ __attribute__((always_inline)) __deprecated_msg("use NTESTPCircularBufferSetAtomic(false) and NTESTPCircularBufferProduce instead") 224 | void NTESTPCircularBufferProduceNoBarrier(NTESTPCircularBuffer *buffer, int32_t amount) { 225 | buffer->head = (buffer->head + amount) % buffer->length; 226 | buffer->fillCount += amount; 227 | assert(buffer->fillCount <= buffer->length); 228 | } 229 | 230 | #ifdef __cplusplus 231 | } 232 | #endif 233 | 234 | #endif 235 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv.a: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/summerxx27/ReplayKitShareScreen-socket/a2d73805791449f0da780062d58516c75ee9ebf5/ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv.a -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2011 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_H_ 12 | #define INCLUDE_LIBYUV_H_ 13 | 14 | #include "libyuv/basic_types.h" 15 | #include "libyuv/compare.h" 16 | #include "libyuv/convert.h" 17 | #include "libyuv/convert_argb.h" 18 | #include "libyuv/convert_from.h" 19 | #include "libyuv/convert_from_argb.h" 20 | #include "libyuv/cpu_id.h" 21 | #include "libyuv/mjpeg_decoder.h" 22 | #include "libyuv/planar_functions.h" 23 | #include "libyuv/rotate.h" 24 | #include "libyuv/rotate_argb.h" 25 | #include "libyuv/row.h" 26 | #include "libyuv/scale.h" 27 | #include "libyuv/scale_argb.h" 28 | #include "libyuv/scale_row.h" 29 | #include "libyuv/version.h" 30 | #include "libyuv/video_common.h" 31 | 32 | #endif // INCLUDE_LIBYUV_H_ 33 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/basic_types.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2011 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_BASIC_TYPES_H_ 12 | #define INCLUDE_LIBYUV_BASIC_TYPES_H_ 13 | 14 | #include // for NULL, size_t 15 | 16 | #if defined(_MSC_VER) && (_MSC_VER < 1600) 17 | #include // for uintptr_t on x86 18 | #else 19 | #include // for uintptr_t 20 | #endif 21 | 22 | #ifndef GG_LONGLONG 23 | #ifndef INT_TYPES_DEFINED 24 | #define INT_TYPES_DEFINED 25 | #ifdef COMPILER_MSVC 26 | typedef unsigned __int64 uint64; 27 | typedef __int64 int64; 28 | #ifndef INT64_C 29 | #define INT64_C(x) x ## I64 30 | #endif 31 | #ifndef UINT64_C 32 | #define UINT64_C(x) x ## UI64 33 | #endif 34 | #define INT64_F "I64" 35 | #else // COMPILER_MSVC 36 | #if defined(__LP64__) && !defined(__OpenBSD__) && !defined(__APPLE__) 37 | typedef unsigned long uint64; // NOLINT 38 | typedef long int64; // NOLINT 39 | #ifndef INT64_C 40 | #define INT64_C(x) x ## L 41 | #endif 42 | #ifndef UINT64_C 43 | #define UINT64_C(x) x ## UL 44 | #endif 45 | #define INT64_F "l" 46 | #else // defined(__LP64__) && !defined(__OpenBSD__) && !defined(__APPLE__) 47 | typedef unsigned long long uint64; // NOLINT 48 | typedef long long int64; // NOLINT 49 | #ifndef INT64_C 50 | #define INT64_C(x) x ## LL 51 | #endif 52 | #ifndef UINT64_C 53 | #define UINT64_C(x) x ## ULL 54 | #endif 55 | #define INT64_F "ll" 56 | #endif // __LP64__ 57 | #endif // COMPILER_MSVC 58 | typedef unsigned int uint32; 59 | typedef int int32; 60 | typedef unsigned short uint16; // NOLINT 61 | typedef short int16; // NOLINT 62 | typedef unsigned char uint8; 63 | typedef signed char int8; 64 | #endif // INT_TYPES_DEFINED 65 | #endif // GG_LONGLONG 66 | 67 | // Detect compiler is for x86 or x64. 68 | #if defined(__x86_64__) || defined(_M_X64) || \ 69 | defined(__i386__) || defined(_M_IX86) 70 | #define CPU_X86 1 71 | #endif 72 | // Detect compiler is for ARM. 73 | #if defined(__arm__) || defined(_M_ARM) 74 | #define CPU_ARM 1 75 | #endif 76 | 77 | #ifndef ALIGNP 78 | #ifdef __cplusplus 79 | #define ALIGNP(p, t) \ 80 | (reinterpret_cast(((reinterpret_cast(p) + \ 81 | ((t) - 1)) & ~((t) - 1)))) 82 | #else 83 | #define ALIGNP(p, t) \ 84 | ((uint8*)((((uintptr_t)(p) + ((t) - 1)) & ~((t) - 1)))) /* NOLINT */ 85 | #endif 86 | #endif 87 | 88 | #if !defined(LIBYUV_API) 89 | #if defined(_WIN32) || defined(__CYGWIN__) 90 | #if defined(LIBYUV_BUILDING_SHARED_LIBRARY) 91 | #define LIBYUV_API __declspec(dllexport) 92 | #elif defined(LIBYUV_USING_SHARED_LIBRARY) 93 | #define LIBYUV_API __declspec(dllimport) 94 | #else 95 | #define LIBYUV_API 96 | #endif // LIBYUV_BUILDING_SHARED_LIBRARY 97 | #elif defined(__GNUC__) && (__GNUC__ >= 4) && !defined(__APPLE__) && \ 98 | (defined(LIBYUV_BUILDING_SHARED_LIBRARY) || \ 99 | defined(LIBYUV_USING_SHARED_LIBRARY)) 100 | #define LIBYUV_API __attribute__ ((visibility ("default"))) 101 | #else 102 | #define LIBYUV_API 103 | #endif // __GNUC__ 104 | #endif // LIBYUV_API 105 | 106 | #define LIBYUV_BOOL int 107 | #define LIBYUV_FALSE 0 108 | #define LIBYUV_TRUE 1 109 | 110 | // Visual C x86 or GCC little endian. 111 | #if defined(__x86_64__) || defined(_M_X64) || \ 112 | defined(__i386__) || defined(_M_IX86) || \ 113 | defined(__arm__) || defined(_M_ARM) || \ 114 | (defined(__BYTE_ORDER__) && __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__) 115 | #define LIBYUV_LITTLE_ENDIAN 116 | #endif 117 | 118 | #endif // INCLUDE_LIBYUV_BASIC_TYPES_H_ 119 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/compare.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2011 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_COMPARE_H_ 12 | #define INCLUDE_LIBYUV_COMPARE_H_ 13 | 14 | #include "basic_types.h" 15 | 16 | #ifdef __cplusplus 17 | namespace libyuv { 18 | extern "C" { 19 | #endif 20 | 21 | // Compute a hash for specified memory. Seed of 5381 recommended. 22 | LIBYUV_API 23 | uint32 HashDjb2(const uint8* src, uint64 count, uint32 seed); 24 | 25 | // Scan an opaque argb image and return fourcc based on alpha offset. 26 | // Returns FOURCC_ARGB, FOURCC_BGRA, or 0 if unknown. 27 | LIBYUV_API 28 | uint32 ARGBDetect(const uint8* argb, int stride_argb, int width, int height); 29 | 30 | // Sum Square Error - used to compute Mean Square Error or PSNR. 31 | LIBYUV_API 32 | uint64 ComputeSumSquareError(const uint8* src_a, 33 | const uint8* src_b, int count); 34 | 35 | LIBYUV_API 36 | uint64 ComputeSumSquareErrorPlane(const uint8* src_a, int stride_a, 37 | const uint8* src_b, int stride_b, 38 | int width, int height); 39 | 40 | static const int kMaxPsnr = 128; 41 | 42 | LIBYUV_API 43 | double SumSquareErrorToPsnr(uint64 sse, uint64 count); 44 | 45 | LIBYUV_API 46 | double CalcFramePsnr(const uint8* src_a, int stride_a, 47 | const uint8* src_b, int stride_b, 48 | int width, int height); 49 | 50 | LIBYUV_API 51 | double I420Psnr(const uint8* src_y_a, int stride_y_a, 52 | const uint8* src_u_a, int stride_u_a, 53 | const uint8* src_v_a, int stride_v_a, 54 | const uint8* src_y_b, int stride_y_b, 55 | const uint8* src_u_b, int stride_u_b, 56 | const uint8* src_v_b, int stride_v_b, 57 | int width, int height); 58 | 59 | LIBYUV_API 60 | double CalcFrameSsim(const uint8* src_a, int stride_a, 61 | const uint8* src_b, int stride_b, 62 | int width, int height); 63 | 64 | LIBYUV_API 65 | double I420Ssim(const uint8* src_y_a, int stride_y_a, 66 | const uint8* src_u_a, int stride_u_a, 67 | const uint8* src_v_a, int stride_v_a, 68 | const uint8* src_y_b, int stride_y_b, 69 | const uint8* src_u_b, int stride_u_b, 70 | const uint8* src_v_b, int stride_v_b, 71 | int width, int height); 72 | 73 | #ifdef __cplusplus 74 | } // extern "C" 75 | } // namespace libyuv 76 | #endif 77 | 78 | #endif // INCLUDE_LIBYUV_COMPARE_H_ 79 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/compare_row.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2013 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_COMPARE_ROW_H_ 12 | #define INCLUDE_LIBYUV_COMPARE_ROW_H_ 13 | 14 | #include "basic_types.h" 15 | 16 | #ifdef __cplusplus 17 | namespace libyuv { 18 | extern "C" { 19 | #endif 20 | 21 | #if defined(__pnacl__) || defined(__CLR_VER) || \ 22 | (defined(__i386__) && !defined(__SSE2__)) 23 | #define LIBYUV_DISABLE_X86 24 | #endif 25 | // MemorySanitizer does not support assembly code yet. http://crbug.com/344505 26 | #if defined(__has_feature) 27 | #if __has_feature(memory_sanitizer) 28 | #define LIBYUV_DISABLE_X86 29 | #endif 30 | #endif 31 | 32 | // Visual C 2012 required for AVX2. 33 | #if defined(_M_IX86) && !defined(__clang__) && \ 34 | defined(_MSC_VER) && _MSC_VER >= 1700 35 | #define VISUALC_HAS_AVX2 1 36 | #endif // VisualStudio >= 2012 37 | 38 | // clang >= 3.4.0 required for AVX2. 39 | #if defined(__clang__) && (defined(__x86_64__) || defined(__i386__)) 40 | #if (__clang_major__ > 3) || (__clang_major__ == 3 && (__clang_minor__ >= 4)) 41 | #define CLANG_HAS_AVX2 1 42 | #endif // clang >= 3.4 43 | #endif // __clang__ 44 | 45 | #if !defined(LIBYUV_DISABLE_X86) && \ 46 | defined(_M_IX86) && (defined(VISUALC_HAS_AVX2) || defined(CLANG_HAS_AVX2)) 47 | #define HAS_HASHDJB2_AVX2 48 | #endif 49 | 50 | // The following are available for Visual C and GCC: 51 | #if !defined(LIBYUV_DISABLE_X86) && \ 52 | (defined(__x86_64__) || (defined(__i386__) || defined(_M_IX86))) 53 | #define HAS_HASHDJB2_SSE41 54 | #define HAS_SUMSQUAREERROR_SSE2 55 | #endif 56 | 57 | // The following are available for Visual C and clangcl 32 bit: 58 | #if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) && \ 59 | (defined(VISUALC_HAS_AVX2) || defined(CLANG_HAS_AVX2)) 60 | #define HAS_HASHDJB2_AVX2 61 | #define HAS_SUMSQUAREERROR_AVX2 62 | #endif 63 | 64 | // The following are available for Neon: 65 | #if !defined(LIBYUV_DISABLE_NEON) && \ 66 | (defined(__ARM_NEON__) || defined(LIBYUV_NEON) || defined(__aarch64__)) 67 | #define HAS_SUMSQUAREERROR_NEON 68 | #endif 69 | 70 | uint32 SumSquareError_C(const uint8* src_a, const uint8* src_b, int count); 71 | uint32 SumSquareError_SSE2(const uint8* src_a, const uint8* src_b, int count); 72 | uint32 SumSquareError_AVX2(const uint8* src_a, const uint8* src_b, int count); 73 | uint32 SumSquareError_NEON(const uint8* src_a, const uint8* src_b, int count); 74 | 75 | uint32 HashDjb2_C(const uint8* src, int count, uint32 seed); 76 | uint32 HashDjb2_SSE41(const uint8* src, int count, uint32 seed); 77 | uint32 HashDjb2_AVX2(const uint8* src, int count, uint32 seed); 78 | 79 | #ifdef __cplusplus 80 | } // extern "C" 81 | } // namespace libyuv 82 | #endif 83 | 84 | #endif // INCLUDE_LIBYUV_COMPARE_ROW_H_ 85 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/convert.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2011 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_CONVERT_H_ 12 | #define INCLUDE_LIBYUV_CONVERT_H_ 13 | 14 | #include "basic_types.h" 15 | 16 | #include "rotate.h" // For enum RotationMode. 17 | 18 | // TODO(fbarchard): fix WebRTC source to include following libyuv headers: 19 | #include "convert_argb.h" // For WebRTC I420ToARGB. b/620 20 | #include "convert_from.h" // For WebRTC ConvertFromI420. b/620 21 | #include "planar_functions.h" // For WebRTC I420Rect, CopyPlane. b/618 22 | 23 | #ifdef __cplusplus 24 | namespace libyuv { 25 | extern "C" { 26 | #endif 27 | 28 | // Convert I444 to I420. 29 | LIBYUV_API 30 | int I444ToI420(const uint8* src_y, int src_stride_y, 31 | const uint8* src_u, int src_stride_u, 32 | const uint8* src_v, int src_stride_v, 33 | uint8* dst_y, int dst_stride_y, 34 | uint8* dst_u, int dst_stride_u, 35 | uint8* dst_v, int dst_stride_v, 36 | int width, int height); 37 | 38 | // Convert I422 to I420. 39 | LIBYUV_API 40 | int I422ToI420(const uint8* src_y, int src_stride_y, 41 | const uint8* src_u, int src_stride_u, 42 | const uint8* src_v, int src_stride_v, 43 | uint8* dst_y, int dst_stride_y, 44 | uint8* dst_u, int dst_stride_u, 45 | uint8* dst_v, int dst_stride_v, 46 | int width, int height); 47 | 48 | // Copy I420 to I420. 49 | #define I420ToI420 I420Copy 50 | LIBYUV_API 51 | int I420Copy(const uint8* src_y, int src_stride_y, 52 | const uint8* src_u, int src_stride_u, 53 | const uint8* src_v, int src_stride_v, 54 | uint8* dst_y, int dst_stride_y, 55 | uint8* dst_u, int dst_stride_u, 56 | uint8* dst_v, int dst_stride_v, 57 | int width, int height); 58 | 59 | // Convert I400 (grey) to I420. 60 | LIBYUV_API 61 | int I400ToI420(const uint8* src_y, int src_stride_y, 62 | uint8* dst_y, int dst_stride_y, 63 | uint8* dst_u, int dst_stride_u, 64 | uint8* dst_v, int dst_stride_v, 65 | int width, int height); 66 | 67 | #define J400ToJ420 I400ToI420 68 | 69 | // Convert NV12 to I420. 70 | LIBYUV_API 71 | int NV12ToI420(const uint8* src_y, int src_stride_y, 72 | const uint8* src_uv, int src_stride_uv, 73 | uint8* dst_y, int dst_stride_y, 74 | uint8* dst_u, int dst_stride_u, 75 | uint8* dst_v, int dst_stride_v, 76 | int width, int height); 77 | 78 | // Convert NV21 to I420. 79 | LIBYUV_API 80 | int NV21ToI420(const uint8* src_y, int src_stride_y, 81 | const uint8* src_vu, int src_stride_vu, 82 | uint8* dst_y, int dst_stride_y, 83 | uint8* dst_u, int dst_stride_u, 84 | uint8* dst_v, int dst_stride_v, 85 | int width, int height); 86 | 87 | // Convert YUY2 to I420. 88 | LIBYUV_API 89 | int YUY2ToI420(const uint8* src_yuy2, int src_stride_yuy2, 90 | uint8* dst_y, int dst_stride_y, 91 | uint8* dst_u, int dst_stride_u, 92 | uint8* dst_v, int dst_stride_v, 93 | int width, int height); 94 | 95 | // Convert UYVY to I420. 96 | LIBYUV_API 97 | int UYVYToI420(const uint8* src_uyvy, int src_stride_uyvy, 98 | uint8* dst_y, int dst_stride_y, 99 | uint8* dst_u, int dst_stride_u, 100 | uint8* dst_v, int dst_stride_v, 101 | int width, int height); 102 | 103 | // Convert M420 to I420. 104 | LIBYUV_API 105 | int M420ToI420(const uint8* src_m420, int src_stride_m420, 106 | uint8* dst_y, int dst_stride_y, 107 | uint8* dst_u, int dst_stride_u, 108 | uint8* dst_v, int dst_stride_v, 109 | int width, int height); 110 | 111 | // Convert Android420 to I420. 112 | LIBYUV_API 113 | int Android420ToI420(const uint8* src_y, int src_stride_y, 114 | const uint8* src_u, int src_stride_u, 115 | const uint8* src_v, int src_stride_v, 116 | int pixel_stride_uv, 117 | uint8* dst_y, int dst_stride_y, 118 | uint8* dst_u, int dst_stride_u, 119 | uint8* dst_v, int dst_stride_v, 120 | int width, int height); 121 | 122 | // ARGB little endian (bgra in memory) to I420. 123 | LIBYUV_API 124 | int ARGBToI420(const uint8* src_frame, int src_stride_frame, 125 | uint8* dst_y, int dst_stride_y, 126 | uint8* dst_u, int dst_stride_u, 127 | uint8* dst_v, int dst_stride_v, 128 | int width, int height); 129 | 130 | // BGRA little endian (argb in memory) to I420. 131 | LIBYUV_API 132 | int BGRAToI420(const uint8* src_frame, int src_stride_frame, 133 | uint8* dst_y, int dst_stride_y, 134 | uint8* dst_u, int dst_stride_u, 135 | uint8* dst_v, int dst_stride_v, 136 | int width, int height); 137 | 138 | // ABGR little endian (rgba in memory) to I420. 139 | LIBYUV_API 140 | int ABGRToI420(const uint8* src_frame, int src_stride_frame, 141 | uint8* dst_y, int dst_stride_y, 142 | uint8* dst_u, int dst_stride_u, 143 | uint8* dst_v, int dst_stride_v, 144 | int width, int height); 145 | 146 | // RGBA little endian (abgr in memory) to I420. 147 | LIBYUV_API 148 | int RGBAToI420(const uint8* src_frame, int src_stride_frame, 149 | uint8* dst_y, int dst_stride_y, 150 | uint8* dst_u, int dst_stride_u, 151 | uint8* dst_v, int dst_stride_v, 152 | int width, int height); 153 | 154 | // RGB little endian (bgr in memory) to I420. 155 | LIBYUV_API 156 | int RGB24ToI420(const uint8* src_frame, int src_stride_frame, 157 | uint8* dst_y, int dst_stride_y, 158 | uint8* dst_u, int dst_stride_u, 159 | uint8* dst_v, int dst_stride_v, 160 | int width, int height); 161 | 162 | // RGB big endian (rgb in memory) to I420. 163 | LIBYUV_API 164 | int RAWToI420(const uint8* src_frame, int src_stride_frame, 165 | uint8* dst_y, int dst_stride_y, 166 | uint8* dst_u, int dst_stride_u, 167 | uint8* dst_v, int dst_stride_v, 168 | int width, int height); 169 | 170 | // RGB16 (RGBP fourcc) little endian to I420. 171 | LIBYUV_API 172 | int RGB565ToI420(const uint8* src_frame, int src_stride_frame, 173 | uint8* dst_y, int dst_stride_y, 174 | uint8* dst_u, int dst_stride_u, 175 | uint8* dst_v, int dst_stride_v, 176 | int width, int height); 177 | 178 | // RGB15 (RGBO fourcc) little endian to I420. 179 | LIBYUV_API 180 | int ARGB1555ToI420(const uint8* src_frame, int src_stride_frame, 181 | uint8* dst_y, int dst_stride_y, 182 | uint8* dst_u, int dst_stride_u, 183 | uint8* dst_v, int dst_stride_v, 184 | int width, int height); 185 | 186 | // RGB12 (R444 fourcc) little endian to I420. 187 | LIBYUV_API 188 | int ARGB4444ToI420(const uint8* src_frame, int src_stride_frame, 189 | uint8* dst_y, int dst_stride_y, 190 | uint8* dst_u, int dst_stride_u, 191 | uint8* dst_v, int dst_stride_v, 192 | int width, int height); 193 | 194 | #ifdef HAVE_JPEG 195 | // src_width/height provided by capture. 196 | // dst_width/height for clipping determine final size. 197 | LIBYUV_API 198 | int MJPGToI420(const uint8* sample, size_t sample_size, 199 | uint8* dst_y, int dst_stride_y, 200 | uint8* dst_u, int dst_stride_u, 201 | uint8* dst_v, int dst_stride_v, 202 | int src_width, int src_height, 203 | int dst_width, int dst_height); 204 | 205 | // Query size of MJPG in pixels. 206 | LIBYUV_API 207 | int MJPGSize(const uint8* sample, size_t sample_size, 208 | int* width, int* height); 209 | #endif 210 | 211 | // Convert camera sample to I420 with cropping, rotation and vertical flip. 212 | // "src_size" is needed to parse MJPG. 213 | // "dst_stride_y" number of bytes in a row of the dst_y plane. 214 | // Normally this would be the same as dst_width, with recommended alignment 215 | // to 16 bytes for better efficiency. 216 | // If rotation of 90 or 270 is used, stride is affected. The caller should 217 | // allocate the I420 buffer according to rotation. 218 | // "dst_stride_u" number of bytes in a row of the dst_u plane. 219 | // Normally this would be the same as (dst_width + 1) / 2, with 220 | // recommended alignment to 16 bytes for better efficiency. 221 | // If rotation of 90 or 270 is used, stride is affected. 222 | // "crop_x" and "crop_y" are starting position for cropping. 223 | // To center, crop_x = (src_width - dst_width) / 2 224 | // crop_y = (src_height - dst_height) / 2 225 | // "src_width" / "src_height" is size of src_frame in pixels. 226 | // "src_height" can be negative indicating a vertically flipped image source. 227 | // "crop_width" / "crop_height" is the size to crop the src to. 228 | // Must be less than or equal to src_width/src_height 229 | // Cropping parameters are pre-rotation. 230 | // "rotation" can be 0, 90, 180 or 270. 231 | // "format" is a fourcc. ie 'I420', 'YUY2' 232 | // Returns 0 for successful; -1 for invalid parameter. Non-zero for failure. 233 | LIBYUV_API 234 | int ConvertToI420(const uint8* src_frame, size_t src_size, 235 | uint8* dst_y, int dst_stride_y, 236 | uint8* dst_u, int dst_stride_u, 237 | uint8* dst_v, int dst_stride_v, 238 | int crop_x, int crop_y, 239 | int src_width, int src_height, 240 | int crop_width, int crop_height, 241 | enum RotationMode rotation, 242 | uint32 format); 243 | 244 | #ifdef __cplusplus 245 | } // extern "C" 246 | } // namespace libyuv 247 | #endif 248 | 249 | #endif // INCLUDE_LIBYUV_CONVERT_H_ 250 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/convert_from.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2011 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_CONVERT_FROM_H_ 12 | #define INCLUDE_LIBYUV_CONVERT_FROM_H_ 13 | 14 | #include "basic_types.h" 15 | #include "rotate.h" 16 | 17 | #ifdef __cplusplus 18 | namespace libyuv { 19 | extern "C" { 20 | #endif 21 | 22 | // See Also convert.h for conversions from formats to I420. 23 | 24 | // I420Copy in convert to I420ToI420. 25 | 26 | LIBYUV_API 27 | int I420ToI422(const uint8* src_y, int src_stride_y, 28 | const uint8* src_u, int src_stride_u, 29 | const uint8* src_v, int src_stride_v, 30 | uint8* dst_y, int dst_stride_y, 31 | uint8* dst_u, int dst_stride_u, 32 | uint8* dst_v, int dst_stride_v, 33 | int width, int height); 34 | 35 | LIBYUV_API 36 | int I420ToI444(const uint8* src_y, int src_stride_y, 37 | const uint8* src_u, int src_stride_u, 38 | const uint8* src_v, int src_stride_v, 39 | uint8* dst_y, int dst_stride_y, 40 | uint8* dst_u, int dst_stride_u, 41 | uint8* dst_v, int dst_stride_v, 42 | int width, int height); 43 | 44 | // Copy to I400. Source can be I420, I422, I444, I400, NV12 or NV21. 45 | LIBYUV_API 46 | int I400Copy(const uint8* src_y, int src_stride_y, 47 | uint8* dst_y, int dst_stride_y, 48 | int width, int height); 49 | 50 | LIBYUV_API 51 | int I420ToNV12(const uint8* src_y, int src_stride_y, 52 | const uint8* src_u, int src_stride_u, 53 | const uint8* src_v, int src_stride_v, 54 | uint8* dst_y, int dst_stride_y, 55 | uint8* dst_uv, int dst_stride_uv, 56 | int width, int height); 57 | 58 | LIBYUV_API 59 | int I420ToNV21(const uint8* src_y, int src_stride_y, 60 | const uint8* src_u, int src_stride_u, 61 | const uint8* src_v, int src_stride_v, 62 | uint8* dst_y, int dst_stride_y, 63 | uint8* dst_vu, int dst_stride_vu, 64 | int width, int height); 65 | 66 | LIBYUV_API 67 | int I420ToYUY2(const uint8* src_y, int src_stride_y, 68 | const uint8* src_u, int src_stride_u, 69 | const uint8* src_v, int src_stride_v, 70 | uint8* dst_frame, int dst_stride_frame, 71 | int width, int height); 72 | 73 | LIBYUV_API 74 | int I420ToUYVY(const uint8* src_y, int src_stride_y, 75 | const uint8* src_u, int src_stride_u, 76 | const uint8* src_v, int src_stride_v, 77 | uint8* dst_frame, int dst_stride_frame, 78 | int width, int height); 79 | 80 | LIBYUV_API 81 | int I420ToARGB(const uint8* src_y, int src_stride_y, 82 | const uint8* src_u, int src_stride_u, 83 | const uint8* src_v, int src_stride_v, 84 | uint8* dst_argb, int dst_stride_argb, 85 | int width, int height); 86 | 87 | LIBYUV_API 88 | int I420ToBGRA(const uint8* src_y, int src_stride_y, 89 | const uint8* src_u, int src_stride_u, 90 | const uint8* src_v, int src_stride_v, 91 | uint8* dst_argb, int dst_stride_argb, 92 | int width, int height); 93 | 94 | LIBYUV_API 95 | int I420ToABGR(const uint8* src_y, int src_stride_y, 96 | const uint8* src_u, int src_stride_u, 97 | const uint8* src_v, int src_stride_v, 98 | uint8* dst_argb, int dst_stride_argb, 99 | int width, int height); 100 | 101 | LIBYUV_API 102 | int I420ToRGBA(const uint8* src_y, int src_stride_y, 103 | const uint8* src_u, int src_stride_u, 104 | const uint8* src_v, int src_stride_v, 105 | uint8* dst_rgba, int dst_stride_rgba, 106 | int width, int height); 107 | 108 | LIBYUV_API 109 | int I420ToRGB24(const uint8* src_y, int src_stride_y, 110 | const uint8* src_u, int src_stride_u, 111 | const uint8* src_v, int src_stride_v, 112 | uint8* dst_frame, int dst_stride_frame, 113 | int width, int height); 114 | 115 | LIBYUV_API 116 | int I420ToRAW(const uint8* src_y, int src_stride_y, 117 | const uint8* src_u, int src_stride_u, 118 | const uint8* src_v, int src_stride_v, 119 | uint8* dst_frame, int dst_stride_frame, 120 | int width, int height); 121 | 122 | LIBYUV_API 123 | int I420ToRGB565(const uint8* src_y, int src_stride_y, 124 | const uint8* src_u, int src_stride_u, 125 | const uint8* src_v, int src_stride_v, 126 | uint8* dst_frame, int dst_stride_frame, 127 | int width, int height); 128 | 129 | // Convert I420 To RGB565 with 4x4 dither matrix (16 bytes). 130 | // Values in dither matrix from 0 to 7 recommended. 131 | // The order of the dither matrix is first byte is upper left. 132 | 133 | LIBYUV_API 134 | int I420ToRGB565Dither(const uint8* src_y, int src_stride_y, 135 | const uint8* src_u, int src_stride_u, 136 | const uint8* src_v, int src_stride_v, 137 | uint8* dst_frame, int dst_stride_frame, 138 | const uint8* dither4x4, int width, int height); 139 | 140 | LIBYUV_API 141 | int I420ToARGB1555(const uint8* src_y, int src_stride_y, 142 | const uint8* src_u, int src_stride_u, 143 | const uint8* src_v, int src_stride_v, 144 | uint8* dst_frame, int dst_stride_frame, 145 | int width, int height); 146 | 147 | LIBYUV_API 148 | int I420ToARGB4444(const uint8* src_y, int src_stride_y, 149 | const uint8* src_u, int src_stride_u, 150 | const uint8* src_v, int src_stride_v, 151 | uint8* dst_frame, int dst_stride_frame, 152 | int width, int height); 153 | 154 | // Convert I420 to specified format. 155 | // "dst_sample_stride" is bytes in a row for the destination. Pass 0 if the 156 | // buffer has contiguous rows. Can be negative. A multiple of 16 is optimal. 157 | LIBYUV_API 158 | int ConvertFromI420(const uint8* y, int y_stride, 159 | const uint8* u, int u_stride, 160 | const uint8* v, int v_stride, 161 | uint8* dst_sample, int dst_sample_stride, 162 | int width, int height, 163 | uint32 format); 164 | 165 | #ifdef __cplusplus 166 | } // extern "C" 167 | } // namespace libyuv 168 | #endif 169 | 170 | #endif // INCLUDE_LIBYUV_CONVERT_FROM_H_ 171 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/convert_from_argb.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2012 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_CONVERT_FROM_ARGB_H_ 12 | #define INCLUDE_LIBYUV_CONVERT_FROM_ARGB_H_ 13 | 14 | #include "basic_types.h" 15 | 16 | #ifdef __cplusplus 17 | namespace libyuv { 18 | extern "C" { 19 | #endif 20 | 21 | // Copy ARGB to ARGB. 22 | #define ARGBToARGB ARGBCopy 23 | LIBYUV_API 24 | int ARGBCopy(const uint8* src_argb, int src_stride_argb, 25 | uint8* dst_argb, int dst_stride_argb, 26 | int width, int height); 27 | 28 | // Convert ARGB To BGRA. 29 | LIBYUV_API 30 | int ARGBToBGRA(const uint8* src_argb, int src_stride_argb, 31 | uint8* dst_bgra, int dst_stride_bgra, 32 | int width, int height); 33 | 34 | // Convert ARGB To ABGR. 35 | LIBYUV_API 36 | int ARGBToABGR(const uint8* src_argb, int src_stride_argb, 37 | uint8* dst_abgr, int dst_stride_abgr, 38 | int width, int height); 39 | 40 | // Convert ARGB To RGBA. 41 | LIBYUV_API 42 | int ARGBToRGBA(const uint8* src_argb, int src_stride_argb, 43 | uint8* dst_rgba, int dst_stride_rgba, 44 | int width, int height); 45 | 46 | // Convert ARGB To RGB24. 47 | LIBYUV_API 48 | int ARGBToRGB24(const uint8* src_argb, int src_stride_argb, 49 | uint8* dst_rgb24, int dst_stride_rgb24, 50 | int width, int height); 51 | 52 | // Convert ARGB To RAW. 53 | LIBYUV_API 54 | int ARGBToRAW(const uint8* src_argb, int src_stride_argb, 55 | uint8* dst_rgb, int dst_stride_rgb, 56 | int width, int height); 57 | 58 | // Convert ARGB To RGB565. 59 | LIBYUV_API 60 | int ARGBToRGB565(const uint8* src_argb, int src_stride_argb, 61 | uint8* dst_rgb565, int dst_stride_rgb565, 62 | int width, int height); 63 | 64 | // Convert ARGB To RGB565 with 4x4 dither matrix (16 bytes). 65 | // Values in dither matrix from 0 to 7 recommended. 66 | // The order of the dither matrix is first byte is upper left. 67 | // TODO(fbarchard): Consider pointer to 2d array for dither4x4. 68 | // const uint8(*dither)[4][4]; 69 | LIBYUV_API 70 | int ARGBToRGB565Dither(const uint8* src_argb, int src_stride_argb, 71 | uint8* dst_rgb565, int dst_stride_rgb565, 72 | const uint8* dither4x4, int width, int height); 73 | 74 | // Convert ARGB To ARGB1555. 75 | LIBYUV_API 76 | int ARGBToARGB1555(const uint8* src_argb, int src_stride_argb, 77 | uint8* dst_argb1555, int dst_stride_argb1555, 78 | int width, int height); 79 | 80 | // Convert ARGB To ARGB4444. 81 | LIBYUV_API 82 | int ARGBToARGB4444(const uint8* src_argb, int src_stride_argb, 83 | uint8* dst_argb4444, int dst_stride_argb4444, 84 | int width, int height); 85 | 86 | // Convert ARGB To I444. 87 | LIBYUV_API 88 | int ARGBToI444(const uint8* src_argb, int src_stride_argb, 89 | uint8* dst_y, int dst_stride_y, 90 | uint8* dst_u, int dst_stride_u, 91 | uint8* dst_v, int dst_stride_v, 92 | int width, int height); 93 | 94 | // Convert ARGB To I422. 95 | LIBYUV_API 96 | int ARGBToI422(const uint8* src_argb, int src_stride_argb, 97 | uint8* dst_y, int dst_stride_y, 98 | uint8* dst_u, int dst_stride_u, 99 | uint8* dst_v, int dst_stride_v, 100 | int width, int height); 101 | 102 | // Convert ARGB To I420. (also in convert.h) 103 | LIBYUV_API 104 | int ARGBToI420(const uint8* src_argb, int src_stride_argb, 105 | uint8* dst_y, int dst_stride_y, 106 | uint8* dst_u, int dst_stride_u, 107 | uint8* dst_v, int dst_stride_v, 108 | int width, int height); 109 | 110 | // Convert ARGB to J420. (JPeg full range I420). 111 | LIBYUV_API 112 | int ARGBToJ420(const uint8* src_argb, int src_stride_argb, 113 | uint8* dst_yj, int dst_stride_yj, 114 | uint8* dst_u, int dst_stride_u, 115 | uint8* dst_v, int dst_stride_v, 116 | int width, int height); 117 | 118 | // Convert ARGB to J422. 119 | LIBYUV_API 120 | int ARGBToJ422(const uint8* src_argb, int src_stride_argb, 121 | uint8* dst_yj, int dst_stride_yj, 122 | uint8* dst_u, int dst_stride_u, 123 | uint8* dst_v, int dst_stride_v, 124 | int width, int height); 125 | 126 | // Convert ARGB to J400. (JPeg full range). 127 | LIBYUV_API 128 | int ARGBToJ400(const uint8* src_argb, int src_stride_argb, 129 | uint8* dst_yj, int dst_stride_yj, 130 | int width, int height); 131 | 132 | // Convert ARGB to I400. 133 | LIBYUV_API 134 | int ARGBToI400(const uint8* src_argb, int src_stride_argb, 135 | uint8* dst_y, int dst_stride_y, 136 | int width, int height); 137 | 138 | // Convert ARGB to G. (Reverse of J400toARGB, which replicates G back to ARGB) 139 | LIBYUV_API 140 | int ARGBToG(const uint8* src_argb, int src_stride_argb, 141 | uint8* dst_g, int dst_stride_g, 142 | int width, int height); 143 | 144 | // Convert ARGB To NV12. 145 | LIBYUV_API 146 | int ARGBToNV12(const uint8* src_argb, int src_stride_argb, 147 | uint8* dst_y, int dst_stride_y, 148 | uint8* dst_uv, int dst_stride_uv, 149 | int width, int height); 150 | 151 | // Convert ARGB To NV21. 152 | LIBYUV_API 153 | int ARGBToNV21(const uint8* src_argb, int src_stride_argb, 154 | uint8* dst_y, int dst_stride_y, 155 | uint8* dst_vu, int dst_stride_vu, 156 | int width, int height); 157 | 158 | // Convert ARGB To NV21. 159 | LIBYUV_API 160 | int ARGBToNV21(const uint8* src_argb, int src_stride_argb, 161 | uint8* dst_y, int dst_stride_y, 162 | uint8* dst_vu, int dst_stride_vu, 163 | int width, int height); 164 | 165 | // Convert ARGB To YUY2. 166 | LIBYUV_API 167 | int ARGBToYUY2(const uint8* src_argb, int src_stride_argb, 168 | uint8* dst_yuy2, int dst_stride_yuy2, 169 | int width, int height); 170 | 171 | // Convert ARGB To UYVY. 172 | LIBYUV_API 173 | int ARGBToUYVY(const uint8* src_argb, int src_stride_argb, 174 | uint8* dst_uyvy, int dst_stride_uyvy, 175 | int width, int height); 176 | 177 | #ifdef __cplusplus 178 | } // extern "C" 179 | } // namespace libyuv 180 | #endif 181 | 182 | #endif // INCLUDE_LIBYUV_CONVERT_FROM_ARGB_H_ 183 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/cpu_id.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2011 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_CPU_ID_H_ 12 | #define INCLUDE_LIBYUV_CPU_ID_H_ 13 | 14 | #include "basic_types.h" 15 | 16 | #ifdef __cplusplus 17 | namespace libyuv { 18 | extern "C" { 19 | #endif 20 | 21 | // Internal flag to indicate cpuid requires initialization. 22 | static const int kCpuInitialized = 0x1; 23 | 24 | // These flags are only valid on ARM processors. 25 | static const int kCpuHasARM = 0x2; 26 | static const int kCpuHasNEON = 0x4; 27 | // 0x8 reserved for future ARM flag. 28 | 29 | // These flags are only valid on x86 processors. 30 | static const int kCpuHasX86 = 0x10; 31 | static const int kCpuHasSSE2 = 0x20; 32 | static const int kCpuHasSSSE3 = 0x40; 33 | static const int kCpuHasSSE41 = 0x80; 34 | static const int kCpuHasSSE42 = 0x100; // unused at this time. 35 | static const int kCpuHasAVX = 0x200; 36 | static const int kCpuHasAVX2 = 0x400; 37 | static const int kCpuHasERMS = 0x800; 38 | static const int kCpuHasFMA3 = 0x1000; 39 | static const int kCpuHasAVX3 = 0x2000; 40 | static const int kCpuHasF16C = 0x4000; 41 | 42 | // 0x8000 reserved for future X86 flags. 43 | 44 | // These flags are only valid on MIPS processors. 45 | static const int kCpuHasMIPS = 0x10000; 46 | static const int kCpuHasDSPR2 = 0x20000; 47 | static const int kCpuHasMSA = 0x40000; 48 | 49 | // Internal function used to auto-init. 50 | LIBYUV_API 51 | int InitCpuFlags(void); 52 | 53 | // Internal function for parsing /proc/cpuinfo. 54 | LIBYUV_API 55 | int ArmCpuCaps(const char* cpuinfo_name); 56 | 57 | // Detect CPU has SSE2 etc. 58 | // Test_flag parameter should be one of kCpuHas constants above. 59 | // returns non-zero if instruction set is detected 60 | static __inline int TestCpuFlag(int test_flag) { 61 | LIBYUV_API extern int cpu_info_; 62 | return (!cpu_info_ ? InitCpuFlags() : cpu_info_) & test_flag; 63 | } 64 | 65 | // For testing, allow CPU flags to be disabled. 66 | // ie MaskCpuFlags(~kCpuHasSSSE3) to disable SSSE3. 67 | // MaskCpuFlags(-1) to enable all cpu specific optimizations. 68 | // MaskCpuFlags(1) to disable all cpu specific optimizations. 69 | LIBYUV_API 70 | void MaskCpuFlags(int enable_flags); 71 | 72 | // Low level cpuid for X86. Returns zeros on other CPUs. 73 | // eax is the info type that you want. 74 | // ecx is typically the cpu number, and should normally be zero. 75 | LIBYUV_API 76 | void CpuId(uint32 eax, uint32 ecx, uint32* cpu_info); 77 | 78 | #ifdef __cplusplus 79 | } // extern "C" 80 | } // namespace libyuv 81 | #endif 82 | 83 | #endif // INCLUDE_LIBYUV_CPU_ID_H_ 84 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/macros_msa.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2016 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_MACROS_MSA_H_ 12 | #define INCLUDE_LIBYUV_MACROS_MSA_H_ 13 | 14 | #if !defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa) 15 | #include 16 | #include 17 | 18 | #if (__mips_isa_rev >= 6) 19 | #define LW(psrc) ({ \ 20 | uint8* psrc_lw_m = (uint8*) (psrc); /* NOLINT */ \ 21 | uint32 val_m; \ 22 | asm volatile ( \ 23 | "lw %[val_m], %[psrc_lw_m] \n\t" \ 24 | : [val_m] "=r" (val_m) \ 25 | : [psrc_lw_m] "m" (*psrc_lw_m) \ 26 | ); \ 27 | val_m; \ 28 | }) 29 | 30 | #if (__mips == 64) 31 | #define LD(psrc) ({ \ 32 | uint8* psrc_ld_m = (uint8*) (psrc); /* NOLINT */ \ 33 | uint64 val_m = 0; \ 34 | asm volatile ( \ 35 | "ld %[val_m], %[psrc_ld_m] \n\t" \ 36 | : [val_m] "=r" (val_m) \ 37 | : [psrc_ld_m] "m" (*psrc_ld_m) \ 38 | ); \ 39 | val_m; \ 40 | }) 41 | #else // !(__mips == 64) 42 | #define LD(psrc) ({ \ 43 | uint8* psrc_ld_m = (uint8*) (psrc); /* NOLINT */ \ 44 | uint32 val0_m, val1_m; \ 45 | uint64 val_m = 0; \ 46 | val0_m = LW(psrc_ld_m); \ 47 | val1_m = LW(psrc_ld_m + 4); \ 48 | val_m = (uint64) (val1_m); /* NOLINT */ \ 49 | val_m = (uint64) ((val_m << 32) & 0xFFFFFFFF00000000); /* NOLINT */ \ 50 | val_m = (uint64) (val_m | (uint64) val0_m); /* NOLINT */ \ 51 | val_m; \ 52 | }) 53 | #endif // (__mips == 64) 54 | #else // !(__mips_isa_rev >= 6) 55 | #define LW(psrc) ({ \ 56 | uint8* psrc_lw_m = (uint8*) (psrc); /* NOLINT */ \ 57 | uint32 val_m; \ 58 | asm volatile ( \ 59 | "ulw %[val_m], %[psrc_lw_m] \n\t" \ 60 | : [val_m] "=r" (val_m) \ 61 | : [psrc_lw_m] "m" (*psrc_lw_m) \ 62 | ); \ 63 | val_m; \ 64 | }) 65 | 66 | #if (__mips == 64) 67 | #define LD(psrc) ({ \ 68 | uint8* psrc_ld_m = (uint8*) (psrc); /* NOLINT */ \ 69 | uint64 val_m = 0; \ 70 | asm volatile ( \ 71 | "uld %[val_m], %[psrc_ld_m] \n\t" \ 72 | : [val_m] "=r" (val_m) \ 73 | : [psrc_ld_m] "m" (*psrc_ld_m) \ 74 | ); \ 75 | val_m; \ 76 | }) 77 | #else // !(__mips == 64) 78 | #define LD(psrc) ({ \ 79 | uint8* psrc_ld_m = (uint8*) (psrc); /* NOLINT */ \ 80 | uint32 val0_m, val1_m; \ 81 | uint64 val_m = 0; \ 82 | val0_m = LW(psrc_ld_m); \ 83 | val1_m = LW(psrc_ld_m + 4); \ 84 | val_m = (uint64) (val1_m); /* NOLINT */ \ 85 | val_m = (uint64) ((val_m << 32) & 0xFFFFFFFF00000000); /* NOLINT */ \ 86 | val_m = (uint64) (val_m | (uint64) val0_m); /* NOLINT */ \ 87 | val_m; \ 88 | }) 89 | #endif // (__mips == 64) 90 | #endif // (__mips_isa_rev >= 6) 91 | 92 | // TODO(fbarchard): Consider removing __VAR_ARGS versions. 93 | #define LD_B(RTYPE, psrc) *((RTYPE*)(psrc)) /* NOLINT */ 94 | #define LD_UB(...) LD_B(v16u8, __VA_ARGS__) 95 | 96 | #define ST_B(RTYPE, in, pdst) *((RTYPE*)(pdst)) = (in) /* NOLINT */ 97 | #define ST_UB(...) ST_B(v16u8, __VA_ARGS__) 98 | 99 | /* Description : Load two vectors with 16 'byte' sized elements 100 | Arguments : Inputs - psrc, stride 101 | Outputs - out0, out1 102 | Return Type - as per RTYPE 103 | Details : Load 16 byte elements in 'out0' from (psrc) 104 | Load 16 byte elements in 'out1' from (psrc + stride) 105 | */ 106 | #define LD_B2(RTYPE, psrc, stride, out0, out1) { \ 107 | out0 = LD_B(RTYPE, (psrc)); \ 108 | out1 = LD_B(RTYPE, (psrc) + stride); \ 109 | } 110 | #define LD_UB2(...) LD_B2(v16u8, __VA_ARGS__) 111 | 112 | #define LD_B4(RTYPE, psrc, stride, out0, out1, out2, out3) { \ 113 | LD_B2(RTYPE, (psrc), stride, out0, out1); \ 114 | LD_B2(RTYPE, (psrc) + 2 * stride , stride, out2, out3); \ 115 | } 116 | #define LD_UB4(...) LD_B4(v16u8, __VA_ARGS__) 117 | 118 | /* Description : Store two vectors with stride each having 16 'byte' sized 119 | elements 120 | Arguments : Inputs - in0, in1, pdst, stride 121 | Details : Store 16 byte elements from 'in0' to (pdst) 122 | Store 16 byte elements from 'in1' to (pdst + stride) 123 | */ 124 | #define ST_B2(RTYPE, in0, in1, pdst, stride) { \ 125 | ST_B(RTYPE, in0, (pdst)); \ 126 | ST_B(RTYPE, in1, (pdst) + stride); \ 127 | } 128 | #define ST_UB2(...) ST_B2(v16u8, __VA_ARGS__) 129 | 130 | #define ST_B4(RTYPE, in0, in1, in2, in3, pdst, stride) { \ 131 | ST_B2(RTYPE, in0, in1, (pdst), stride); \ 132 | ST_B2(RTYPE, in2, in3, (pdst) + 2 * stride, stride); \ 133 | } 134 | #define ST_UB4(...) ST_B4(v16u8, __VA_ARGS__) 135 | 136 | // TODO(fbarchard): Consider using __msa_vshf_b and __msa_ilvr_b directly. 137 | /* Description : Shuffle byte vector elements as per mask vector 138 | Arguments : Inputs - in0, in1, in2, in3, mask0, mask1 139 | Outputs - out0, out1 140 | Return Type - as per RTYPE 141 | Details : Byte elements from 'in0' & 'in1' are copied selectively to 142 | 'out0' as per control vector 'mask0' 143 | */ 144 | #define VSHF_B2(RTYPE, in0, in1, in2, in3, mask0, mask1, out0, out1) { \ 145 | out0 = (RTYPE) __msa_vshf_b((v16i8) mask0, (v16i8) in1, (v16i8) in0); \ 146 | out1 = (RTYPE) __msa_vshf_b((v16i8) mask1, (v16i8) in3, (v16i8) in2); \ 147 | } 148 | #define VSHF_B2_UB(...) VSHF_B2(v16u8, __VA_ARGS__) 149 | 150 | /* Description : Interleave both left and right half of input vectors 151 | Arguments : Inputs - in0, in1 152 | Outputs - out0, out1 153 | Return Type - as per RTYPE 154 | Details : Right half of byte elements from 'in0' and 'in1' are 155 | interleaved and written to 'out0' 156 | */ 157 | #define ILVRL_B2(RTYPE, in0, in1, out0, out1) { \ 158 | out0 = (RTYPE) __msa_ilvr_b((v16i8) in0, (v16i8) in1); \ 159 | out1 = (RTYPE) __msa_ilvl_b((v16i8) in0, (v16i8) in1); \ 160 | } 161 | #define ILVRL_B2_UB(...) ILVRL_B2(v16u8, __VA_ARGS__) 162 | 163 | #endif /* !defined(LIBYUV_DISABLE_MSA) && defined(__mips_msa) */ 164 | 165 | #endif // INCLUDE_LIBYUV_MACROS_MSA_H_ 166 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/mjpeg_decoder.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2012 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_MJPEG_DECODER_H_ 12 | #define INCLUDE_LIBYUV_MJPEG_DECODER_H_ 13 | 14 | #include "basic_types.h" 15 | 16 | #ifdef __cplusplus 17 | // NOTE: For a simplified public API use convert.h MJPGToI420(). 18 | 19 | struct jpeg_common_struct; 20 | struct jpeg_decompress_struct; 21 | struct jpeg_source_mgr; 22 | 23 | namespace libyuv { 24 | 25 | #ifdef __cplusplus 26 | extern "C" { 27 | #endif 28 | 29 | LIBYUV_BOOL ValidateJpeg(const uint8* sample, size_t sample_size); 30 | 31 | #ifdef __cplusplus 32 | } // extern "C" 33 | #endif 34 | 35 | static const uint32 kUnknownDataSize = 0xFFFFFFFF; 36 | 37 | enum JpegSubsamplingType { 38 | kJpegYuv420, 39 | kJpegYuv422, 40 | kJpegYuv444, 41 | kJpegYuv400, 42 | kJpegUnknown 43 | }; 44 | 45 | struct Buffer { 46 | const uint8* data; 47 | int len; 48 | }; 49 | 50 | struct BufferVector { 51 | Buffer* buffers; 52 | int len; 53 | int pos; 54 | }; 55 | 56 | struct SetJmpErrorMgr; 57 | 58 | // MJPEG ("Motion JPEG") is a pseudo-standard video codec where the frames are 59 | // simply independent JPEG images with a fixed huffman table (which is omitted). 60 | // It is rarely used in video transmission, but is common as a camera capture 61 | // format, especially in Logitech devices. This class implements a decoder for 62 | // MJPEG frames. 63 | // 64 | // See http://tools.ietf.org/html/rfc2435 65 | class LIBYUV_API MJpegDecoder { 66 | public: 67 | typedef void (*CallbackFunction)(void* opaque, 68 | const uint8* const* data, 69 | const int* strides, 70 | int rows); 71 | 72 | static const int kColorSpaceUnknown; 73 | static const int kColorSpaceGrayscale; 74 | static const int kColorSpaceRgb; 75 | static const int kColorSpaceYCbCr; 76 | static const int kColorSpaceCMYK; 77 | static const int kColorSpaceYCCK; 78 | 79 | MJpegDecoder(); 80 | ~MJpegDecoder(); 81 | 82 | // Loads a new frame, reads its headers, and determines the uncompressed 83 | // image format. 84 | // Returns LIBYUV_TRUE if image looks valid and format is supported. 85 | // If return value is LIBYUV_TRUE, then the values for all the following 86 | // getters are populated. 87 | // src_len is the size of the compressed mjpeg frame in bytes. 88 | LIBYUV_BOOL LoadFrame(const uint8* src, size_t src_len); 89 | 90 | // Returns width of the last loaded frame in pixels. 91 | int GetWidth(); 92 | 93 | // Returns height of the last loaded frame in pixels. 94 | int GetHeight(); 95 | 96 | // Returns format of the last loaded frame. The return value is one of the 97 | // kColorSpace* constants. 98 | int GetColorSpace(); 99 | 100 | // Number of color components in the color space. 101 | int GetNumComponents(); 102 | 103 | // Sample factors of the n-th component. 104 | int GetHorizSampFactor(int component); 105 | 106 | int GetVertSampFactor(int component); 107 | 108 | int GetHorizSubSampFactor(int component); 109 | 110 | int GetVertSubSampFactor(int component); 111 | 112 | // Public for testability. 113 | int GetImageScanlinesPerImcuRow(); 114 | 115 | // Public for testability. 116 | int GetComponentScanlinesPerImcuRow(int component); 117 | 118 | // Width of a component in bytes. 119 | int GetComponentWidth(int component); 120 | 121 | // Height of a component. 122 | int GetComponentHeight(int component); 123 | 124 | // Width of a component in bytes with padding for DCTSIZE. Public for testing. 125 | int GetComponentStride(int component); 126 | 127 | // Size of a component in bytes. 128 | int GetComponentSize(int component); 129 | 130 | // Call this after LoadFrame() if you decide you don't want to decode it 131 | // after all. 132 | LIBYUV_BOOL UnloadFrame(); 133 | 134 | // Decodes the entire image into a one-buffer-per-color-component format. 135 | // dst_width must match exactly. dst_height must be <= to image height; if 136 | // less, the image is cropped. "planes" must have size equal to at least 137 | // GetNumComponents() and they must point to non-overlapping buffers of size 138 | // at least GetComponentSize(i). The pointers in planes are incremented 139 | // to point to after the end of the written data. 140 | // TODO(fbarchard): Add dst_x, dst_y to allow specific rect to be decoded. 141 | LIBYUV_BOOL DecodeToBuffers(uint8** planes, int dst_width, int dst_height); 142 | 143 | // Decodes the entire image and passes the data via repeated calls to a 144 | // callback function. Each call will get the data for a whole number of 145 | // image scanlines. 146 | // TODO(fbarchard): Add dst_x, dst_y to allow specific rect to be decoded. 147 | LIBYUV_BOOL DecodeToCallback(CallbackFunction fn, void* opaque, 148 | int dst_width, int dst_height); 149 | 150 | // The helper function which recognizes the jpeg sub-sampling type. 151 | static JpegSubsamplingType JpegSubsamplingTypeHelper( 152 | int* subsample_x, int* subsample_y, int number_of_components); 153 | 154 | private: 155 | void AllocOutputBuffers(int num_outbufs); 156 | void DestroyOutputBuffers(); 157 | 158 | LIBYUV_BOOL StartDecode(); 159 | LIBYUV_BOOL FinishDecode(); 160 | 161 | void SetScanlinePointers(uint8** data); 162 | LIBYUV_BOOL DecodeImcuRow(); 163 | 164 | int GetComponentScanlinePadding(int component); 165 | 166 | // A buffer holding the input data for a frame. 167 | Buffer buf_; 168 | BufferVector buf_vec_; 169 | 170 | jpeg_decompress_struct* decompress_struct_; 171 | jpeg_source_mgr* source_mgr_; 172 | SetJmpErrorMgr* error_mgr_; 173 | 174 | // LIBYUV_TRUE iff at least one component has scanline padding. (i.e., 175 | // GetComponentScanlinePadding() != 0.) 176 | LIBYUV_BOOL has_scanline_padding_; 177 | 178 | // Temporaries used to point to scanline outputs. 179 | int num_outbufs_; // Outermost size of all arrays below. 180 | uint8*** scanlines_; 181 | int* scanlines_sizes_; 182 | // Temporary buffer used for decoding when we can't decode directly to the 183 | // output buffers. Large enough for just one iMCU row. 184 | uint8** databuf_; 185 | int* databuf_strides_; 186 | }; 187 | 188 | } // namespace libyuv 189 | 190 | #endif // __cplusplus 191 | #endif // INCLUDE_LIBYUV_MJPEG_DECODER_H_ 192 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/rotate.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2011 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_ROTATE_H_ 12 | #define INCLUDE_LIBYUV_ROTATE_H_ 13 | 14 | #include "basic_types.h" 15 | 16 | #ifdef __cplusplus 17 | namespace libyuv { 18 | extern "C" { 19 | #endif 20 | 21 | // Supported rotation. 22 | typedef enum RotationMode { 23 | kRotate0 = 0, // No rotation. 24 | kRotate90 = 90, // Rotate 90 degrees clockwise. 25 | kRotate180 = 180, // Rotate 180 degrees. 26 | kRotate270 = 270, // Rotate 270 degrees clockwise. 27 | 28 | // Deprecated. 29 | kRotateNone = 0, 30 | kRotateClockwise = 90, 31 | kRotateCounterClockwise = 270, 32 | } RotationModeEnum; 33 | 34 | // Rotate I420 frame. 35 | LIBYUV_API 36 | int I420Rotate(const uint8* src_y, int src_stride_y, 37 | const uint8* src_u, int src_stride_u, 38 | const uint8* src_v, int src_stride_v, 39 | uint8* dst_y, int dst_stride_y, 40 | uint8* dst_u, int dst_stride_u, 41 | uint8* dst_v, int dst_stride_v, 42 | int src_width, int src_height, enum RotationMode mode); 43 | 44 | // Rotate NV12 input and store in I420. 45 | LIBYUV_API 46 | int NV12ToI420Rotate(const uint8* src_y, int src_stride_y, 47 | const uint8* src_uv, int src_stride_uv, 48 | uint8* dst_y, int dst_stride_y, 49 | uint8* dst_u, int dst_stride_u, 50 | uint8* dst_v, int dst_stride_v, 51 | int src_width, int src_height, enum RotationMode mode); 52 | 53 | // Rotate a plane by 0, 90, 180, or 270. 54 | LIBYUV_API 55 | int RotatePlane(const uint8* src, int src_stride, 56 | uint8* dst, int dst_stride, 57 | int src_width, int src_height, enum RotationMode mode); 58 | 59 | // Rotate planes by 90, 180, 270. Deprecated. 60 | LIBYUV_API 61 | void RotatePlane90(const uint8* src, int src_stride, 62 | uint8* dst, int dst_stride, 63 | int width, int height); 64 | 65 | LIBYUV_API 66 | void RotatePlane180(const uint8* src, int src_stride, 67 | uint8* dst, int dst_stride, 68 | int width, int height); 69 | 70 | LIBYUV_API 71 | void RotatePlane270(const uint8* src, int src_stride, 72 | uint8* dst, int dst_stride, 73 | int width, int height); 74 | 75 | LIBYUV_API 76 | void RotateUV90(const uint8* src, int src_stride, 77 | uint8* dst_a, int dst_stride_a, 78 | uint8* dst_b, int dst_stride_b, 79 | int width, int height); 80 | 81 | // Rotations for when U and V are interleaved. 82 | // These functions take one input pointer and 83 | // split the data into two buffers while 84 | // rotating them. Deprecated. 85 | LIBYUV_API 86 | void RotateUV180(const uint8* src, int src_stride, 87 | uint8* dst_a, int dst_stride_a, 88 | uint8* dst_b, int dst_stride_b, 89 | int width, int height); 90 | 91 | LIBYUV_API 92 | void RotateUV270(const uint8* src, int src_stride, 93 | uint8* dst_a, int dst_stride_a, 94 | uint8* dst_b, int dst_stride_b, 95 | int width, int height); 96 | 97 | // The 90 and 270 functions are based on transposes. 98 | // Doing a transpose with reversing the read/write 99 | // order will result in a rotation by +- 90 degrees. 100 | // Deprecated. 101 | LIBYUV_API 102 | void TransposePlane(const uint8* src, int src_stride, 103 | uint8* dst, int dst_stride, 104 | int width, int height); 105 | 106 | LIBYUV_API 107 | void TransposeUV(const uint8* src, int src_stride, 108 | uint8* dst_a, int dst_stride_a, 109 | uint8* dst_b, int dst_stride_b, 110 | int width, int height); 111 | 112 | #ifdef __cplusplus 113 | } // extern "C" 114 | } // namespace libyuv 115 | #endif 116 | 117 | #endif // INCLUDE_LIBYUV_ROTATE_H_ 118 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/rotate_argb.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2012 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_ROTATE_ARGB_H_ 12 | #define INCLUDE_LIBYUV_ROTATE_ARGB_H_ 13 | 14 | #include "basic_types.h" 15 | #include "rotate.h" // For RotationMode. 16 | 17 | #ifdef __cplusplus 18 | namespace libyuv { 19 | extern "C" { 20 | #endif 21 | 22 | // Rotate ARGB frame 23 | LIBYUV_API 24 | int ARGBRotate(const uint8* src_argb, int src_stride_argb, 25 | uint8* dst_argb, int dst_stride_argb, 26 | int src_width, int src_height, enum RotationMode mode); 27 | 28 | #ifdef __cplusplus 29 | } // extern "C" 30 | } // namespace libyuv 31 | #endif 32 | 33 | #endif // INCLUDE_LIBYUV_ROTATE_ARGB_H_ 34 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/rotate_row.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2013 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_ROTATE_ROW_H_ 12 | #define INCLUDE_LIBYUV_ROTATE_ROW_H_ 13 | 14 | #include "basic_types.h" 15 | 16 | #ifdef __cplusplus 17 | namespace libyuv { 18 | extern "C" { 19 | #endif 20 | 21 | #if defined(__pnacl__) || defined(__CLR_VER) || \ 22 | (defined(__i386__) && !defined(__SSE2__)) 23 | #define LIBYUV_DISABLE_X86 24 | #endif 25 | // MemorySanitizer does not support assembly code yet. http://crbug.com/344505 26 | #if defined(__has_feature) 27 | #if __has_feature(memory_sanitizer) 28 | #define LIBYUV_DISABLE_X86 29 | #endif 30 | #endif 31 | // The following are available for Visual C and clangcl 32 bit: 32 | #if !defined(LIBYUV_DISABLE_X86) && defined(_M_IX86) 33 | #define HAS_TRANSPOSEWX8_SSSE3 34 | #define HAS_TRANSPOSEUVWX8_SSE2 35 | #endif 36 | 37 | // The following are available for GCC 32 or 64 bit but not NaCL for 64 bit: 38 | #if !defined(LIBYUV_DISABLE_X86) && \ 39 | (defined(__i386__) || (defined(__x86_64__) && !defined(__native_client__))) 40 | #define HAS_TRANSPOSEWX8_SSSE3 41 | #endif 42 | 43 | // The following are available for 64 bit GCC but not NaCL: 44 | #if !defined(LIBYUV_DISABLE_X86) && !defined(__native_client__) && \ 45 | defined(__x86_64__) 46 | #define HAS_TRANSPOSEWX8_FAST_SSSE3 47 | #define HAS_TRANSPOSEUVWX8_SSE2 48 | #endif 49 | 50 | #if !defined(LIBYUV_DISABLE_NEON) && !defined(__native_client__) && \ 51 | (defined(__ARM_NEON__) || defined(LIBYUV_NEON) || defined(__aarch64__)) 52 | #define HAS_TRANSPOSEWX8_NEON 53 | #define HAS_TRANSPOSEUVWX8_NEON 54 | #endif 55 | 56 | #if !defined(LIBYUV_DISABLE_MIPS) && !defined(__native_client__) && \ 57 | defined(__mips__) && \ 58 | defined(__mips_dsp) && (__mips_dsp_rev >= 2) 59 | #define HAS_TRANSPOSEWX8_DSPR2 60 | #define HAS_TRANSPOSEUVWX8_DSPR2 61 | #endif // defined(__mips__) 62 | 63 | void TransposeWxH_C(const uint8* src, int src_stride, 64 | uint8* dst, int dst_stride, int width, int height); 65 | 66 | void TransposeWx8_C(const uint8* src, int src_stride, 67 | uint8* dst, int dst_stride, int width); 68 | void TransposeWx8_NEON(const uint8* src, int src_stride, 69 | uint8* dst, int dst_stride, int width); 70 | void TransposeWx8_SSSE3(const uint8* src, int src_stride, 71 | uint8* dst, int dst_stride, int width); 72 | void TransposeWx8_Fast_SSSE3(const uint8* src, int src_stride, 73 | uint8* dst, int dst_stride, int width); 74 | void TransposeWx8_DSPR2(const uint8* src, int src_stride, 75 | uint8* dst, int dst_stride, int width); 76 | void TransposeWx8_Fast_DSPR2(const uint8* src, int src_stride, 77 | uint8* dst, int dst_stride, int width); 78 | 79 | void TransposeWx8_Any_NEON(const uint8* src, int src_stride, 80 | uint8* dst, int dst_stride, int width); 81 | void TransposeWx8_Any_SSSE3(const uint8* src, int src_stride, 82 | uint8* dst, int dst_stride, int width); 83 | void TransposeWx8_Fast_Any_SSSE3(const uint8* src, int src_stride, 84 | uint8* dst, int dst_stride, int width); 85 | void TransposeWx8_Any_DSPR2(const uint8* src, int src_stride, 86 | uint8* dst, int dst_stride, int width); 87 | 88 | void TransposeUVWxH_C(const uint8* src, int src_stride, 89 | uint8* dst_a, int dst_stride_a, 90 | uint8* dst_b, int dst_stride_b, 91 | int width, int height); 92 | 93 | void TransposeUVWx8_C(const uint8* src, int src_stride, 94 | uint8* dst_a, int dst_stride_a, 95 | uint8* dst_b, int dst_stride_b, int width); 96 | void TransposeUVWx8_SSE2(const uint8* src, int src_stride, 97 | uint8* dst_a, int dst_stride_a, 98 | uint8* dst_b, int dst_stride_b, int width); 99 | void TransposeUVWx8_NEON(const uint8* src, int src_stride, 100 | uint8* dst_a, int dst_stride_a, 101 | uint8* dst_b, int dst_stride_b, int width); 102 | void TransposeUVWx8_DSPR2(const uint8* src, int src_stride, 103 | uint8* dst_a, int dst_stride_a, 104 | uint8* dst_b, int dst_stride_b, int width); 105 | 106 | void TransposeUVWx8_Any_SSE2(const uint8* src, int src_stride, 107 | uint8* dst_a, int dst_stride_a, 108 | uint8* dst_b, int dst_stride_b, int width); 109 | void TransposeUVWx8_Any_NEON(const uint8* src, int src_stride, 110 | uint8* dst_a, int dst_stride_a, 111 | uint8* dst_b, int dst_stride_b, int width); 112 | void TransposeUVWx8_Any_DSPR2(const uint8* src, int src_stride, 113 | uint8* dst_a, int dst_stride_a, 114 | uint8* dst_b, int dst_stride_b, int width); 115 | 116 | #ifdef __cplusplus 117 | } // extern "C" 118 | } // namespace libyuv 119 | #endif 120 | 121 | #endif // INCLUDE_LIBYUV_ROTATE_ROW_H_ 122 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/scale.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2011 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_SCALE_H_ 12 | #define INCLUDE_LIBYUV_SCALE_H_ 13 | 14 | #include "basic_types.h" 15 | 16 | #ifdef __cplusplus 17 | namespace libyuv { 18 | extern "C" { 19 | #endif 20 | 21 | // Supported filtering. 22 | typedef enum FilterMode { 23 | kFilterNone = 0, // Point sample; Fastest. 24 | kFilterLinear = 1, // Filter horizontally only. 25 | kFilterBilinear = 2, // Faster than box, but lower quality scaling down. 26 | kFilterBox = 3 // Highest quality. 27 | } FilterModeEnum; 28 | 29 | // Scale a YUV plane. 30 | LIBYUV_API 31 | void ScalePlane(const uint8* src, int src_stride, 32 | int src_width, int src_height, 33 | uint8* dst, int dst_stride, 34 | int dst_width, int dst_height, 35 | enum FilterMode filtering); 36 | 37 | LIBYUV_API 38 | void ScalePlane_16(const uint16* src, int src_stride, 39 | int src_width, int src_height, 40 | uint16* dst, int dst_stride, 41 | int dst_width, int dst_height, 42 | enum FilterMode filtering); 43 | 44 | // Scales a YUV 4:2:0 image from the src width and height to the 45 | // dst width and height. 46 | // If filtering is kFilterNone, a simple nearest-neighbor algorithm is 47 | // used. This produces basic (blocky) quality at the fastest speed. 48 | // If filtering is kFilterBilinear, interpolation is used to produce a better 49 | // quality image, at the expense of speed. 50 | // If filtering is kFilterBox, averaging is used to produce ever better 51 | // quality image, at further expense of speed. 52 | // Returns 0 if successful. 53 | 54 | LIBYUV_API 55 | int I420Scale(const uint8* src_y, int src_stride_y, 56 | const uint8* src_u, int src_stride_u, 57 | const uint8* src_v, int src_stride_v, 58 | int src_width, int src_height, 59 | uint8* dst_y, int dst_stride_y, 60 | uint8* dst_u, int dst_stride_u, 61 | uint8* dst_v, int dst_stride_v, 62 | int dst_width, int dst_height, 63 | enum FilterMode filtering); 64 | 65 | LIBYUV_API 66 | int I420Scale_16(const uint16* src_y, int src_stride_y, 67 | const uint16* src_u, int src_stride_u, 68 | const uint16* src_v, int src_stride_v, 69 | int src_width, int src_height, 70 | uint16* dst_y, int dst_stride_y, 71 | uint16* dst_u, int dst_stride_u, 72 | uint16* dst_v, int dst_stride_v, 73 | int dst_width, int dst_height, 74 | enum FilterMode filtering); 75 | 76 | #ifdef __cplusplus 77 | // Legacy API. Deprecated. 78 | LIBYUV_API 79 | int Scale(const uint8* src_y, const uint8* src_u, const uint8* src_v, 80 | int src_stride_y, int src_stride_u, int src_stride_v, 81 | int src_width, int src_height, 82 | uint8* dst_y, uint8* dst_u, uint8* dst_v, 83 | int dst_stride_y, int dst_stride_u, int dst_stride_v, 84 | int dst_width, int dst_height, 85 | LIBYUV_BOOL interpolate); 86 | 87 | // Legacy API. Deprecated. 88 | LIBYUV_API 89 | int ScaleOffset(const uint8* src_i420, int src_width, int src_height, 90 | uint8* dst_i420, int dst_width, int dst_height, int dst_yoffset, 91 | LIBYUV_BOOL interpolate); 92 | 93 | // For testing, allow disabling of specialized scalers. 94 | LIBYUV_API 95 | void SetUseReferenceImpl(LIBYUV_BOOL use); 96 | #endif // __cplusplus 97 | 98 | #ifdef __cplusplus 99 | } // extern "C" 100 | } // namespace libyuv 101 | #endif 102 | 103 | #endif // INCLUDE_LIBYUV_SCALE_H_ 104 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/scale_argb.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2012 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_SCALE_ARGB_H_ 12 | #define INCLUDE_LIBYUV_SCALE_ARGB_H_ 13 | 14 | #include "basic_types.h" 15 | #include "scale.h" // For FilterMode 16 | 17 | #ifdef __cplusplus 18 | namespace libyuv { 19 | extern "C" { 20 | #endif 21 | 22 | LIBYUV_API 23 | int ARGBScale(const uint8* src_argb, int src_stride_argb, 24 | int src_width, int src_height, 25 | uint8* dst_argb, int dst_stride_argb, 26 | int dst_width, int dst_height, 27 | enum FilterMode filtering); 28 | 29 | // Clipped scale takes destination rectangle coordinates for clip values. 30 | LIBYUV_API 31 | int ARGBScaleClip(const uint8* src_argb, int src_stride_argb, 32 | int src_width, int src_height, 33 | uint8* dst_argb, int dst_stride_argb, 34 | int dst_width, int dst_height, 35 | int clip_x, int clip_y, int clip_width, int clip_height, 36 | enum FilterMode filtering); 37 | 38 | // Scale with YUV conversion to ARGB and clipping. 39 | LIBYUV_API 40 | int YUVToARGBScaleClip(const uint8* src_y, int src_stride_y, 41 | const uint8* src_u, int src_stride_u, 42 | const uint8* src_v, int src_stride_v, 43 | uint32 src_fourcc, 44 | int src_width, int src_height, 45 | uint8* dst_argb, int dst_stride_argb, 46 | uint32 dst_fourcc, 47 | int dst_width, int dst_height, 48 | int clip_x, int clip_y, int clip_width, int clip_height, 49 | enum FilterMode filtering); 50 | 51 | #ifdef __cplusplus 52 | } // extern "C" 53 | } // namespace libyuv 54 | #endif 55 | 56 | #endif // INCLUDE_LIBYUV_SCALE_ARGB_H_ 57 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/version.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2012 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | #ifndef INCLUDE_LIBYUV_VERSION_H_ 12 | #define INCLUDE_LIBYUV_VERSION_H_ 13 | 14 | #define LIBYUV_VERSION 1634 15 | 16 | #endif // INCLUDE_LIBYUV_VERSION_H_ 17 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Vendors/libyuv/video_common.h: -------------------------------------------------------------------------------- 1 | /* 2 | * Copyright 2011 The LibYuv Project Authors. All rights reserved. 3 | * 4 | * Use of this source code is governed by a BSD-style license 5 | * that can be found in the LICENSE file in the root of the source 6 | * tree. An additional intellectual property rights grant can be found 7 | * in the file PATENTS. All contributing project authors may 8 | * be found in the AUTHORS file in the root of the source tree. 9 | */ 10 | 11 | // Common definitions for video, including fourcc and VideoFormat. 12 | 13 | #ifndef INCLUDE_LIBYUV_VIDEO_COMMON_H_ 14 | #define INCLUDE_LIBYUV_VIDEO_COMMON_H_ 15 | 16 | #include "basic_types.h" 17 | 18 | #ifdef __cplusplus 19 | namespace libyuv { 20 | extern "C" { 21 | #endif 22 | 23 | ////////////////////////////////////////////////////////////////////////////// 24 | // Definition of FourCC codes 25 | ////////////////////////////////////////////////////////////////////////////// 26 | 27 | // Convert four characters to a FourCC code. 28 | // Needs to be a macro otherwise the OS X compiler complains when the kFormat* 29 | // constants are used in a switch. 30 | #ifdef __cplusplus 31 | #define FOURCC(a, b, c, d) ( \ 32 | (static_cast(a)) | (static_cast(b) << 8) | \ 33 | (static_cast(c) << 16) | (static_cast(d) << 24)) 34 | #else 35 | #define FOURCC(a, b, c, d) ( \ 36 | ((uint32)(a)) | ((uint32)(b) << 8) | /* NOLINT */ \ 37 | ((uint32)(c) << 16) | ((uint32)(d) << 24)) /* NOLINT */ 38 | #endif 39 | 40 | // Some pages discussing FourCC codes: 41 | // http://www.fourcc.org/yuv.php 42 | // http://v4l2spec.bytesex.org/spec/book1.htm 43 | // http://developer.apple.com/quicktime/icefloe/dispatch020.html 44 | // http://msdn.microsoft.com/library/windows/desktop/dd206750.aspx#nv12 45 | // http://people.xiph.org/~xiphmont/containers/nut/nut4cc.txt 46 | 47 | // FourCC codes grouped according to implementation efficiency. 48 | // Primary formats should convert in 1 efficient step. 49 | // Secondary formats are converted in 2 steps. 50 | // Auxilliary formats call primary converters. 51 | enum FourCC { 52 | // 8 Primary YUV formats: 5 planar, 2 biplanar, 2 packed. 53 | FOURCC_I420 = FOURCC('I', '4', '2', '0'), 54 | FOURCC_I422 = FOURCC('I', '4', '2', '2'), 55 | FOURCC_I444 = FOURCC('I', '4', '4', '4'), 56 | FOURCC_I411 = FOURCC('I', '4', '1', '1'), // deprecated. 57 | FOURCC_I400 = FOURCC('I', '4', '0', '0'), 58 | FOURCC_NV21 = FOURCC('N', 'V', '2', '1'), 59 | FOURCC_NV12 = FOURCC('N', 'V', '1', '2'), 60 | FOURCC_YUY2 = FOURCC('Y', 'U', 'Y', '2'), 61 | FOURCC_UYVY = FOURCC('U', 'Y', 'V', 'Y'), 62 | 63 | // 1 Secondary YUV format: row biplanar. 64 | FOURCC_M420 = FOURCC('M', '4', '2', '0'), 65 | FOURCC_Q420 = FOURCC('Q', '4', '2', '0'), // deprecated. 66 | 67 | // 9 Primary RGB formats: 4 32 bpp, 2 24 bpp, 3 16 bpp. 68 | FOURCC_ARGB = FOURCC('A', 'R', 'G', 'B'), 69 | FOURCC_BGRA = FOURCC('B', 'G', 'R', 'A'), 70 | FOURCC_ABGR = FOURCC('A', 'B', 'G', 'R'), 71 | FOURCC_24BG = FOURCC('2', '4', 'B', 'G'), 72 | FOURCC_RAW = FOURCC('r', 'a', 'w', ' '), 73 | FOURCC_RGBA = FOURCC('R', 'G', 'B', 'A'), 74 | FOURCC_RGBP = FOURCC('R', 'G', 'B', 'P'), // rgb565 LE. 75 | FOURCC_RGBO = FOURCC('R', 'G', 'B', 'O'), // argb1555 LE. 76 | FOURCC_R444 = FOURCC('R', '4', '4', '4'), // argb4444 LE. 77 | 78 | // 4 Secondary RGB formats: 4 Bayer Patterns. deprecated. 79 | FOURCC_RGGB = FOURCC('R', 'G', 'G', 'B'), 80 | FOURCC_BGGR = FOURCC('B', 'G', 'G', 'R'), 81 | FOURCC_GRBG = FOURCC('G', 'R', 'B', 'G'), 82 | FOURCC_GBRG = FOURCC('G', 'B', 'R', 'G'), 83 | 84 | // 1 Primary Compressed YUV format. 85 | FOURCC_MJPG = FOURCC('M', 'J', 'P', 'G'), 86 | 87 | // 5 Auxiliary YUV variations: 3 with U and V planes are swapped, 1 Alias. 88 | FOURCC_YV12 = FOURCC('Y', 'V', '1', '2'), 89 | FOURCC_YV16 = FOURCC('Y', 'V', '1', '6'), 90 | FOURCC_YV24 = FOURCC('Y', 'V', '2', '4'), 91 | FOURCC_YU12 = FOURCC('Y', 'U', '1', '2'), // Linux version of I420. 92 | FOURCC_J420 = FOURCC('J', '4', '2', '0'), 93 | FOURCC_J400 = FOURCC('J', '4', '0', '0'), // unofficial fourcc 94 | FOURCC_H420 = FOURCC('H', '4', '2', '0'), // unofficial fourcc 95 | 96 | // 14 Auxiliary aliases. CanonicalFourCC() maps these to canonical fourcc. 97 | FOURCC_IYUV = FOURCC('I', 'Y', 'U', 'V'), // Alias for I420. 98 | FOURCC_YU16 = FOURCC('Y', 'U', '1', '6'), // Alias for I422. 99 | FOURCC_YU24 = FOURCC('Y', 'U', '2', '4'), // Alias for I444. 100 | FOURCC_YUYV = FOURCC('Y', 'U', 'Y', 'V'), // Alias for YUY2. 101 | FOURCC_YUVS = FOURCC('y', 'u', 'v', 's'), // Alias for YUY2 on Mac. 102 | FOURCC_HDYC = FOURCC('H', 'D', 'Y', 'C'), // Alias for UYVY. 103 | FOURCC_2VUY = FOURCC('2', 'v', 'u', 'y'), // Alias for UYVY on Mac. 104 | FOURCC_JPEG = FOURCC('J', 'P', 'E', 'G'), // Alias for MJPG. 105 | FOURCC_DMB1 = FOURCC('d', 'm', 'b', '1'), // Alias for MJPG on Mac. 106 | FOURCC_BA81 = FOURCC('B', 'A', '8', '1'), // Alias for BGGR. 107 | FOURCC_RGB3 = FOURCC('R', 'G', 'B', '3'), // Alias for RAW. 108 | FOURCC_BGR3 = FOURCC('B', 'G', 'R', '3'), // Alias for 24BG. 109 | FOURCC_CM32 = FOURCC(0, 0, 0, 32), // Alias for BGRA kCMPixelFormat_32ARGB 110 | FOURCC_CM24 = FOURCC(0, 0, 0, 24), // Alias for RAW kCMPixelFormat_24RGB 111 | FOURCC_L555 = FOURCC('L', '5', '5', '5'), // Alias for RGBO. 112 | FOURCC_L565 = FOURCC('L', '5', '6', '5'), // Alias for RGBP. 113 | FOURCC_5551 = FOURCC('5', '5', '5', '1'), // Alias for RGBO. 114 | 115 | // 1 Auxiliary compressed YUV format set aside for capturer. 116 | FOURCC_H264 = FOURCC('H', '2', '6', '4'), 117 | 118 | // Match any fourcc. 119 | FOURCC_ANY = -1, 120 | }; 121 | 122 | enum FourCCBpp { 123 | // Canonical fourcc codes used in our code. 124 | FOURCC_BPP_I420 = 12, 125 | FOURCC_BPP_I422 = 16, 126 | FOURCC_BPP_I444 = 24, 127 | FOURCC_BPP_I411 = 12, 128 | FOURCC_BPP_I400 = 8, 129 | FOURCC_BPP_NV21 = 12, 130 | FOURCC_BPP_NV12 = 12, 131 | FOURCC_BPP_YUY2 = 16, 132 | FOURCC_BPP_UYVY = 16, 133 | FOURCC_BPP_M420 = 12, 134 | FOURCC_BPP_Q420 = 12, 135 | FOURCC_BPP_ARGB = 32, 136 | FOURCC_BPP_BGRA = 32, 137 | FOURCC_BPP_ABGR = 32, 138 | FOURCC_BPP_RGBA = 32, 139 | FOURCC_BPP_24BG = 24, 140 | FOURCC_BPP_RAW = 24, 141 | FOURCC_BPP_RGBP = 16, 142 | FOURCC_BPP_RGBO = 16, 143 | FOURCC_BPP_R444 = 16, 144 | FOURCC_BPP_RGGB = 8, 145 | FOURCC_BPP_BGGR = 8, 146 | FOURCC_BPP_GRBG = 8, 147 | FOURCC_BPP_GBRG = 8, 148 | FOURCC_BPP_YV12 = 12, 149 | FOURCC_BPP_YV16 = 16, 150 | FOURCC_BPP_YV24 = 24, 151 | FOURCC_BPP_YU12 = 12, 152 | FOURCC_BPP_J420 = 12, 153 | FOURCC_BPP_J400 = 8, 154 | FOURCC_BPP_H420 = 12, 155 | FOURCC_BPP_MJPG = 0, // 0 means unknown. 156 | FOURCC_BPP_H264 = 0, 157 | FOURCC_BPP_IYUV = 12, 158 | FOURCC_BPP_YU16 = 16, 159 | FOURCC_BPP_YU24 = 24, 160 | FOURCC_BPP_YUYV = 16, 161 | FOURCC_BPP_YUVS = 16, 162 | FOURCC_BPP_HDYC = 16, 163 | FOURCC_BPP_2VUY = 16, 164 | FOURCC_BPP_JPEG = 1, 165 | FOURCC_BPP_DMB1 = 1, 166 | FOURCC_BPP_BA81 = 8, 167 | FOURCC_BPP_RGB3 = 24, 168 | FOURCC_BPP_BGR3 = 24, 169 | FOURCC_BPP_CM32 = 32, 170 | FOURCC_BPP_CM24 = 24, 171 | 172 | // Match any fourcc. 173 | FOURCC_BPP_ANY = 0, // 0 means unknown. 174 | }; 175 | 176 | // Converts fourcc aliases into canonical ones. 177 | LIBYUV_API uint32 CanonicalFourCC(uint32 fourcc); 178 | 179 | #ifdef __cplusplus 180 | } // extern "C" 181 | } // namespace libyuv 182 | #endif 183 | 184 | #endif // INCLUDE_LIBYUV_VIDEO_COMMON_H_ 185 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Video/NTESI420Frame.h: -------------------------------------------------------------------------------- 1 | // 2 | // 3 | // Created by Netease on 15/4/17. 4 | // Copyright (c) 2017年 Netease. All rights reserved. 5 | // 6 | 7 | #import 8 | #import 9 | 10 | typedef NS_ENUM(NSUInteger, NTESI420FramePlane) { 11 | NTESI420FramePlaneY = 0, 12 | NTESI420FramePlaneU = 1, 13 | NTESI420FramePlaneV = 2, 14 | }; 15 | 16 | @interface NTESI420Frame : NSObject 17 | 18 | @property (nonatomic, readonly) int width; 19 | @property (nonatomic, readonly) int height; 20 | @property (nonatomic, readonly) int i420DataLength; 21 | @property (nonatomic, assign) UInt64 timetag; 22 | @property (nonatomic, readonly) UInt8 *data; 23 | 24 | + (instancetype)initWithData:(NSData *)data; 25 | 26 | - (NSData *)bytes; 27 | 28 | - (id)initWithWidth:(int)w height:(int)h; 29 | 30 | - (UInt8 *)dataOfPlane:(NTESI420FramePlane)plane; 31 | 32 | - (NSUInteger)strideOfPlane:(NTESI420FramePlane)plane; 33 | 34 | - (CMSampleBufferRef)convertToSampleBuffer; 35 | 36 | @end 37 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Video/NTESI420Frame.m: -------------------------------------------------------------------------------- 1 | // 2 | // 3 | // Created by fenric on 15/4/17. 4 | // Copyright (c) 2015年 Netease. All rights reserved. 5 | // 6 | 7 | #import "NTESI420Frame.h" 8 | #import "NTESYUVConverter.h" 9 | 10 | @interface NTESI420Frame() { 11 | CFMutableDataRef _cfData; 12 | UInt8 *_planeData[3]; 13 | NSUInteger _stride[3]; 14 | } 15 | 16 | @end 17 | 18 | @implementation NTESI420Frame 19 | 20 | + (instancetype)initWithData:(NSData *)data { 21 | 22 | int width = 0; 23 | int height = 0; 24 | int i420DataLength = 0; 25 | UInt64 timetag = 0; 26 | 27 | int structSize = sizeof(width) + sizeof(height) + sizeof(i420DataLength) + sizeof(timetag); 28 | if(structSize > data.length) { 29 | return nil; 30 | } 31 | 32 | const void * buffer = [data bytes]; 33 | int offset = 0; 34 | 35 | memcpy(&width, buffer + offset, sizeof(width)); 36 | offset += sizeof(width); 37 | 38 | memcpy(&height,buffer + offset,sizeof(height)); 39 | offset += sizeof(height); 40 | 41 | memcpy(&i420DataLength,buffer + offset, sizeof(i420DataLength)); 42 | offset += sizeof(i420DataLength); 43 | 44 | memcpy(&timetag, buffer + offset, sizeof(timetag)); 45 | offset += sizeof(timetag); 46 | 47 | 48 | if(i420DataLength > data.length - structSize) { 49 | return nil; 50 | } 51 | NTESI420Frame *frame = [[[self class] alloc] initWithWidth:width height:height]; 52 | 53 | memcpy([frame dataOfPlane:NTESI420FramePlaneY], buffer+offset, [frame strideOfPlane:NTESI420FramePlaneY] * height); 54 | offset += [frame strideOfPlane:NTESI420FramePlaneY] * height; 55 | 56 | memcpy([frame dataOfPlane:NTESI420FramePlaneU], buffer+offset, [frame strideOfPlane:NTESI420FramePlaneU] * height / 2); 57 | offset += [frame strideOfPlane:NTESI420FramePlaneU] * height / 2; 58 | 59 | memcpy([frame dataOfPlane:NTESI420FramePlaneV], buffer+offset, [frame strideOfPlane:NTESI420FramePlaneV] * height / 2); 60 | offset += [frame strideOfPlane:NTESI420FramePlaneV] * height / 2; 61 | 62 | return frame; 63 | } 64 | 65 | - (NSData *)bytes { 66 | int structSize = sizeof(self.width) + sizeof(self.height) + sizeof(self.i420DataLength) + sizeof(self.timetag); 67 | 68 | void * buffer = malloc(structSize + self.i420DataLength); 69 | 70 | memset(buffer, 0, structSize + self.i420DataLength); 71 | int offset = 0; 72 | 73 | memcpy(buffer + offset, &_width, sizeof(_width)); 74 | offset += sizeof(_width); 75 | 76 | memcpy(buffer + offset, &_height, sizeof(_height)); 77 | offset += sizeof(_height); 78 | 79 | memcpy(buffer + offset, &_i420DataLength, sizeof(_i420DataLength)); 80 | offset += sizeof(_i420DataLength); 81 | 82 | memcpy(buffer + offset, &_timetag, sizeof(_timetag)); 83 | offset += sizeof(_timetag); 84 | 85 | 86 | memcpy(buffer + offset, [self dataOfPlane:NTESI420FramePlaneY], [self strideOfPlane:NTESI420FramePlaneY] * self.height); 87 | offset += [self strideOfPlane:NTESI420FramePlaneY] * self.height; 88 | 89 | memcpy(buffer + offset, [self dataOfPlane:NTESI420FramePlaneU], [self strideOfPlane:NTESI420FramePlaneU] * self.height / 2); 90 | offset += [self strideOfPlane:NTESI420FramePlaneU] * self.height / 2; 91 | 92 | memcpy(buffer + offset, [self dataOfPlane:NTESI420FramePlaneV], [self strideOfPlane:NTESI420FramePlaneV] * self.height / 2); 93 | offset += [self strideOfPlane:NTESI420FramePlaneV] * self.height / 2; 94 | 95 | NSData *data = [NSData dataWithBytes:buffer length:offset]; 96 | free(buffer); 97 | return data; 98 | } 99 | 100 | - (id)initWithWidth:(int)w 101 | height:(int)h 102 | { 103 | if (self = [super init]) { 104 | _width = w; 105 | _height = h; 106 | _i420DataLength = _width * _height * 3 >> 1; 107 | _cfData = CFDataCreateMutable(kCFAllocatorDefault, _i420DataLength); 108 | _data = CFDataGetMutableBytePtr(_cfData); 109 | _planeData[NTESI420FramePlaneY] = _data; 110 | _planeData[NTESI420FramePlaneU] = _planeData[NTESI420FramePlaneY] + _width * _height; 111 | _planeData[NTESI420FramePlaneV] = _planeData[NTESI420FramePlaneU] + _width * _height / 4; 112 | _stride[NTESI420FramePlaneY] = _width; 113 | _stride[NTESI420FramePlaneU] = _width >> 1; 114 | _stride[NTESI420FramePlaneV] = _width >> 1; 115 | 116 | } 117 | 118 | return self; 119 | 120 | } 121 | 122 | - (UInt8 *)dataOfPlane:(NTESI420FramePlane)plane 123 | { 124 | return _planeData[plane]; 125 | } 126 | 127 | - (NSUInteger)strideOfPlane:(NTESI420FramePlane)plane 128 | { 129 | return _stride[plane]; 130 | } 131 | 132 | -(void)freeData 133 | { 134 | CFRelease(_cfData); 135 | 136 | _data = NULL; 137 | _width = _height = _i420DataLength = 0; 138 | } 139 | 140 | - (void) dealloc 141 | { 142 | [self freeData]; 143 | } 144 | 145 | - (CMSampleBufferRef)convertToSampleBuffer 146 | { 147 | CVPixelBufferRef pixelBuffer = [NTESYUVConverter i420FrameToPixelBuffer:self]; 148 | if (!pixelBuffer) { 149 | return nil; 150 | } 151 | CMSampleBufferRef sampleBuffer = [NTESYUVConverter pixelBufferToSampleBuffer:pixelBuffer]; 152 | return sampleBuffer; 153 | } 154 | 155 | @end 156 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Video/NTESVideoUtil.h: -------------------------------------------------------------------------------- 1 | // 2 | // 3 | // Created by fenric on 17/3/20. 4 | // Copyright © 2017年 Netease. All rights reserved. 5 | // 6 | 7 | #import 8 | #import 9 | 10 | @interface NTESVideoUtil : NSObject 11 | 12 | + (CMVideoDimensions)outputVideoDimens:(CMVideoDimensions)inputDimens 13 | crop:(float)ratio; 14 | 15 | + (CMVideoDimensions)calculateDiemnsDividedByTwo:(int)width andHeight:(int)height; 16 | 17 | + (CMVideoDimensions)outputVideoDimensEnhanced:(CMVideoDimensions)inputDimens crop:(float)ratio; 18 | @end 19 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Video/NTESVideoUtil.m: -------------------------------------------------------------------------------- 1 | // 2 | // 3 | // Created by fenric on 17/3/20. 4 | // Copyright © 2017年 Netease. All rights reserved. 5 | // 6 | 7 | #import "NTESVideoUtil.h" 8 | 9 | #define NVSVideoUtilCropWidthAlignment 2 10 | 11 | @implementation NTESVideoUtil 12 | 13 | 14 | + (CMVideoDimensions)outputVideoDimensEnhanced:(CMVideoDimensions)inputDimens crop:(float)ratio 15 | { 16 | inputDimens.width >>= 1; 17 | inputDimens.width <<= 1; 18 | inputDimens.height >>= 1; 19 | inputDimens.height <<= 1; 20 | if (ratio <= 0 || ratio > 1) { 21 | return inputDimens; 22 | } 23 | 24 | 25 | CMVideoDimensions outputDimens = {0,0}; 26 | 27 | int cropW,cropH,sw,sh; 28 | sw = inputDimens.width; 29 | sh = inputDimens.height; 30 | 31 | if(sw/sh == ratio) { 32 | outputDimens.width = inputDimens.width; 33 | outputDimens.height = inputDimens.height; 34 | return outputDimens; 35 | } 36 | 37 | if(sw/sh < ratio) { 38 | for(cropW = 0; cropW < sw; cropW += 2) { 39 | for(cropH = 0; cropH < sh; cropH += 2) { 40 | if((sw - cropW) == ratio * (sh - cropH)) { 41 | outputDimens.height = sh - cropH; 42 | outputDimens.width = sw - cropW; 43 | 44 | return outputDimens; 45 | } 46 | } 47 | } 48 | }else{ 49 | for(cropH = 0; cropH < sh; cropH += 2) { 50 | for(cropW = 0; cropW < sw; cropW += 2) { 51 | if((sw - cropW) == ratio * (sh - cropH)) { 52 | outputDimens.height = sh - cropH; 53 | outputDimens.width = sw - cropW; 54 | 55 | return outputDimens; 56 | } 57 | } 58 | } 59 | } 60 | return outputDimens; 61 | } 62 | 63 | + (CMVideoDimensions)outputVideoDimens:(CMVideoDimensions)inputDimens crop:(float)ratio 64 | { 65 | if (ratio <= 0 || ratio > 1) { 66 | return inputDimens; 67 | } 68 | 69 | CMVideoDimensions outputDimens = inputDimens; 70 | 71 | if (inputDimens.width > inputDimens.height) { 72 | if (inputDimens.width * ratio > inputDimens.height) { 73 | outputDimens.width = inputDimens.height / ratio; 74 | } 75 | else { 76 | outputDimens.height = inputDimens.width * ratio; 77 | } 78 | } 79 | else { 80 | if (inputDimens.height * ratio > inputDimens.width) { 81 | outputDimens.height = inputDimens.width / ratio; 82 | } 83 | else { 84 | outputDimens.width = inputDimens.height * ratio; 85 | } 86 | } 87 | 88 | int32_t mod = outputDimens.width % NVSVideoUtilCropWidthAlignment; 89 | 90 | if (mod) { 91 | outputDimens.width -= mod; 92 | } 93 | 94 | mod = outputDimens.height % NVSVideoUtilCropWidthAlignment; 95 | 96 | if (mod) { 97 | outputDimens.height -= mod; 98 | } 99 | 100 | return outputDimens; 101 | } 102 | 103 | + (CMVideoDimensions)calculateDiemnsDividedByTwo:(int)width andHeight:(int)height 104 | { 105 | CMVideoDimensions dimens = {width,height}; 106 | int32_t mod = dimens.width % NVSVideoUtilCropWidthAlignment; 107 | if (mod) { 108 | dimens.width -= mod; 109 | } 110 | 111 | mod = dimens.height % NVSVideoUtilCropWidthAlignment; 112 | 113 | if (mod) { 114 | dimens.height -= mod; 115 | } 116 | 117 | return dimens; 118 | } 119 | 120 | @end 121 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/JWExtensionKit/Video/NTESYUVConverter.h: -------------------------------------------------------------------------------- 1 | // 2 | // 3 | // Created by fenric on 16/3/25. 4 | // Copyright © 2016年 Netease. All rights reserved. 5 | // 6 | 7 | #import 8 | #import 9 | #import "NTESI420Frame.h" 10 | 11 | 12 | typedef NS_ENUM(uint8_t, NTESVideoPackOrientation) { 13 | NTESVideoPackOrientationPortrait = 0, //No rotation 14 | NTESVideoPackOrientationLandscapeLeft = 1, //Rotate 90 degrees clockwise 15 | NTESVideoPackOrientationPortraitUpsideDown = 2, //Rotate 180 degrees 16 | NTESVideoPackOrientationLandscapeRight = 3, //Rotate 270 degrees clockwise 17 | }; 18 | 19 | @interface NTESYUVConverter : NSObject 20 | 21 | + (NTESI420Frame *)pixelBufferToI420:(CVImageBufferRef)pixelBuffer 22 | withCrop:(float)cropRatio 23 | targetSize:(CGSize)size 24 | andOrientation:(NTESVideoPackOrientation)orientation; 25 | 26 | + (CVPixelBufferRef)i420FrameToPixelBuffer:(NTESI420Frame *)i420Frame; 27 | 28 | + (CMSampleBufferRef)pixelBufferToSampleBuffer:(CVPixelBufferRef)pixelBuffer; 29 | 30 | @end 31 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata: -------------------------------------------------------------------------------- 1 | 2 | 4 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | IDEDidComputeMac32BitWarning 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo.xcodeproj/project.xcworkspace/xcuserdata/summerxx.xcuserdatad/UserInterfaceState.xcuserstate: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/summerxx27/ReplayKitShareScreen-socket/a2d73805791449f0da780062d58516c75ee9ebf5/ReplyKitDemo-Socket/ReplyKitDemo.xcodeproj/project.xcworkspace/xcuserdata/summerxx.xcuserdatad/UserInterfaceState.xcuserstate -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo.xcodeproj/xcshareddata/xcschemes/ReplyKitDemo.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 5 | 8 | 9 | 15 | 21 | 22 | 23 | 24 | 25 | 30 | 31 | 32 | 33 | 43 | 45 | 51 | 52 | 53 | 54 | 60 | 62 | 68 | 69 | 70 | 71 | 73 | 74 | 77 | 78 | 79 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo.xcodeproj/xcshareddata/xcschemes/extension-demo.xcscheme: -------------------------------------------------------------------------------- 1 | 2 | 6 | 9 | 10 | 16 | 22 | 23 | 24 | 30 | 36 | 37 | 38 | 39 | 40 | 45 | 46 | 47 | 48 | 60 | 64 | 65 | 66 | 72 | 73 | 74 | 75 | 83 | 85 | 91 | 92 | 93 | 94 | 96 | 97 | 100 | 101 | 102 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo.xcodeproj/xcuserdata/summerxx.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist: -------------------------------------------------------------------------------- 1 | 2 | 6 | 7 | 9 | 17 | 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo.xcodeproj/xcuserdata/summerxx.xcuserdatad/xcschemes/xcschememanagement.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SchemeUserState 6 | 7 | JWExtensionKit.xcscheme_^#shared#^_ 8 | 9 | orderHint 10 | 2 11 | 12 | ReplyKitDemo.xcscheme_^#shared#^_ 13 | 14 | orderHint 15 | 1 16 | 17 | extension-demo.xcscheme_^#shared#^_ 18 | 19 | orderHint 20 | 0 21 | 22 | 23 | SuppressBuildableAutocreation 24 | 25 | B991ACB0295DB6F8007C1D90 26 | 27 | primary 28 | 29 | 30 | B991ACEC295DB721007C1D90 31 | 32 | primary 33 | 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/AppDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.h 3 | // ReplyKitDemo 4 | // 5 | // Created by summerxx on 2022/12/29. 6 | // 7 | 8 | #import 9 | 10 | @interface AppDelegate : UIResponder 11 | 12 | @end 13 | 14 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/AppDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // AppDelegate.m 3 | // ReplyKitDemo 4 | // 5 | // Created by summerxx on 2022/12/29. 6 | // 7 | 8 | #import "AppDelegate.h" 9 | #import "ViewController.h" 10 | @interface AppDelegate () 11 | 12 | @end 13 | 14 | @implementation AppDelegate 15 | 16 | 17 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions { 18 | // Override point for customization after application launch. 19 | return YES; 20 | } 21 | 22 | @end 23 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/Assets.xcassets/AccentColor.colorset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "colors" : [ 3 | { 4 | "idiom" : "universal" 5 | } 6 | ], 7 | "info" : { 8 | "author" : "xcode", 9 | "version" : 1 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/Assets.xcassets/AppIcon.appiconset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "idiom" : "universal", 5 | "platform" : "ios", 6 | "size" : "1024x1024" 7 | } 8 | ], 9 | "info" : { 10 | "author" : "xcode", 11 | "version" : 1 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/Assets.xcassets/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "info" : { 3 | "author" : "xcode", 4 | "version" : 1 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/Assets.xcassets/focus.imageset/Contents.json: -------------------------------------------------------------------------------- 1 | { 2 | "images" : [ 3 | { 4 | "filename" : "focus.png", 5 | "idiom" : "universal", 6 | "scale" : "1x" 7 | }, 8 | { 9 | "idiom" : "universal", 10 | "scale" : "2x" 11 | }, 12 | { 13 | "idiom" : "universal", 14 | "scale" : "3x" 15 | } 16 | ], 17 | "info" : { 18 | "author" : "xcode", 19 | "version" : 1 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/Assets.xcassets/focus.imageset/focus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/summerxx27/ReplayKitShareScreen-socket/a2d73805791449f0da780062d58516c75ee9ebf5/ReplyKitDemo-Socket/ReplyKitDemo/Assets.xcassets/focus.imageset/focus.png -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/Base.lproj/LaunchScreen.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/Base.lproj/Main.storyboard: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/CameraCapture/CaptureViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // CaptureViewController.h 3 | // ReplyKitDemo 4 | // 5 | // Created by summerxx on 2022/12/30. 6 | // 7 | 8 | #import 9 | 10 | NS_ASSUME_NONNULL_BEGIN 11 | 12 | @interface CaptureViewController : UIViewController 13 | 14 | @end 15 | 16 | NS_ASSUME_NONNULL_END 17 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/DisplayLayer/VideoDisplayLayer.h: -------------------------------------------------------------------------------- 1 | // 2 | // VideoDisplayLayer.h 3 | // InteractiveScreen 4 | // 5 | // Created by 刘川 on 2018/12/5. 6 | // Copyright © 2018 王楠. All rights reserved. 7 | // 8 | 9 | #warning 用来测试的视频渲染 - iOS12 之后已经废弃了 openGL ES; so... 10 | 11 | #import 12 | #include 13 | #include 14 | 15 | NS_ASSUME_NONNULL_BEGIN 16 | 17 | @interface VideoDisplayLayer : CAEAGLLayer 18 | 19 | @property CVPixelBufferRef pixelBuffer; 20 | 21 | - (id)initWithFrame:(CGRect)frame; 22 | 23 | - (void)resetRenderBuffer; 24 | 25 | @end 26 | 27 | NS_ASSUME_NONNULL_END 28 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/FJDeepSleepPreventer/FJDeepSleepPreventer.h: -------------------------------------------------------------------------------- 1 | // 2 | // FJDeepSleepPreventer.h 3 | // PlaySilentMusicInBackgroundMode 4 | // 5 | // Created by FJ on 2020/1/22. 6 | // Copyright © 2020 FJ. All rights reserved. 7 | // 进入后台模式调用start方法开始播放无声音乐,返回前台调用stop方法停止播放(这种方法需要一直播放无声音乐保持后台,比较耗电) 8 | 9 | #import 10 | 11 | NS_ASSUME_NONNULL_BEGIN 12 | 13 | @interface FJDeepSleepPreventer : NSObject 14 | + (instancetype)sharedInstance; 15 | - (void)start; 16 | - (void)stop; 17 | @end 18 | 19 | NS_ASSUME_NONNULL_END 20 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/FJDeepSleepPreventer/FJDeepSleepPreventer.m: -------------------------------------------------------------------------------- 1 | // 2 | // FJDeepSleepPreventer.m 3 | // PlaySilentMusicInBackgroundMode 4 | // 5 | // Created by FJ on 2020/1/22. 6 | // Copyright © 2020 FJ. All rights reserved. 7 | // 8 | 9 | #import "FJDeepSleepPreventer.h" 10 | #import 11 | #import 12 | #import 13 | 14 | @interface FJDeepSleepPreventer () 15 | @property (nonatomic, strong) AVAudioPlayer *audioPlayer; 16 | @end 17 | 18 | @implementation FJDeepSleepPreventer 19 | 20 | #pragma mark - life cycle 21 | 22 | + (instancetype)sharedInstance { 23 | static FJDeepSleepPreventer *instance; 24 | static dispatch_once_t onceToken; 25 | dispatch_once(&onceToken, ^{ 26 | instance = [FJDeepSleepPreventer new]; 27 | }); 28 | return instance; 29 | } 30 | 31 | - (instancetype)init { 32 | self = [super init]; 33 | if (self) { 34 | [self setup]; 35 | } 36 | return self; 37 | } 38 | 39 | - (void)setup { 40 | [self setupAudioSession]; 41 | [self setupAudioPlayer]; 42 | } 43 | 44 | - (void)setupAudioSession { 45 | // 新建AudioSession会话 46 | AVAudioSession *audioSession = [AVAudioSession sharedInstance]; 47 | // 设置后台播放 48 | NSError *error = nil; 49 | [audioSession setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:&error]; 50 | if (error) { 51 | NSLog(@"Error setCategory AVAudioSession: %@", error); 52 | } 53 | NSLog(@"%d", audioSession.isOtherAudioPlaying); 54 | NSError *activeSetError = nil; 55 | // 启动AudioSession,如果一个前台app正在播放音频则可能会启动失败 56 | [audioSession setActive:YES error:&activeSetError]; 57 | if (activeSetError) { 58 | NSLog(@"Error activating AVAudioSession: %@", activeSetError); 59 | } 60 | } 61 | 62 | - (void)setupAudioPlayer { 63 | //静音文件 64 | NSString *filePath = [[NSBundle mainBundle] pathForResource:@"Silence" ofType:@"wav"]; 65 | NSURL *fileURL = [[NSURL alloc] initFileURLWithPath:filePath]; 66 | 67 | self.audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:fileURL error:nil]; 68 | //静音 69 | self.audioPlayer.volume = 0; 70 | //循环播放 71 | self.audioPlayer.numberOfLoops = -1; 72 | [self.audioPlayer prepareToPlay]; 73 | } 74 | 75 | #pragma mark - public method 76 | 77 | - (void)start { 78 | [self.audioPlayer play]; 79 | } 80 | 81 | - (void)stop { 82 | [self.audioPlayer stop]; 83 | } 84 | 85 | @end 86 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/FJDeepSleepPreventer/FJDeepSleepPreventerPlus.h: -------------------------------------------------------------------------------- 1 | // 2 | // FJDeepSleepPreventerPlus.h 3 | // PlaySilentMusicInBackgroundMode 4 | // 5 | // Created by FJ on 2020/1/22. 6 | // Copyright © 2020 FJ. All rights reserved. 7 | // 进入后台模式调用start方法,返回前台调用stop方法 8 | // 通过不断 播放一次无声音乐+申请BackgroundTask达到后台保活的效果(相对省电) 9 | 10 | #import 11 | 12 | NS_ASSUME_NONNULL_BEGIN 13 | 14 | @interface FJDeepSleepPreventerPlus : NSObject 15 | + (instancetype)sharedInstance; 16 | - (void)start; 17 | - (void)stop; 18 | @end 19 | 20 | NS_ASSUME_NONNULL_END 21 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/FJDeepSleepPreventer/FJDeepSleepPreventerPlus.m: -------------------------------------------------------------------------------- 1 | // 2 | // FJDeepSleepPreventerPlus.m 3 | // PlaySilentMusicInBackgroundMode 4 | // 5 | // Created by FJ on 2020/1/22. 6 | // Copyright © 2020 FJ. All rights reserved. 7 | // 8 | 9 | #import "FJDeepSleepPreventerPlus.h" 10 | #import 11 | #import 12 | #import 13 | 14 | @interface FJDeepSleepPreventerPlus () 15 | @property (nonatomic, strong) AVAudioPlayer *audioPlayer; 16 | @property (nonatomic, assign) UIBackgroundTaskIdentifier backgroundTaskIdentifier; 17 | @end 18 | 19 | @implementation FJDeepSleepPreventerPlus 20 | 21 | + (instancetype)sharedInstance { 22 | static FJDeepSleepPreventerPlus *instance; 23 | static dispatch_once_t onceToken; 24 | dispatch_once(&onceToken, ^{ 25 | instance = [FJDeepSleepPreventerPlus new]; 26 | }); 27 | return instance; 28 | } 29 | 30 | - (instancetype)init { 31 | self = [super init]; 32 | if (self) { 33 | [self setup]; 34 | } 35 | return self; 36 | } 37 | 38 | - (void)setup { 39 | [self setupAudioSession]; 40 | [self setupAudioPlayer]; 41 | } 42 | 43 | - (void)addObserver { 44 | [[UIApplication sharedApplication] addObserver:self 45 | forKeyPath:@"backgroundTimeRemaining" 46 | options:NSKeyValueObservingOptionNew|NSKeyValueObservingOptionOld|NSKeyValueObservingOptionInitial|NSKeyValueObservingOptionPrior 47 | context:nil]; 48 | } 49 | 50 | - (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context { 51 | 52 | } 53 | 54 | - (void)setupAudioSession { 55 | // 新建AudioSession会话 56 | AVAudioSession *audioSession = [AVAudioSession sharedInstance]; 57 | // 设置后台播放 58 | NSError *error = nil; 59 | [audioSession setCategory:AVAudioSessionCategoryPlayback withOptions:AVAudioSessionCategoryOptionMixWithOthers error:&error]; 60 | if (error) { 61 | NSLog(@"Error setCategory AVAudioSession: %@", error); 62 | } 63 | NSLog(@"%d", audioSession.isOtherAudioPlaying); 64 | NSError *activeSetError = nil; 65 | // 启动AudioSession,如果一个前台app正在播放音频则可能会启动失败 66 | [audioSession setActive:YES error:&activeSetError]; 67 | if (activeSetError) { 68 | NSLog(@"Error activating AVAudioSession: %@", activeSetError); 69 | } 70 | } 71 | 72 | - (void)setupAudioPlayer { 73 | //静音文件 74 | NSString *filePath = [[NSBundle mainBundle] pathForResource:@"Silence" ofType:@"wav"]; 75 | NSURL *fileURL = [[NSURL alloc] initFileURLWithPath:filePath]; 76 | 77 | self.audioPlayer = [[AVAudioPlayer alloc] initWithContentsOfURL:fileURL error:nil]; 78 | //静音 79 | self.audioPlayer.volume = 0; 80 | //播放一次 81 | self.audioPlayer.numberOfLoops = 1; 82 | [self.audioPlayer prepareToPlay]; 83 | } 84 | 85 | #pragma mark - public method 86 | 87 | - (void)start { 88 | [self.audioPlayer play]; 89 | [self applyforBackgroundTask]; 90 | } 91 | 92 | - (void)stop { 93 | [self.audioPlayer stop]; 94 | } 95 | 96 | #pragma mark - private method 97 | 98 | //申请后台任务 99 | - (void)applyforBackgroundTask{ 100 | self.backgroundTaskIdentifier = [[UIApplication sharedApplication] beginBackgroundTaskWithExpirationHandler:^{ 101 | if (self.backgroundTaskIdentifier!=UIBackgroundTaskInvalid) { 102 | [[UIApplication sharedApplication] endBackgroundTask:self.backgroundTaskIdentifier]; 103 | self.backgroundTaskIdentifier = UIBackgroundTaskInvalid; 104 | } 105 | [self start]; 106 | }]; 107 | } 108 | 109 | @end 110 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/FJDeepSleepPreventer/Silence.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/summerxx27/ReplayKitShareScreen-socket/a2d73805791449f0da780062d58516c75ee9ebf5/ReplyKitDemo-Socket/ReplyKitDemo/FJDeepSleepPreventer/Silence.wav -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/H264DeCode解码/VideoH264Decoder.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by summerxx on 2022/12/30. 3 | // 4 | 5 | #import 6 | #import 7 | 8 | @class VideoH264Decoder; 9 | 10 | @protocol VideoH264DecoderDelegate 11 | 12 | @optional 13 | - (void)decoder:(VideoH264Decoder *)decoder didDecodingFrame:(CVImageBufferRef)imageBuffer; 14 | 15 | @end 16 | 17 | @interface VideoH264Decoder : NSObject 18 | 19 | @property (nonatomic, weak) id delegate; 20 | 21 | // 解码NALU 22 | - (void)decodeNalu:(uint8_t *)frame size:(uint32_t)frameSize; 23 | 24 | @end 25 | 26 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/H264DeCode解码/VideoH264Decoder.m: -------------------------------------------------------------------------------- 1 | // 2 | // Created by summerxx on 2022/12/30. 3 | // 4 | 5 | #import "VideoH264Decoder.h" 6 | 7 | @interface VideoH264Decoder() 8 | { 9 | // 解码 session 10 | VTDecompressionSessionRef _deocderSession; 11 | // 解码 format 封装了sps 和 pps 12 | CMVideoFormatDescriptionRef _decoderFormatDescription; 13 | // sps & pps 14 | uint8_t *_sps; 15 | NSInteger _spsSize; 16 | uint8_t *_pps; 17 | NSInteger _ppsSize; 18 | } 19 | @end 20 | 21 | @implementation VideoH264Decoder 22 | 23 | // 解码回调函数 24 | static void didDecompress(void *decompressionOutputRefCon, 25 | void *sourceFrameRefCon, 26 | OSStatus status, 27 | VTDecodeInfoFlags infoFlags, 28 | CVImageBufferRef pixelBuffer, 29 | CMTime presentationTimeStamp, 30 | CMTime presentationDuration ) 31 | { 32 | CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon; 33 | 34 | *outputPixelBuffer = CVPixelBufferRetain(pixelBuffer); 35 | 36 | VideoH264Decoder *decoder = (__bridge VideoH264Decoder *)decompressionOutputRefCon; 37 | 38 | if ([decoder.delegate respondsToSelector:@selector(decoder:didDecodingFrame:)]) { 39 | [decoder.delegate decoder: decoder didDecodingFrame:pixelBuffer]; 40 | } 41 | } 42 | 43 | 44 | // 初始化解码器 45 | - (BOOL)initH264Decoder 46 | { 47 | if(_deocderSession) { 48 | return YES; 49 | } 50 | 51 | const uint8_t* const parameterSetPointers[2] = { _sps, _pps }; 52 | const size_t parameterSetSizes[2] = { _spsSize, _ppsSize }; 53 | OSStatus status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, 54 | 2, // param count 55 | parameterSetPointers, 56 | parameterSetSizes, 57 | 4, // nal start code size 58 | &_decoderFormatDescription); 59 | 60 | if(status == noErr) { 61 | NSDictionary* destinationPixelBufferAttributes = @{ 62 | (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange], //硬解必须是 kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange 或者是kCVPixelFormatType_420YpCbCr8Planar 63 | //这里款高和编码反的 64 | (id)kCVPixelBufferOpenGLCompatibilityKey : [NSNumber numberWithBool:YES] 65 | }; 66 | 67 | VTDecompressionOutputCallbackRecord callBackRecord; 68 | callBackRecord.decompressionOutputCallback = didDecompress; 69 | callBackRecord.decompressionOutputRefCon = (__bridge void *)self; 70 | status = VTDecompressionSessionCreate(kCFAllocatorDefault, 71 | _decoderFormatDescription, 72 | NULL, 73 | (__bridge CFDictionaryRef)destinationPixelBufferAttributes, 74 | &callBackRecord, 75 | &_deocderSession); 76 | VTSessionSetProperty(_deocderSession, kVTDecompressionPropertyKey_ThreadCount, (__bridge CFTypeRef)[NSNumber numberWithInt:1]); 77 | VTSessionSetProperty(_deocderSession, kVTDecompressionPropertyKey_RealTime, kCFBooleanTrue); 78 | } else { 79 | NSLog(@"IOS8VT: reset decoder session failed status=%d", (int)status); 80 | } 81 | 82 | return YES; 83 | } 84 | 85 | 86 | - (CVPixelBufferRef)decode:(uint8_t *)frame withSize:(uint32_t)frameSize 87 | { 88 | CVPixelBufferRef outputPixelBuffer = NULL; 89 | 90 | CMBlockBufferRef blockBuffer = NULL; 91 | 92 | OSStatus status = CMBlockBufferCreateWithMemoryBlock(NULL, 93 | (void *)frame, 94 | frameSize, 95 | kCFAllocatorNull, 96 | NULL, 97 | 0, 98 | frameSize, 99 | FALSE, 100 | &blockBuffer); 101 | if(status == kCMBlockBufferNoErr) { 102 | CMSampleBufferRef sampleBuffer = NULL; 103 | const size_t sampleSizeArray[] = {frameSize}; 104 | status = CMSampleBufferCreateReady(kCFAllocatorDefault, 105 | blockBuffer, 106 | _decoderFormatDescription , 107 | 1, 0, NULL, 1, sampleSizeArray, 108 | &sampleBuffer); 109 | 110 | if (status == kCMBlockBufferNoErr && sampleBuffer) { 111 | VTDecodeFrameFlags flags = 0; 112 | VTDecodeInfoFlags flagOut = 0; 113 | OSStatus decodeStatus = VTDecompressionSessionDecodeFrame(_deocderSession, 114 | sampleBuffer, 115 | flags, 116 | &outputPixelBuffer, 117 | &flagOut); 118 | 119 | if(decodeStatus == kVTInvalidSessionErr) { 120 | NSLog(@"IOS8VT: Invalid session, reset decoder session"); 121 | [self resetH264Decoder]; 122 | } else if(decodeStatus == kVTVideoDecoderBadDataErr) { 123 | NSLog(@"IOS8VT: decode failed status=%d(Bad data)", (int)decodeStatus); 124 | } else if(decodeStatus != noErr) { 125 | NSLog(@"IOS8VT: decode failed status=%d", (int)decodeStatus); 126 | } 127 | CFRelease(sampleBuffer); 128 | } 129 | CFRelease(blockBuffer); 130 | } 131 | return outputPixelBuffer; 132 | } 133 | 134 | - (void)resetH264Decoder 135 | { 136 | if(_deocderSession) { 137 | VTDecompressionSessionInvalidate(_deocderSession); 138 | CFRelease(_deocderSession); 139 | _deocderSession = NULL; 140 | } 141 | CFDictionaryRef attrs = NULL; 142 | const void *keys[] = { kCVPixelBufferPixelFormatTypeKey }; 143 | // kCVPixelFormatType_420YpCbCr8Planar is YUV420 144 | // kCVPixelFormatType_420YpCbCr8BiPlanarFullRange is NV12 145 | uint32_t v = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; 146 | const void *values[] = { CFNumberCreate(NULL, kCFNumberSInt32Type, &v) }; 147 | attrs = CFDictionaryCreate(NULL, keys, values, 1, NULL, NULL); 148 | 149 | VTDecompressionOutputCallbackRecord callBackRecord; 150 | callBackRecord.decompressionOutputCallback = didDecompress; 151 | callBackRecord.decompressionOutputRefCon = NULL; 152 | if(VTDecompressionSessionCanAcceptFormatDescription(_deocderSession, _decoderFormatDescription)) 153 | { 154 | NSLog(@"yes"); 155 | } 156 | 157 | OSStatus status = VTDecompressionSessionCreate(kCFAllocatorSystemDefault, 158 | _decoderFormatDescription, 159 | NULL, attrs, 160 | &callBackRecord, 161 | &_deocderSession); 162 | CFRelease(attrs); 163 | } 164 | 165 | // 解码操作 166 | - (void)decodeNalu:(uint8_t *)frame size:(uint32_t) frameSize 167 | { 168 | 169 | int nalu_type = (frame[4] & 0x1F); 170 | CVPixelBufferRef pixelBuffer = NULL; 171 | uint32_t nalSize = (uint32_t)(frameSize - 4); 172 | uint8_t *pNalSize = (uint8_t*)(&nalSize); 173 | frame[0] = *(pNalSize + 3); 174 | frame[1] = *(pNalSize + 2); 175 | frame[2] = *(pNalSize + 1); 176 | frame[3] = *(pNalSize); 177 | 178 | // 传输的时候。关键帧不能丢数据 否则绿屏 B/P可以丢 这样会卡顿 179 | switch (nalu_type) 180 | { 181 | case 0x05: 182 | // 关键帧 183 | if([self initH264Decoder]) { 184 | pixelBuffer = [self decode:frame withSize:frameSize]; 185 | } 186 | break; 187 | case 0x07: 188 | // sps 189 | _spsSize = frameSize - 4; 190 | _sps = malloc(_spsSize); 191 | memcpy(_sps, &frame[4], _spsSize); 192 | break; 193 | case 0x08: 194 | { 195 | // pps 196 | _ppsSize = frameSize - 4; 197 | _pps = malloc(_ppsSize); 198 | memcpy(_pps, &frame[4], _ppsSize); 199 | break; 200 | } 201 | default: 202 | { 203 | // B/P其他帧 204 | if([self initH264Decoder]){ 205 | pixelBuffer = [self decode:frame withSize:frameSize]; 206 | } 207 | break; 208 | } 209 | } 210 | } 211 | @end 212 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/H264EnCode编码/VideoH264EnCode.h: -------------------------------------------------------------------------------- 1 | // 2 | // Created by summerxx on 2022/12/30. 3 | // 4 | 5 | #import 6 | #import 7 | #import 8 | 9 | NS_ASSUME_NONNULL_BEGIN 10 | 11 | @interface VideoH264EnCode : NSObject 12 | 13 | /// 硬编码 14 | /// - Parameters: 15 | /// - sampleBuffer: CMSampleBufferRef每一帧原始数据 16 | /// - h264DataBlock: 十六进制数据 17 | - (void)encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer 18 | H264DataBlock:(void (^)(NSData *data))h264DataBlock; 19 | 20 | /// 结束编码 21 | - (void)endEncode; 22 | 23 | @end 24 | 25 | NS_ASSUME_NONNULL_END 26 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/H264EnCode编码/VideoH264EnCode.m: -------------------------------------------------------------------------------- 1 | // 2 | // Created by summerxx on 2022/12/30. 3 | // 4 | 5 | #import "VideoH264EnCode.h" 6 | #import 7 | #import 8 | 9 | @interface VideoH264EnCode () 10 | 11 | // 编码会话 12 | @property (nonatomic, assign) VTCompressionSessionRef compressionSession; 13 | 14 | // 记录当前的帧数 15 | @property (nonatomic, assign) NSInteger frameID; 16 | 17 | // 编码回调 18 | @property (nonatomic, copy) void (^h264DataBlock)(NSData *data); 19 | 20 | @end 21 | 22 | @implementation VideoH264EnCode 23 | 24 | // 将 sampleBuffer(摄像头捕捉数据,原始帧数据) 编码为H.264 25 | - (void)encodeSampleBuffer:(CMSampleBufferRef)sampleBuffer H264DataBlock:(void (^)(NSData * _Nonnull))h264DataBlock 26 | { 27 | 28 | if (!self.compressionSession) { 29 | return; 30 | } 31 | // 1.保存 block 块 32 | self.h264DataBlock = h264DataBlock; 33 | 34 | // 2.将sampleBuffer转成imageBuffer 35 | CVImageBufferRef imageBuffer = (CVImageBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer); 36 | 37 | // 3.根据当前的帧数,创建CMTime的时间 38 | CMTime presentationTimeStamp = CMTimeMake(self.frameID ++, 1000); 39 | 40 | VTEncodeInfoFlags flags; 41 | 42 | // 4.开始编码该帧数据 43 | OSStatus statusCode = VTCompressionSessionEncodeFrame( 44 | self.compressionSession, 45 | imageBuffer, 46 | presentationTimeStamp, 47 | kCMTimeInvalid, 48 | NULL, 49 | (__bridge void * _Nullable)(self), 50 | &flags 51 | ); 52 | 53 | if (statusCode != noErr) { 54 | 55 | NSLog(@"H264: VTCompressionSessionEncodeFrame failed with %d", (int)statusCode); 56 | // VTCompressionSessionInvalidate(self.compressionSession); 57 | // CFRelease(self.compressionSession); 58 | // self.compressionSession = NULL; 59 | 60 | [self setupVideoSession]; 61 | return; 62 | } 63 | } 64 | 65 | // 结束编码 66 | - (void)endEncode 67 | { 68 | VTCompressionSessionCompleteFrames(self.compressionSession, kCMTimeInvalid); 69 | VTCompressionSessionInvalidate(self.compressionSession); 70 | CFRelease(self.compressionSession); 71 | self.compressionSession = NULL; 72 | } 73 | 74 | 75 | - (instancetype)init 76 | { 77 | if (self = [super init]) { 78 | // 初始化压缩编码的会话 79 | [self setupVideoSession]; 80 | } 81 | return self; 82 | } 83 | 84 | // 初始化编码器 85 | - (void)setupVideoSession { 86 | 87 | // 1.用于记录当前是第几帧数据 88 | self.frameID = 0; 89 | 90 | // 2.录制视频的宽度&高度,根据实际需求修改 91 | int width = 720; 92 | int height = 1280; 93 | 94 | // 3.创建CompressionSession对象,该对象用于对画面进行编码 95 | OSStatus status = VTCompressionSessionCreate(NULL, // 会话的分配器。传递NULL以使用默认分配器。 96 | width, // 帧的宽度,以像素为单位。 97 | height, // 帧的高度,以像素为单位。 98 | kCMVideoCodecType_H264, // 编解码器的类型,表示使用h.264进行编码 99 | NULL, // 指定必须使用的特定视频编码器。传递NULL让视频工具箱选择编码器。 100 | NULL, // 源像素缓冲区所需的属性,用于创建像素缓冲池。如果不希望视频工具箱为您创建一个,请传递NULL 101 | NULL, // 压缩数据的分配器。传递NULL以使用默认分配器。 102 | didCompressH264, // 当一次编码结束会在该函数进行回调,可以在该函数中将数据,写入文件中 103 | (__bridge void *)(self),// outputCallbackRefCon 104 | &_compressionSession); // 指向一个变量以接收的压缩会话。 105 | if (status != 0){ 106 | NSLog(@"H264: session 创建失败"); 107 | return ; 108 | } 109 | 110 | // 4.设置实时编码输出(直播必然是实时输出,否则会有延迟) 111 | VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, kCFBooleanTrue); 112 | VTSessionSetProperty(_compressionSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Baseline_AutoLevel); 113 | 114 | // 5.设置关键帧(GOPsize)间隔 115 | int frameInterval = 60; 116 | CFNumberRef frameIntervalRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &frameInterval); 117 | VTSessionSetProperty(self.compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval, frameIntervalRef); 118 | 119 | // 6.设置期望帧率(每秒多少帧,如果帧率过低,会造成画面卡顿) 120 | int fps = 24; 121 | CFNumberRef fpsRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberIntType, &fps); 122 | VTSessionSetProperty(self.compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, fpsRef); 123 | 124 | // 7.设置码率(码率: 编码效率, 码率越高, 则画面越清晰, 如果码率较低会引起马赛克 --> 码率高有利于还原原始画面, 但是也不利于传输) 125 | int bitRate = width * height * 3 * 4 * 8; 126 | CFNumberRef bitRateRef = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &bitRate); 127 | VTSessionSetProperty(self.compressionSession, kVTCompressionPropertyKey_AverageBitRate, bitRateRef); 128 | 129 | // 8.设置码率,均值,单位是byte 这是一个算法 130 | NSArray *limit = @[@(bitRate * 1.5 / 8), @(1)]; 131 | VTSessionSetProperty(self.compressionSession, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFArrayRef)limit); 132 | 133 | // 9.基本设置结束, 准备进行编码 134 | VTCompressionSessionPrepareToEncodeFrames(_compressionSession); 135 | } 136 | 137 | 138 | // 编码完成回调 139 | void didCompressH264(void *outputCallbackRefCon, 140 | void *sourceFrameRefCon, 141 | OSStatus status, 142 | VTEncodeInfoFlags infoFlags, 143 | CMSampleBufferRef sampleBuffer) 144 | { 145 | 146 | // 1.判断状态是否等于没有错误 147 | if (status != noErr) { 148 | return; 149 | } 150 | if (!CMSampleBufferDataIsReady(sampleBuffer)) { 151 | NSLog(@"didCompressH264 data is not ready "); 152 | return; 153 | } 154 | 155 | // 2.根据传入的参数获取对象 156 | VideoH264EnCode* encoder = (__bridge VideoH264EnCode*)outputCallbackRefCon; 157 | 158 | // 3.判断是否是关键帧 159 | bool isKeyframe = !CFDictionaryContainsKey( (CFArrayGetValueAtIndex(CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true), 0)), kCMSampleAttachmentKey_NotSync); 160 | 161 | // 判断当前帧是否为关键帧 162 | // 获取sps & pps数据 163 | if (isKeyframe) { 164 | // 获取编码后的信息(存储于CMFormatDescriptionRef中) 165 | CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer); 166 | 167 | // 获取SPS信息 168 | size_t sparameterSetSize, sparameterSetCount; 169 | const uint8_t *sparameterSet; 170 | CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0 ); 171 | 172 | // 获取PPS信息 173 | size_t pparameterSetSize, pparameterSetCount; 174 | const uint8_t *pparameterSet; 175 | CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0 ); 176 | 177 | // 装sps/pps转成NSData 178 | NSData *sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize]; 179 | NSData *pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize]; 180 | 181 | // 写入文件 182 | [encoder gotSpsPps:sps pps:pps]; 183 | } 184 | 185 | // 获取数据块 186 | CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer); 187 | size_t length, totalLength; 188 | char *dataPointer; 189 | OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer); 190 | 191 | if (statusCodeRet == noErr) { 192 | 193 | size_t bufferOffset = 0; 194 | static const int AVCCHeaderLength = 4; // 返回的nalu数据前四个字节不是0001的startcode,而是大端模式的帧长度length 195 | 196 | // 循环获取nalu数据 197 | while (bufferOffset < totalLength - AVCCHeaderLength) { 198 | uint32_t NALUnitLength = 0; 199 | // Read the NAL unit length 200 | memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength); 201 | 202 | // 从大端转系统端 203 | NALUnitLength = CFSwapInt32BigToHost(NALUnitLength); 204 | 205 | NSData* data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength]; 206 | [encoder gotEncodedData:data isKeyFrame:isKeyframe]; 207 | 208 | // 移动到写一个块,转成NALU单元 209 | // Move to the next NAL unit in the block buffer 210 | bufferOffset += AVCCHeaderLength + NALUnitLength; 211 | } 212 | } 213 | } 214 | 215 | // 获取 sps 以及 pps, 并进行StartCode 216 | - (void)gotSpsPps:(NSData*)sps pps:(NSData*)pps 217 | { 218 | 219 | // 拼接NALU的 StartCode,默认规定使用 00000001 220 | const char bytes[] = "\x00\x00\x00\x01"; 221 | size_t length = (sizeof bytes) - 1; 222 | NSData *ByteHeader = [NSData dataWithBytes:bytes length:length]; 223 | 224 | NSMutableData *h264Data = [[NSMutableData alloc] init]; 225 | [h264Data appendData:ByteHeader]; 226 | [h264Data appendData:sps]; 227 | 228 | if (self.h264DataBlock) { 229 | self.h264DataBlock(h264Data); 230 | } 231 | 232 | [h264Data resetBytesInRange:NSMakeRange(0, [h264Data length])]; 233 | [h264Data setLength:0]; 234 | [h264Data appendData:ByteHeader]; 235 | [h264Data appendData:pps]; 236 | 237 | if (self.h264DataBlock) { 238 | self.h264DataBlock(h264Data); 239 | } 240 | } 241 | 242 | - (void)gotEncodedData:(NSData*)data isKeyFrame:(BOOL)isKeyFrame 243 | { 244 | const char bytes[] = "\x00\x00\x00\x01"; 245 | // string literals have implicit trailing '\0' 246 | size_t length = (sizeof bytes) - 1; 247 | NSData *ByteHeader = [NSData dataWithBytes:bytes length:length]; 248 | 249 | NSMutableData *h264Data = [[NSMutableData alloc] init]; 250 | [h264Data appendData:ByteHeader]; 251 | [h264Data appendData:data]; 252 | 253 | if (self.h264DataBlock) { 254 | self.h264DataBlock(h264Data); 255 | } 256 | } 257 | 258 | // 释放编码器 259 | - (void)dealloc 260 | { 261 | if (self.compressionSession) { 262 | VTCompressionSessionInvalidate(self.compressionSession); 263 | CFRelease(self.compressionSession); 264 | self.compressionSession = NULL; 265 | } 266 | } 267 | 268 | @end 269 | 270 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | UIApplicationSceneManifest 6 | 7 | UIApplicationSupportsMultipleScenes 8 | 9 | UISceneConfigurations 10 | 11 | UIWindowSceneSessionRoleApplication 12 | 13 | 14 | UISceneConfigurationName 15 | Default Configuration 16 | UISceneDelegateClassName 17 | SceneDelegate 18 | UISceneStoryboardFile 19 | Main 20 | 21 | 22 | 23 | 24 | UIBackgroundModes 25 | 26 | audio 27 | voip 28 | 29 | 30 | 31 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/SceneDelegate.h: -------------------------------------------------------------------------------- 1 | // 2 | // SceneDelegate.h 3 | // ReplyKitDemo 4 | // 5 | // Created by summerxx on 2022/12/29. 6 | // 7 | 8 | #import 9 | 10 | @interface SceneDelegate : UIResponder 11 | 12 | @property (strong, nonatomic) UIWindow * window; 13 | 14 | @end 15 | 16 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/SceneDelegate.m: -------------------------------------------------------------------------------- 1 | // 2 | // SceneDelegate.m 3 | // ReplyKitDemo 4 | // 5 | // Created by summerxx on 2022/12/29. 6 | // 7 | 8 | #import "SceneDelegate.h" 9 | 10 | @interface SceneDelegate () 11 | 12 | @end 13 | 14 | @implementation SceneDelegate 15 | 16 | 17 | - (void)scene:(UIScene *)scene willConnectToSession:(UISceneSession *)session options:(UISceneConnectionOptions *)connectionOptions { 18 | // Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`. 19 | // If using a storyboard, the `window` property will automatically be initialized and attached to the scene. 20 | // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead). 21 | } 22 | 23 | 24 | - (void)sceneDidDisconnect:(UIScene *)scene { 25 | // Called as the scene is being released by the system. 26 | // This occurs shortly after the scene enters the background, or when its session is discarded. 27 | // Release any resources associated with this scene that can be re-created the next time the scene connects. 28 | // The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead). 29 | } 30 | 31 | 32 | - (void)sceneDidBecomeActive:(UIScene *)scene { 33 | // Called when the scene has moved from an inactive state to an active state. 34 | // Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive. 35 | } 36 | 37 | 38 | - (void)sceneWillResignActive:(UIScene *)scene { 39 | // Called when the scene will move from an active state to an inactive state. 40 | // This may occur due to temporary interruptions (ex. an incoming phone call). 41 | } 42 | 43 | 44 | - (void)sceneWillEnterForeground:(UIScene *)scene { 45 | // Called as the scene transitions from the background to the foreground. 46 | // Use this method to undo the changes made on entering the background. 47 | } 48 | 49 | 50 | - (void)sceneDidEnterBackground:(UIScene *)scene { 51 | // Called as the scene transitions from the foreground to the background. 52 | // Use this method to save data, release shared resources, and store enough scene-specific state information 53 | // to restore the scene back to its current state. 54 | } 55 | 56 | 57 | @end 58 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/main.m: -------------------------------------------------------------------------------- 1 | // 2 | // main.m 3 | // ReplyKitDemo 4 | // 5 | // Created by summerxx on 2022/12/29. 6 | // 7 | 8 | #import 9 | #import "AppDelegate.h" 10 | 11 | int main(int argc, char * argv[]) { 12 | NSString * appDelegateClassName; 13 | @autoreleasepool { 14 | // Setup code that might create autoreleased objects goes here. 15 | appDelegateClassName = NSStringFromClass([AppDelegate class]); 16 | } 17 | return UIApplicationMain(argc, argv, nil, appDelegateClassName); 18 | } 19 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/屏幕共享演示/ViewController.h: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.h 3 | // ReplyKitDemo 4 | // 5 | // Created by summerxx on 2022/12/29. 6 | // 7 | 8 | #import 9 | 10 | @interface ViewController : UIViewController 11 | 12 | 13 | @end 14 | 15 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemo/屏幕共享演示/ViewController.m: -------------------------------------------------------------------------------- 1 | // 2 | // ViewController.m 3 | // screen-share-ios 4 | // 5 | // Created by summerxx on 2022/12/28. 6 | // 7 | 8 | #import "ViewController.h" 9 | #import 10 | #import "FIAgoraClientBufferSocketManager.h" 11 | #import 12 | #import "CaptureViewController.h" 13 | #import "VideoH264EnCode.h" 14 | #import "VideoH264Decoder.h" 15 | #import "VideoDisplayLayer.h" 16 | #import "FJDeepSleepPreventer.h" 17 | #import "FJDeepSleepPreventerPlus.h" 18 | 19 | @interface ViewController () 20 | 21 | @property (nonatomic, strong) RPSystemBroadcastPickerView *broadcastPickerView; 22 | 23 | // 编码 24 | @property (nonatomic, strong) VideoH264EnCode *h264code; 25 | 26 | // 解码以及播放 27 | @property (nonatomic, strong) VideoDisplayLayer *playLayer; 28 | @property (nonatomic, strong) VideoH264Decoder *h264Decoder; 29 | 30 | @property (nonatomic, assign) UIBackgroundTaskIdentifier backIden; 31 | 32 | @end 33 | 34 | @implementation ViewController 35 | 36 | - (void)viewDidLoad 37 | { 38 | self.navigationItem.title = @"DEMO"; 39 | [super viewDidLoad]; 40 | self.view.backgroundColor = [UIColor orangeColor]; 41 | [self setupSocket]; 42 | [self setupDeCoder]; 43 | [self setupSystemBroadcastPickerView]; 44 | [[NSNotificationCenter defaultCenter]addObserver:self selector:@selector(didEnterBackGround) name:UIApplicationDidEnterBackgroundNotification object:nil]; 45 | [[NSNotificationCenter defaultCenter]addObserver:self selector:@selector(willEnterForeground) name:UIApplicationWillEnterForegroundNotification object:nil]; 46 | } 47 | 48 | - (void)willEnterForeground 49 | { 50 | [[FJDeepSleepPreventerPlus sharedInstance] stop]; 51 | } 52 | 53 | - (void)didEnterBackGround 54 | { 55 | [[FJDeepSleepPreventerPlus sharedInstance] start]; 56 | } 57 | 58 | - (void)setupSocket 59 | { 60 | [[FIAgoraClientBufferSocketManager sharedManager] setupSocket]; 61 | 62 | UILabel *label = [UILabel new]; 63 | label.backgroundColor = UIColor.cyanColor; 64 | label.frame = CGRectMake(0, 100, [UIScreen mainScreen].bounds.size.width, 30); 65 | label.textColor = [UIColor blackColor]; 66 | [self.view addSubview:label]; 67 | 68 | __weak __typeof(self)weakSelf = self; 69 | [FIAgoraClientBufferSocketManager sharedManager].testBlock = ^(NSString * testText, CMSampleBufferRef sampleBuffer) { 70 | dispatch_async(dispatch_get_main_queue(), ^{ 71 | [label setText:testText]; 72 | }); 73 | 74 | // 进行视频编码 75 | [weakSelf.h264code encodeSampleBuffer:sampleBuffer H264DataBlock:^(NSData * data) { 76 | NSLog(@"%@", data); 77 | // [weakSelf didReadData:data]; 78 | }]; 79 | }; 80 | } 81 | 82 | // 设置系统的广播 Picker 视图 83 | - (void)setupSystemBroadcastPickerView 84 | { 85 | // 兼容 iOS12 或更高的版本 86 | if (@available(iOS 12.0, *)) { 87 | self.broadcastPickerView = [[RPSystemBroadcastPickerView alloc] initWithFrame:CGRectMake(50, 200, 100, 100)]; 88 | self.broadcastPickerView.preferredExtension = @"summerxx.com.screen-share-ios.broadcast-extension"; 89 | self.broadcastPickerView.backgroundColor = UIColor.cyanColor; 90 | self.broadcastPickerView.showsMicrophoneButton = NO; 91 | [self.view addSubview:self.broadcastPickerView]; 92 | } 93 | 94 | UIButton *startButton = [UIButton buttonWithType:UIButtonTypeCustom]; 95 | startButton.frame = CGRectMake(50, 310, 100, 100); 96 | startButton.backgroundColor = UIColor.cyanColor; 97 | [startButton setTitle:@"开启摄像头" forState:UIControlStateNormal]; 98 | [startButton setTitleColor:UIColor.blackColor forState:UIControlStateNormal]; 99 | [startButton addTarget:self action:@selector(startAction) forControlEvents:UIControlEventTouchUpInside]; 100 | [self.view addSubview:startButton]; 101 | } 102 | 103 | - (void)startAction 104 | { 105 | CaptureViewController *vc = [CaptureViewController new]; 106 | [self presentViewController:vc animated:YES completion:nil]; 107 | } 108 | 109 | #pragma mark - 编码 110 | - (VideoH264EnCode *)h264code 111 | { 112 | if (!_h264code) { 113 | _h264code = [[VideoH264EnCode alloc]init]; 114 | } 115 | return _h264code; 116 | } 117 | 118 | #pragma 解码以及播放操作-------------------- 119 | - (void)setupDeCoder 120 | { 121 | // 初始化解码器 122 | self.h264Decoder = [[VideoH264Decoder alloc]init]; 123 | self.h264Decoder.delegate = self; 124 | [self setupDisplayLayer]; 125 | } 126 | 127 | - (void)setupDisplayLayer 128 | { 129 | self.playLayer = [[VideoDisplayLayer alloc] initWithFrame:CGRectMake(0, self.view.bounds.size.height - 300, self.view.bounds.size.width, 300)]; 130 | self.playLayer.backgroundColor = self.view.backgroundColor.CGColor; 131 | [self.view.layer addSublayer:self.playLayer]; 132 | } 133 | 134 | // 获取数据进行解码 135 | - (void)didReadData:(NSData *)data 136 | { 137 | [self.h264Decoder decodeNalu:(uint8_t *)[data bytes] size:(uint32_t)data.length]; 138 | } 139 | 140 | // 解码完成回调 141 | - (void)decoder:(VideoH264Decoder *)decoder didDecodingFrame:(CVImageBufferRef)imageBuffer 142 | { 143 | if (!imageBuffer) { 144 | return; 145 | } 146 | // 回主线程给 layer 进行展示 147 | dispatch_async(dispatch_get_main_queue(), ^{ 148 | self.playLayer.pixelBuffer = imageBuffer; 149 | CVPixelBufferRelease(imageBuffer); 150 | }); 151 | } 152 | 153 | @end 154 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemoTests/ReplyKitDemoTests.m: -------------------------------------------------------------------------------- 1 | // 2 | // ReplyKitDemoTests.m 3 | // ReplyKitDemoTests 4 | // 5 | // Created by summerxx on 2022/12/29. 6 | // 7 | 8 | #import 9 | 10 | @interface ReplyKitDemoTests : XCTestCase 11 | 12 | @end 13 | 14 | @implementation ReplyKitDemoTests 15 | 16 | - (void)setUp { 17 | // Put setup code here. This method is called before the invocation of each test method in the class. 18 | } 19 | 20 | - (void)tearDown { 21 | // Put teardown code here. This method is called after the invocation of each test method in the class. 22 | } 23 | 24 | - (void)testExample { 25 | // This is an example of a functional test case. 26 | // Use XCTAssert and related functions to verify your tests produce the correct results. 27 | } 28 | 29 | - (void)testPerformanceExample { 30 | // This is an example of a performance test case. 31 | [self measureBlock:^{ 32 | // Put the code you want to measure the time of here. 33 | }]; 34 | } 35 | 36 | @end 37 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemoUITests/ReplyKitDemoUITests.m: -------------------------------------------------------------------------------- 1 | // 2 | // ReplyKitDemoUITests.m 3 | // ReplyKitDemoUITests 4 | // 5 | // Created by summerxx on 2022/12/29. 6 | // 7 | 8 | #import 9 | 10 | @interface ReplyKitDemoUITests : XCTestCase 11 | 12 | @end 13 | 14 | @implementation ReplyKitDemoUITests 15 | 16 | - (void)setUp { 17 | // Put setup code here. This method is called before the invocation of each test method in the class. 18 | 19 | // In UI tests it is usually best to stop immediately when a failure occurs. 20 | self.continueAfterFailure = NO; 21 | 22 | // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this. 23 | } 24 | 25 | - (void)tearDown { 26 | // Put teardown code here. This method is called after the invocation of each test method in the class. 27 | } 28 | 29 | - (void)testExample { 30 | // UI tests must launch the application that they test. 31 | XCUIApplication *app = [[XCUIApplication alloc] init]; 32 | [app launch]; 33 | 34 | // Use XCTAssert and related functions to verify your tests produce the correct results. 35 | } 36 | 37 | - (void)testLaunchPerformance { 38 | if (@available(macOS 10.15, iOS 13.0, tvOS 13.0, watchOS 7.0, *)) { 39 | // This measures how long it takes to launch your application. 40 | [self measureWithMetrics:@[[[XCTApplicationLaunchMetric alloc] init]] block:^{ 41 | [[[XCUIApplication alloc] init] launch]; 42 | }]; 43 | } 44 | } 45 | 46 | @end 47 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/ReplyKitDemoUITests/ReplyKitDemoUITestsLaunchTests.m: -------------------------------------------------------------------------------- 1 | // 2 | // ReplyKitDemoUITestsLaunchTests.m 3 | // ReplyKitDemoUITests 4 | // 5 | // Created by summerxx on 2022/12/29. 6 | // 7 | 8 | #import 9 | 10 | @interface ReplyKitDemoUITestsLaunchTests : XCTestCase 11 | 12 | @end 13 | 14 | @implementation ReplyKitDemoUITestsLaunchTests 15 | 16 | + (BOOL)runsForEachTargetApplicationUIConfiguration { 17 | return YES; 18 | } 19 | 20 | - (void)setUp { 21 | self.continueAfterFailure = NO; 22 | } 23 | 24 | - (void)testLaunch { 25 | XCUIApplication *app = [[XCUIApplication alloc] init]; 26 | [app launch]; 27 | 28 | // Insert steps here to perform after app launch but before taking a screenshot, 29 | // such as logging into a test account or navigating somewhere in the app 30 | 31 | XCTAttachment *attachment = [XCTAttachment attachmentWithScreenshot:XCUIScreen.mainScreen.screenshot]; 32 | attachment.name = @"Launch Screen"; 33 | attachment.lifetime = XCTAttachmentLifetimeKeepAlways; 34 | [self addAttachment:attachment]; 35 | } 36 | 37 | @end 38 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/extension-demo/Info.plist: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | NSExtension 6 | 7 | NSExtensionPointIdentifier 8 | com.apple.broadcast-services-upload 9 | NSExtensionPrincipalClass 10 | SampleHandler 11 | RPBroadcastProcessMode 12 | RPBroadcastProcessModeSampleBuffer 13 | 14 | 15 | 16 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/extension-demo/SampleHandler.h: -------------------------------------------------------------------------------- 1 | // 2 | // SampleHandler.h 3 | // extension-demo 4 | // 5 | // Created by summerxx on 2022/12/29. 6 | // 7 | 8 | #import 9 | 10 | @interface SampleHandler : RPBroadcastSampleHandler 11 | 12 | @end 13 | -------------------------------------------------------------------------------- /ReplyKitDemo-Socket/extension-demo/SampleHandler.m: -------------------------------------------------------------------------------- 1 | // 2 | // SampleHandler.m 3 | // extension-demo 4 | // 5 | // Created by summerxx on 2022/12/29. 6 | // 7 | 8 | #import "FIAgoraSampleHandlerSocketManager.h" 9 | #import "SampleHandler.h" 10 | 11 | @implementation SampleHandler 12 | 13 | - (void)broadcastStartedWithSetupInfo:(NSDictionary *)setupInfo { 14 | // User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional. 15 | [[FIAgoraSampleHandlerSocketManager sharedManager] setUpSocket]; 16 | } 17 | 18 | - (void)broadcastPaused { 19 | // User has requested to pause the broadcast. Samples will stop being delivered. 20 | } 21 | 22 | - (void)broadcastResumed { 23 | // User has requested to resume the broadcast. Samples delivery will resume. 24 | } 25 | 26 | - (void)broadcastFinished { 27 | // User has requested to finish the broadcast. 28 | } 29 | 30 | - (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType { 31 | 32 | switch (sampleBufferType) { 33 | case RPSampleBufferTypeVideo: 34 | // Handle video sample buffer 35 | [[FIAgoraSampleHandlerSocketManager sharedManager] sendVideoBufferToHostApp:sampleBuffer]; 36 | break; 37 | case RPSampleBufferTypeAudioApp: 38 | // Handle audio sample buffer for app audio 39 | break; 40 | case RPSampleBufferTypeAudioMic: 41 | // Handle audio sample buffer for mic audio 42 | break; 43 | 44 | default: 45 | break; 46 | } 47 | } 48 | 49 | @end 50 | --------------------------------------------------------------------------------