├── .gitignore
├── LFLiveKitDemo
├── .gitignore
├── LFLiveKitDemo
│ ├── images
│ │ ├── camra_beauty@2x.png
│ │ ├── camra_beauty@3x.png
│ │ ├── camra_preview@2x.png
│ │ ├── camra_preview@3x.png
│ │ ├── close_preview@2x.png
│ │ ├── close_preview@3x.png
│ │ ├── camra_beauty_close@2x.png
│ │ └── camra_beauty_close@3x.png
│ ├── Podfile
│ ├── LFLivePreview.h
│ ├── ViewController.h
│ ├── AppDelegate.h
│ ├── main.m
│ ├── ViewController.m
│ ├── Assets.xcassets
│ │ └── AppIcon.appiconset
│ │ │ └── Contents.json
│ ├── Base.lproj
│ │ ├── Main.storyboard
│ │ └── LaunchScreen.storyboard
│ ├── Info.plist
│ ├── AppDelegate.m
│ └── category
│ │ ├── UIControl+YYAdd.h
│ │ ├── UIControl+YYAdd.m
│ │ └── UIView+YYAdd.h
├── Podfile
├── LFLiveKitDemo.xcworkspace
│ ├── xcuserdata
│ │ └── admin.xcuserdatad
│ │ │ └── UserInterfaceState.xcuserstate
│ └── contents.xcworkspacedata
└── LFLiveKitDemo.xcodeproj
│ ├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcuserdata
│ │ └── admin.xcuserdatad
│ │ └── UserInterfaceState.xcuserstate
│ └── xcuserdata
│ └── admin.xcuserdatad
│ └── xcschemes
│ ├── xcschememanagement.plist
│ └── LFLiveKitDemo.xcscheme
├── LFLiveKit
├── filter
│ ├── LFGPUImageEmptyFilter.h
│ ├── LFGPUImageBeautyFilter.h
│ ├── LFGPUImageEmptyFilter.m
│ └── LFGPUImageBeautyFilter.m
├── objects
│ ├── LFFrame.m
│ ├── LFAudioFrame.m
│ ├── LFVideoFrame.m
│ ├── LFLiveDebug.m
│ ├── LFLiveStreamInfo.m
│ ├── LFAudioFrame.h
│ ├── LFVideoFrame.h
│ ├── LFFrame.h
│ ├── LFLiveDebug.h
│ └── LFLiveStreamInfo.h
├── LFLiveKit.h
├── upload
│ ├── NSMutableArray+LFAdd.h
│ ├── NSMutableArray+LFAdd.m
│ ├── LFStreamTcpSocket.h
│ ├── LFStreamRtmpSocket.h
│ ├── LFStreamingBuffer.h
│ ├── LFStreamSocket.h
│ └── LFStreamingBuffer.m
├── packet
│ ├── LFStreamPackage.h
│ ├── LFFlvPackage.h
│ └── flv
│ │ ├── avc.h
│ │ ├── info.h
│ │ ├── types.c
│ │ ├── types.h
│ │ ├── flv.h
│ │ ├── amf.h
│ │ └── avc.c
├── coder
│ ├── LFHardwareAudioEncoder.h
│ ├── LFHardwareVideoEncoder.h
│ ├── LFVideoEncoding.h
│ ├── LFAudioEncoding.h
│ ├── LFHardwareAudioEncoder.m
│ └── LFHardwareVideoEncoder.m
├── Info.plist
├── capture
│ ├── LFAudioCapture.h
│ ├── LFVideoCapture.h
│ ├── LFVideoCapture.m
│ └── LFAudioCapture.m
├── configuration
│ ├── LFLiveAudioConfiguration.h
│ ├── LFLiveVideoConfiguration.h
│ ├── LFLiveAudioConfiguration.m
│ └── LFLiveVideoConfiguration.m
├── LFLiveSession.h
└── LFLiveSession.m
├── LFLiveKit.xcworkspace
├── xcuserdata
│ ├── admin.xcuserdatad
│ │ ├── UserInterfaceState.xcuserstate
│ │ └── xcdebugger
│ │ │ └── Breakpoints_v2.xcbkptlist
│ └── feng.xcuserdatad
│ │ └── UserInterfaceState.xcuserstate
└── contents.xcworkspacedata
├── LFLiveKit.xcodeproj
├── project.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcuserdata
│ │ └── admin.xcuserdatad
│ │ └── UserInterfaceState.xcuserstate
├── xcuserdata
│ ├── feng.xcuserdatad
│ │ └── xcschemes
│ │ │ └── xcschememanagement.plist
│ └── admin.xcuserdatad
│ │ └── xcschemes
│ │ └── xcschememanagement.plist
└── xcshareddata
│ └── xcschemes
│ └── LFLiveKit.xcscheme
├── Podfile
├── .travis.yml
├── LFLiveKitTests
├── Info.plist
└── LFLiveKitTests.m
├── LFLiveKit.podspec
├── LICENSE
└── README.md
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | #CocoaPods
3 | Pods/
4 | Podfile.lock
--------------------------------------------------------------------------------
/LFLiveKitDemo/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | #CocoaPods
3 | Pods/
4 | Podfile.lock
--------------------------------------------------------------------------------
/LFLiveKit/filter/LFGPUImageEmptyFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface LFGPUImageEmptyFilter : GPUImageFilter
4 | {
5 | }
6 |
7 | @end
8 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/images/camra_beauty@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKitDemo/LFLiveKitDemo/images/camra_beauty@2x.png
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/images/camra_beauty@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKitDemo/LFLiveKitDemo/images/camra_beauty@3x.png
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/images/camra_preview@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKitDemo/LFLiveKitDemo/images/camra_preview@2x.png
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/images/camra_preview@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKitDemo/LFLiveKitDemo/images/camra_preview@3x.png
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/images/close_preview@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKitDemo/LFLiveKitDemo/images/close_preview@2x.png
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/images/close_preview@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKitDemo/LFLiveKitDemo/images/close_preview@3x.png
--------------------------------------------------------------------------------
/LFLiveKitDemo/Podfile:
--------------------------------------------------------------------------------
1 | source 'https://github.com/CocoaPods/Specs.git'
2 | platform :ios,'8.0'
3 |
4 | target "LFLiveKitDemo" do
5 |
6 | pod 'LFLiveKit', '~> 1.5.2'
7 |
8 | end
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/images/camra_beauty_close@2x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKitDemo/LFLiveKitDemo/images/camra_beauty_close@2x.png
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/images/camra_beauty_close@3x.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKitDemo/LFLiveKitDemo/images/camra_beauty_close@3x.png
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/Podfile:
--------------------------------------------------------------------------------
1 | source 'https://github.com/CocoaPods/Specs.git'
2 | platform :ios,'8.0'
3 |
4 | target "LFLiveKitDemo" do
5 |
6 | pod 'LFLiveKit', '~> 1.5.2'
7 |
8 | end
--------------------------------------------------------------------------------
/LFLiveKit/filter/LFGPUImageBeautyFilter.h:
--------------------------------------------------------------------------------
1 | #import "GPUImageFilter.h"
2 |
3 | @interface LFGPUImageBeautyFilter : GPUImageFilter {
4 | }
5 |
6 | @property (nonatomic, assign) NSInteger beautyLevel;
7 |
8 | @end
9 |
--------------------------------------------------------------------------------
/LFLiveKit.xcworkspace/xcuserdata/admin.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKit.xcworkspace/xcuserdata/admin.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/LFLiveKit.xcworkspace/xcuserdata/feng.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKit.xcworkspace/xcuserdata/feng.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/LFLiveKit.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Podfile:
--------------------------------------------------------------------------------
1 | source 'https://github.com/CocoaPods/Specs.git'
2 | platform :ios,'8.0'
3 |
4 | target "LFLiveKit" do
5 |
6 | pod 'CocoaAsyncSocket', '~> 7.4.1'
7 | pod 'pili-librtmp', '~> 1.0.2'
8 | pod 'LMGPUImage', '~> 0.1.9'
9 |
10 | end
--------------------------------------------------------------------------------
/LFLiveKit.xcodeproj/project.xcworkspace/xcuserdata/admin.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKit.xcodeproj/project.xcworkspace/xcuserdata/admin.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/LFLiveKit/objects/LFFrame.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFFrame.m
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFFrame.h"
10 |
11 | @implementation LFFrame
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo.xcworkspace/xcuserdata/admin.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKitDemo/LFLiveKitDemo.xcworkspace/xcuserdata/admin.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: objective-c
2 | osx_image: xcode7
3 | xcode_workspace: LFLiveKit.xcworkspace
4 | xcode_scheme: LFLiveKit
5 |
6 | script:
7 | - xctool -workspace LFLiveKit.xcworkspace -scheme 'LFLiveKit' -configuration Release -sdk iphonesimulator -arch i386 build
--------------------------------------------------------------------------------
/LFLiveKit/objects/LFAudioFrame.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFAudioFrame.m
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFAudioFrame.h"
10 |
11 | @implementation LFAudioFrame
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/LFLiveKit/objects/LFVideoFrame.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFVideoFrame.m
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFVideoFrame.h"
10 |
11 | @implementation LFVideoFrame
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/LFLiveKit/objects/LFLiveDebug.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFLiveDebug.m
3 | // LaiFeng
4 | //
5 | // Created by admin on 16/5/19.
6 | // Copyright © 2016年 live Interactive. All rights reserved.
7 | //
8 |
9 | #import "LFLiveDebug.h"
10 |
11 | @implementation LFLiveDebug
12 |
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/LFLiveKit/objects/LFLiveStreamInfo.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFLiveStreamInfo.m
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFLiveStreamInfo.h"
10 |
11 | @implementation LFLiveStreamInfo
12 |
13 | @end
14 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo.xcodeproj/project.xcworkspace/xcuserdata/admin.xcuserdatad/UserInterfaceState.xcuserstate:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/wuyongqiang123/LFLiveKit/HEAD/LFLiveKitDemo/LFLiveKitDemo.xcodeproj/project.xcworkspace/xcuserdata/admin.xcuserdatad/UserInterfaceState.xcuserstate
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/LFLivePreview.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFLivePreview.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 live Interactive. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface LFLivePreview : UIView
12 |
13 |
14 | @end
15 |
--------------------------------------------------------------------------------
/LFLiveKit.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/ViewController.h:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.h
3 | // LFLiveKitDemo
4 | //
5 | // Created by admin on 16/6/8.
6 | // Copyright © 2016年 admin. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface ViewController : UIViewController
12 |
13 |
14 | @end
15 |
16 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/LFLiveKit/objects/LFAudioFrame.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFAudioFrame.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFFrame.h"
10 |
11 | @interface LFAudioFrame : LFFrame
12 |
13 | /// flv打包中aac的header
14 | @property (nonatomic, strong) NSData *audioInfo;
15 |
16 | @end
17 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/AppDelegate.h:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.h
3 | // LFLiveKitDemo
4 | //
5 | // Created by admin on 16/6/8.
6 | // Copyright © 2016年 admin. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface AppDelegate : UIResponder
12 |
13 | @property (strong, nonatomic) UIWindow *window;
14 |
15 |
16 | @end
17 |
18 |
--------------------------------------------------------------------------------
/LFLiveKit/LFLiveKit.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFLiveKit.h
3 | // LFLiveKit
4 | //
5 | // Created by admin on 16/5/24.
6 | // Copyright © 2016年 admin. All rights reserved.
7 | //
8 |
9 | #import "LFLiveSession.h"
10 | #import "LFLiveAudioConfiguration.h"
11 | #import "LFLiveVideoConfiguration.h"
12 | #import "LFAudioFrame.h"
13 | #import "LFFrame.h"
14 | #import "LFLiveStreamInfo.h"
15 | #import "LFVideoFrame.h"
16 |
--------------------------------------------------------------------------------
/LFLiveKit/objects/LFVideoFrame.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFVideoFrame.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFFrame.h"
10 |
11 | @interface LFVideoFrame : LFFrame
12 |
13 | @property (nonatomic, assign) BOOL isKeyFrame;
14 | @property (nonatomic, strong) NSData *sps;
15 | @property (nonatomic, strong) NSData *pps;
16 |
17 | @end
18 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/main.m:
--------------------------------------------------------------------------------
1 | //
2 | // main.m
3 | // LFLiveKitDemo
4 | //
5 | // Created by admin on 16/6/8.
6 | // Copyright © 2016年 admin. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "AppDelegate.h"
11 |
12 | int main(int argc, char * argv[]) {
13 | @autoreleasepool {
14 | return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/LFLiveKit/objects/LFFrame.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFFrame.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface LFFrame : NSObject
12 |
13 | @property (nonatomic, assign) uint64_t timestamp;
14 | @property (nonatomic, strong) NSData *data;
15 | ///< flv或者rtmp包头
16 | @property (nonatomic, strong) NSData *header;
17 |
18 | @end
19 |
--------------------------------------------------------------------------------
/LFLiveKit/upload/NSMutableArray+LFAdd.h:
--------------------------------------------------------------------------------
1 | //
2 | // NSMutableArray+LFAdd.h
3 | // YYKit
4 | //
5 | // Created by admin on 16/5/20.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface NSMutableArray (YYAdd)
12 |
13 | /**
14 | Removes and returns the object with the lowest-valued index in the array.
15 | If the array is empty, it just returns nil.
16 |
17 | @return The first object, or nil.
18 | */
19 | - (nullable id)lfPopFirstObject;
20 |
21 | @end
22 |
--------------------------------------------------------------------------------
/LFLiveKit.xcodeproj/xcuserdata/feng.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SuppressBuildableAutocreation
6 |
7 | 84001F891D0015D10026C63F
8 |
9 | primary
10 |
11 |
12 | 84001F931D0015D10026C63F
13 |
14 | primary
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/LFLiveKit/packet/LFStreamPackage.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFStreamPackage.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import "LFAudioFrame.h"
12 | #import "LFVideoFrame.h"
13 |
14 | /// 编码器抽象的接口
15 | @protocol LFStreamPackage
16 | @required
17 | - (nullable instancetype)initWithVideoSize:(CGSize)videoSize;
18 | - (nullable NSData*)aaCPacket:(nullable LFAudioFrame*)audioFrame;
19 | - (nullable NSData*)h264Packet:(nullable LFVideoFrame*)videoFrame;
20 | @end
21 |
22 |
--------------------------------------------------------------------------------
/LFLiveKit/upload/NSMutableArray+LFAdd.m:
--------------------------------------------------------------------------------
1 | //
2 | // NSMutableArray+LFAdd.m
3 | // YYKit
4 | //
5 | // Created by admin on 16/5/20.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "NSMutableArray+LFAdd.h"
10 |
11 | @implementation NSMutableArray (YYAdd)
12 |
13 | - (void)lfRemoveFirstObject {
14 | if (self.count) {
15 | [self removeObjectAtIndex:0];
16 | }
17 | }
18 |
19 | - (id)lfPopFirstObject {
20 | id obj = nil;
21 | if (self.count) {
22 | obj = self.firstObject;
23 | [self lfRemoveFirstObject];
24 | }
25 | return obj;
26 | }
27 |
28 | @end
29 |
--------------------------------------------------------------------------------
/LFLiveKit/packet/LFFlvPackage.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFFlvPackage.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFStreamPackage.h"
10 |
11 | @interface LFFlvPackage : NSObject
12 |
13 | #pragma mark - Initializer
14 | ///=============================================================================
15 | /// @name Initializer
16 | ///=============================================================================
17 | - (nullable instancetype)init UNAVAILABLE_ATTRIBUTE;
18 | + (nullable instancetype)new UNAVAILABLE_ATTRIBUTE;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/LFLiveKit/upload/LFStreamTcpSocket.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFStreamTcpSocket.h
3 | // LFLiveKit
4 | //
5 | // Created by admin on 16/5/3.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFStreamSocket.h"
10 |
11 | @interface LFStreamTcpSocket : NSObject
12 | #pragma mark - Initializer
13 | ///=============================================================================
14 | /// @name Initializer
15 | ///=============================================================================
16 | - (nullable instancetype)init UNAVAILABLE_ATTRIBUTE;
17 | + (nullable instancetype)new UNAVAILABLE_ATTRIBUTE;
18 |
19 | @end
20 |
--------------------------------------------------------------------------------
/LFLiveKit/coder/LFHardwareAudioEncoder.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFHardwareAudioEncoder.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFAudioEncoding.h"
10 |
11 | @interface LFHardwareAudioEncoder : NSObject
12 |
13 | #pragma mark - Initializer
14 | ///=============================================================================
15 | /// @name Initializer
16 | ///=============================================================================
17 | - (nullable instancetype)init UNAVAILABLE_ATTRIBUTE;
18 | + (nullable instancetype)new UNAVAILABLE_ATTRIBUTE;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/LFLiveKit/coder/LFHardwareVideoEncoder.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFHardwareVideoEncoder.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFVideoEncoding.h"
10 |
11 | @interface LFHardwareVideoEncoder : NSObject
12 |
13 | #pragma mark - Initializer
14 | ///=============================================================================
15 | /// @name Initializer
16 | ///=============================================================================
17 | - (nullable instancetype)init UNAVAILABLE_ATTRIBUTE;
18 | + (nullable instancetype)new UNAVAILABLE_ATTRIBUTE;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/LFLiveKit/upload/LFStreamRtmpSocket.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFStreamRtmpSocket.h
3 | // LaiFeng
4 | //
5 | // Created by admin on 16/5/18.
6 | // Copyright © 2016年 live Interactive. All rights reserved.
7 | //
8 |
9 | #import "LFStreamSocket.h"
10 |
11 | @interface LFStreamRtmpSocket : NSObject
12 |
13 | #pragma mark - Initializer
14 | ///=============================================================================
15 | /// @name Initializer
16 | ///=============================================================================
17 | - (nullable instancetype)init UNAVAILABLE_ATTRIBUTE;
18 | + (nullable instancetype)new UNAVAILABLE_ATTRIBUTE;
19 |
20 | @end
21 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/ViewController.m:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.m
3 | // LFLiveKitDemo
4 | //
5 | // Created by admin on 16/6/8.
6 | // Copyright © 2016年 admin. All rights reserved.
7 | //
8 |
9 | #import "ViewController.h"
10 | #import "LFLivePreview.h"
11 |
12 | @interface ViewController ()
13 |
14 | @end
15 |
16 | @implementation ViewController
17 |
18 | - (void)viewDidLoad {
19 | [super viewDidLoad];
20 | // Do any additional setup after loading the view, typically from a nib.
21 | [self.view addSubview:[[LFLivePreview alloc] initWithFrame:self.view.bounds]];
22 | }
23 |
24 | - (void)didReceiveMemoryWarning {
25 | [super didReceiveMemoryWarning];
26 | // Dispose of any resources that can be recreated.
27 | }
28 |
29 | @end
30 |
--------------------------------------------------------------------------------
/LFLiveKit.xcodeproj/xcuserdata/admin.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | LFLiveKit.xcscheme_^#shared#^_
8 |
9 | orderHint
10 | 0
11 |
12 |
13 | SuppressBuildableAutocreation
14 |
15 | 84001F891D0015D10026C63F
16 |
17 | primary
18 |
19 |
20 | 84001F931D0015D10026C63F
21 |
22 | primary
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/LFLiveKitTests/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | BNDL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1
23 |
24 |
25 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo.xcodeproj/xcuserdata/admin.xcuserdatad/xcschemes/xcschememanagement.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | SchemeUserState
6 |
7 | LFLiveKitDemo.xcscheme
8 |
9 | orderHint
10 | 0
11 |
12 |
13 | SuppressBuildableAutocreation
14 |
15 | 840762EB1D07C7D0000FD0BF
16 |
17 | primary
18 |
19 |
20 | 840763041D07C7D0000FD0BF
21 |
22 | primary
23 |
24 |
25 | 8407630F1D07C7D0000FD0BF
26 |
27 | primary
28 |
29 |
30 |
31 |
32 |
33 |
--------------------------------------------------------------------------------
/LFLiveKit/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | FMWK
17 | CFBundleShortVersionString
18 | 1.6
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | $(CURRENT_PROJECT_VERSION)
23 | NSPrincipalClass
24 |
25 |
26 |
27 |
--------------------------------------------------------------------------------
/LFLiveKit.podspec:
--------------------------------------------------------------------------------
1 |
2 | Pod::Spec.new do |s|
3 |
4 | s.name = "LFLiveKit"
5 | s.version = "1.6"
6 | s.summary = "LaiFeng ios Live. LFLiveKit."
7 | s.homepage = "https://github.com/chenliming777"
8 | s.license = { :type => "MIT", :file => "LICENSE" }
9 | s.author = { "chenliming" => "chenliming777@qq.com" }
10 | s.platform = :ios, "8.0"
11 | s.ios.deployment_target = "8.0"
12 | s.source = { :git => "https://github.com/LaiFengiOS/LFLiveKit.git", :tag => "#{s.version}" }
13 | s.source_files = "LFLiveKit/**/*.{*}"
14 | s.public_header_files = "LFLiveKit/**/*.h"
15 |
16 | s.frameworks = "VideoToolbox", "AudioToolbox","AVFoundation","Foundation","UIKit"
17 | s.library = "z"
18 |
19 | s.requires_arc = true
20 |
21 | s.dependency "CocoaAsyncSocket", "~> 7.4.1"
22 | s.dependency 'LMGPUImage', '~> 0.1.9'
23 | s.dependency "pili-librtmp", "~> 1.0.2"
24 |
25 | end
26 |
--------------------------------------------------------------------------------
/LFLiveKit/coder/LFVideoEncoding.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFVideoEncoding.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "LFVideoFrame.h"
11 | #import "LFLiveVideoConfiguration.h"
12 |
13 | @protocol LFVideoEncoding;
14 | /// 编码器编码后回调
15 | @protocol LFVideoEncodingDelegate
16 | @required
17 | - (void)videoEncoder:(nullable id)encoder videoFrame:(nullable LFVideoFrame*)frame;
18 | @end
19 |
20 | /// 编码器抽象的接口
21 | @protocol LFVideoEncoding
22 | @required
23 | - (void)encodeVideoData:(nullable CVImageBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp;
24 | - (void)stopEncoder;
25 | @optional
26 | @property (nonatomic, assign) NSInteger videoBitRate;
27 | - (nullable instancetype)initWithVideoStreamConfiguration:(nullable LFLiveVideoConfiguration*)configuration;
28 | - (void)setDelegate:(nullable id)delegate;
29 |
30 | @end
31 |
32 |
--------------------------------------------------------------------------------
/LFLiveKit/coder/LFAudioEncoding.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFAudioEncoding.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import "LFAudioFrame.h"
12 | #import "LFLiveAudioConfiguration.h"
13 |
14 | @protocol LFAudioEncoding;
15 | /// 编码器编码后回调
16 | @protocol LFAudioEncodingDelegate
17 | @required
18 | - (void)audioEncoder:(nullable id)encoder audioFrame:(nullable LFAudioFrame*)frame;
19 | @end
20 |
21 | /// 编码器抽象的接口
22 | @protocol LFAudioEncoding
23 | @required
24 | - (void)encodeAudioData:(AudioBufferList)inBufferList timeStamp:(uint64_t)timeStamp;
25 | - (void)stopEncoder;
26 | @optional
27 | - (nullable instancetype)initWithAudioStreamConfiguration:(nullable LFLiveAudioConfiguration*)configuration;
28 | - (void)setDelegate:(nullable id)delegate;
29 | - (nullable NSData*)adtsData:(NSInteger)channel rawDataLength:(NSInteger)rawDataLength;
30 | @end
31 |
32 |
--------------------------------------------------------------------------------
/LFLiveKitTests/LFLiveKitTests.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFLiveKitTests.m
3 | // LFLiveKitTests
4 | //
5 | // Created by admin on 16/6/2.
6 | // Copyright © 2016年 admin. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | @interface LFLiveKitTests : XCTestCase
12 |
13 | @end
14 |
15 | @implementation LFLiveKitTests
16 |
17 | - (void)setUp {
18 | [super setUp];
19 | // Put setup code here. This method is called before the invocation of each test method in the class.
20 | }
21 |
22 | - (void)tearDown {
23 | // Put teardown code here. This method is called after the invocation of each test method in the class.
24 | [super tearDown];
25 | }
26 |
27 | - (void)testExample {
28 | // This is an example of a functional test case.
29 | // Use XCTAssert and related functions to verify your tests produce the correct results.
30 | }
31 |
32 | - (void)testPerformanceExample {
33 | // This is an example of a performance test case.
34 | [self measureBlock:^{
35 | // Put the code you want to measure the time of here.
36 | }];
37 | }
38 |
39 | @end
40 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2016 LaiFengiOS
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/LFLiveKit/filter/LFGPUImageEmptyFilter.m:
--------------------------------------------------------------------------------
1 | #import "LFGPUImageEmptyFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kLFGPUImageEmptyFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 |
10 | void main()
11 | {
12 | lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
13 |
14 | gl_FragColor = vec4((textureColor.rgb), textureColor.w);
15 | }
16 | );
17 | #else
18 | NSString *const kGPUImageInvertFragmentShaderString = SHADER_STRING
19 | (
20 | varying vec2 textureCoordinate;
21 |
22 | uniform sampler2D inputImageTexture;
23 |
24 | void main()
25 | {
26 | vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
27 |
28 | gl_FragColor = vec4((textureColor.rgb), textureColor.w);
29 | }
30 | );
31 | #endif
32 |
33 | @implementation LFGPUImageEmptyFilter
34 |
35 | - (id)init;
36 | {
37 | if (!(self = [super initWithFragmentShaderFromString:kLFGPUImageEmptyFragmentShaderString]))
38 | {
39 | return nil;
40 | }
41 |
42 | return self;
43 | }
44 |
45 | @end
46 |
47 |
--------------------------------------------------------------------------------
/LFLiveKit/packet/flv/avc.h:
--------------------------------------------------------------------------------
1 | /*
2 | $Id: avc.h 231 2011-06-27 13:46:19Z marc.noirot $
3 |
4 | FLV Metadata updater
5 |
6 | Copyright (C) 2007-2012 Marc Noirot
7 |
8 | This file is part of FLVMeta.
9 |
10 | FLVMeta is free software; you can redistribute it and/or modify
11 | it under the terms of the GNU General Public License as published by
12 | the Free Software Foundation; either version 2 of the License, or
13 | (at your option) any later version.
14 |
15 | FLVMeta is distributed in the hope that it will be useful,
16 | but WITHOUT ANY WARRANTY; without even the implied warranty of
17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 | GNU General Public License for more details.
19 |
20 | You should have received a copy of the GNU General Public License
21 | along with FLVMeta; if not, write to the Free Software
22 | Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 | */
24 | #ifndef __AVC_H__
25 | #define __AVC_H__
26 |
27 | #include
28 |
29 | #include "types.h"
30 | #include "flv.h"
31 |
32 | #ifdef __cplusplus
33 | extern "C" {
34 | #endif /* __cplusplus */
35 |
36 | int read_avc_resolution(flv_stream * f, uint32 body_length, uint32 * width, uint32 * height);
37 |
38 | #ifdef __cplusplus
39 | }
40 | #endif /* __cplusplus */
41 |
42 | #endif /* __AVC_H__ */
43 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "iphone",
5 | "size" : "29x29",
6 | "scale" : "2x"
7 | },
8 | {
9 | "idiom" : "iphone",
10 | "size" : "29x29",
11 | "scale" : "3x"
12 | },
13 | {
14 | "idiom" : "iphone",
15 | "size" : "40x40",
16 | "scale" : "2x"
17 | },
18 | {
19 | "idiom" : "iphone",
20 | "size" : "40x40",
21 | "scale" : "3x"
22 | },
23 | {
24 | "idiom" : "iphone",
25 | "size" : "60x60",
26 | "scale" : "2x"
27 | },
28 | {
29 | "idiom" : "iphone",
30 | "size" : "60x60",
31 | "scale" : "3x"
32 | },
33 | {
34 | "idiom" : "ipad",
35 | "size" : "29x29",
36 | "scale" : "1x"
37 | },
38 | {
39 | "idiom" : "ipad",
40 | "size" : "29x29",
41 | "scale" : "2x"
42 | },
43 | {
44 | "idiom" : "ipad",
45 | "size" : "40x40",
46 | "scale" : "1x"
47 | },
48 | {
49 | "idiom" : "ipad",
50 | "size" : "40x40",
51 | "scale" : "2x"
52 | },
53 | {
54 | "idiom" : "ipad",
55 | "size" : "76x76",
56 | "scale" : "1x"
57 | },
58 | {
59 | "idiom" : "ipad",
60 | "size" : "76x76",
61 | "scale" : "2x"
62 | }
63 | ],
64 | "info" : {
65 | "version" : 1,
66 | "author" : "xcode"
67 | }
68 | }
--------------------------------------------------------------------------------
/LFLiveKit/objects/LFLiveDebug.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFLiveDebug.h
3 | // LaiFeng
4 | //
5 | // Created by admin on 16/5/19.
6 | // Copyright © 2016年 live Interactive. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 |
12 | @interface LFLiveDebug : NSObject
13 |
14 | @property (nonatomic, copy) NSString *streamId; ///< 流id
15 | @property (nonatomic, copy) NSString *uploadUrl; ///< 流地址
16 | @property (nonatomic, assign) CGSize videoSize; ///< 上传的分辨率
17 | @property (nonatomic, assign) BOOL isRtmp; ///< 上传方式(TCP or RTMP)
18 |
19 | @property (nonatomic, assign) CGFloat timeStamp; ///< 当前的时间戳,从而计算1s内数据
20 | @property (nonatomic, assign) CGFloat dataFlow; ///< 总流量
21 | @property (nonatomic, assign) CGFloat bandwidth; ///< 1s内总带宽
22 | @property (nonatomic, assign) CGFloat currentBandwidth; ///< 上次的带宽
23 |
24 | @property (nonatomic, assign) NSInteger capturedAudioCount; ///< 1s内音频捕获个数
25 | @property (nonatomic, assign) NSInteger capturedVideoCount; ///< 1s内视频捕获个数
26 | @property (nonatomic, assign) NSInteger currentCapturedAudioCount; ///< 上次的音频捕获个数
27 | @property (nonatomic, assign) NSInteger currentCapturedVideoCount; ///< 上次的视频捕获个数
28 |
29 | @property (nonatomic, assign) NSInteger unSendCount; ///< 未发送个数(代表当前缓冲区等待发送的)
30 |
31 | @end
32 |
--------------------------------------------------------------------------------
/LFLiveKit/objects/LFLiveStreamInfo.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFLiveStreamInfo.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | // 真正的上传地址 token等
8 |
9 | #import
10 | #import "LFLiveAudioConfiguration.h"
11 | #import "LFLiveVideoConfiguration.h"
12 |
13 | /// 流状态
14 | typedef NS_ENUM(NSUInteger, LFLiveState){
15 | /// 准备
16 | LFLiveReady = 0,
17 | /// 连接中
18 | LFLivePending = 1,
19 | /// 已连接
20 | LFLiveStart = 2,
21 | /// 已断开
22 | LFLiveStop = 3,
23 | /// 连接出错
24 | LFLiveError = 4
25 | };
26 |
27 | typedef NS_ENUM(NSUInteger,LFLiveSocketErrorCode) {
28 | LFLiveSocketError_PreView = 201,///< 预览失败
29 | LFLiveSocketError_GetStreamInfo = 202,///< 获取流媒体信息失败
30 | LFLiveSocketError_ConnectSocket = 203,///< 连接socket失败
31 | LFLiveSocketError_Verification = 204,///< 验证服务器失败
32 | LFLiveSocketError_ReConnectTimeOut = 205///< 重新连接服务器超时
33 | };
34 |
35 | @interface LFLiveStreamInfo : NSObject
36 |
37 | @property (nonatomic, copy) NSString *streamId;
38 |
39 | #pragma mark -- FLV
40 | @property (nonatomic, copy) NSString *host;
41 | @property (nonatomic, assign) NSInteger port;
42 | #pragma mark -- RTMP
43 | @property (nonatomic, copy) NSString *url; ///< 上传地址 (RTMP用就好了)
44 | ///音频配置
45 | @property (nonatomic, strong) LFLiveAudioConfiguration *audioConfiguration;
46 | ///视频配置
47 | @property (nonatomic, strong) LFLiveVideoConfiguration *videoConfiguration;
48 |
49 |
50 | @end
51 |
--------------------------------------------------------------------------------
/LFLiveKit/upload/LFStreamingBuffer.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFStreamingBuffer.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "LFAudioFrame.h"
11 | #import "LFVideoFrame.h"
12 |
13 | /** current buffer status */
14 | typedef NS_ENUM(NSUInteger, LFLiveBuffferState) {
15 | LFLiveBuffferUnknown = 0, //< 未知
16 | LFLiveBuffferIncrease = 1, //< 缓冲区状态好可以增加码率
17 | LFLiveBuffferDecline = 2 //< 缓冲区状态差应该降低码率
18 | };
19 |
20 | @class LFStreamingBuffer;
21 | /** this two method will control videoBitRate */
22 | @protocol LFStreamingBufferDelegate
23 | @optional
24 | /** 当前buffer变动(增加or减少) 根据buffer中的updateInterval时间回调*/
25 | - (void)streamingBuffer:(nullable LFStreamingBuffer * )buffer bufferState:(LFLiveBuffferState)state;
26 | @end
27 |
28 | @interface LFStreamingBuffer : NSObject
29 |
30 | /** The delegate of the buffer. buffer callback */
31 | @property (nullable,nonatomic, weak) id delegate;
32 |
33 | /** current frame buffer */
34 | @property (nonatomic, strong, readonly) NSMutableArray * _Nonnull list;
35 |
36 | /** buffer count max size default 1000 */
37 | @property (nonatomic, assign) NSUInteger maxCount;
38 |
39 | /** add frame to buffer */
40 | - (void)appendObject:(nullable LFFrame*)frame;
41 |
42 | /** pop the first frome buffer */
43 | - (nullable LFFrame*)popFirstObject;
44 |
45 | /** remove all objects from Buffer */
46 | - (void)removeAllObject;
47 |
48 | @end
49 |
--------------------------------------------------------------------------------
/LFLiveKit/upload/LFStreamSocket.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFStreamSocket.h
3 | // LFLiveKit
4 | //
5 | // Created by admin on 16/5/3.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import
10 | #import "LFLiveStreamInfo.h"
11 | #import "LFStreamingBuffer.h"
12 | #import "LFLiveDebug.h"
13 |
14 | @protocol LFStreamSocket ;
15 | @protocol LFStreamSocketDelegate
16 |
17 | /** callback buffer current status (回调当前缓冲区情况,可实现相关切换帧率 码率等策略)*/
18 | - (void)socketBufferStatus:(nullable id)socket status:(LFLiveBuffferState)status;
19 | /** callback socket current status (回调当前网络情况) */
20 | - (void)socketStatus:(nullable id)socket status:(LFLiveState)status;
21 | /** callback socket errorcode */
22 | - (void)socketDidError:(nullable id)socket errorCode:(LFLiveSocketErrorCode)errorCode;
23 | @optional
24 | /** callback debugInfo */
25 | - (void)socketDebug:(nullable id)socket debugInfo:(nullable LFLiveDebug*)debugInfo;
26 | @end
27 |
28 | @protocol LFStreamSocket
29 | - (void) start;
30 | - (void) stop;
31 | - (void) sendFrame:(nullable LFFrame*)frame;
32 | - (void) setDelegate:(nullable id)delegate;
33 | @optional
34 | - (nullable instancetype)initWithStream:(nullable LFLiveStreamInfo*)stream;
35 | - (nullable instancetype)initWithStream:(nullable LFLiveStreamInfo*)stream videoSize:(CGSize)videoSize;
36 | - (nullable instancetype)initWithStream:(nullable LFLiveStreamInfo*)stream videoSize:(CGSize)videoSize reconnectInterval:(NSInteger)reconnectInterval reconnectCount:(NSInteger)reconnectCount;
37 | @end
38 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | CFBundleDevelopmentRegion
6 | en
7 | CFBundleExecutable
8 | $(EXECUTABLE_NAME)
9 | CFBundleIdentifier
10 | $(PRODUCT_BUNDLE_IDENTIFIER)
11 | CFBundleInfoDictionaryVersion
12 | 6.0
13 | CFBundleName
14 | $(PRODUCT_NAME)
15 | CFBundlePackageType
16 | APPL
17 | CFBundleShortVersionString
18 | 1.0
19 | CFBundleSignature
20 | ????
21 | CFBundleVersion
22 | 1
23 | LSRequiresIPhoneOS
24 |
25 | UILaunchStoryboardName
26 | LaunchScreen
27 | UIMainStoryboardFile
28 | Main
29 | UIRequiredDeviceCapabilities
30 |
31 | armv7
32 |
33 | UISupportedInterfaceOrientations
34 |
35 | UIInterfaceOrientationPortrait
36 | UIInterfaceOrientationLandscapeLeft
37 | UIInterfaceOrientationLandscapeRight
38 |
39 | UISupportedInterfaceOrientations~ipad
40 |
41 | UIInterfaceOrientationPortrait
42 | UIInterfaceOrientationPortraitUpsideDown
43 | UIInterfaceOrientationLandscapeLeft
44 | UIInterfaceOrientationLandscapeRight
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
--------------------------------------------------------------------------------
/LFLiveKit/capture/LFAudioCapture.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFAudioCapture.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/1.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import "LFLiveAudioConfiguration.h"
12 |
13 | #pragma mark -- AudioCaptureNotification
14 | /** compoentFialed will post the notification */
15 | extern NSString *_Nullable const LFAudioComponentFailedToCreateNotification;
16 |
17 | @class LFAudioCapture;
18 | /** LFAudioCapture callback audioData */
19 | @protocol LFAudioCaptureDelegate
20 | - (void)captureOutput:(nullable LFAudioCapture*)capture audioBuffer:(AudioBufferList)inBufferList;
21 | @end
22 |
23 |
24 | @interface LFAudioCapture : NSObject
25 |
26 | #pragma mark - Attribute
27 | ///=============================================================================
28 | /// @name Attribute
29 | ///=============================================================================
30 |
31 | /** The delegate of the capture. captureData callback */
32 | @property (nullable,nonatomic, weak) id delegate;
33 |
34 | /** The muted control callbackAudioData,muted will memset 0.*/
35 | @property (nonatomic,assign) BOOL muted;
36 |
37 | /** The running control start capture or stop capture*/
38 | @property (nonatomic, assign) BOOL running;
39 |
40 | #pragma mark - Initializer
41 | ///=============================================================================
42 | /// @name Initializer
43 | ///=============================================================================
44 | - (nullable instancetype)init UNAVAILABLE_ATTRIBUTE;
45 | + (nullable instancetype)new UNAVAILABLE_ATTRIBUTE;
46 |
47 | /**
48 | The designated initializer. Multiple instances with the same configuration will make the
49 | capture unstable.
50 | */
51 | - (nullable instancetype)initWithAudioConfiguration:(nullable LFLiveAudioConfiguration *)configuration NS_DESIGNATED_INITIALIZER;
52 |
53 | @end
54 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/AppDelegate.m:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.m
3 | // LFLiveKitDemo
4 | //
5 | // Created by admin on 16/6/8.
6 | // Copyright © 2016年 admin. All rights reserved.
7 | //
8 |
9 | #import "AppDelegate.h"
10 |
11 | @interface AppDelegate ()
12 |
13 | @end
14 |
15 | @implementation AppDelegate
16 |
17 |
18 | - (BOOL)application:(UIApplication *)application didFinishLaunchingWithOptions:(NSDictionary *)launchOptions {
19 | // Override point for customization after application launch.
20 | return YES;
21 | }
22 |
23 | - (void)applicationWillResignActive:(UIApplication *)application {
24 | // Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
25 | // Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
26 | }
27 |
28 | - (void)applicationDidEnterBackground:(UIApplication *)application {
29 | // Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
30 | // If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
31 | }
32 |
33 | - (void)applicationWillEnterForeground:(UIApplication *)application {
34 | // Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
35 | }
36 |
37 | - (void)applicationDidBecomeActive:(UIApplication *)application {
38 | // Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
39 | }
40 |
41 | - (void)applicationWillTerminate:(UIApplication *)application {
42 | // Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
43 | }
44 |
45 | @end
46 |
--------------------------------------------------------------------------------
/LFLiveKit/capture/LFVideoCapture.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFVideoCapture.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/1.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import "LFLiveVideoConfiguration.h"
12 |
13 | @class LFVideoCapture;
14 | /** LFVideoCapture callback videoData */
15 | @protocol LFVideoCaptureDelegate
16 | - (void)captureOutput:(nullable LFVideoCapture*)capture pixelBuffer:(nullable CVImageBufferRef)pixelBuffer;
17 | @end
18 |
19 | @interface LFVideoCapture : NSObject
20 |
21 | #pragma mark - Attribute
22 | ///=============================================================================
23 | /// @name Attribute
24 | ///=============================================================================
25 |
26 | /** The delegate of the capture. captureData callback */
27 | @property (nullable,nonatomic, weak) id delegate;
28 |
29 | /** The running control start capture or stop capture*/
30 | @property (nonatomic, assign) BOOL running;
31 |
32 | /** The preView will show OpenGL ES view*/
33 | @property (null_resettable,nonatomic, strong) UIView * preView;
34 |
35 | /** The captureDevicePosition control camraPosition ,default front*/
36 | @property (nonatomic, assign) AVCaptureDevicePosition captureDevicePosition;
37 |
38 | /** The beautyFace control capture shader filter empty or beautiy */
39 | @property (nonatomic, assign) BOOL beautyFace;
40 |
41 | /** The videoFrameRate control videoCapture output data count */
42 | @property (nonatomic, assign) NSInteger videoFrameRate;
43 |
44 | #pragma mark - Initializer
45 | ///=============================================================================
46 | /// @name Initializer
47 | ///=============================================================================
48 | - (nullable instancetype)init UNAVAILABLE_ATTRIBUTE;
49 | + (nullable instancetype)new UNAVAILABLE_ATTRIBUTE;
50 |
51 | /**
52 | The designated initializer. Multiple instances with the same configuration will make the
53 | capture unstable.
54 | */
55 | - (nullable instancetype)initWithVideoConfiguration:(nullable LFLiveVideoConfiguration *)configuration NS_DESIGNATED_INITIALIZER;
56 |
57 | @end
58 |
--------------------------------------------------------------------------------
/LFLiveKit/configuration/LFLiveAudioConfiguration.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFLiveAudioConfiguration.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/1.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | /// 音频码率
12 | typedef NS_ENUM(NSUInteger, LFLiveAudioBitRate) {
13 | /// 32Kbps 音频码率
14 | LFLiveAudioBitRate_32Kbps = 32000,
15 | /// 64Kbps 音频码率
16 | LFLiveAudioBitRate_64Kbps = 64000,
17 | /// 96Kbps 音频码率
18 | LFLiveAudioBitRate_96Kbps = 96000,
19 | /// 128Kbps 音频码率
20 | LFLiveAudioBitRate_128Kbps = 128000,
21 | /// 默认音频码率,默认为 64Kbps
22 | LFLiveAudioBitRate_Default = LFLiveAudioBitRate_64Kbps
23 | };
24 |
25 | /// 采样率 (默认44.1Hz iphoneg6以上48Hz)
26 | typedef NS_ENUM(NSUInteger, LFLiveAudioSampleRate){
27 | /// 44.1Hz 采样率
28 | LFLiveAudioSampleRate_44100Hz = 44100,
29 | /// 48Hz 采样率
30 | LFLiveAudioSampleRate_48000Hz = 48000,
31 | /// 默认音频码率,默认为 64Kbps
32 | LFLiveAudioSampleRate_Default = LFLiveAudioSampleRate_44100Hz
33 | };
34 |
35 | /// Audio Live quality(音频质量)
36 | typedef NS_ENUM(NSUInteger, LFLiveAudioQuality){
37 | /// 高音频质量 audio sample rate: 44MHz(默认44.1Hz iphoneg6以上48Hz), audio bitrate: 32Kbps
38 | LFLiveAudioQuality_Low = 0,
39 | /// 高音频质量 audio sample rate: 44MHz(默认44.1Hz iphoneg6以上48Hz), audio bitrate: 64Kbps
40 | LFLiveAudioQuality_Medium = 1,
41 | /// 高音频质量 audio sample rate: 44MHz(默认44.1Hz iphoneg6以上48Hz), audio bitrate: 96Kbps
42 | LFLiveAudioQuality_High = 2,
43 | /// 高音频质量 audio sample rate: 44MHz(默认44.1Hz iphoneg6以上48Hz), audio bitrate: 128Kbps
44 | LFLiveAudioQuality_VeryHigh = 3,
45 | /// 默认音频质量 audio sample rate: 44MHz(默认44.1Hz iphoneg6以上48Hz), audio bitrate: 64Kbps
46 | LFLiveAudioQuality_Default = LFLiveAudioQuality_Medium
47 | };
48 |
49 | @interface LFLiveAudioConfiguration : NSObject
50 |
51 | /// 默认音频配置
52 | + (instancetype)defaultConfiguration;
53 | /// 音频配置
54 | + (instancetype)defaultConfigurationForQuality:(LFLiveAudioQuality)audioQuality;
55 |
56 | #pragma mark - Attribute
57 | ///=============================================================================
58 | /// @name Attribute
59 | ///=============================================================================
60 | /// 声道数目(default 2)
61 | @property (nonatomic, assign) NSUInteger numberOfChannels;
62 | /// 采样率
63 | @property (nonatomic, assign) LFLiveAudioSampleRate audioSampleRate;
64 | // 码率
65 | @property (nonatomic, assign) LFLiveAudioBitRate audioBitrate;
66 | /// flv编码音频头 44100 为0x12 0x10
67 | @property (nonatomic ,assign,readonly) char *asc;
68 |
69 | @end
70 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/category/UIControl+YYAdd.h:
--------------------------------------------------------------------------------
1 | //
2 | // UIControl+YYAdd.h
3 | //
4 | //
5 | // Created by guoyaoyuan on 13-4-5.
6 | // Copyright (c) 2013 live Interactive. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | /**
12 | Provides extensions for `UIControl`.
13 | */
14 | @interface UIControl (YYAdd)
15 |
16 | /**
17 | Removes all targets and actions for a particular event (or events)
18 | from an internal dispatch table.
19 | */
20 | - (void)removeAllTargets;
21 |
22 | /**
23 | Adds or replaces a target and action for a particular event (or events)
24 | to an internal dispatch table.
25 |
26 | @param target The target object—that is, the object to which the
27 | action message is sent. If this is nil, the responder
28 | chain is searched for an object willing to respond to the
29 | action message.
30 |
31 | @param action A selector identifying an action message. It cannot be NULL.
32 |
33 | @param controlEvents A bitmask specifying the control events for which the
34 | action message is sent.
35 | */
36 | - (void)setTarget:(id)target action:(SEL)action forControlEvents:(UIControlEvents)controlEvents;
37 |
38 | /**
39 | Adds a block for a particular event (or events) to an internal dispatch table.
40 | It will cause a strong reference to @a block.
41 |
42 | @param block The block which is invoked then the action message is
43 | sent (cannot be nil). The block is retained.
44 |
45 | @param controlEvents A bitmask specifying the control events for which the
46 | action message is sent.
47 | */
48 | - (void)addBlockForControlEvents:(UIControlEvents)controlEvents block:(void (^)(id sender))block;
49 |
50 | /**
51 | Adds or replaces a block for a particular event (or events) to an internal
52 | dispatch table. It will cause a strong reference to @a block.
53 |
54 | @param block The block which is invoked then the action message is
55 | sent (cannot be nil). The block is retained.
56 |
57 | @param controlEvents A bitmask specifying the control events for which the
58 | action message is sent.
59 | */
60 | - (void)setBlockForControlEvents:(UIControlEvents)controlEvents block:(void (^)(id sender))block;
61 |
62 | /**
63 | Removes all blocks for a particular event (or events) from an internal
64 | dispatch table.
65 |
66 | @param controlEvents A bitmask specifying the control events for which the
67 | action message is sent.
68 | */
69 | - (void)removeAllBlocksForControlEvents:(UIControlEvents)controlEvents;
70 |
71 | @end
72 |
--------------------------------------------------------------------------------
/LFLiveKit/packet/flv/info.h:
--------------------------------------------------------------------------------
1 | /*
2 | $Id: info.h 231 2011-06-27 13:46:19Z marc.noirot $
3 |
4 | FLV Metadata updater
5 |
6 | Copyright (C) 2007-2012 Marc Noirot
7 |
8 | This file is part of FLVMeta.
9 |
10 | FLVMeta is free software; you can redistribute it and/or modify
11 | it under the terms of the GNU General Public License as published by
12 | the Free Software Foundation; either version 2 of the License, or
13 | (at your option) any later version.
14 |
15 | FLVMeta is distributed in the hope that it will be useful,
16 | but WITHOUT ANY WARRANTY; without even the implied warranty of
17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 | GNU General Public License for more details.
19 |
20 | You should have received a copy of the GNU General Public License
21 | along with FLVMeta; if not, write to the Free Software
22 | Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 | */
24 | #ifndef __INFO_H__
25 | #define __INFO_H__
26 | #include "flv.h"
27 |
28 | typedef struct __flv_info {
29 | flv_header header;
30 | uint8 have_video;
31 | uint8 have_audio;
32 | uint32 video_width;
33 | uint32 video_height;
34 | uint8 video_codec;
35 | uint32 video_frames_number;
36 | uint8 audio_codec;
37 | uint8 audio_size;
38 | uint8 audio_rate;
39 | uint8 audio_stereo;
40 | file_offset_t video_data_size;
41 | file_offset_t audio_data_size;
42 | file_offset_t meta_data_size;
43 | file_offset_t real_video_data_size;
44 | file_offset_t real_audio_data_size;
45 | uint32 video_first_timestamp;
46 | uint32 audio_first_timestamp;
47 | uint32 first_timestamp;
48 | uint8 can_seek_to_end;
49 | uint8 have_keyframes;
50 | uint32 last_keyframe_timestamp;
51 | uint32 on_metadata_size;
52 | file_offset_t on_metadata_offset;
53 | uint32 biggest_tag_body_size;
54 | uint32 last_timestamp;
55 | uint32 video_frame_duration;
56 | uint32 audio_frame_duration;
57 | file_offset_t total_prev_tags_size;
58 | uint8 have_on_last_second;
59 | amf_data * original_on_metadata;
60 | amf_data * keyframes;
61 | amf_data * times;
62 | amf_data * filepositions;
63 | } flv_info;
64 |
65 | typedef struct __flv_metadata {
66 | amf_data * on_last_second_name;
67 | amf_data * on_last_second;
68 | amf_data * on_metadata_name;
69 | amf_data * on_metadata;
70 | } flv_metadata;
71 |
72 | #ifdef __cplusplus
73 | extern "C" {
74 | #endif /* __cplusplus */
75 |
76 | int get_flv_info(flv_stream * flv_in, flv_info * info);
77 |
78 | void compute_metadata(flv_info * info, flv_metadata * meta);
79 |
80 | void compute_current_metadata(flv_info * info, flv_metadata * meta);
81 |
82 | #ifdef __cplusplus
83 | }
84 | #endif /* __cplusplus */
85 |
86 | #endif /* __INFO_H__ */
87 |
--------------------------------------------------------------------------------
/LFLiveKit/packet/flv/types.c:
--------------------------------------------------------------------------------
1 | /*
2 | $Id: types.c 231 2011-06-27 13:46:19Z marc.noirot $
3 |
4 | FLV Metadata updater
5 |
6 | Copyright (C) 2007-2012 Marc Noirot
7 |
8 | This file is part of FLVMeta.
9 |
10 | FLVMeta is free software; you can redistribute it and/or modify
11 | it under the terms of the GNU General Public License as published by
12 | the Free Software Foundation; either version 2 of the License, or
13 | (at your option) any later version.
14 |
15 | FLVMeta is distributed in the hope that it will be useful,
16 | but WITHOUT ANY WARRANTY; without even the implied warranty of
17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 | GNU General Public License for more details.
19 |
20 | You should have received a copy of the GNU General Public License
21 | along with FLVMeta; if not, write to the Free Software
22 | Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 | */
24 | #include "types.h"
25 |
26 | #ifndef WORDS_BIGENDIAN
27 |
28 | /* swap 64 bits doubles */
29 | typedef union __convert_u {
30 | uint64 i;
31 | number64 f;
32 | } convert_u;
33 |
34 | number64 swap_number64(number64 n) {
35 | convert_u c;
36 | c.f = n;
37 | c.i = (((c.i & 0x00000000000000FFULL) << 56) |
38 | ((c.i & 0x000000000000FF00ULL) << 40) |
39 | ((c.i & 0x0000000000FF0000ULL) << 24) |
40 | ((c.i & 0x00000000FF000000ULL) << 8) |
41 | ((c.i & 0x000000FF00000000ULL) >> 8) |
42 | ((c.i & 0x0000FF0000000000ULL) >> 24) |
43 | ((c.i & 0x00FF000000000000ULL) >> 40) |
44 | ((c.i & 0xFF00000000000000ULL) >> 56));
45 | return c.f;
46 | }
47 | #endif /* !defined WORDS_BIGENDIAN */
48 |
49 | /* convert native integers into 24 bits big endian integers */
50 | uint24_be uint32_to_uint24_be(uint32 l) {
51 | uint24_be r;
52 | r.b[0] = (uint8)((l & 0x00FF0000U) >> 16);
53 | r.b[1] = (uint8)((l & 0x0000FF00U) >> 8);
54 | r.b[2] = (uint8) (l & 0x000000FFU);
55 | return r;
56 | }
57 |
58 | #ifdef WIN32
59 |
60 | /*
61 | These functions assume fpos_t is a 64-bit signed integer
62 | */
63 |
64 | file_offset_t lfs_ftell(FILE * stream) {
65 | fpos_t p;
66 | if (fgetpos(stream, &p) == 0) {
67 | return (file_offset_t)p;
68 | }
69 | else {
70 | return -1LL;
71 | }
72 | }
73 |
74 | int lfs_fseek(FILE * stream, file_offset_t offset, int whence) {
75 | fpos_t p;
76 | if (fgetpos(stream, &p) == 0) {
77 | switch (whence) {
78 | case SEEK_CUR: p += offset; break;
79 | case SEEK_SET: p = offset; break;
80 | /*case SEEK_END:; not implemented here */
81 | default:
82 | return -1;
83 | }
84 | fsetpos(stream, &p);
85 | return 0;
86 | }
87 | else {
88 | return -1;
89 | }
90 | }
91 |
92 | #endif /* WIN32 */
93 |
--------------------------------------------------------------------------------
/LFLiveKit.xcworkspace/xcuserdata/admin.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist:
--------------------------------------------------------------------------------
1 |
2 |
5 |
6 |
8 |
20 |
21 |
22 |
24 |
36 |
37 |
38 |
40 |
52 |
53 |
54 |
56 |
68 |
69 |
70 |
71 |
72 |
--------------------------------------------------------------------------------
/LFLiveKit/configuration/LFLiveVideoConfiguration.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFLiveVideoConfiguration.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/1.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 |
12 | /// 视频分辨率(都是16:9 当此设备不支持当前分辨率,自动降低一级)
13 | typedef NS_ENUM(NSUInteger, LFLiveVideoSessionPreset){
14 | /// 低分辨率
15 | LFCaptureSessionPreset360x640 = 0,
16 | /// 中分辨率
17 | LFCaptureSessionPreset540x960 = 1,
18 | /// 高分辨率
19 | LFCaptureSessionPreset720x1280 = 2
20 | };
21 |
22 | /// 视频质量
23 | typedef NS_ENUM(NSUInteger, LFLiveVideoQuality){
24 | /// 分辨率: 360 *640 帧数:15 码率:500Kps
25 | LFLiveVideoQuality_Low1 = 0,
26 | /// 分辨率: 360 *640 帧数:24 码率:800Kps
27 | LFLiveVideoQuality_Low2 = 1,
28 | /// 分辨率: 360 *640 帧数:30 码率:800Kps
29 | LFLiveVideoQuality_Low3 = 2,
30 | /// 分辨率: 540 *960 帧数:15 码率:800Kps
31 | LFLiveVideoQuality_Medium1 = 3,
32 | /// 分辨率: 540 *960 帧数:24 码率:800Kps
33 | LFLiveVideoQuality_Medium2 = 4,
34 | /// 分辨率: 540 *960 帧数:30 码率:800Kps
35 | LFLiveVideoQuality_Medium3 = 5,
36 | /// 分辨率: 720 *1280 帧数:15 码率:1000Kps
37 | LFLiveVideoQuality_High1 = 6,
38 | /// 分辨率: 720 *1280 帧数:24 码率:1200Kps
39 | LFLiveVideoQuality_High2 = 7,
40 | /// 分辨率: 720 *1280 帧数:30 码率:1200Kps
41 | LFLiveVideoQuality_High3 = 8,
42 | /// 默认配置
43 | LFLiveVideoQuality_Default = LFLiveVideoQuality_Low2
44 | };
45 |
46 | @interface LFLiveVideoConfiguration : NSObject
47 |
48 | /// 默认视频配置
49 | + (instancetype)defaultConfiguration;
50 | /// 视频配置(质量)
51 | + (instancetype)defaultConfigurationForQuality:(LFLiveVideoQuality)videoQuality;
52 |
53 | /// 视频配置(质量 & 方向)
54 | + (instancetype)defaultConfigurationForQuality:(LFLiveVideoQuality)videoQuality orientation:(UIInterfaceOrientation)orientation;
55 |
56 | #pragma mark - Attribute
57 | ///=============================================================================
58 | /// @name Attribute
59 | ///=============================================================================
60 | /// 视频的分辨率,宽高务必设定为 2 的倍数,否则解码播放时可能出现绿边
61 | @property (nonatomic, assign) CGSize videoSize;
62 |
63 | /// 视频输出方向
64 | @property (nonatomic, assign) UIInterfaceOrientation orientation;
65 |
66 | /// 视频的帧率,即 fps
67 | @property (nonatomic, assign) NSUInteger videoFrameRate;
68 |
69 | /// 视频的最大帧率,即 fps
70 | @property (nonatomic, assign) NSUInteger videoMaxFrameRate;
71 |
72 | /// 视频的最小帧率,即 fps
73 | @property (nonatomic, assign) NSUInteger videoMinFrameRate;
74 |
75 | /// 最大关键帧间隔,可设定为 fps 的2倍,影响一个 gop 的大小
76 | @property (nonatomic, assign) NSUInteger videoMaxKeyframeInterval;
77 |
78 | /// 视频的码率,单位是 bps
79 | @property (nonatomic, assign) NSUInteger videoBitRate;
80 |
81 | /// 视频的最大码率,单位是 bps
82 | @property (nonatomic, assign) NSUInteger videoMaxBitRate;
83 |
84 | /// 视频的最小码率,单位是 bps
85 | @property (nonatomic, assign) NSUInteger videoMinBitRate;
86 |
87 | ///< 分辨率
88 | @property (nonatomic, assign) LFLiveVideoSessionPreset sessionPreset;
89 |
90 | ///< ≈sde3分辨率
91 | @property (nonatomic, assign,readonly) NSString *avSessionPreset;
92 |
93 | ///< 是否裁剪
94 | @property (nonatomic, assign,readonly) BOOL isClipVideo;
95 |
96 | @end
97 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/category/UIControl+YYAdd.m:
--------------------------------------------------------------------------------
1 | //
2 | // UIControl+YYAdd.m
3 | //
4 | //
5 | // Created by guoyaoyuan on 13-4-5.
6 | // Copyright (c) 2013 live Interactive. All rights reserved.
7 | //
8 |
9 | #import "UIControl+YYAdd.h"
10 | #import
11 |
12 |
13 | static const int block_key;
14 |
15 | @interface _LFUIControlBlockTarget : NSObject
16 |
17 | @property (nonatomic, copy) void (^block)(id sender);
18 | @property (nonatomic, assign) UIControlEvents events;
19 |
20 | - (id)initWithBlock:(void (^)(id sender))block events:(UIControlEvents)events;
21 | - (void)invoke:(id)sender;
22 |
23 | @end
24 |
25 | @implementation _LFUIControlBlockTarget
26 |
27 | - (id)initWithBlock:(void (^)(id sender))block events:(UIControlEvents)events {
28 | self = [super init];
29 | if (self) {
30 | self.block = block;
31 | self.events = events;
32 | }
33 | return self;
34 | }
35 |
36 | - (void)invoke:(id)sender {
37 | if (self.block) self.block(sender);
38 | }
39 |
40 | @end
41 |
42 |
43 |
44 | @implementation UIControl (LFAdd)
45 |
46 | - (void)removeAllTargets {
47 | [[self allTargets] enumerateObjectsUsingBlock: ^(id object, BOOL *stop) {
48 | [self removeTarget:object
49 | action:NULL
50 | forControlEvents:UIControlEventAllEvents];
51 | }];
52 | }
53 |
54 | - (void)setTarget:(id)target action:(SEL)action forControlEvents:(UIControlEvents)controlEvents {
55 | NSSet *targets = [self allTargets];
56 | for (id currentTarget in targets) {
57 | NSArray *actions = [self actionsForTarget:currentTarget forControlEvent:controlEvents];
58 | for (NSString *currentAction in actions) {
59 | [self removeTarget:currentTarget action:NSSelectorFromString(currentAction)
60 | forControlEvents:controlEvents];
61 | }
62 | }
63 | [self addTarget:target action:action forControlEvents:controlEvents];
64 | }
65 |
66 | - (void)addBlockForControlEvents:(UIControlEvents)controlEvents
67 | block:(void (^)(id sender))block {
68 | _LFUIControlBlockTarget *target = [[_LFUIControlBlockTarget alloc]
69 | initWithBlock:block events:controlEvents];
70 | [self addTarget:target action:@selector(invoke:) forControlEvents:controlEvents];
71 | NSMutableArray *targets = [self _lf_allUIControlBlockTargets];
72 | [targets addObject:target];
73 | }
74 |
75 | - (void)setBlockForControlEvents:(UIControlEvents)controlEvents
76 | block:(void (^)(id sender))block {
77 | [self removeAllBlocksForControlEvents:controlEvents];
78 | [self addBlockForControlEvents:controlEvents block:block];
79 | }
80 |
81 | - (void)removeAllBlocksForControlEvents:(UIControlEvents)controlEvents {
82 | NSMutableArray *targets = [self _lf_allUIControlBlockTargets];
83 | NSMutableArray *removes = [NSMutableArray array];
84 | [targets enumerateObjectsUsingBlock: ^(id obj, NSUInteger idx, BOOL *stop) {
85 | _LFUIControlBlockTarget *target = (_LFUIControlBlockTarget *)obj;
86 | if (target.events == controlEvents) {
87 | [removes addObject:target];
88 | [self removeTarget:target
89 | action:@selector(invoke:)
90 | forControlEvents:controlEvents];
91 | }
92 | }];
93 | [targets removeObjectsInArray:removes];
94 | }
95 |
96 | - (NSMutableArray *)_lf_allUIControlBlockTargets {
97 | NSMutableArray *targets = objc_getAssociatedObject(self, &block_key);
98 | if (!targets) {
99 | targets = [NSMutableArray array];
100 | objc_setAssociatedObject(self, &block_key, targets, OBJC_ASSOCIATION_RETAIN_NONATOMIC);
101 | }
102 | return targets;
103 | }
104 |
105 | @end
106 |
--------------------------------------------------------------------------------
/LFLiveKit/LFLiveSession.h:
--------------------------------------------------------------------------------
1 | //
2 | // LFLiveSession.h
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import
10 | #import
11 | #import "LFLiveStreamInfo.h"
12 | #import "LFAudioFrame.h"
13 | #import "LFVideoFrame.h"
14 | #import "LFLiveAudioConfiguration.h"
15 | #import "LFLiveVideoConfiguration.h"
16 | #import "LFLiveDebug.h"
17 |
18 | typedef void (^ LFRequestComplete)(_Nullable id info,NSError *_Nullable errorMsg);
19 |
20 | /// 流类型
21 | typedef NS_ENUM(NSUInteger, LFLiveType){
22 | /// rtmp格式
23 | LFLiveRTMP = 0,
24 | /// tcp 传输flv格式
25 | LFLiveFLV = 1,
26 | };
27 |
28 | @class LFLiveSession;
29 | @protocol LFLiveSessionDelegate
30 |
31 | @optional
32 | /** live status changed will callback */
33 | - (void)liveSession:(nullable LFLiveSession *)session liveStateDidChange:(LFLiveState)state;
34 | /** live debug info callback */
35 | - (void)liveSession:(nullable LFLiveSession *)session debugInfo:(nullable LFLiveDebug*)debugInfo;
36 | /** callback socket errorcode */
37 | - (void)liveSession:(nullable LFLiveSession*)session errorCode:(LFLiveSocketErrorCode)errorCode;
38 | @end
39 |
40 | @class LFLiveStreamInfo;
41 |
42 | @interface LFLiveSession : NSObject
43 |
44 | #pragma mark - Attribute
45 | ///=============================================================================
46 | /// @name Attribute
47 | ///=============================================================================
48 | /** The delegate of the capture. captureData callback */
49 | @property (nullable,nonatomic, weak) id delegate;
50 |
51 | /** The running control start capture or stop capture*/
52 | @property (nonatomic, assign) BOOL running;
53 |
54 | /** The preView will show OpenGL ES view*/
55 | @property (nonatomic, strong,null_resettable) UIView *preView;
56 |
57 | /** The captureDevicePosition control camraPosition ,default front*/
58 | @property (nonatomic, assign) AVCaptureDevicePosition captureDevicePosition;
59 |
60 | /** The beautyFace control capture shader filter empty or beautiy */
61 | @property (nonatomic, assign) BOOL beautyFace;
62 |
63 | /** The muted control callbackAudioData,muted will memset 0.*/
64 | @property (nonatomic,assign) BOOL muted;
65 |
66 | /** The stream control upload and package*/
67 | @property (nullable,nonatomic, strong,readonly) LFLiveStreamInfo * streamInfo;
68 |
69 | /** The status of the stream .*/
70 | @property (nonatomic,assign,readonly) LFLiveState state;
71 |
72 | /** The showDebugInfo control streamInfo and uploadInfo(1s) *.*/
73 | @property (nonatomic,assign) BOOL showDebugInfo;
74 |
75 | /** The reconnectInterval control reconnect timeInterval(重连间隔) *.*/
76 | @property (nonatomic,assign) NSUInteger reconnectInterval;
77 |
78 | /** The reconnectCount control reconnect count (重连次数) *.*/
79 | @property (nonatomic,assign) NSUInteger reconnectCount;
80 |
81 | #pragma mark - Initializer
82 | ///=============================================================================
83 | /// @name Initializer
84 | ///=============================================================================
85 | - (nullable instancetype)init UNAVAILABLE_ATTRIBUTE;
86 | + (nullable instancetype)new UNAVAILABLE_ATTRIBUTE;
87 |
88 | /**
89 | The designated initializer. Multiple instances with the same configuration will make the
90 | capture unstable.
91 | */
92 | - (nullable instancetype)initWithAudioConfiguration:(nullable LFLiveAudioConfiguration*)audioConfiguration videoConfiguration:(nullable LFLiveVideoConfiguration*)videoConfiguration liveType:(LFLiveType)liveType NS_DESIGNATED_INITIALIZER;
93 |
94 | /** The start stream .*/
95 | - (void)startLive:(nonnull LFLiveStreamInfo*)streamInfo;
96 |
97 | /** The stop stream .*/
98 | - (void)stopLive;
99 |
100 |
101 | @end
102 |
103 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | [](https://travis-ci.org/LaiFengiOS/LFLiveKit)
3 | [](https://raw.githubusercontent.com/chenliming777/LFLiveKit/master/LICENSE)
4 | [](http://cocoapods.org/?q=LFLiveKit)
5 | [](https://www.apple.com/nl/ios/)
6 |
7 | 
8 |
9 | LFLiveKit
10 |
11 | LFLiveKit IOS mobile phone push code,Default format support RTMP and FLV,At the same time, the structure is very easy to extend.
12 |
13 | Podfile
14 | To integrate LFLiveKit into your Xcode project using CocoaPods, specify it in your Podfile:
15 |
16 | source 'https://github.com/CocoaPods/Specs.git'
17 | platform :ios, '8.0'
18 | pod 'LFLiveKit'
19 |
20 | Then, run the following command:
21 | $ pod install
22 |
23 |
24 | Functional
25 |
26 | Background recording
27 | Support horizontal vertical recording
28 | GPUImage Beauty
29 | H264 Hard coding
30 | AAC Hard coding
31 | Weak network lost frame
32 | Dynamic switching rate
33 | Audio configuration
34 | Video configuration
35 | RTMP Transport
36 | Switch camera
37 | Audio Mute
38 | Support Send Buffer
39 | FLV package and send
40 |
41 |
42 | Architecture
43 |
44 | capture: LFAudioCapture and LFVideoCapture
45 | encode: LFHardwareAudioEncoder and LFHardwareVideoEncoder
46 | publish: LFStreamRtmpSocket LFStreamTcpSocket
47 |
48 | Usage
49 |
50 | - (LFLiveSession*)session{
51 | if(!_session){
52 | _session = [[LFLiveSession alloc] initWithAudioConfiguration:[LFLiveAudioConfiguration defaultConfiguration] videoConfiguration:[LFLiveVideoConfiguration defaultConfiguration] liveType:LFLiveRTMP];
53 | _session.running = YES;
54 | _session.preView = self;
55 | }
56 | return _session;
57 | }
58 |
59 | - (LFLiveSession*)session{
60 | if(!_session){
61 | LFLiveAudioConfiguration *audioConfiguration = [LFLiveAudioConfiguration new];
62 | audioConfiguration.numberOfChannels = 2;
63 | audioConfiguration.audioBitrate = LFLiveAudioBitRate_128Kbps;
64 | audioConfiguration.audioSampleRate = LFLiveAudioSampleRate_44100Hz;
65 |
66 | LFLiveVideoConfiguration *videoConfiguration = [LFLiveVideoConfiguration new];
67 | videoConfiguration.videoSize = CGSizeMake(1280, 720);
68 | videoConfiguration.videoBitRate = 800*1024;
69 | videoConfiguration.videoMaxBitRate = 1000*1024;
70 | videoConfiguration.videoMinBitRate = 500*1024;
71 | videoConfiguration.videoFrameRate = 15;
72 | videoConfiguration.videoMaxKeyframeInterval = 30;
73 | videoConfiguration.orientation = UIInterfaceOrientationLandscapeLeft;
74 | videoConfiguration.sessionPreset = LFCaptureSessionPreset720x1280;
75 |
76 | _session = [[LFLiveSession alloc] initWithAudioConfiguration:audioConfiguration videoConfiguration:videoConfiguration liveType:LFLiveRTMP];
77 | _session.running = YES;
78 | _session.preView = self;
79 | }
80 | return _session;
81 | }
82 |
83 | LFLiveStreamInfo *streamInfo = [LFLiveStreamInfo new];
84 | streamInfo.url = @"your server rtmp url";
85 | [self.session startLive:streamInfo];
86 | [self.session stopLive];
87 |
88 | CallBack:
89 | - (void)liveSession:(nullable LFLiveSession *)session liveStateDidChange: (LFLiveState)state;
90 | - (void)liveSession:(nullable LFLiveSession *)session debugInfo:(nullable LFLiveDebug*)debugInfo;
91 | - (void)liveSession:(nullable LFLiveSession*)session errorCode:(LFLiveSocketErrorCode)errorCode;
92 |
93 | License
94 |
95 | LFLiveKit is released under the MIT license. See LICENSE for details.
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
--------------------------------------------------------------------------------
/LFLiveKit/packet/flv/types.h:
--------------------------------------------------------------------------------
1 | /*
2 | $Id: types.h 231 2011-06-27 13:46:19Z marc.noirot $
3 |
4 | FLV Metadata updater
5 |
6 | Copyright (C) 2007-2012 Marc Noirot
7 |
8 | This file is part of FLVMeta.
9 |
10 | FLVMeta is free software; you can redistribute it and/or modify
11 | it under the terms of the GNU General Public License as published by
12 | the Free Software Foundation; either version 2 of the License, or
13 | (at your option) any later version.
14 |
15 | FLVMeta is distributed in the hope that it will be useful,
16 | but WITHOUT ANY WARRANTY; without even the implied warranty of
17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 | GNU General Public License for more details.
19 |
20 | You should have received a copy of the GNU General Public License
21 | along with FLVMeta; if not, write to the Free Software
22 | Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 | */
24 | #ifndef __TYPES_H__
25 | #define __TYPES_H__
26 |
27 | #include
28 |
29 | #include
30 |
31 | typedef uint8_t byte, uint8, uint8_bitmask;
32 |
33 | typedef uint16_t uint16, uint16_be, uint16_le;
34 |
35 | typedef int16_t sint16, sint16_be, sint16_le;
36 |
37 | typedef uint32_t uint32, uint32_be, uint32_le;
38 |
39 | typedef int32_t sint32, sint32_be, sint32_le;
40 |
41 | typedef struct __uint24 {
42 | uint8 b[3];
43 | } uint24, uint24_be, uint24_le;
44 |
45 | typedef uint64_t uint64, uint64_le, uint64_be;
46 |
47 | typedef int64_t sint64, sint64_le, sint64_be;
48 |
49 | //typedef
50 | //#if SIZEOF_FLOAT == 8
51 | //float
52 | //#elif SIZEOF_DOUBLE == 8
53 | //double
54 | //#elif SIZEOF_LONG_DOUBLE == 8
55 | //long double
56 | //#else
57 | //uint64_t
58 | //#endif
59 | //number64, number64_le, number64_be;
60 |
61 | typedef double number64, number64_le, number64_be;
62 |
63 | #ifdef __cplusplus
64 | extern "C" {
65 | #endif /* __cplusplus */
66 |
67 | #ifdef WORDS_BIGENDIAN
68 |
69 | # define swap_uint16(x) (x)
70 | # define swap_sint16(x) (x)
71 | # define swap_uint32(x) (x)
72 | # define swap_number64(x) (x)
73 |
74 | #else /* !defined WORDS_BIGENDIAN */
75 |
76 | /* swap 16 bits integers */
77 | # define swap_uint16(x) ((uint16)((((x) & 0x00FFU) << 8) | \
78 | (((x) & 0xFF00U) >> 8)))
79 | # define swap_sint16(x) ((sint16)((((x) & 0x00FF) << 8) | \
80 | (((x) & 0xFF00) >> 8)))
81 |
82 | /* swap 32 bits integers */
83 | # define swap_uint32(x) ((uint32)((((x) & 0x000000FFU) << 24) | \
84 | (((x) & 0x0000FF00U) << 8) | \
85 | (((x) & 0x00FF0000U) >> 8) | \
86 | (((x) & 0xFF000000U) >> 24)))
87 |
88 | /* swap 64 bits doubles */
89 | number64 swap_number64(number64);
90 |
91 | #endif /* WORDS_BIGENDIAN */
92 |
93 | /* convert big endian 24 bits integers to native integers */
94 | # define uint24_be_to_uint32(x) ((uint32)(((x).b[0] << 16) | \
95 | ((x).b[1] << 8) | (x).b[2]))
96 |
97 | /* convert native integers into 24 bits big endian integers */
98 | uint24_be uint32_to_uint24_be(uint32);
99 |
100 | /* large file support */
101 | #ifdef HAVE_FSEEKO
102 | # define lfs_ftell ftello
103 | # define lfs_fseek fseeko
104 |
105 | # define FILE_OFFSET_T_64_BITS 1
106 | typedef off_t file_offset_t;
107 |
108 | #else /* !HAVE_SEEKO */
109 |
110 | # ifdef WIN32
111 |
112 | # define FILE_OFFSET_T_64_BITS 1
113 | typedef long long int file_offset_t;
114 |
115 | /* Win32 large file support */
116 | file_offset_t lfs_ftell(FILE * stream);
117 | int lfs_fseek(FILE * stream, file_offset_t offset, int whence);
118 |
119 | # else /* !defined WIN32 */
120 |
121 | # define lfs_ftell ftell
122 | # define lfs_fseek fseek
123 |
124 | typedef long file_offset_t;
125 |
126 | # endif /* WIN32 */
127 |
128 | #endif /* HAVE_FSEEKO */
129 |
130 | /* file offset printf specifier */
131 | #ifdef FILE_OFFSET_T_64_BITS
132 | # define FILE_OFFSET_PRINTF_FORMAT "ll"
133 | #else
134 | # define FILE_OFFSET_PRINTF_FORMAT "l"
135 | #endif
136 |
137 | #ifdef __cplusplus
138 | }
139 | #endif /* __cplusplus */
140 |
141 | #endif /* __TYPES_H__ */
142 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo.xcodeproj/xcuserdata/admin.xcuserdatad/xcschemes/LFLiveKitDemo.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
30 |
31 |
33 |
39 |
40 |
41 |
43 |
49 |
50 |
51 |
52 |
53 |
59 |
60 |
61 |
62 |
63 |
64 |
74 |
76 |
82 |
83 |
84 |
85 |
86 |
87 |
93 |
95 |
101 |
102 |
103 |
104 |
106 |
107 |
110 |
111 |
112 |
--------------------------------------------------------------------------------
/LFLiveKitDemo/LFLiveKitDemo/category/UIView+YYAdd.h:
--------------------------------------------------------------------------------
1 | //
2 | // UIView+Add.h
3 | //
4 | //
5 | // Created by guoyaoyuan on 13-4-3.
6 | // Copyright (c) 2013 live Interactive. All rights reserved.
7 | //
8 |
9 | #import
10 |
11 | /**
12 | Provides extensions for `UIView`.
13 | */
14 | @interface UIView (YYAdd)
15 |
16 | /**
17 | Create a snapshot image of the complete view hierarchy.
18 | This method should be called in main thread.
19 | */
20 | - (UIImage *)snapshotImage;
21 |
22 | /**
23 | Create a snapshot PDF of the complete view hierarchy.
24 | This method should be called in main thread.
25 | */
26 | - (NSData *)snapshotPDF;
27 |
28 | /**
29 | Shortcut to set the view.layer's shadow
30 |
31 | @param color Shadow Color
32 | @param offset Shadow offset
33 | @param radius Shadow radius
34 | */
35 | - (void)setLayerShadow:(UIColor*)color offset:(CGSize)offset radius:(CGFloat)radius;
36 | /**
37 | * 设置阴影 郭liyuan+
38 | */
39 | - (void) makeInsetShadow;
40 | - (void) makeInsetShadowWithRadius:(float)radius Alpha:(float)alpha;
41 | - (void) makeInsetShadowWithRadius:(float)radius Color:(UIColor *)color Directions:(NSArray *)directions;
42 |
43 | /**
44 | Remove all subviews.
45 |
46 | @warning Never call this method inside your view's drawRect: method.
47 | */
48 | - (void)removeAllSubviews;
49 |
50 | /**
51 | Returns the view's view controller (may be nil).
52 | */
53 | @property (nonatomic, readonly) UIViewController *viewController;
54 |
55 | @property (nonatomic) CGFloat left; ///< Shortcut for frame.origin.x.
56 | @property (nonatomic) CGFloat top; ///< Shortcut for frame.origin.y
57 | @property (nonatomic) CGFloat right; ///< Shortcut for frame.origin.x + frame.size.width
58 | @property (nonatomic) CGFloat bottom; ///< Shortcut for frame.origin.y + frame.size.height
59 | @property (nonatomic) CGFloat width; ///< Shortcut for frame.size.width.
60 | @property (nonatomic) CGFloat height; ///< Shortcut for frame.size.height.
61 | @property (nonatomic) CGFloat centerX; ///< Shortcut for center.x
62 | @property (nonatomic) CGFloat centerY; ///< Shortcut for center.y
63 | @property (nonatomic) CGPoint origin; ///< Shortcut for frame.origin.
64 | @property (nonatomic) CGSize size; ///< Shortcut for frame.size.
65 | @property (nonatomic, readonly) CGRect screenFrame; ///< View frame on the screen, taking into account scroll views.
66 |
67 | /**
68 | Returns the visible alpha on screen, taking into account superview and window.
69 | */
70 | @property (nonatomic, readonly) CGFloat visibleAlpha;
71 |
72 |
73 | /**
74 | Converts a point from the receiver's coordinate system to that of the specified view or window.
75 |
76 | @param point A point specified in the local coordinate system (bounds) of the receiver.
77 | @param view The view or window into whose coordinate system point is to be converted.
78 | If view is nil, this method instead converts to window base coordinates.
79 | @return The point converted to the coordinate system of view.
80 | */
81 | - (CGPoint)convertPoint:(CGPoint)point toViewOrWindow:(UIView *)view;
82 |
83 | /**
84 | Converts a point from the coordinate system of a given view or window to that of the receiver.
85 |
86 | @param point A point specified in the local coordinate system (bounds) of view.
87 | @param view The view or window with point in its coordinate system.
88 | If view is nil, this method instead converts from window base coordinates.
89 | @return The point converted to the local coordinate system (bounds) of the receiver.
90 | */
91 | - (CGPoint)convertPoint:(CGPoint)point fromViewOrWindow:(UIView *)view;
92 |
93 | /**
94 | Converts a rectangle from the receiver's coordinate system to that of another view or window.
95 |
96 | @param rect A rectangle specified in the local coordinate system (bounds) of the receiver.
97 | @param view The view or window that is the target of the conversion operation. If view is nil, this method instead converts to window base coordinates.
98 | @return The converted rectangle.
99 | */
100 | - (CGRect)convertRect:(CGRect)rect toViewOrWindow:(UIView *)view;
101 |
102 | /**
103 | Converts a rectangle from the coordinate system of another view or window to that of the receiver.
104 |
105 | @param rect A rectangle specified in the local coordinate system (bounds) of view.
106 | @param view The view or window with rect in its coordinate system.
107 | If view is nil, this method instead converts from window base coordinates.
108 | @return The converted rectangle.
109 | */
110 | - (CGRect)convertRect:(CGRect)rect fromViewOrWindow:(UIView *)view;
111 |
112 | /**
113 | * 返回响应者链上的任意Objc
114 | *
115 | * @param viewControllerCls 需要返回的Obj的类名,为nil时默认返回当前控制器
116 | *
117 | * @return viewController Or needCls
118 | */
119 | - (nonnull id)viewControllerWithNeedViewOrViewController:(nullable Class)viewControllerCls
120 | ;
121 |
122 |
123 | /// 移除所有子视图中 tableview、scrollview 的 delegate、datasource
124 | - (void)clearScrollViewDelegate;
125 |
126 |
127 | - (void)removeAllGestures;
128 | - (void)removeAllGesturesWithSubViews;
129 |
130 | /// 在 block 内禁用动画
131 | + (void)disableAnimationWithBlock:(void (^)(void))block;
132 | @end
133 |
--------------------------------------------------------------------------------
/LFLiveKit.xcodeproj/xcshareddata/xcschemes/LFLiveKit.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
29 |
35 |
36 |
37 |
43 |
49 |
50 |
51 |
57 |
63 |
64 |
65 |
71 |
77 |
78 |
79 |
80 |
81 |
86 |
87 |
89 |
95 |
96 |
97 |
98 |
99 |
105 |
106 |
107 |
108 |
109 |
110 |
120 |
121 |
127 |
128 |
129 |
130 |
131 |
132 |
138 |
139 |
145 |
146 |
147 |
148 |
150 |
151 |
154 |
155 |
156 |
--------------------------------------------------------------------------------
/LFLiveKit/capture/LFVideoCapture.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFVideoCapture.m
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/1.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFVideoCapture.h"
10 | #import "GPUImage.h"
11 | #import "LFGPUImageBeautyFilter.h"
12 | #import "LFGPUImageEmptyFilter.h"
13 |
14 | @interface LFVideoCapture ()
15 |
16 | @property(nonatomic, strong) GPUImageVideoCamera *videoCamera;
17 | @property(nonatomic, strong) GPUImageOutput *filter;
18 | @property(nonatomic, strong) GPUImageOutput *output;
19 | @property(nonatomic, strong) GPUImageCropFilter *cropfilter;
20 | @property(nonatomic, strong) GPUImageView *gpuImageView;
21 | @property(nonatomic, strong) LFLiveVideoConfiguration *configuration;
22 |
23 | @end
24 |
25 | @implementation LFVideoCapture
26 |
27 | #pragma mark -- LifeCycle
28 | - (instancetype)initWithVideoConfiguration:(LFLiveVideoConfiguration *)configuration{
29 | if(self = [super init]){
30 | _configuration = configuration;
31 | _videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:_configuration.avSessionPreset cameraPosition:AVCaptureDevicePositionFront];
32 | _videoCamera.outputImageOrientation = _configuration.orientation;
33 | _videoCamera.horizontallyMirrorFrontFacingCamera = NO;
34 | _videoCamera.horizontallyMirrorRearFacingCamera = NO;
35 | _videoCamera.frameRate = (int32_t)_configuration.videoFrameRate;
36 |
37 | _gpuImageView = [[GPUImageView alloc] initWithFrame:[UIScreen mainScreen].bounds];
38 | [_gpuImageView setFillMode:kGPUImageFillModePreserveAspectRatioAndFill];
39 | [_gpuImageView setAutoresizingMask:UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight];
40 | [_gpuImageView setInputRotation:kGPUImageFlipHorizonal atIndex:0];
41 |
42 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(willEnterBackground:) name:UIApplicationWillResignActiveNotification object:nil];
43 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(willEnterForeground:) name:UIApplicationDidBecomeActiveNotification object:nil];
44 |
45 | self.beautyFace = YES;
46 | }
47 | return self;
48 | }
49 |
50 | - (void)dealloc{
51 | [UIApplication sharedApplication].idleTimerDisabled = NO;
52 | [[NSNotificationCenter defaultCenter] removeObserver:self];
53 | [_videoCamera stopCameraCapture];
54 | }
55 |
56 | #pragma mark -- Setter Getter
57 | - (void)setRunning:(BOOL)running{
58 | if(_running == running) return;
59 | _running = running;
60 |
61 | if(!_running){
62 | [UIApplication sharedApplication].idleTimerDisabled = NO;
63 | [_videoCamera stopCameraCapture];
64 | }else{
65 | [UIApplication sharedApplication].idleTimerDisabled = YES;
66 | [_videoCamera startCameraCapture];
67 | }
68 | }
69 |
70 | - (void)setPreView:(UIView *)preView{
71 | if(_gpuImageView.superview) [_gpuImageView removeFromSuperview];
72 | [preView insertSubview:_gpuImageView atIndex:0];
73 | }
74 |
75 | - (UIView*)preView{
76 | return _gpuImageView.superview;
77 | }
78 |
79 | - (void)setCaptureDevicePosition:(AVCaptureDevicePosition)captureDevicePosition{
80 | [_videoCamera rotateCamera];
81 | _videoCamera.frameRate = (int32_t)_configuration.videoFrameRate;
82 | if (captureDevicePosition == AVCaptureDevicePositionFront) {
83 | [_gpuImageView setInputRotation:kGPUImageFlipHorizonal atIndex:0];
84 | } else {
85 | [_gpuImageView setInputRotation:kGPUImageNoRotation atIndex:0];
86 | }
87 | }
88 |
89 | - (AVCaptureDevicePosition)captureDevicePosition{
90 | return [_videoCamera cameraPosition];
91 | }
92 |
93 | - (void)setVideoFrameRate:(NSInteger)videoFrameRate{
94 | if(videoFrameRate <= 0) return;
95 | if(videoFrameRate == _videoCamera.frameRate) return;
96 | _videoCamera.frameRate = (uint32_t)videoFrameRate;
97 | }
98 |
99 | - (NSInteger)videoFrameRate{
100 | return _videoCamera.frameRate;
101 | }
102 |
103 | - (void)setBeautyFace:(BOOL)beautyFace{
104 | if(_beautyFace == beautyFace) return;
105 |
106 | _beautyFace = beautyFace;
107 | [_filter removeAllTargets];
108 | [_cropfilter removeAllTargets];
109 | [_videoCamera removeAllTargets];
110 |
111 | if (_beautyFace) {
112 | _output = [[LFGPUImageEmptyFilter alloc] init];
113 | _filter = [[LFGPUImageBeautyFilter alloc] init];
114 |
115 | __weak typeof(self) _self = self;
116 | [_output setFrameProcessingCompletionBlock:^(GPUImageOutput *output, CMTime time) {
117 | [_self processVideo:output];
118 | }];
119 | } else {
120 | _filter = [[LFGPUImageEmptyFilter alloc] init];
121 |
122 | __weak typeof(self) _self = self;
123 | [_filter setFrameProcessingCompletionBlock:^(GPUImageOutput *output, CMTime time) {
124 | [_self processVideo:output];
125 | }];
126 | }
127 |
128 | if (_configuration.isClipVideo) {
129 | if (_configuration.orientation == UIInterfaceOrientationPortrait || _configuration.orientation == UIInterfaceOrientationPortraitUpsideDown){
130 | _cropfilter = [[GPUImageCropFilter alloc] initWithCropRegion:CGRectMake(0.125, 0, 0.75, 1)];
131 | } else {
132 | _cropfilter = [[GPUImageCropFilter alloc] initWithCropRegion:CGRectMake(0, 0.125, 1, 0.75)];
133 | }
134 | [_videoCamera addTarget:_cropfilter];
135 | [_cropfilter addTarget:_filter];
136 | } else {
137 | [_videoCamera addTarget:_filter];
138 | }
139 |
140 | if (_beautyFace) {
141 | [_filter addTarget:_output];
142 | [_output addTarget:_gpuImageView];
143 | } else {
144 | [_filter addTarget:_gpuImageView];
145 | }
146 |
147 | if (_videoCamera.cameraPosition == AVCaptureDevicePositionFront) {
148 | [_gpuImageView setInputRotation:kGPUImageFlipHorizonal atIndex:0];
149 | } else {
150 | [_gpuImageView setInputRotation:kGPUImageNoRotation atIndex:0];
151 | }
152 | }
153 |
154 | #pragma mark -- Custom Method
155 | - (void)processVideo:(GPUImageOutput *)output{
156 | __weak typeof(self) _self = self;
157 | @autoreleasepool {
158 | GPUImageFramebuffer *imageFramebuffer = output.framebufferForOutput;
159 | CVPixelBufferRef pixelBuffer = [imageFramebuffer pixelBuffer];
160 |
161 | if(pixelBuffer && _self.delegate && [_self.delegate respondsToSelector:@selector(captureOutput:pixelBuffer:)]){
162 | [_self.delegate captureOutput:_self pixelBuffer:pixelBuffer];
163 | }
164 |
165 | }
166 | }
167 |
168 | #pragma mark Notification
169 |
170 | - (void)willEnterBackground:(NSNotification*)notification{
171 | [UIApplication sharedApplication].idleTimerDisabled = NO;
172 | [_videoCamera pauseCameraCapture];
173 | runSynchronouslyOnVideoProcessingQueue(^{
174 | glFinish();
175 | });
176 | }
177 |
178 | - (void)willEnterForeground:(NSNotification*)notification{
179 | [_videoCamera resumeCameraCapture];
180 | [UIApplication sharedApplication].idleTimerDisabled = YES;
181 | }
182 |
183 | @end
184 |
--------------------------------------------------------------------------------
/LFLiveKit/upload/LFStreamingBuffer.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFStreamingBuffer.m
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFStreamingBuffer.h"
10 | #import "NSMutableArray+LFAdd.h"
11 |
12 | static const NSUInteger defaultSortBufferMaxCount = 10;///< 排序10个内
13 | static const NSUInteger defaultUpdateInterval = 1;///< 更新频率为1s
14 | static const NSUInteger defaultCallBackInterval = 5;///< 5s计时一次
15 | static const NSUInteger defaultSendBufferMaxCount = 600;///< 最大缓冲区为600
16 |
17 | @interface LFStreamingBuffer (){
18 | dispatch_semaphore_t _lock;
19 | }
20 |
21 | @property (nonatomic, strong) NSMutableArray *sortList;
22 | @property (nonatomic, strong, readwrite) NSMutableArray *list;
23 | @property (nonatomic, strong) NSMutableArray *thresholdList;
24 |
25 | /** 处理buffer缓冲区情况 */
26 | @property (nonatomic, assign) NSInteger currentInterval;
27 | @property (nonatomic, assign) NSInteger callBackInterval;
28 | @property (nonatomic, assign) NSInteger updateInterval;
29 | @property (nonatomic, assign) BOOL startTimer;
30 |
31 | @end
32 |
33 | @implementation LFStreamingBuffer
34 |
35 | - (instancetype)init{
36 | if(self = [super init]){
37 | _lock = dispatch_semaphore_create(1);
38 | self.updateInterval = defaultUpdateInterval;
39 | self.callBackInterval = defaultCallBackInterval;
40 | self.maxCount = defaultSendBufferMaxCount;
41 | }
42 | return self;
43 | }
44 |
45 | - (void)dealloc{
46 | }
47 |
48 | #pragma mark -- Custom
49 | - (void)appendObject:(LFFrame*)frame{
50 | if(!frame) return;
51 | if(!_startTimer){
52 | _startTimer = YES;
53 | [self tick];
54 | }
55 |
56 | dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER);
57 | if(self.sortList.count < defaultSortBufferMaxCount){
58 | [self.sortList addObject:frame];
59 | }else{
60 | ///< 排序
61 | [self.sortList addObject:frame];
62 | NSArray *sortedSendQuery = [self.sortList sortedArrayUsingFunction:frameDataCompare context:NULL];
63 | [self.sortList removeAllObjects];
64 | [self.sortList addObjectsFromArray:sortedSendQuery];
65 | /// 丢帧
66 | [self removeExpireFrame];
67 | /// 添加至缓冲区
68 | LFFrame *firstFrame = [self.sortList lfPopFirstObject];
69 |
70 | if(firstFrame) [self.list addObject:firstFrame];
71 | }
72 | dispatch_semaphore_signal(_lock);
73 | }
74 |
75 | - (LFFrame*)popFirstObject{
76 | dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER);
77 | LFFrame *firstFrame = [self.list lfPopFirstObject];
78 | dispatch_semaphore_signal(_lock);
79 | return firstFrame;
80 | }
81 |
82 | - (void)removeAllObject{
83 | dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER);
84 | [self.list removeAllObjects];
85 | dispatch_semaphore_signal(_lock);
86 | }
87 |
88 | - (void)removeExpireFrame{
89 | if(self.list.count < self.maxCount) return;
90 |
91 | NSArray *pFrames = [self expirePFrames];///< 第一个P到第一个I之间的p帧
92 | if(pFrames && pFrames.count > 0){
93 | [self.list removeObjectsInArray:pFrames];
94 | return;
95 | }
96 |
97 | LFFrame *firstIFrame = [self firstIFrame];
98 | if(firstIFrame){
99 | [self.list removeObject:firstIFrame];
100 | return;
101 | }
102 |
103 | [self.list removeAllObjects];
104 | }
105 |
106 | - (NSArray*)expirePFrames{
107 | NSMutableArray *pframes = [[NSMutableArray alloc] init];
108 | for(NSInteger index = 0;index < self.list.count;index++){
109 | LFFrame *frame = [self.list objectAtIndex:index];
110 | if([frame isKindOfClass:[LFVideoFrame class]]){
111 | LFVideoFrame *videoFrame = (LFVideoFrame*)frame;
112 | if(videoFrame.isKeyFrame && pframes.count > 0){
113 | break;
114 | }else if(!videoFrame.isKeyFrame){
115 | [pframes addObject:frame];
116 | }
117 | }
118 | }
119 | return pframes;
120 | }
121 |
122 | - (LFFrame*)firstIFrame{
123 | for(NSInteger index = 0;index < self.list.count;index++){
124 | LFFrame *frame = [self.list objectAtIndex:index];
125 | if([frame isKindOfClass:[LFVideoFrame class]] && ((LFVideoFrame*)frame).isKeyFrame){
126 | return frame;
127 | }
128 | }
129 | return nil;
130 | }
131 |
132 | NSInteger frameDataCompare(id obj1, id obj2, void *context){
133 | LFFrame* frame1 = (LFFrame*) obj1;
134 | LFFrame *frame2 = (LFFrame*) obj2;
135 |
136 | if (frame1.timestamp == frame2.timestamp)
137 | return NSOrderedSame;
138 | else if(frame1.timestamp > frame2.timestamp)
139 | return NSOrderedDescending;
140 | return NSOrderedAscending;
141 | }
142 |
143 | - (LFLiveBuffferState)currentBufferState{
144 | NSInteger currentCount = 0;
145 | NSInteger increaseCount = 0;
146 | NSInteger decreaseCount = 0;
147 |
148 | for(NSNumber *number in self.thresholdList){
149 | if(number.integerValue >= currentCount){
150 | increaseCount ++;
151 | }else{
152 | decreaseCount ++;
153 | }
154 | currentCount = [number integerValue];
155 | }
156 |
157 | if(increaseCount >= self.callBackInterval){
158 | return LFLiveBuffferIncrease;
159 | }
160 |
161 | if(decreaseCount >= self.callBackInterval){
162 | return LFLiveBuffferDecline;
163 | }
164 |
165 | return LFLiveBuffferUnknown;
166 | }
167 |
168 | #pragma mark -- Setter Getter
169 | - (NSMutableArray*)list{
170 | if(!_list){
171 | _list = [[NSMutableArray alloc] init];
172 | }
173 | return _list;
174 | }
175 |
176 | - (NSMutableArray*)sortList{
177 | if(!_sortList){
178 | _sortList = [[NSMutableArray alloc] init];
179 | }
180 | return _sortList;
181 | }
182 |
183 | - (NSMutableArray*)thresholdList{
184 | if(!_thresholdList){
185 | _thresholdList = [[NSMutableArray alloc] init];
186 | }
187 | return _thresholdList;
188 | }
189 |
190 |
191 | #pragma mark -- 采样
192 | - (void)tick{
193 | /** 采样 3个阶段 如果网络都是好或者都是差给回调 */
194 | _currentInterval += self.updateInterval;
195 |
196 | dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER);
197 | [self.thresholdList addObject:@(self.list.count)];
198 | dispatch_semaphore_signal(_lock);
199 |
200 | if(self.currentInterval >= self.callBackInterval){
201 | LFLiveBuffferState state = [self currentBufferState];
202 | if(state == LFLiveBuffferIncrease){
203 | if(self.delegate && [self.delegate respondsToSelector:@selector(streamingBuffer:bufferState:)]){
204 | [self.delegate streamingBuffer:self bufferState:LFLiveBuffferIncrease];
205 | }
206 | }else if(state == LFLiveBuffferDecline){
207 | if(self.delegate && [self.delegate respondsToSelector:@selector(streamingBuffer:bufferState:)]){
208 | [self.delegate streamingBuffer:self bufferState:LFLiveBuffferDecline];
209 | }
210 | }
211 |
212 | self.currentInterval = 0;
213 | [self.thresholdList removeAllObjects];
214 | }
215 | __weak typeof(self) _self = self;
216 | dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(self.updateInterval * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
217 | __weak typeof(_self) self = _self;
218 | [self tick];
219 | });
220 | }
221 |
222 | @end
223 |
--------------------------------------------------------------------------------
/LFLiveKit/coder/LFHardwareAudioEncoder.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFHardwareAudioEncoder.m
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFHardwareAudioEncoder.h"
10 |
11 | @interface LFHardwareAudioEncoder (){
12 | AudioConverterRef m_converter;
13 | char *aacBuf;
14 | }
15 | @property (nonatomic, strong) LFLiveAudioConfiguration *configuration;
16 | @property (nonatomic, weak) id aacDeleage;
17 |
18 | @end
19 |
20 | @implementation LFHardwareAudioEncoder
21 |
22 | - (instancetype)initWithAudioStreamConfiguration:(LFLiveAudioConfiguration *)configuration{
23 | if(self = [super init]){
24 | _configuration = configuration;
25 | }
26 | return self;
27 | }
28 |
29 | - (void)dealloc{
30 | if(aacBuf) free(aacBuf);
31 | }
32 |
33 | #pragma mark -- LFAudioEncoder
34 | - (void)setDelegate:(id)delegate{
35 | _aacDeleage = delegate;
36 | }
37 |
38 | - (void)encodeAudioData:(AudioBufferList)inBufferList timeStamp:(uint64_t)timeStamp{
39 | if (![self createAudioConvert]){
40 | return;
41 | }
42 |
43 | if(!aacBuf){
44 | aacBuf = malloc(inBufferList.mBuffers[0].mDataByteSize);
45 | }
46 |
47 | // 初始化一个输出缓冲列表
48 | AudioBufferList outBufferList;
49 | outBufferList.mNumberBuffers = 1;
50 | outBufferList.mBuffers[0].mNumberChannels = inBufferList.mBuffers[0].mNumberChannels;
51 | outBufferList.mBuffers[0].mDataByteSize = inBufferList.mBuffers[0].mDataByteSize; // 设置缓冲区大小
52 | outBufferList.mBuffers[0].mData = aacBuf; // 设置AAC缓冲区
53 | UInt32 outputDataPacketSize = 1;
54 | if (AudioConverterFillComplexBuffer(m_converter, inputDataProc, &inBufferList, &outputDataPacketSize, &outBufferList, NULL) != noErr){
55 | return;
56 | }
57 | LFAudioFrame *audioFrame = [LFAudioFrame new];
58 | audioFrame.timestamp = timeStamp;
59 | audioFrame.data = [NSData dataWithBytes:aacBuf length:outBufferList.mBuffers[0].mDataByteSize];
60 |
61 | char exeData[2];
62 | exeData[0] = _configuration.asc[0];
63 | exeData[1] = _configuration.asc[1];
64 | audioFrame.audioInfo =[NSData dataWithBytes:exeData length:2];
65 | if(self.aacDeleage && [self.aacDeleage respondsToSelector:@selector(audioEncoder:audioFrame:)]){
66 | [self.aacDeleage audioEncoder:self audioFrame:audioFrame];
67 | }
68 | }
69 |
70 | - (void)stopEncoder{
71 |
72 | }
73 |
74 | #pragma mark -- CustomMethod
75 | -(BOOL)createAudioConvert{ //根据输入样本初始化一个编码转换器
76 | if (m_converter != nil){
77 | return TRUE;
78 | }
79 |
80 | AudioStreamBasicDescription inputFormat = {0};
81 | inputFormat.mSampleRate = _configuration.audioSampleRate;
82 | inputFormat.mFormatID = kAudioFormatLinearPCM;
83 | inputFormat.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
84 | inputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels;
85 | inputFormat.mFramesPerPacket = 1;
86 | inputFormat.mBitsPerChannel = 16;
87 | inputFormat.mBytesPerFrame = inputFormat.mBitsPerChannel / 8 * inputFormat.mChannelsPerFrame;
88 | inputFormat.mBytesPerPacket = inputFormat.mBytesPerFrame * inputFormat.mFramesPerPacket;
89 |
90 | AudioStreamBasicDescription outputFormat; // 这里开始是输出音频格式
91 | memset(&outputFormat, 0, sizeof(outputFormat));
92 | outputFormat.mSampleRate = inputFormat.mSampleRate; // 采样率保持一致
93 | outputFormat.mFormatID = kAudioFormatMPEG4AAC; // AAC编码 kAudioFormatMPEG4AAC kAudioFormatMPEG4AAC_HE_V2
94 | outputFormat.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels;;
95 | outputFormat.mFramesPerPacket = 1024; // AAC一帧是1024个字节
96 |
97 | const OSType subtype = kAudioFormatMPEG4AAC;
98 | AudioClassDescription requestedCodecs[2] = {
99 | {
100 | kAudioEncoderComponentType,
101 | subtype,
102 | kAppleSoftwareAudioCodecManufacturer
103 | },
104 | {
105 | kAudioEncoderComponentType,
106 | subtype,
107 | kAppleHardwareAudioCodecManufacturer
108 | }
109 | };
110 | OSStatus result = AudioConverterNewSpecific(&inputFormat, &outputFormat, 2, requestedCodecs, &m_converter);
111 |
112 |
113 | if(result != noErr) return NO;
114 |
115 | return YES;
116 | }
117 |
118 | -(AudioClassDescription*)getAudioClassDescriptionWithType:(UInt32)type fromManufacturer:(UInt32)manufacturer { // 获得相应的编码器
119 | static AudioClassDescription audioDesc;
120 |
121 | UInt32 encoderSpecifier = type, size = 0;
122 | OSStatus status;
123 |
124 | memset(&audioDesc, 0, sizeof(audioDesc));
125 | status = AudioFormatGetPropertyInfo(kAudioFormatProperty_Encoders, sizeof(encoderSpecifier), &encoderSpecifier, &size);
126 | if (status) {
127 | return nil;
128 | }
129 |
130 | uint32_t count = size / sizeof(AudioClassDescription);
131 | AudioClassDescription descs[count];
132 | status = AudioFormatGetProperty(kAudioFormatProperty_Encoders, sizeof(encoderSpecifier), &encoderSpecifier, &size, descs);
133 | for (uint32_t i = 0; i < count; i++){
134 | if ((type == descs[i].mSubType) && (manufacturer == descs[i].mManufacturer)){
135 | memcpy(&audioDesc, &descs[i], sizeof(audioDesc));
136 | break;
137 | }
138 | }
139 | return &audioDesc;
140 | }
141 |
142 | #pragma mark -- AudioCallBack
143 | OSStatus inputDataProc(AudioConverterRef inConverter, UInt32 *ioNumberDataPackets, AudioBufferList *ioData,AudioStreamPacketDescription **outDataPacketDescription, void *inUserData) { //AudioConverterFillComplexBuffer 编码过程中,会要求这个函数来填充输入数据,也就是原始PCM数据
144 | AudioBufferList bufferList = *(AudioBufferList*)inUserData;
145 | ioData->mBuffers[0].mNumberChannels = 1;
146 | ioData->mBuffers[0].mData = bufferList.mBuffers[0].mData;
147 | ioData->mBuffers[0].mDataByteSize = bufferList.mBuffers[0].mDataByteSize;
148 | return noErr;
149 | }
150 |
151 | /**
152 | * Add ADTS header at the beginning of each and every AAC packet.
153 | * This is needed as MediaCodec encoder generates a packet of raw
154 | * AAC data.
155 | *
156 | * Note the packetLen must count in the ADTS header itself.
157 | * See: http://wiki.multimedia.cx/index.php?title=ADTS
158 | * Also: http://wiki.multimedia.cx/index.php?title=MPEG-4_Audio#Channel_Configurations
159 | **/
160 | - (NSData*)adtsData:(NSInteger)channel rawDataLength:(NSInteger)rawDataLength {
161 | int adtsLength = 7;
162 | char *packet = malloc(sizeof(char) * adtsLength);
163 | // Variables Recycled by addADTStoPacket
164 | int profile = 2; //AAC LC
165 | //39=MediaCodecInfo.CodecProfileLevel.AACObjectELD;
166 | int freqIdx = 4; //44.1KHz
167 | int chanCfg = (int)channel; //MPEG-4 Audio Channel Configuration. 1 Channel front-center
168 | NSUInteger fullLength = adtsLength + rawDataLength;
169 | // fill in ADTS data
170 | packet[0] = (char)0xFF; // 11111111 = syncword
171 | packet[1] = (char)0xF9; // 1111 1 00 1 = syncword MPEG-2 Layer CRC
172 | packet[2] = (char)(((profile-1)<<6) + (freqIdx<<2) +(chanCfg>>2));
173 | packet[3] = (char)(((chanCfg&3)<<6) + (fullLength>>11));
174 | packet[4] = (char)((fullLength&0x7FF) >> 3);
175 | packet[5] = (char)(((fullLength&7)<<5) + 0x1F);
176 | packet[6] = (char)0xFC;
177 | NSData *data = [NSData dataWithBytesNoCopy:packet length:adtsLength freeWhenDone:YES];
178 | return data;
179 | }
180 |
181 | @end
182 |
--------------------------------------------------------------------------------
/LFLiveKit/configuration/LFLiveAudioConfiguration.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFLiveAudioConfiguration.m
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/1.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFLiveAudioConfiguration.h"
10 | #import
11 |
12 | @implementation LFLiveAudioConfiguration
13 |
14 | #pragma mark -- LifyCycle
15 | + (instancetype)defaultConfiguration{
16 | LFLiveAudioConfiguration *audioConfig = [LFLiveAudioConfiguration defaultConfigurationForQuality:LFLiveAudioQuality_Default];
17 | return audioConfig;
18 | }
19 |
20 | + (instancetype)defaultConfigurationForQuality:(LFLiveAudioQuality)audioQuality{
21 | LFLiveAudioConfiguration *audioConfig = [LFLiveAudioConfiguration new];
22 | audioConfig.numberOfChannels = 2;
23 | switch (audioQuality) {
24 | case LFLiveAudioQuality_Default:{
25 | audioConfig.audioBitrate = LFLiveAudioBitRate_64Kbps;
26 | }
27 | break;
28 | case LFLiveAudioQuality_Low:{
29 | audioConfig.audioBitrate = LFLiveAudioBitRate_32Kbps;
30 | }
31 | case LFLiveAudioQuality_High:{
32 | audioConfig.audioBitrate = LFLiveAudioBitRate_96Kbps;
33 | }
34 | case LFLiveAudioQuality_VeryHigh:{
35 | audioConfig.audioBitrate = LFLiveAudioBitRate_128Kbps;
36 | }
37 | break;
38 | default:
39 | break;
40 | }
41 | audioConfig.audioSampleRate = [self.class isNewThaniPhone6] ? LFLiveAudioSampleRate_48000Hz : LFLiveAudioSampleRate_44100Hz;
42 |
43 | return audioConfig;
44 | }
45 |
46 | - (instancetype)init{
47 | if(self = [super init]){
48 | _asc = malloc(2);
49 | }
50 | return self;
51 | }
52 |
53 | - (void)dealloc{
54 | if(_asc) free(_asc);
55 | }
56 |
57 | #pragma mark Setter
58 | - (void)setAudioSampleRate:(LFLiveAudioSampleRate)audioSampleRate{
59 | _audioSampleRate = audioSampleRate;
60 | NSInteger sampleRateIndex = [self sampleRateIndex:audioSampleRate];
61 | self.asc[0] = 0x10 | ((sampleRateIndex>>1) & 0x3);
62 | self.asc[1] = ((sampleRateIndex & 0x1)<<7) | ((self.numberOfChannels & 0xF) << 3);
63 | }
64 |
65 | - (void)setNumberOfChannels:(NSUInteger)numberOfChannels{
66 | _numberOfChannels = numberOfChannels;
67 | NSInteger sampleRateIndex = [self sampleRateIndex:self.audioSampleRate];
68 | self.asc[0] = 0x10 | ((sampleRateIndex>>1) & 0x3);
69 | self.asc[1] = ((sampleRateIndex & 0x1)<<7) | ((numberOfChannels & 0xF) << 3);
70 | }
71 |
72 |
73 | #pragma mark -- CustomMethod
74 | - (NSInteger)sampleRateIndex:(NSInteger)frequencyInHz{
75 | NSInteger sampleRateIndex = 0;
76 | switch(frequencyInHz) {
77 | case 96000:
78 | sampleRateIndex = 0;
79 | break;
80 | case 88200:
81 | sampleRateIndex = 1;
82 | break;
83 | case 64000:
84 | sampleRateIndex = 2;
85 | break;
86 | case 48000:
87 | sampleRateIndex = 3;
88 | break;
89 | case 44100:
90 | sampleRateIndex = 4;
91 | break;
92 | case 32000:
93 | sampleRateIndex = 5;
94 | break;
95 | case 24000:
96 | sampleRateIndex = 6;
97 | break;
98 | case 22050:
99 | sampleRateIndex = 7;
100 | break;
101 | case 16000:
102 | sampleRateIndex = 8;
103 | break;
104 | case 12000:
105 | sampleRateIndex = 9;
106 | break;
107 | case 11025:
108 | sampleRateIndex = 10;
109 | break;
110 | case 8000:
111 | sampleRateIndex = 11;
112 | break;
113 | case 7350:
114 | sampleRateIndex = 12;
115 | break;
116 | default:
117 | sampleRateIndex = 15;
118 | }
119 | return sampleRateIndex;
120 | }
121 |
122 |
123 | #pragma mark -- DeviceCategory
124 | +(NSString*)deviceName{
125 | struct utsname systemInfo;
126 | uname(&systemInfo);
127 |
128 | return [NSString stringWithCString:systemInfo.machine
129 | encoding:NSUTF8StringEncoding];
130 | }
131 |
132 | //@"iPad4,1" on 5th Generation iPad (iPad Air) - Wifi
133 | //@"iPad4,2" on 5th Generation iPad (iPad Air) - Cellular
134 | //@"iPad4,4" on 2nd Generation iPad Mini - Wifi
135 | //@"iPad4,5" on 2nd Generation iPad Mini - Cellular
136 | //@"iPad4,7" on 3rd Generation iPad Mini - Wifi (model A1599)
137 | //@"iPhone7,1" on iPhone 6 Plus
138 | //@"iPhone7,2" on iPhone 6
139 | //@"iPhone8,1" on iPhone 6S
140 | //@"iPhone8,2" on iPhone 6S Plus
141 |
142 | +(BOOL) isNewThaniPhone6{
143 | NSString *device = [self deviceName];
144 | NSLog(@"device %@", device);
145 | if (device == nil) {
146 | return NO;
147 | }
148 | NSArray *array = [device componentsSeparatedByString:@","];
149 | if (array.count <2) {
150 | return NO;
151 | }
152 | NSString *model = [array objectAtIndex:0];
153 | NSLog(@"model %@", model);
154 | if ([model hasPrefix:@"iPhone"]) {
155 | NSString *str1 = [model substringFromIndex:[@"iPhone" length]];
156 | NSUInteger num = [str1 integerValue];
157 | NSLog(@"num %lu", (unsigned long)num);
158 | if (num > 7) {
159 | return YES;
160 | }
161 | }
162 |
163 | if ([model hasPrefix:@"iPad"]) {
164 | NSString *str1 = [model substringFromIndex:[@"iPad" length]];
165 | NSUInteger num = [str1 integerValue];
166 | if (num > 4) {
167 | return YES;
168 | }
169 | }
170 |
171 | return NO;
172 | }
173 |
174 | #pragma mark -- Encoder
175 | - (void)encodeWithCoder:(NSCoder *)aCoder {
176 | [aCoder encodeObject:@(self.numberOfChannels) forKey:@"numberOfChannels"];
177 | [aCoder encodeObject:@(self.audioSampleRate) forKey:@"audioSampleRate"];
178 | [aCoder encodeObject:@(self.audioBitrate) forKey:@"audioBitrate"];
179 | [aCoder encodeObject:[NSString stringWithUTF8String:self.asc] forKey:@"asc"];
180 | }
181 |
182 | - (id)initWithCoder:(NSCoder *)aDecoder {
183 | self = [super init];
184 | _numberOfChannels = [[aDecoder decodeObjectForKey:@"numberOfChannels"] unsignedIntegerValue];
185 | _audioSampleRate = [[aDecoder decodeObjectForKey:@"audioSampleRate"] unsignedIntegerValue];
186 | _audioBitrate = [[aDecoder decodeObjectForKey:@"audioBitrate"] unsignedIntegerValue];
187 | _asc = strdup([[aDecoder decodeObjectForKey:@"asc"] cStringUsingEncoding:NSUTF8StringEncoding]);
188 | return self;
189 | }
190 |
191 | - (BOOL)isEqual:(id)other{
192 | if (other == self) {
193 | return YES;
194 | } else if (![super isEqual:other]) {
195 | return NO;
196 | } else {
197 | LFLiveAudioConfiguration *object = other;
198 | return object.numberOfChannels == self.numberOfChannels &&
199 | object.audioBitrate == self.audioBitrate &&
200 | strcmp(object.asc, self.asc) == 0 &&
201 | object.audioSampleRate == self.audioSampleRate;
202 | }
203 | }
204 |
205 | - (NSUInteger)hash {
206 | NSUInteger hash = 0;
207 | NSArray *values = @[@(_numberOfChannels),
208 | @(_audioSampleRate),
209 | [NSString stringWithUTF8String:self.asc],
210 | @(_audioBitrate)];
211 |
212 | for (NSObject *value in values) {
213 | hash ^= value.hash;
214 | }
215 | return hash;
216 | }
217 |
218 | - (id)copyWithZone:(nullable NSZone *)zone{
219 | LFLiveAudioConfiguration *other = [self.class defaultConfiguration];
220 | return other;
221 | }
222 |
223 | - (NSString *)description{
224 | NSMutableString *desc = @"".mutableCopy;
225 | [desc appendFormat:@"",self];
226 | [desc appendFormat:@" numberOfChannels:%zi",self.numberOfChannels];
227 | [desc appendFormat:@" audioSampleRate:%zi",self.audioSampleRate];
228 | [desc appendFormat:@" audioBitrate:%zi",self.audioBitrate];
229 | [desc appendFormat:@" audioHeader:%@",[NSString stringWithUTF8String:self.asc]];
230 | return desc;
231 | }
232 |
233 | @end
234 |
--------------------------------------------------------------------------------
/LFLiveKit/packet/flv/flv.h:
--------------------------------------------------------------------------------
1 | /*
2 | $Id: flv.h 231 2011-06-27 13:46:19Z marc.noirot $
3 |
4 | FLV Metadata updater
5 |
6 | Copyright (C) 2007-2012 Marc Noirot
7 |
8 | This file is part of FLVMeta.
9 |
10 | FLVMeta is free software; you can redistribute it and/or modify
11 | it under the terms of the GNU General Public License as published by
12 | the Free Software Foundation; either version 2 of the License, or
13 | (at your option) any later version.
14 |
15 | FLVMeta is distributed in the hope that it will be useful,
16 | but WITHOUT ANY WARRANTY; without even the implied warranty of
17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 | GNU General Public License for more details.
19 |
20 | You should have received a copy of the GNU General Public License
21 | along with FLVMeta; if not, write to the Free Software
22 | Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 | */
24 | #ifndef __FLV_H__
25 | #define __FLV_H__
26 |
27 | /* Configuration of the sources */
28 | #ifdef HAVE_CONFIG_H
29 | # include
30 | #endif
31 |
32 | #include "types.h"
33 | #include "amf.h"
34 |
35 | /* error statuses */
36 | #define FLV_OK 0
37 | #define FLV_ERROR_OPEN_READ 1
38 | #define FLV_ERROR_NO_FLV 2
39 | #define FLV_ERROR_EOF 3
40 | #define FLV_ERROR_MEMORY 4
41 | #define FLV_ERROR_EMPTY_TAG 5
42 | #define FLV_ERROR_INVALID_METADATA_NAME 6
43 | #define FLV_ERROR_INVALID_METADATA 7
44 |
45 | /* flv file format structure and definitions */
46 |
47 | /* FLV file header */
48 | #define FLV_SIGNATURE "FLV"
49 | #define FLV_VERSION ((uint8)0x01)
50 |
51 | #define FLV_FLAG_VIDEO ((uint8)0x01)
52 | #define FLV_FLAG_AUDIO ((uint8)0x04)
53 |
54 | typedef struct __flv_header {
55 | byte signature[3]; /* always "FLV" */
56 | uint8 version; /* should be 1 */
57 | uint8_bitmask flags;
58 | uint32_be offset; /* always 9 */
59 | } flv_header;
60 |
61 | #define FLV_HEADER_SIZE 9
62 |
63 | #define flv_header_has_video(header) ((header).flags & FLV_FLAG_VIDEO)
64 | #define flv_header_has_audio(header) ((header).flags & FLV_FLAG_AUDIO)
65 | #define flv_header_get_offset(header) (swap_uint32((header).offset))
66 |
67 | /* FLV tag */
68 | #define FLV_TAG_TYPE_AUDIO ((uint8)0x08)
69 | #define FLV_TAG_TYPE_VIDEO ((uint8)0x09)
70 | #define FLV_TAG_TYPE_META ((uint8)0x12)
71 |
72 | typedef struct __flv_tag {
73 | uint8 type;
74 | uint24_be body_length; /* in bytes, total tag size minus 11 */
75 | uint24_be timestamp; /* milli-seconds */
76 | uint8 timestamp_extended; /* timestamp extension */
77 | uint24_be stream_id; /* reserved, must be "\0\0\0" */
78 | /* body comes next */
79 | } flv_tag;
80 |
81 | #define FLV_TAG_SIZE 11
82 |
83 | #define flv_tag_get_body_length(tag) (uint24_be_to_uint32((tag).body_length))
84 | #define flv_tag_get_timestamp(tag) \
85 | (uint24_be_to_uint32((tag).timestamp) + ((tag).timestamp_extended << 24))
86 | #define flv_tag_get_stream_id(tag) (uint24_be_to_uint32((tag).stream_id))
87 |
88 | /* audio tag */
89 | #define FLV_AUDIO_TAG_SOUND_TYPE_MONO 0
90 | #define FLV_AUDIO_TAG_SOUND_TYPE_STEREO 1
91 |
92 | #define FLV_AUDIO_TAG_SOUND_SIZE_8 0
93 | #define FLV_AUDIO_TAG_SOUND_SIZE_16 1
94 |
95 | #define FLV_AUDIO_TAG_SOUND_RATE_5_5 0
96 | #define FLV_AUDIO_TAG_SOUND_RATE_11 1
97 | #define FLV_AUDIO_TAG_SOUND_RATE_22 2
98 | #define FLV_AUDIO_TAG_SOUND_RATE_44 3
99 |
100 | #define FLV_AUDIO_TAG_SOUND_FORMAT_LINEAR_PCM 0
101 | #define FLV_AUDIO_TAG_SOUND_FORMAT_ADPCM 1
102 | #define FLV_AUDIO_TAG_SOUND_FORMAT_MP3 2
103 | #define FLV_AUDIO_TAG_SOUND_FORMAT_LINEAR_PCM_LE 3
104 | #define FLV_AUDIO_TAG_SOUND_FORMAT_NELLYMOSER_16_MONO 4
105 | #define FLV_AUDIO_TAG_SOUND_FORMAT_NELLYMOSER_8_MONO 5
106 | #define FLV_AUDIO_TAG_SOUND_FORMAT_NELLYMOSER 6
107 | #define FLV_AUDIO_TAG_SOUND_FORMAT_G711_A 7
108 | #define FLV_AUDIO_TAG_SOUND_FORMAT_G711_MU 8
109 | #define FLV_AUDIO_TAG_SOUND_FORMAT_RESERVED 9
110 | #define FLV_AUDIO_TAG_SOUND_FORMAT_AAC 10
111 | #define FLV_AUDIO_TAG_SOUND_FORMAT_SPEEX 11
112 | #define FLV_AUDIO_TAG_SOUND_FORMAT_MP3_8 14
113 | #define FLV_AUDIO_TAG_SOUND_FORMAT_DEVICE_SPECIFIC 15
114 |
115 | typedef byte flv_audio_tag;
116 |
117 | #define flv_audio_tag_sound_type(tag) (((tag) & 0x01) >> 0)
118 | #define flv_audio_tag_sound_size(tag) (((tag) & 0x02) >> 1)
119 | #define flv_audio_tag_sound_rate(tag) (((tag) & 0x0C) >> 2)
120 | #define flv_audio_tag_sound_format(tag) (((tag) & 0xF0) >> 4)
121 |
122 | /* video tag */
123 | #define FLV_VIDEO_TAG_CODEC_JPEG 1
124 | #define FLV_VIDEO_TAG_CODEC_SORENSEN_H263 2
125 | #define FLV_VIDEO_TAG_CODEC_SCREEN_VIDEO 3
126 | #define FLV_VIDEO_TAG_CODEC_ON2_VP6 4
127 | #define FLV_VIDEO_TAG_CODEC_ON2_VP6_ALPHA 5
128 | #define FLV_VIDEO_TAG_CODEC_SCREEN_VIDEO_V2 6
129 | #define FLV_VIDEO_TAG_CODEC_AVC 7
130 |
131 | #define FLV_VIDEO_TAG_FRAME_TYPE_KEYFRAME 1
132 | #define FLV_VIDEO_TAG_FRAME_TYPE_INTERFRAME 2
133 | #define FLV_VIDEO_TAG_FRAME_TYPE_DISPOSABLE_INTERFRAME 3
134 | #define FLV_VIDEO_TAG_FRAME_TYPE_GENERATED_KEYFRAME 4
135 | #define FLV_VIDEO_TAG_FRAME_TYPE_COMMAND_FRAME 5
136 |
137 | typedef byte flv_video_tag;
138 |
139 | #define flv_video_tag_codec_id(tag) (((tag) & 0x0F) >> 0)
140 | #define flv_video_tag_frame_type(tag) (((tag) & 0xF0) >> 4)
141 |
142 | #ifdef __cplusplus
143 | extern "C" {
144 | #endif /* __cplusplus */
145 |
146 | /* FLV helper functions */
147 | void flv_tag_set_timestamp(flv_tag * tag, uint32 timestamp);
148 |
149 | /* FLV stream */
150 | #define FLV_STREAM_STATE_START 0
151 | #define FLV_STREAM_STATE_TAG 1
152 | #define FLV_STREAM_STATE_TAG_BODY 2
153 | #define FLV_STREAM_STATE_PREV_TAG_SIZE 3
154 |
155 | typedef struct __flv_stream {
156 | FILE * flvin;
157 | uint8 state;
158 | flv_tag current_tag;
159 | file_offset_t current_tag_offset;
160 | uint32 current_tag_body_length;
161 | uint32 current_tag_body_overflow;
162 | } flv_stream;
163 |
164 | /* FLV stream functions */
165 | flv_stream * flv_open(const char * file);
166 | int flv_read_header(flv_stream * stream, flv_header * header);
167 | int flv_read_prev_tag_size(flv_stream * stream, uint32 * prev_tag_size);
168 | int flv_read_tag(flv_stream * stream, flv_tag * tag);
169 | int flv_read_audio_tag(flv_stream * stream, flv_audio_tag * tag);
170 | int flv_read_video_tag(flv_stream * stream, flv_video_tag * tag);
171 | int flv_read_metadata(flv_stream * stream, amf_data ** name, amf_data ** data);
172 | size_t flv_read_tag_body(flv_stream * stream, void * buffer, size_t buffer_size);
173 | file_offset_t flv_get_current_tag_offset(flv_stream * stream);
174 | file_offset_t flv_get_offset(flv_stream * stream);
175 | void flv_reset(flv_stream * stream);
176 | void flv_close(flv_stream * stream);
177 |
178 | /* FLV stdio writing helper functions */
179 | size_t flv_write_header(FILE * out, const flv_header * header);
180 | size_t flv_write_tag(FILE * out, const flv_tag * tag);
181 |
182 | /* FLV event based parser */
183 | typedef struct __flv_parser {
184 | flv_stream * stream;
185 | void * user_data;
186 | int (* on_header)(flv_header * header, struct __flv_parser * parser);
187 | int (* on_tag)(flv_tag * tag, struct __flv_parser * parser);
188 | int (* on_metadata_tag)(flv_tag * tag, amf_data * name, amf_data * data, struct __flv_parser * parser);
189 | int (* on_audio_tag)(flv_tag * tag, flv_audio_tag audio_tag, struct __flv_parser * parser);
190 | int (* on_video_tag)(flv_tag * tag, flv_video_tag audio_tag, struct __flv_parser * parser);
191 | int (* on_unknown_tag)(flv_tag * tag, struct __flv_parser * parser);
192 | int (* on_prev_tag_size)(uint32 size, struct __flv_parser * parser);
193 | int (* on_stream_end)(struct __flv_parser * parser);
194 | } flv_parser;
195 |
196 | int flv_parse(const char * file, flv_parser * parser);
197 |
198 | #ifdef __cplusplus
199 | }
200 | #endif /* __cplusplus */
201 |
202 | #endif /* __FLV_H__ */
203 |
--------------------------------------------------------------------------------
/LFLiveKit/packet/flv/amf.h:
--------------------------------------------------------------------------------
1 | /*
2 | $Id: amf.h 231 2011-06-27 13:46:19Z marc.noirot $
3 |
4 | FLV Metadata updater
5 |
6 | Copyright (C) 2007-2012 Marc Noirot
7 |
8 | This file is part of FLVMeta.
9 |
10 | FLVMeta is free software; you can redistribute it and/or modify
11 | it under the terms of the GNU General Public License as published by
12 | the Free Software Foundation; either version 2 of the License, or
13 | (at your option) any later version.
14 |
15 | FLVMeta is distributed in the hope that it will be useful,
16 | but WITHOUT ANY WARRANTY; without even the implied warranty of
17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 | GNU General Public License for more details.
19 |
20 | You should have received a copy of the GNU General Public License
21 | along with FLVMeta; if not, write to the Free Software
22 | Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 | */
24 | #ifndef __AMF_H__
25 | #define __AMF_H__
26 |
27 | #include
28 | #include
29 | #include
30 |
31 | #include "types.h"
32 |
33 | /* AMF data types */
34 | #define AMF_TYPE_NUMBER ((byte)0x00)
35 | #define AMF_TYPE_BOOLEAN ((byte)0x01)
36 | #define AMF_TYPE_STRING ((byte)0x02)
37 | #define AMF_TYPE_OBJECT ((byte)0x03)
38 | #define AMF_TYPE_NULL ((byte)0x05)
39 | #define AMF_TYPE_UNDEFINED ((byte)0x06)
40 | /* #define AMF_TYPE_REFERENCE ((byte)0x07) */
41 | #define AMF_TYPE_ASSOCIATIVE_ARRAY ((byte)0x08)
42 | #define AMF_TYPE_END ((byte)0x09)
43 | #define AMF_TYPE_ARRAY ((byte)0x0A)
44 | #define AMF_TYPE_DATE ((byte)0x0B)
45 | /* #define AMF_TYPE_SIMPLEOBJECT ((byte)0x0D) */
46 | #define AMF_TYPE_XML ((byte)0x0F)
47 | #define AMF_TYPE_CLASS ((byte)0x10)
48 |
49 | /* AMF error codes */
50 | #define AMF_ERROR_OK ((byte)0x00)
51 | #define AMF_ERROR_EOF ((byte)0x01)
52 | #define AMF_ERROR_UNKNOWN_TYPE ((byte)0x02)
53 | #define AMF_ERROR_END_TAG ((byte)0x03)
54 | #define AMF_ERROR_NULL_POINTER ((byte)0x04)
55 | #define AMF_ERROR_MEMORY ((byte)0x05)
56 | #define AMF_ERROR_UNSUPPORTED_TYPE ((byte)0x06)
57 |
58 | typedef struct __amf_node * p_amf_node;
59 |
60 | /* string type */
61 | typedef struct __amf_string {
62 | uint16 size;
63 | byte * mbstr;
64 | } amf_string;
65 |
66 | /* array type */
67 | typedef struct __amf_list {
68 | uint32 size;
69 | p_amf_node first_element;
70 | p_amf_node last_element;
71 | } amf_list;
72 |
73 | /* date type */
74 | typedef struct __amf_date {
75 | number64 milliseconds;
76 | sint16 timezone;
77 | } amf_date;
78 |
79 | /* XML string type */
80 | typedef struct __amf_xmlstring {
81 | uint32 size;
82 | byte * mbstr;
83 | } amf_xmlstring;
84 |
85 | /* class type */
86 | typedef struct __amf_class {
87 | amf_string name;
88 | amf_list elements;
89 | } amf_class;
90 |
91 | /* structure encapsulating the various AMF objects */
92 | typedef struct __amf_data {
93 | byte type;
94 | byte error_code;
95 | union {
96 | number64 number_data;
97 | uint8 boolean_data;
98 | amf_string string_data;
99 | amf_list list_data;
100 | amf_date date_data;
101 | amf_xmlstring xmlstring_data;
102 | amf_class class_data;
103 | };
104 | } amf_data;
105 |
106 | /* node used in lists, relies on amf_data */
107 | typedef struct __amf_node {
108 | amf_data * data;
109 | p_amf_node prev;
110 | p_amf_node next;
111 | } amf_node;
112 |
113 | #ifdef __cplusplus
114 | extern "C" {
115 | #endif /* __cplusplus */
116 |
117 | /* Pluggable backend support */
118 | typedef size_t (*amf_read_proc)(void * out_buffer, size_t size, void * user_data);
119 | typedef size_t (*amf_write_proc)(const void * in_buffer, size_t size, void * user_data);
120 |
121 | /* read AMF data */
122 | amf_data * amf_data_read(amf_read_proc read_proc, void * user_data);
123 |
124 | /* write AMF data */
125 | size_t amf_data_write(const amf_data * data, amf_write_proc write_proc, void * user_data);
126 |
127 | /* generic functions */
128 |
129 | /* allocate an AMF data object */
130 | amf_data * amf_data_new(byte type);
131 | /* load AMF data from buffer */
132 | amf_data * amf_data_buffer_read(byte * buffer, size_t maxbytes);
133 | /* load AMF data from stream */
134 | amf_data * amf_data_file_read(FILE * stream);
135 | /* AMF data size */
136 | size_t amf_data_size(const amf_data * data);
137 | /* write encoded AMF data into a buffer */
138 | size_t amf_data_buffer_write(amf_data * data, byte * buffer, size_t maxbytes);
139 | /* write encoded AMF data into a stream */
140 | size_t amf_data_file_write(const amf_data * data, FILE * stream);
141 | /* get the type of AMF data */
142 | byte amf_data_get_type(const amf_data * data);
143 | /* get the error code of AMF data */
144 | byte amf_data_get_error_code(const amf_data * data);
145 | /* return a new copy of AMF data */
146 | amf_data * amf_data_clone(const amf_data * data);
147 | /* release the memory of AMF data */
148 | void amf_data_free(amf_data * data);
149 | /* dump AMF data into a stream as text */
150 | void amf_data_dump(FILE * stream, const amf_data * data, int indent_level);
151 |
152 | /* return a null AMF object with the specified error code attached to it */
153 | amf_data * amf_data_error(byte error_code);
154 |
155 | /* number functions */
156 | amf_data * amf_number_new(number64 value);
157 | amf_data * amf_number_double(double value);
158 | number64 amf_number_get_value(const amf_data * data);
159 | void amf_number_set_value(amf_data * data, number64 value);
160 |
161 | /* boolean functions */
162 | amf_data * amf_boolean_new(uint8 value);
163 | uint8 amf_boolean_get_value(const amf_data * data);
164 | void amf_boolean_set_value(amf_data * data, uint8 value);
165 |
166 | /* string functions */
167 | amf_data * amf_string_new(byte * str, uint16 size);
168 | amf_data * amf_str(const char * str);
169 | uint16 amf_string_get_size(const amf_data * data);
170 | byte * amf_string_get_bytes(const amf_data * data);
171 |
172 | /* object functions */
173 | amf_data * amf_object_new(void);
174 | uint32 amf_object_size(const amf_data * data);
175 | amf_data * amf_object_add(amf_data * data, const char * name, amf_data * element);
176 | amf_data * amf_object_get(const amf_data * data, const char * name);
177 | amf_data * amf_object_set(amf_data * data, const char * name, amf_data * element);
178 | amf_data * amf_object_delete(amf_data * data, const char * name);
179 | amf_node * amf_object_first(const amf_data * data);
180 | amf_node * amf_object_last(const amf_data * data);
181 | amf_node * amf_object_next(amf_node * node);
182 | amf_node * amf_object_prev(amf_node * node);
183 | amf_data * amf_object_get_name(amf_node * node);
184 | amf_data * amf_object_get_data(amf_node * node);
185 |
186 | /* null functions */
187 | #define amf_null_new() amf_data_new(AMF_TYPE_NULL)
188 |
189 | /* undefined functions */
190 | #define amf_undefined_new() amf_data_new(AMF_TYPE_UNDEFINED)
191 |
192 | /* associative array functions */
193 | amf_data * amf_associative_array_new(void);
194 | #define amf_associative_array_size(d) amf_object_size(d)
195 | #define amf_associative_array_add(d, n, e) amf_object_add(d, n, e)
196 | #define amf_associative_array_get(d, n) amf_object_get(d, n)
197 | #define amf_associative_array_set(d, n, e) amf_object_set(d, n, e)
198 | #define amf_associative_array_delete(d, n) amf_object_delete(d, n)
199 | #define amf_associative_array_first(d) amf_object_first(d)
200 | #define amf_associative_array_last(d) amf_object_last(d)
201 | #define amf_associative_array_next(n) amf_object_next(n)
202 | #define amf_associative_array_prev(n) amf_object_prev(n)
203 | #define amf_associative_array_get_name(n) amf_object_get_name(n)
204 | #define amf_associative_array_get_data(n) amf_object_get_data(n)
205 |
206 | /* array functions */
207 | amf_data * amf_array_new(void);
208 | uint32 amf_array_size(const amf_data * data);
209 | amf_data * amf_array_push(amf_data * data, amf_data * element);
210 | amf_data * amf_array_pop(amf_data * data);
211 | amf_node * amf_array_first(const amf_data * data);
212 | amf_node * amf_array_last(const amf_data * data);
213 | amf_node * amf_array_next(amf_node * node);
214 | amf_node * amf_array_prev(amf_node * node);
215 | amf_data * amf_array_get(amf_node * node);
216 | amf_data * amf_array_get_at(const amf_data * data, uint32 n);
217 | amf_data * amf_array_delete(amf_data * data, amf_node * node);
218 | amf_data * amf_array_insert_before(amf_data * data, amf_node * node, amf_data * element);
219 | amf_data * amf_array_insert_after(amf_data * data, amf_node * node, amf_data * element);
220 |
221 | /* date functions */
222 | amf_data * amf_date_new(number64 milliseconds, sint16 timezone);
223 | number64 amf_date_get_milliseconds(const amf_data * data);
224 | sint16 amf_date_get_timezone(const amf_data * data);
225 | time_t amf_date_to_time_t(const amf_data * data);
226 |
227 | #ifdef __cplusplus
228 | }
229 | #endif /* __cplusplus */
230 |
231 | #endif /* __AMF_H__ */
232 |
--------------------------------------------------------------------------------
/LFLiveKit/LFLiveSession.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFLiveSession.m
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFLiveSession.h"
10 | #import "LFVideoCapture.h"
11 | #import "LFAudioCapture.h"
12 | #import "LFHardwareVideoEncoder.h"
13 | #import "LFHardwareAudioEncoder.h"
14 | #import "LFStreamRtmpSocket.h"
15 | #import "LFStreamTcpSocket.h"
16 | #import "LFLiveStreamInfo.h"
17 |
18 | #define LFLiveReportKey @"com.youku.liveSessionReport"
19 |
20 | @interface LFLiveSession ()
21 | {
22 | dispatch_semaphore_t _lock;
23 | }
24 | ///流媒体格式
25 | @property (nonatomic, assign) LFLiveType liveType;
26 | ///音频配置
27 | @property (nonatomic, strong) LFLiveAudioConfiguration *audioConfiguration;
28 | ///视频配置
29 | @property (nonatomic, strong) LFLiveVideoConfiguration *videoConfiguration;
30 | /// 声音采集
31 | @property (nonatomic, strong) LFAudioCapture *audioCaptureSource;
32 | /// 视频采集
33 | @property (nonatomic, strong) LFVideoCapture *videoCaptureSource;
34 | /// 音频编码
35 | @property (nonatomic, strong) id audioEncoder;
36 | /// 视频编码
37 | @property (nonatomic, strong) id videoEncoder;
38 | /// 上传
39 | @property (nonatomic, strong) id socket;
40 |
41 | #pragma mark -- 内部标识
42 | /// 上报
43 | @property (nonatomic, copy) dispatch_block_t reportBlock;
44 | /// debugInfo
45 | @property (nonatomic, strong) LFLiveDebug *debugInfo;
46 | /// streamInfo
47 | @property (nonatomic, strong) LFLiveStreamInfo *streamInfo;
48 | /// uploading
49 | @property (nonatomic, assign) BOOL uploading;
50 | /// state
51 | @property (nonatomic,assign,readwrite) LFLiveState state;
52 |
53 | @end
54 |
55 | /** 时间戳 */
56 | #define NOW (CACurrentMediaTime()*1000)
57 | @interface LFLiveSession ()
58 |
59 | @property (nonatomic, assign) uint64_t timestamp;
60 | @property (nonatomic, assign) BOOL isFirstFrame;
61 | @property (nonatomic, assign) uint64_t currentTimestamp;
62 |
63 | @end
64 |
65 | @implementation LFLiveSession
66 |
67 | #pragma mark -- LifeCycle
68 | - (instancetype)initWithAudioConfiguration:(LFLiveAudioConfiguration *)audioConfiguration videoConfiguration:(LFLiveVideoConfiguration *)videoConfiguration liveType:(LFLiveType)liveType{
69 | if(!audioConfiguration || !videoConfiguration) @throw [NSException exceptionWithName:@"LFLiveSession init error" reason:@"audioConfiguration or videoConfiguration is nil " userInfo:nil];
70 | if(self = [super init]){
71 | _audioConfiguration = audioConfiguration;
72 | _videoConfiguration = videoConfiguration;
73 | _liveType = liveType;
74 | _lock = dispatch_semaphore_create(1);
75 | }
76 | return self;
77 | }
78 |
79 | - (void)dealloc{
80 | self.audioCaptureSource.running = NO;
81 | self.videoCaptureSource.running = NO;
82 | }
83 |
84 | #pragma mark -- CustomMethod
85 | - (void)startLive:(LFLiveStreamInfo*)streamInfo{
86 | if(!streamInfo) return;
87 | _streamInfo = streamInfo;
88 | _streamInfo.videoConfiguration = _videoConfiguration;
89 | _streamInfo.audioConfiguration = _audioConfiguration;
90 | [self.socket start];
91 | }
92 |
93 | - (void)stopLive{
94 | self.uploading = NO;
95 | [self.socket stop];
96 | }
97 |
98 | #pragma mark -- CaptureDelegate
99 | - (void)captureOutput:(nullable LFAudioCapture*)capture audioBuffer:(AudioBufferList)inBufferList{
100 | [self.audioEncoder encodeAudioData:inBufferList timeStamp:self.currentTimestamp];
101 | }
102 |
103 | - (void)captureOutput:(nullable LFVideoCapture*)capture pixelBuffer:(nullable CVImageBufferRef)pixelBuffer{
104 | [self.videoEncoder encodeVideoData:pixelBuffer timeStamp:self.currentTimestamp];
105 | }
106 |
107 | #pragma mark -- EncoderDelegate
108 | - (void)audioEncoder:(nullable id)encoder audioFrame:(nullable LFAudioFrame*)frame{
109 | if(self.uploading) [self.socket sendFrame:frame];//<上传
110 | }
111 |
112 | - (void)videoEncoder:(nullable id)encoder videoFrame:(nullable LFVideoFrame*)frame{
113 | if(self.uploading) [self.socket sendFrame:frame];//<上传
114 | }
115 |
116 | #pragma mark -- LFStreamTcpSocketDelegate
117 | - (void)socketStatus:(nullable id)socket status:(LFLiveState)status{
118 | if(status == LFLiveStart){
119 | if(!self.uploading){
120 | self.timestamp = 0;
121 | self.isFirstFrame = YES;
122 | self.uploading = YES;
123 | }
124 | }
125 | dispatch_async(dispatch_get_main_queue(), ^{
126 | self.state = status;
127 | if(self.delegate && [self.delegate respondsToSelector:@selector(liveSession:liveStateDidChange:)]){
128 | [self.delegate liveSession:self liveStateDidChange:status];
129 | }
130 | });
131 | }
132 |
133 | - (void)socketDidError:(nullable id)socket errorCode:(LFLiveSocketErrorCode)errorCode{
134 | dispatch_async(dispatch_get_main_queue(), ^{
135 | if(self.delegate && [self.delegate respondsToSelector:@selector(liveSession:errorCode:)]){
136 | [self.delegate liveSession:self errorCode:errorCode];
137 | }
138 | });
139 | }
140 |
141 | - (void)socketDebug:(nullable id)socket debugInfo:(nullable LFLiveDebug*)debugInfo{
142 | self.debugInfo = debugInfo;
143 | if(self.showDebugInfo){
144 | dispatch_async(dispatch_get_main_queue(), ^{
145 | if(self.delegate && [self.delegate respondsToSelector:@selector(liveSession:debugInfo:)]){
146 | [self.delegate liveSession:self debugInfo:debugInfo];
147 | }
148 | });
149 | }
150 | }
151 |
152 | - (void)socketBufferStatus:(nullable id)socket status:(LFLiveBuffferState)status{
153 | NSUInteger videoBitRate = [_videoEncoder videoBitRate];
154 | if(status == LFLiveBuffferIncrease){
155 | if(videoBitRate < _videoConfiguration.videoMaxBitRate){
156 | videoBitRate = videoBitRate + 50*1024;
157 | [_videoEncoder setVideoBitRate:videoBitRate];
158 | }
159 | }else{
160 | if(videoBitRate > _videoConfiguration.videoMinBitRate){
161 | videoBitRate = videoBitRate - 100*1024;
162 | [_videoEncoder setVideoBitRate:videoBitRate];
163 | }
164 | }
165 | }
166 |
167 | #pragma mark -- Getter Setter
168 | - (void)setRunning:(BOOL)running{
169 | if(_running == running) return;
170 | [self willChangeValueForKey:@"running"];
171 | _running = running;
172 | [self didChangeValueForKey:@"running"];
173 | self.videoCaptureSource.running = _running;
174 | self.audioCaptureSource.running = _running;
175 | }
176 |
177 | - (void)setPreView:(UIView *)preView{
178 | [self.videoCaptureSource setPreView:preView];
179 | }
180 |
181 | - (UIView*)preView{
182 | return self.videoCaptureSource.preView;
183 | }
184 |
185 | - (void)setCaptureDevicePosition:(AVCaptureDevicePosition)captureDevicePosition{
186 | [self.videoCaptureSource setCaptureDevicePosition:captureDevicePosition];
187 | }
188 |
189 | - (AVCaptureDevicePosition)captureDevicePosition{
190 | return self.videoCaptureSource.captureDevicePosition;
191 | }
192 |
193 | - (void)setBeautyFace:(BOOL)beautyFace{
194 | [self.videoCaptureSource setBeautyFace:beautyFace];
195 | }
196 |
197 | - (BOOL)beautyFace{
198 | return self.videoCaptureSource.beautyFace;
199 | }
200 |
201 | - (void)setMuted:(BOOL)muted{
202 | [self.audioCaptureSource setMuted:muted];
203 | }
204 |
205 | - (BOOL)muted{
206 | return self.audioCaptureSource.muted;
207 | }
208 |
209 | - (LFAudioCapture*)audioCaptureSource{
210 | if(!_audioCaptureSource){
211 | _audioCaptureSource = [[LFAudioCapture alloc] initWithAudioConfiguration:_audioConfiguration];
212 | _audioCaptureSource.delegate = self;
213 | }
214 | return _audioCaptureSource;
215 | }
216 |
217 | - (LFVideoCapture*)videoCaptureSource{
218 | if(!_videoCaptureSource){
219 | _videoCaptureSource = [[LFVideoCapture alloc] initWithVideoConfiguration:_videoConfiguration];
220 | _videoCaptureSource.delegate = self;
221 | }
222 | return _videoCaptureSource;
223 | }
224 |
225 | - (id)audioEncoder{
226 | if(!_audioEncoder){
227 | _audioEncoder = [[LFHardwareAudioEncoder alloc] initWithAudioStreamConfiguration:_audioConfiguration];
228 | [_audioEncoder setDelegate:self];
229 | }
230 | return _audioEncoder;
231 | }
232 |
233 | - (id)videoEncoder{
234 | if(!_videoEncoder){
235 | _videoEncoder = [[LFHardwareVideoEncoder alloc] initWithVideoStreamConfiguration:_videoConfiguration];
236 | [_videoEncoder setDelegate:self];
237 | }
238 | return _videoEncoder;
239 | }
240 |
241 | - (id)socket{
242 | if(!_socket){
243 | if(self.liveType == LFLiveRTMP){
244 | _socket = [[LFStreamRtmpSocket alloc] initWithStream:self.streamInfo];
245 | }else if(self.liveType == LFLiveFLV){
246 | _socket = [[LFStreamTcpSocket alloc] initWithStream:self.streamInfo videoSize:self.videoConfiguration.videoSize reconnectInterval:self.reconnectInterval reconnectCount:self.reconnectCount];
247 | }
248 | [_socket setDelegate:self];
249 | }
250 | return _socket;
251 | }
252 |
253 | - (LFLiveStreamInfo*)streamInfo{
254 | if(!_streamInfo){
255 | _streamInfo = [[LFLiveStreamInfo alloc] init];
256 | }
257 | return _streamInfo;
258 | }
259 |
260 | - (uint64_t)currentTimestamp{
261 | dispatch_semaphore_wait(_lock, DISPATCH_TIME_FOREVER);
262 | uint64_t currentts = 0;
263 | if(_isFirstFrame == true) {
264 | _timestamp = NOW;
265 | _isFirstFrame = false;
266 | currentts = 0;
267 | }
268 | else {
269 | currentts = NOW - _timestamp;
270 | }
271 | dispatch_semaphore_signal(_lock);
272 | return currentts;
273 | }
274 |
275 | @end
276 |
--------------------------------------------------------------------------------
/LFLiveKit/packet/flv/avc.c:
--------------------------------------------------------------------------------
1 | /*
2 | $Id: avc.c 231 2011-06-27 13:46:19Z marc.noirot $
3 |
4 | FLV Metadata updater
5 |
6 | Copyright (C) 2007-2012 Marc Noirot
7 |
8 | This file is part of FLVMeta.
9 |
10 | FLVMeta is free software; you can redistribute it and/or modify
11 | it under the terms of the GNU General Public License as published by
12 | the Free Software Foundation; either version 2 of the License, or
13 | (at your option) any later version.
14 |
15 | FLVMeta is distributed in the hope that it will be useful,
16 | but WITHOUT ANY WARRANTY; without even the implied warranty of
17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
18 | GNU General Public License for more details.
19 |
20 | You should have received a copy of the GNU General Public License
21 | along with FLVMeta; if not, write to the Free Software
22 | Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 | */
24 | #include
25 |
26 | #include "avc.h"
27 |
28 | /**
29 | bit buffer handling
30 | */
31 | typedef struct __bit_buffer {
32 | byte * start;
33 | size_t size;
34 | byte * current;
35 | uint8 read_bits;
36 | } bit_buffer;
37 |
38 | static void skip_bits(bit_buffer * bb, size_t nbits) {
39 | bb->current = bb->current + ((nbits + bb->read_bits) / 8);
40 | bb->read_bits = (uint8)((bb->read_bits + nbits) % 8);
41 | }
42 |
43 | static uint8 get_bit(bit_buffer * bb) {
44 | uint8 ret;
45 | ret = (*(bb->current) >> (7 - bb->read_bits)) & 0x1;
46 | if (bb->read_bits == 7) {
47 | bb->read_bits = 0;
48 | bb->current++;
49 | }
50 | else {
51 | bb->read_bits++;
52 | }
53 | return ret;
54 | }
55 |
56 | static uint32 get_bits(bit_buffer * bb, size_t nbits) {
57 | uint32 i, ret;
58 | ret = 0;
59 | for (i = 0; i < nbits; i++) {
60 | ret = (ret << 1) + get_bit(bb);
61 | }
62 | return ret;
63 | }
64 |
65 | static uint32 exp_golomb_ue(bit_buffer * bb) {
66 | uint8 bit, significant_bits;
67 | significant_bits = 0;
68 | bit = get_bit(bb);
69 | while (bit == 0) {
70 | significant_bits++;
71 | bit = get_bit(bb);
72 | }
73 | return (1 << significant_bits) + get_bits(bb, significant_bits) - 1;
74 | }
75 |
76 | static sint32 exp_golomb_se(bit_buffer * bb) {
77 | sint32 ret;
78 | ret = exp_golomb_ue(bb);
79 | if ((ret & 0x1) == 0) {
80 | return -(ret >> 1);
81 | }
82 | else {
83 | return (ret + 1) >> 1;
84 | }
85 | }
86 |
87 | /* AVC type definitions */
88 |
89 | #define AVC_SEQUENCE_HEADER 0
90 | #define AVC_NALU 1
91 | #define AVC_END_OF_SEQUENCE 2
92 |
93 | typedef struct __AVCDecoderConfigurationRecord {
94 | uint8 configurationVersion;
95 | uint8 AVCProfileIndication;
96 | uint8 profile_compatibility;
97 | uint8 AVCLevelIndication;
98 | uint8 lengthSizeMinusOne;
99 | uint8 numOfSequenceParameterSets;
100 | } AVCDecoderConfigurationRecord;
101 |
102 | int read_avc_decoder_configuration_record(flv_stream * f, AVCDecoderConfigurationRecord * adcr) {
103 | if (flv_read_tag_body(f, &adcr->configurationVersion, 1) == 1
104 | && flv_read_tag_body(f, &adcr->AVCProfileIndication, 1) == 1
105 | && flv_read_tag_body(f, &adcr->profile_compatibility, 1) == 1
106 | && flv_read_tag_body(f, &adcr->AVCLevelIndication, 1) == 1
107 | && flv_read_tag_body(f, &adcr->lengthSizeMinusOne, 1) == 1
108 | && flv_read_tag_body(f, &adcr->numOfSequenceParameterSets, 1) == 1) {
109 | return FLV_OK;
110 | }
111 | else {
112 | return FLV_ERROR_EOF;
113 | }
114 | }
115 |
116 |
117 | static void parse_scaling_list(uint32 size, bit_buffer * bb) {
118 | uint32 last_scale, next_scale, i;
119 | sint32 delta_scale;
120 | last_scale = 8;
121 | next_scale = 8;
122 | for (i = 0; i < size; i++) {
123 | if (next_scale != 0) {
124 | delta_scale = exp_golomb_se(bb);
125 | next_scale = (last_scale + delta_scale + 256) % 256;
126 | }
127 | if (next_scale != 0) {
128 | last_scale = next_scale;
129 | }
130 | }
131 | }
132 |
133 | /**
134 | Parses a SPS NALU to retrieve video width and height
135 | */
136 | static void parse_sps(byte * sps, size_t sps_size, uint32 * width, uint32 * height) {
137 | bit_buffer bb;
138 | uint32 profile, pic_order_cnt_type, width_in_mbs, height_in_map_units;
139 | uint32 i, size, left, right, top, bottom;
140 | uint8 frame_mbs_only_flag;
141 |
142 | bb.start = sps;
143 | bb.size = sps_size;
144 | bb.current = sps;
145 | bb.read_bits = 0;
146 |
147 | /* skip first byte, since we already know we're parsing a SPS */
148 | skip_bits(&bb, 8);
149 | /* get profile */
150 | profile = get_bits(&bb, 8);
151 | /* skip 4 bits + 4 zeroed bits + 8 bits = 32 bits = 4 bytes */
152 | skip_bits(&bb, 16);
153 |
154 | /* read sps id, first exp-golomb encoded value */
155 | exp_golomb_ue(&bb);
156 |
157 | if (profile == 100 || profile == 110 || profile == 122 || profile == 144) {
158 | /* chroma format idx */
159 | if (exp_golomb_ue(&bb) == 3) {
160 | skip_bits(&bb, 1);
161 | }
162 | /* bit depth luma minus8 */
163 | exp_golomb_ue(&bb);
164 | /* bit depth chroma minus8 */
165 | exp_golomb_ue(&bb);
166 | /* Qpprime Y Zero Transform Bypass flag */
167 | skip_bits(&bb, 1);
168 | /* Seq Scaling Matrix Present Flag */
169 | if (get_bit(&bb)) {
170 | for (i = 0; i < 8; i++) {
171 | /* Seq Scaling List Present Flag */
172 | if (get_bit(&bb)) {
173 | parse_scaling_list(i < 6 ? 16 : 64, &bb);
174 | }
175 | }
176 | }
177 | }
178 | /* log2_max_frame_num_minus4 */
179 | exp_golomb_ue(&bb);
180 | /* pic_order_cnt_type */
181 | pic_order_cnt_type = exp_golomb_ue(&bb);
182 | if (pic_order_cnt_type == 0) {
183 | /* log2_max_pic_order_cnt_lsb_minus4 */
184 | exp_golomb_ue(&bb);
185 | }
186 | else if (pic_order_cnt_type == 1) {
187 | /* delta_pic_order_always_zero_flag */
188 | skip_bits(&bb, 1);
189 | /* offset_for_non_ref_pic */
190 | exp_golomb_se(&bb);
191 | /* offset_for_top_to_bottom_field */
192 | exp_golomb_se(&bb);
193 | size = exp_golomb_ue(&bb);
194 | for (i = 0; i < size; i++) {
195 | /* offset_for_ref_frame */
196 | exp_golomb_se(&bb);
197 | }
198 | }
199 | /* num_ref_frames */
200 | exp_golomb_ue(&bb);
201 | /* gaps_in_frame_num_value_allowed_flag */
202 | skip_bits(&bb, 1);
203 | /* pic_width_in_mbs */
204 | width_in_mbs = exp_golomb_ue(&bb) + 1;
205 | /* pic_height_in_map_units */
206 | height_in_map_units = exp_golomb_ue(&bb) + 1;
207 | /* frame_mbs_only_flag */
208 | frame_mbs_only_flag = get_bit(&bb);
209 | if (!frame_mbs_only_flag) {
210 | /* mb_adaptive_frame_field */
211 | skip_bits(&bb, 1);
212 | }
213 | /* direct_8x8_inference_flag */
214 | skip_bits(&bb, 1);
215 | /* frame_cropping */
216 | left = right = top = bottom = 0;
217 | if (get_bit(&bb)) {
218 | left = exp_golomb_ue(&bb) * 2;
219 | right = exp_golomb_ue(&bb) * 2;
220 | top = exp_golomb_ue(&bb) * 2;
221 | bottom = exp_golomb_ue(&bb) * 2;
222 | if (!frame_mbs_only_flag) {
223 | top *= 2;
224 | bottom *= 2;
225 | }
226 | }
227 | /* width */
228 | *width = width_in_mbs * 16 - (left + right);
229 | /* height */
230 | *height = height_in_map_units * 16 - (top + bottom);
231 | if (!frame_mbs_only_flag) {
232 | *height *= 2;
233 | }
234 | }
235 |
236 | /**
237 | Tries to read the resolution of the current video packet.
238 | We assume to be at the first byte of the video data.
239 | */
240 | int read_avc_resolution(flv_stream * f, uint32 body_length, uint32 * width, uint32 * height) {
241 | byte avc_packet_type;
242 | uint24 composition_time;
243 | AVCDecoderConfigurationRecord adcr;
244 | uint16 sps_size;
245 | byte * sps_buffer;
246 |
247 | /* make sure we have enough bytes to read in the current tag */
248 | if (body_length < sizeof(byte) + sizeof(uint24) + sizeof(AVCDecoderConfigurationRecord)) {
249 | return FLV_OK;
250 | }
251 |
252 | /* determine whether we're reading an AVCDecoderConfigurationRecord */
253 | if (flv_read_tag_body(f, &avc_packet_type, 1) < 1) {
254 | return FLV_ERROR_EOF;
255 | }
256 | if (avc_packet_type != AVC_SEQUENCE_HEADER) {
257 | return FLV_OK;
258 | }
259 |
260 | /* read the composition time */
261 | if (flv_read_tag_body(f, &composition_time, sizeof(uint24)) < sizeof(uint24)) {
262 | return FLV_ERROR_EOF;
263 | }
264 |
265 | /* we need to read an AVCDecoderConfigurationRecord */
266 | if (read_avc_decoder_configuration_record(f, &adcr) == FLV_ERROR_EOF) {
267 | return FLV_ERROR_EOF;
268 | }
269 |
270 | /* number of SequenceParameterSets */
271 | if ((adcr.numOfSequenceParameterSets & 0x1F) == 0) {
272 | /* no SPS, return */
273 | return FLV_OK;
274 | }
275 |
276 | /** read the first SequenceParameterSet found */
277 | /* SPS size */
278 | if (flv_read_tag_body(f, &sps_size, sizeof(uint16)) < sizeof(uint16)) {
279 | return FLV_ERROR_EOF;
280 | }
281 | sps_size = swap_uint16(sps_size);
282 |
283 | /* read the SPS entirely */
284 | sps_buffer = (byte *) malloc((size_t)sps_size);
285 | if (sps_buffer == NULL) {
286 | return FLV_ERROR_MEMORY;
287 | }
288 | if (flv_read_tag_body(f, sps_buffer, (size_t)sps_size) < (size_t)sps_size) {
289 | free(sps_buffer);
290 | return FLV_ERROR_EOF;
291 | }
292 |
293 | /* parse SPS to determine video resolution */
294 | parse_sps(sps_buffer, (size_t)sps_size, width, height);
295 |
296 | free(sps_buffer);
297 | return FLV_OK;
298 | }
299 |
--------------------------------------------------------------------------------
/LFLiveKit/filter/LFGPUImageBeautyFilter.m:
--------------------------------------------------------------------------------
1 | #import "LFGPUImageBeautyFilter.h"
2 |
3 | #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
4 | NSString *const kLFGPUImageBeautyFragmentShaderString = SHADER_STRING
5 | (
6 | varying highp vec2 textureCoordinate;
7 |
8 | uniform sampler2D inputImageTexture;
9 |
10 | uniform highp vec2 singleStepOffset;
11 | uniform mediump float params;
12 |
13 | const highp vec3 W = vec3(0.299,0.587,0.114);
14 | highp vec2 blurCoordinates[20];
15 |
16 | highp float hardLight(highp float color)
17 | {
18 | if(color <= 0.5)
19 | color = color * color * 2.0;
20 | else
21 | color = 1.0 - ((1.0 - color)*(1.0 - color) * 2.0);
22 | return color;
23 | }
24 |
25 | void main()
26 | {
27 | highp vec3 centralColor = texture2D(inputImageTexture, textureCoordinate).rgb;
28 | blurCoordinates[0] = textureCoordinate.xy + singleStepOffset * vec2(0.0, -10.0);
29 | blurCoordinates[1] = textureCoordinate.xy + singleStepOffset * vec2(0.0, 10.0);
30 | blurCoordinates[2] = textureCoordinate.xy + singleStepOffset * vec2(-10.0, 0.0);
31 | blurCoordinates[3] = textureCoordinate.xy + singleStepOffset * vec2(10.0, 0.0);
32 | blurCoordinates[4] = textureCoordinate.xy + singleStepOffset * vec2(5.0, -8.0);
33 | blurCoordinates[5] = textureCoordinate.xy + singleStepOffset * vec2(5.0, 8.0);
34 | blurCoordinates[6] = textureCoordinate.xy + singleStepOffset * vec2(-5.0, 8.0);
35 | blurCoordinates[7] = textureCoordinate.xy + singleStepOffset * vec2(-5.0, -8.0);
36 | blurCoordinates[8] = textureCoordinate.xy + singleStepOffset * vec2(8.0, -5.0);
37 | blurCoordinates[9] = textureCoordinate.xy + singleStepOffset * vec2(8.0, 5.0);
38 | blurCoordinates[10] = textureCoordinate.xy + singleStepOffset * vec2(-8.0, 5.0);
39 | blurCoordinates[11] = textureCoordinate.xy + singleStepOffset * vec2(-8.0, -5.0);
40 | blurCoordinates[12] = textureCoordinate.xy + singleStepOffset * vec2(0.0, -6.0);
41 | blurCoordinates[13] = textureCoordinate.xy + singleStepOffset * vec2(0.0, 6.0);
42 | blurCoordinates[14] = textureCoordinate.xy + singleStepOffset * vec2(6.0, 0.0);
43 | blurCoordinates[15] = textureCoordinate.xy + singleStepOffset * vec2(-6.0, 0.0);
44 | blurCoordinates[16] = textureCoordinate.xy + singleStepOffset * vec2(-4.0, -4.0);
45 | blurCoordinates[17] = textureCoordinate.xy + singleStepOffset * vec2(-4.0, 4.0);
46 | blurCoordinates[18] = textureCoordinate.xy + singleStepOffset * vec2(4.0, -4.0);
47 | blurCoordinates[19] = textureCoordinate.xy + singleStepOffset * vec2(4.0, 4.0);
48 |
49 | highp float sampleColor = centralColor.g * 20.0;
50 | sampleColor += texture2D(inputImageTexture, blurCoordinates[0]).g;
51 | sampleColor += texture2D(inputImageTexture, blurCoordinates[1]).g;
52 | sampleColor += texture2D(inputImageTexture, blurCoordinates[2]).g;
53 | sampleColor += texture2D(inputImageTexture, blurCoordinates[3]).g;
54 | sampleColor += texture2D(inputImageTexture, blurCoordinates[4]).g;
55 | sampleColor += texture2D(inputImageTexture, blurCoordinates[5]).g;
56 | sampleColor += texture2D(inputImageTexture, blurCoordinates[6]).g;
57 | sampleColor += texture2D(inputImageTexture, blurCoordinates[7]).g;
58 | sampleColor += texture2D(inputImageTexture, blurCoordinates[8]).g;
59 | sampleColor += texture2D(inputImageTexture, blurCoordinates[9]).g;
60 | sampleColor += texture2D(inputImageTexture, blurCoordinates[10]).g;
61 | sampleColor += texture2D(inputImageTexture, blurCoordinates[11]).g;
62 | sampleColor += texture2D(inputImageTexture, blurCoordinates[12]).g * 2.0;
63 | sampleColor += texture2D(inputImageTexture, blurCoordinates[13]).g * 2.0;
64 | sampleColor += texture2D(inputImageTexture, blurCoordinates[14]).g * 2.0;
65 | sampleColor += texture2D(inputImageTexture, blurCoordinates[15]).g * 2.0;
66 | sampleColor += texture2D(inputImageTexture, blurCoordinates[16]).g * 2.0;
67 | sampleColor += texture2D(inputImageTexture, blurCoordinates[17]).g * 2.0;
68 | sampleColor += texture2D(inputImageTexture, blurCoordinates[18]).g * 2.0;
69 | sampleColor += texture2D(inputImageTexture, blurCoordinates[19]).g * 2.0;
70 |
71 | sampleColor = sampleColor / 48.0;
72 |
73 | highp float highPass = centralColor.g - sampleColor + 0.5;
74 |
75 | for(int i = 0; i < 5;i++)
76 | {
77 | highPass = hardLight(highPass);
78 | }
79 | highp float luminance = dot(centralColor, W);
80 |
81 | highp float alpha = pow(luminance, params);
82 |
83 | highp vec3 smoothColor = centralColor + (centralColor-vec3(highPass))*alpha*0.1;
84 |
85 | gl_FragColor = vec4(mix(smoothColor.rgb, max(smoothColor, centralColor), alpha), 1.0);
86 | }
87 | );
88 | #else
89 | NSString *const kLFGPUImageBeautyFragmentShaderString = SHADER_STRING
90 | (
91 | varying vec2 textureCoordinate;
92 |
93 | uniform sampler2D inputImageTexture;
94 |
95 | uniform mediump vec2 singleStepOffset;
96 | uniform mediump float params;
97 |
98 | const mediump vec3 W = vec3(0.299,0.587,0.114);
99 | mediump vec2 blurCoordinates[20];
100 |
101 | mediump float hardLight(mediump float color)
102 | {
103 | if(color <= 0.5)
104 | color = color * color * 2.0;
105 | else
106 | color = 1.0 - ((1.0 - color)*(1.0 - color) * 2.0);
107 | return color;
108 | }
109 |
110 | void main()
111 | {
112 | mediump vec3 centralColor = texture2D(inputImageTexture, textureCoordinate).rgb;
113 | blurCoordinates[0] = textureCoordinate.xy + singleStepOffset * vec2(0.0, -10.0);
114 | blurCoordinates[1] = textureCoordinate.xy + singleStepOffset * vec2(0.0, 10.0);
115 | blurCoordinates[2] = textureCoordinate.xy + singleStepOffset * vec2(-10.0, 0.0);
116 | blurCoordinates[3] = textureCoordinate.xy + singleStepOffset * vec2(10.0, 0.0);
117 | blurCoordinates[4] = textureCoordinate.xy + singleStepOffset * vec2(5.0, -8.0);
118 | blurCoordinates[5] = textureCoordinate.xy + singleStepOffset * vec2(5.0, 8.0);
119 | blurCoordinates[6] = textureCoordinate.xy + singleStepOffset * vec2(-5.0, 8.0);
120 | blurCoordinates[7] = textureCoordinate.xy + singleStepOffset * vec2(-5.0, -8.0);
121 | blurCoordinates[8] = textureCoordinate.xy + singleStepOffset * vec2(8.0, -5.0);
122 | blurCoordinates[9] = textureCoordinate.xy + singleStepOffset * vec2(8.0, 5.0);
123 | blurCoordinates[10] = textureCoordinate.xy + singleStepOffset * vec2(-8.0, 5.0);
124 | blurCoordinates[11] = textureCoordinate.xy + singleStepOffset * vec2(-8.0, -5.0);
125 | blurCoordinates[12] = textureCoordinate.xy + singleStepOffset * vec2(0.0, -6.0);
126 | blurCoordinates[13] = textureCoordinate.xy + singleStepOffset * vec2(0.0, 6.0);
127 | blurCoordinates[14] = textureCoordinate.xy + singleStepOffset * vec2(6.0, 0.0);
128 | blurCoordinates[15] = textureCoordinate.xy + singleStepOffset * vec2(-6.0, 0.0);
129 | blurCoordinates[16] = textureCoordinate.xy + singleStepOffset * vec2(-4.0, -4.0);
130 | blurCoordinates[17] = textureCoordinate.xy + singleStepOffset * vec2(-4.0, 4.0);
131 | blurCoordinates[18] = textureCoordinate.xy + singleStepOffset * vec2(4.0, -4.0);
132 | blurCoordinates[19] = textureCoordinate.xy + singleStepOffset * vec2(4.0, 4.0);
133 |
134 | mediump float sampleColor = centralColor.g * 20.0;
135 | sampleColor += texture2D(inputImageTexture, blurCoordinates[0]).g;
136 | sampleColor += texture2D(inputImageTexture, blurCoordinates[1]).g;
137 | sampleColor += texture2D(inputImageTexture, blurCoordinates[2]).g;
138 | sampleColor += texture2D(inputImageTexture, blurCoordinates[3]).g;
139 | sampleColor += texture2D(inputImageTexture, blurCoordinates[4]).g;
140 | sampleColor += texture2D(inputImageTexture, blurCoordinates[5]).g;
141 | sampleColor += texture2D(inputImageTexture, blurCoordinates[6]).g;
142 | sampleColor += texture2D(inputImageTexture, blurCoordinates[7]).g;
143 | sampleColor += texture2D(inputImageTexture, blurCoordinates[8]).g;
144 | sampleColor += texture2D(inputImageTexture, blurCoordinates[9]).g;
145 | sampleColor += texture2D(inputImageTexture, blurCoordinates[10]).g;
146 | sampleColor += texture2D(inputImageTexture, blurCoordinates[11]).g;
147 | sampleColor += texture2D(inputImageTexture, blurCoordinates[12]).g * 2.0;
148 | sampleColor += texture2D(inputImageTexture, blurCoordinates[13]).g * 2.0;
149 | sampleColor += texture2D(inputImageTexture, blurCoordinates[14]).g * 2.0;
150 | sampleColor += texture2D(inputImageTexture, blurCoordinates[15]).g * 2.0;
151 | sampleColor += texture2D(inputImageTexture, blurCoordinates[16]).g * 2.0;
152 | sampleColor += texture2D(inputImageTexture, blurCoordinates[17]).g * 2.0;
153 | sampleColor += texture2D(inputImageTexture, blurCoordinates[18]).g * 2.0;
154 | sampleColor += texture2D(inputImageTexture, blurCoordinates[19]).g * 2.0;
155 |
156 | sampleColor = sampleColor / 48.0;
157 |
158 | mediump float highPass = centralColor.g - sampleColor + 0.5;
159 |
160 | for(int i = 0; i < 5;i++)
161 | {
162 | highPass = hardLight(highPass);
163 | }
164 | mediump float luminance = dot(centralColor, W);
165 |
166 | mediump float alpha = pow(luminance, params);
167 |
168 | mediump vec3 smoothColor = centralColor + (centralColor-vec3(highPass))*alpha*0.1;
169 |
170 | gl_FragColor = vec4(mix(smoothColor.rgb, max(smoothColor, centralColor), alpha), 1.0);
171 | }
172 | );
173 | #endif
174 |
175 | @implementation LFGPUImageBeautyFilter
176 |
177 | - (id)init;
178 | {
179 | if (!(self = [super initWithFragmentShaderFromString:kLFGPUImageBeautyFragmentShaderString]))
180 | {
181 | return nil;
182 | }
183 |
184 | self.beautyLevel = 2
185 | ;
186 |
187 | return self;
188 | }
189 |
190 | - (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex
191 | {
192 | CGSize oldInputSize = inputTextureSize;
193 | [super setInputSize:newSize atIndex:textureIndex];
194 | inputTextureSize = newSize;
195 |
196 | CGPoint offset = CGPointMake(2.0f / inputTextureSize.width, 2.0 / inputTextureSize.height);
197 | [self setPoint:offset forUniformName:@"singleStepOffset"];
198 | }
199 |
200 | - (void)setBeautyLevel:(NSInteger)level
201 | {
202 | switch (level) {
203 | case 1:
204 | [self setFloat:1.0f forUniformName:@"params"];
205 | break;
206 | case 2:
207 | [self setFloat:0.8f forUniformName:@"params"];
208 | break;
209 | case 3:
210 | [self setFloat:0.6f forUniformName:@"params"];
211 | break;
212 | case 4:
213 | [self setFloat:0.4f forUniformName:@"params"];
214 | break;
215 | case 5:
216 | [self setFloat:0.33f forUniformName:@"params"];
217 | break;
218 | default:
219 | break;
220 | }
221 | }
222 |
223 | @end
224 |
225 |
--------------------------------------------------------------------------------
/LFLiveKit/coder/LFHardwareVideoEncoder.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFHardwareVideoEncoder.m
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/2.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFHardwareVideoEncoder.h"
10 | #import
11 |
12 | @interface LFHardwareVideoEncoder (){
13 | VTCompressionSessionRef compressionSession;
14 | NSInteger frameCount;
15 | NSData *sps;
16 | NSData *pps;
17 | FILE *fp;
18 | BOOL enabledWriteVideoFile;
19 | }
20 |
21 | @property (nonatomic, strong) LFLiveVideoConfiguration *configuration;
22 | @property (nonatomic,weak) id h264Delegate;
23 | @property (nonatomic) BOOL isBackGround;
24 | @property (nonatomic) NSInteger currentVideoBitRate;
25 |
26 | @end
27 |
28 | @implementation LFHardwareVideoEncoder
29 |
30 | #pragma mark -- LifeCycle
31 | - (instancetype)initWithVideoStreamConfiguration:(LFLiveVideoConfiguration *)configuration{
32 | if(self = [super init]){
33 | _configuration = configuration;
34 | [self initCompressionSession];
35 |
36 | #ifdef DEBUG
37 | enabledWriteVideoFile = NO;
38 | [self initForFilePath];
39 | #endif
40 |
41 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(willEnterBackground:) name:UIApplicationWillResignActiveNotification object:nil];
42 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(willEnterForeground:) name:UIApplicationDidBecomeActiveNotification object:nil];
43 | }
44 | return self;
45 | }
46 |
47 | - (void)initCompressionSession{
48 | if(compressionSession){
49 | VTCompressionSessionCompleteFrames(compressionSession, kCMTimeInvalid);
50 |
51 | VTCompressionSessionInvalidate(compressionSession);
52 | CFRelease(compressionSession);
53 | compressionSession = NULL;
54 | }
55 |
56 | OSStatus status = VTCompressionSessionCreate(NULL, _configuration.videoSize.width, _configuration.videoSize.height, kCMVideoCodecType_H264, NULL, NULL, NULL, VideoCompressonOutputCallback, (__bridge void *)self, &compressionSession);
57 | if(status != noErr){
58 | return;
59 | }
60 |
61 | _currentVideoBitRate = _configuration.videoBitRate;
62 | status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_MaxKeyFrameInterval,(__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval));
63 | status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_MaxKeyFrameIntervalDuration,(__bridge CFTypeRef)@(_configuration.videoMaxKeyframeInterval));
64 | status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, (__bridge CFTypeRef)@(_configuration.videoFrameRate));
65 | status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)@(_configuration.videoBitRate));
66 | NSArray *limit = @[@(_configuration.videoBitRate * 1.5/8),@(1)];
67 | status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFArrayRef)limit);
68 | status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_RealTime, kCFBooleanFalse);
69 | status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_ProfileLevel, kVTProfileLevel_H264_Main_AutoLevel);
70 | status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, kCFBooleanFalse);
71 | status = VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_H264EntropyMode, kVTH264EntropyMode_CABAC);
72 | VTCompressionSessionPrepareToEncodeFrames(compressionSession);
73 |
74 | }
75 |
76 | - (void)setVideoBitRate:(NSInteger)videoBitRate{
77 | if(_isBackGround) return;
78 | VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_AverageBitRate, (__bridge CFTypeRef)@(videoBitRate));
79 | NSArray *limit = @[@(videoBitRate * 1.5/8),@(1)];
80 | VTSessionSetProperty(compressionSession, kVTCompressionPropertyKey_DataRateLimits, (__bridge CFArrayRef)limit);
81 | _currentVideoBitRate = videoBitRate;
82 | }
83 |
84 | -(NSInteger)videoBitRate{
85 | return _currentVideoBitRate;
86 | }
87 |
88 | - (void)dealloc{
89 | if(compressionSession != NULL)
90 | {
91 | VTCompressionSessionCompleteFrames(compressionSession, kCMTimeInvalid);
92 |
93 | VTCompressionSessionInvalidate(compressionSession);
94 | CFRelease(compressionSession);
95 | compressionSession = NULL;
96 | }
97 | [[NSNotificationCenter defaultCenter] removeObserver:self];
98 | }
99 |
100 | #pragma mark -- LFVideoEncoder
101 | - (void)encodeVideoData:(CVImageBufferRef)pixelBuffer timeStamp:(uint64_t)timeStamp{
102 | if(_isBackGround) return;
103 |
104 | frameCount ++;
105 | CMTime presentationTimeStamp = CMTimeMake(frameCount, 1000);
106 | VTEncodeInfoFlags flags;
107 | CMTime duration = CMTimeMake(1, (int32_t)_configuration.videoFrameRate);
108 |
109 | NSDictionary *properties = nil;
110 | if(frameCount % (int32_t)_configuration.videoMaxKeyframeInterval == 0){
111 | properties = @{(__bridge NSString *)kVTEncodeFrameOptionKey_ForceKeyFrame: @YES};
112 | }
113 | NSNumber *timeNumber = @(timeStamp);
114 |
115 | VTCompressionSessionEncodeFrame(compressionSession, pixelBuffer, presentationTimeStamp, duration, (__bridge CFDictionaryRef)properties, (__bridge_retained void *)timeNumber, &flags);
116 | }
117 |
118 | - (void)stopEncoder{
119 | VTCompressionSessionCompleteFrames(compressionSession, kCMTimeIndefinite);
120 | }
121 |
122 | - (void)setDelegate:(id)delegate{
123 | _h264Delegate = delegate;
124 | }
125 |
126 | #pragma mark -- NSNotification
127 | - (void)willEnterBackground:(NSNotification*)notification{
128 | _isBackGround = YES;
129 | }
130 |
131 | - (void)willEnterForeground:(NSNotification*)notification{
132 | [self initCompressionSession];
133 | _isBackGround = NO;
134 | }
135 |
136 | #pragma mark -- VideoCallBack
137 | static void VideoCompressonOutputCallback(void *VTref, void *VTFrameRef, OSStatus status, VTEncodeInfoFlags infoFlags, CMSampleBufferRef sampleBuffer)
138 | {
139 | if(!sampleBuffer) return;
140 | CFArrayRef array = CMSampleBufferGetSampleAttachmentsArray(sampleBuffer, true);
141 | if(!array) return;
142 | CFDictionaryRef dic = (CFDictionaryRef)CFArrayGetValueAtIndex(array, 0);
143 | if(!dic) return;
144 |
145 | BOOL keyframe = !CFDictionaryContainsKey(dic, kCMSampleAttachmentKey_NotSync);
146 | uint64_t timeStamp = [((__bridge_transfer NSNumber*)VTFrameRef) longLongValue];
147 |
148 | LFHardwareVideoEncoder *videoEncoder = (__bridge LFHardwareVideoEncoder *)VTref;
149 | if(status != noErr){
150 | return;
151 | }
152 |
153 | if (keyframe && !videoEncoder->sps)
154 | {
155 | CMFormatDescriptionRef format = CMSampleBufferGetFormatDescription(sampleBuffer);
156 |
157 | size_t sparameterSetSize, sparameterSetCount;
158 | const uint8_t *sparameterSet;
159 | OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 0, &sparameterSet, &sparameterSetSize, &sparameterSetCount, 0 );
160 | if (statusCode == noErr)
161 | {
162 | size_t pparameterSetSize, pparameterSetCount;
163 | const uint8_t *pparameterSet;
164 | OSStatus statusCode = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(format, 1, &pparameterSet, &pparameterSetSize, &pparameterSetCount, 0 );
165 | if (statusCode == noErr)
166 | {
167 | videoEncoder->sps = [NSData dataWithBytes:sparameterSet length:sparameterSetSize];
168 | videoEncoder->pps = [NSData dataWithBytes:pparameterSet length:pparameterSetSize];
169 |
170 | if(videoEncoder->enabledWriteVideoFile){
171 | NSMutableData *data = [[NSMutableData alloc] init];
172 | uint8_t header[] = {0x00,0x00,0x00,0x01};
173 | [data appendBytes:header length:4];
174 | [data appendData:videoEncoder->sps];
175 | [data appendBytes:header length:4];
176 | [data appendData:videoEncoder->pps];
177 | fwrite(data.bytes, 1,data.length,videoEncoder->fp);
178 | }
179 |
180 | }
181 | }
182 | }
183 |
184 |
185 | CMBlockBufferRef dataBuffer = CMSampleBufferGetDataBuffer(sampleBuffer);
186 | size_t length, totalLength;
187 | char *dataPointer;
188 | OSStatus statusCodeRet = CMBlockBufferGetDataPointer(dataBuffer, 0, &length, &totalLength, &dataPointer);
189 | if (statusCodeRet == noErr) {
190 | size_t bufferOffset = 0;
191 | static const int AVCCHeaderLength = 4;
192 | while (bufferOffset < totalLength - AVCCHeaderLength) {
193 | // Read the NAL unit length
194 | uint32_t NALUnitLength = 0;
195 | memcpy(&NALUnitLength, dataPointer + bufferOffset, AVCCHeaderLength);
196 |
197 | NALUnitLength = CFSwapInt32BigToHost(NALUnitLength);
198 |
199 | LFVideoFrame *videoFrame = [LFVideoFrame new];
200 | videoFrame.timestamp = timeStamp;
201 | videoFrame.data = [[NSData alloc] initWithBytes:(dataPointer + bufferOffset + AVCCHeaderLength) length:NALUnitLength];
202 | videoFrame.isKeyFrame = keyframe;
203 | videoFrame.sps = videoEncoder->sps;
204 | videoFrame.pps = videoEncoder->pps;
205 |
206 | if(videoEncoder.h264Delegate && [videoEncoder.h264Delegate respondsToSelector:@selector(videoEncoder:videoFrame:)]){
207 | [videoEncoder.h264Delegate videoEncoder:videoEncoder videoFrame:videoFrame];
208 | }
209 |
210 | if(videoEncoder->enabledWriteVideoFile){
211 | NSMutableData *data = [[NSMutableData alloc] init];
212 | if(keyframe){
213 | uint8_t header[] = {0x00,0x00,0x00,0x01};
214 | [data appendBytes:header length:4];
215 | }else{
216 | uint8_t header[] = {0x00,0x00,0x01};
217 | [data appendBytes:header length:3];
218 | }
219 | [data appendData:videoFrame.data];
220 |
221 | fwrite(data.bytes, 1,data.length,videoEncoder->fp);
222 | }
223 |
224 |
225 | bufferOffset += AVCCHeaderLength + NALUnitLength;
226 |
227 | }
228 |
229 | }
230 | }
231 |
232 | - (void)initForFilePath
233 | {
234 | char *path = [self GetFilePathByfileName:"IOSCamDemo.h264"];
235 | NSLog(@"%s",path);
236 | self->fp = fopen(path,"wb");
237 | }
238 |
239 |
240 | - (char*)GetFilePathByfileName:(char*)filename
241 | {
242 | NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask,YES);
243 | NSString *documentsDirectory = [paths objectAtIndex:0];
244 | NSString *strName = [NSString stringWithFormat:@"%s",filename];
245 |
246 | NSString *writablePath = [documentsDirectory stringByAppendingPathComponent:strName];
247 |
248 | NSUInteger len = [writablePath length];
249 |
250 | char *filepath = (char*)malloc(sizeof(char) * (len + 1));
251 |
252 | [writablePath getCString:filepath maxLength:len + 1 encoding:[NSString defaultCStringEncoding]];
253 |
254 | return filepath;
255 | }
256 |
257 | @end
258 |
--------------------------------------------------------------------------------
/LFLiveKit/capture/LFAudioCapture.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFAudioCapture.m
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/1.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFAudioCapture.h"
10 | #import
11 | #import
12 |
13 | NSString *const LFAudioComponentFailedToCreateNotification = @"LFAudioComponentFailedToCreateNotification";
14 |
15 | @interface LFAudioCapture ()
16 |
17 | @property (nonatomic, assign) AudioComponentInstance componetInstance;
18 | @property (nonatomic, assign) AudioComponent component;
19 | @property (nonatomic, strong) dispatch_queue_t taskQueue;
20 | @property (nonatomic, assign) BOOL isRunning;
21 | @property (nonatomic, strong) LFLiveAudioConfiguration *configuration;
22 |
23 | @end
24 |
25 | @implementation LFAudioCapture
26 |
27 | #pragma mark -- LiftCycle
28 | - (instancetype)initWithAudioConfiguration:(LFLiveAudioConfiguration *)configuration{
29 | if(self = [super init]){
30 | _configuration = configuration;
31 | self.isRunning = NO;
32 | self.taskQueue = dispatch_queue_create("com.youku.Laifeng.audioCapture.Queue", NULL);
33 |
34 | AVAudioSession *session = [AVAudioSession sharedInstance];
35 | [session setActive:YES withOptions:kAudioSessionSetActiveFlag_NotifyOthersOnDeactivation error:nil];
36 |
37 | [[NSNotificationCenter defaultCenter] addObserver: self
38 | selector: @selector(handleRouteChange:)
39 | name: AVAudioSessionRouteChangeNotification
40 | object: session];
41 | [[NSNotificationCenter defaultCenter] addObserver: self
42 | selector: @selector(handleInterruption:)
43 | name: AVAudioSessionInterruptionNotification
44 | object: session];
45 |
46 | NSError *error = nil;
47 |
48 | [session setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:AVAudioSessionCategoryOptionDefaultToSpeaker | AVAudioSessionCategoryOptionMixWithOthers error:nil];
49 |
50 | [session setMode:AVAudioSessionModeVideoRecording error:&error];
51 |
52 | if (![session setActive:YES error:&error]) {
53 | [self handleAudioComponentCreationFailure];
54 | }
55 |
56 | AudioComponentDescription acd;
57 | acd.componentType = kAudioUnitType_Output;
58 | acd.componentSubType = kAudioUnitSubType_RemoteIO;
59 | acd.componentManufacturer = kAudioUnitManufacturer_Apple;
60 | acd.componentFlags = 0;
61 | acd.componentFlagsMask = 0;
62 |
63 | self.component = AudioComponentFindNext(NULL, &acd);
64 |
65 | OSStatus status = noErr;
66 | status = AudioComponentInstanceNew(self.component, &_componetInstance);
67 |
68 | if (noErr != status) {
69 | [self handleAudioComponentCreationFailure];
70 | }
71 |
72 | UInt32 flagOne = 1;
73 |
74 | AudioUnitSetProperty(self.componetInstance, kAudioOutputUnitProperty_EnableIO, kAudioUnitScope_Input, 1, &flagOne, sizeof(flagOne));
75 |
76 | AudioStreamBasicDescription desc = {0};
77 | desc.mSampleRate = _configuration.audioSampleRate;
78 | desc.mFormatID = kAudioFormatLinearPCM;
79 | desc.mFormatFlags = kAudioFormatFlagIsSignedInteger | kAudioFormatFlagsNativeEndian | kAudioFormatFlagIsPacked;
80 | desc.mChannelsPerFrame = (UInt32)_configuration.numberOfChannels;
81 | desc.mFramesPerPacket = 1;
82 | desc.mBitsPerChannel = 16;
83 | desc.mBytesPerFrame = desc.mBitsPerChannel / 8 * desc.mChannelsPerFrame;
84 | desc.mBytesPerPacket = desc.mBytesPerFrame * desc.mFramesPerPacket;
85 |
86 | AURenderCallbackStruct cb;
87 | cb.inputProcRefCon = (__bridge void *)(self);
88 | cb.inputProc = handleInputBuffer;
89 | status = AudioUnitSetProperty(self.componetInstance, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &desc, sizeof(desc));
90 | status = AudioUnitSetProperty(self.componetInstance, kAudioOutputUnitProperty_SetInputCallback, kAudioUnitScope_Global, 1, &cb, sizeof(cb));
91 |
92 | status = AudioUnitInitialize(self.componetInstance);
93 |
94 | if (noErr != status) {
95 | [self handleAudioComponentCreationFailure];
96 | }
97 |
98 | [session setPreferredSampleRate:_configuration.audioSampleRate error:nil];
99 |
100 |
101 | [session setActive:YES error:nil];
102 | }
103 | return self;
104 | }
105 |
106 | - (void)dealloc{
107 | [[NSNotificationCenter defaultCenter] removeObserver:self];
108 |
109 | dispatch_sync(self.taskQueue, ^{
110 | if(self.componetInstance){
111 | AudioOutputUnitStop(self.componetInstance);
112 | AudioComponentInstanceDispose(self.componetInstance);
113 | self.componetInstance = nil;
114 | self.component = nil;
115 | }
116 | });
117 | }
118 |
119 | #pragma mark -- Setter
120 | - (void)setRunning:(BOOL)running{
121 | if(_running == running) return;
122 | _running = running;
123 | if(_running){
124 | dispatch_async(self.taskQueue, ^{
125 | self.isRunning = YES;
126 | NSLog(@"MicrophoneSource: startRunning");
127 | AudioOutputUnitStart(self.componetInstance);
128 | });
129 | }else{
130 | self.isRunning = NO;
131 | }
132 | }
133 |
134 | #pragma mark -- CustomMethod
135 | - (void)handleAudioComponentCreationFailure {
136 | dispatch_async(dispatch_get_main_queue(), ^{
137 | [[NSNotificationCenter defaultCenter] postNotificationName:LFAudioComponentFailedToCreateNotification object:nil];
138 | });
139 | }
140 |
141 | #pragma mark -- NSNotification
142 | - (void)handleRouteChange:(NSNotification *)notification {
143 | AVAudioSession *session = [ AVAudioSession sharedInstance ];
144 | NSString* seccReason = @"";
145 | NSInteger reason = [[[notification userInfo] objectForKey:AVAudioSessionRouteChangeReasonKey] integerValue];
146 | // AVAudioSessionRouteDescription* prevRoute = [[notification userInfo] objectForKey:AVAudioSessionRouteChangePreviousRouteKey];
147 | switch (reason) {
148 | case AVAudioSessionRouteChangeReasonNoSuitableRouteForCategory:
149 | seccReason = @"The route changed because no suitable route is now available for the specified category.";
150 | break;
151 | case AVAudioSessionRouteChangeReasonWakeFromSleep:
152 | seccReason = @"The route changed when the device woke up from sleep.";
153 | break;
154 | case AVAudioSessionRouteChangeReasonOverride:
155 | seccReason = @"The output route was overridden by the app.";
156 | break;
157 | case AVAudioSessionRouteChangeReasonCategoryChange:
158 | seccReason = @"The category of the session object changed.";
159 | break;
160 | case AVAudioSessionRouteChangeReasonOldDeviceUnavailable:
161 | seccReason = @"The previous audio output path is no longer available.";
162 | break;
163 | case AVAudioSessionRouteChangeReasonNewDeviceAvailable:
164 | seccReason = @"A preferred new audio output path is now available.";
165 | break;
166 | case AVAudioSessionRouteChangeReasonUnknown:
167 | default:
168 | seccReason = @"The reason for the change is unknown.";
169 | break;
170 | }
171 | AVAudioSessionPortDescription *input = [[session.currentRoute.inputs count]?session.currentRoute.inputs:nil objectAtIndex:0];
172 | if (input.portType == AVAudioSessionPortHeadsetMic) {
173 |
174 | }
175 | }
176 |
177 | - (void)handleInterruption:(NSNotification *)notification {
178 | NSInteger reason = 0;
179 | NSString* reasonStr = @"";
180 | if ([notification.name isEqualToString:AVAudioSessionInterruptionNotification]) {
181 | //Posted when an audio interruption occurs.
182 | reason = [[[notification userInfo] objectForKey:AVAudioSessionInterruptionTypeKey] integerValue];
183 | if (reason == AVAudioSessionInterruptionTypeBegan) {
184 | if (self.isRunning) {
185 | dispatch_sync(self.taskQueue, ^{
186 | NSLog(@"MicrophoneSource: stopRunning");
187 | AudioOutputUnitStop(self.componetInstance);
188 | });
189 | }
190 | }
191 |
192 | if (reason == AVAudioSessionInterruptionTypeEnded) {
193 | reasonStr = @"AVAudioSessionInterruptionTypeEnded";
194 | NSNumber* seccondReason = [[notification userInfo] objectForKey:AVAudioSessionInterruptionOptionKey] ;
195 | switch ([seccondReason integerValue]) {
196 | case AVAudioSessionInterruptionOptionShouldResume:
197 | if (self.isRunning) {
198 | dispatch_async(self.taskQueue, ^{
199 | NSLog(@"MicrophoneSource: stopRunning");
200 | AudioOutputUnitStart(self.componetInstance);
201 | });
202 | }
203 | // Indicates that the audio session is active and immediately ready to be used. Your app can resume the audio operation that was interrupted.
204 | break;
205 | default:
206 | break;
207 | }
208 | }
209 |
210 | };
211 | NSLog(@"handleInterruption: %@ reason %@",[notification name], reasonStr);
212 | }
213 |
214 | #pragma mark -- CallBack
215 | static OSStatus handleInputBuffer(void *inRefCon,
216 | AudioUnitRenderActionFlags *ioActionFlags,
217 | const AudioTimeStamp *inTimeStamp,
218 | UInt32 inBusNumber,
219 | UInt32 inNumberFrames,
220 | AudioBufferList *ioData) {
221 | @autoreleasepool {
222 | LFAudioCapture *source = (__bridge LFAudioCapture *)inRefCon;
223 | if(!source) return -1;
224 |
225 | AudioBuffer buffer;
226 | buffer.mData = NULL;
227 | buffer.mDataByteSize = 0;
228 | buffer.mNumberChannels = 1;
229 |
230 | AudioBufferList buffers;
231 | buffers.mNumberBuffers = 1;
232 | buffers.mBuffers[0] = buffer;
233 |
234 | OSStatus status = AudioUnitRender(source.componetInstance,
235 | ioActionFlags,
236 | inTimeStamp,
237 | inBusNumber,
238 | inNumberFrames,
239 | &buffers);
240 |
241 | if (!source.isRunning) {
242 | dispatch_sync(source.taskQueue, ^{
243 | NSLog(@"MicrophoneSource: stopRunning");
244 | AudioOutputUnitStop(source.componetInstance);
245 | });
246 |
247 | return status;
248 | }
249 |
250 | if (source.muted) {
251 | for (int i = 0; i < buffers.mNumberBuffers; i++) {
252 | AudioBuffer ab = buffers.mBuffers[i];
253 | memset(ab.mData, 0, ab.mDataByteSize);
254 | }
255 | }
256 |
257 | if(!status) {
258 | if(source.delegate && [source.delegate respondsToSelector:@selector(captureOutput:audioBuffer:)]){
259 | [source.delegate captureOutput:source audioBuffer:buffers];
260 | }
261 | }
262 | return status;
263 | }
264 | }
265 |
266 | @end
267 |
--------------------------------------------------------------------------------
/LFLiveKit/configuration/LFLiveVideoConfiguration.m:
--------------------------------------------------------------------------------
1 | //
2 | // LFLiveVideoConfiguration.m
3 | // LFLiveKit
4 | //
5 | // Created by 倾慕 on 16/5/1.
6 | // Copyright © 2016年 倾慕. All rights reserved.
7 | //
8 |
9 | #import "LFLiveVideoConfiguration.h"
10 | #import
11 |
12 | @implementation LFLiveVideoConfiguration
13 |
14 | #pragma mark -- LifeCycle
15 | + (instancetype)defaultConfiguration{
16 | LFLiveVideoConfiguration *configuration = [LFLiveVideoConfiguration defaultConfigurationForQuality:LFLiveVideoQuality_Default];
17 | return configuration;
18 | }
19 |
20 | + (instancetype)defaultConfigurationForQuality:(LFLiveVideoQuality)videoQuality{
21 | LFLiveVideoConfiguration *configuration = [LFLiveVideoConfiguration defaultConfigurationForQuality:videoQuality orientation:UIInterfaceOrientationPortrait];
22 | return configuration;
23 | }
24 |
25 | + (instancetype)defaultConfigurationForQuality:(LFLiveVideoQuality)videoQuality orientation:(UIInterfaceOrientation)orientation{
26 | LFLiveVideoConfiguration *configuration = [LFLiveVideoConfiguration new];
27 | switch (videoQuality) {
28 | case LFLiveVideoQuality_Low1:
29 | {
30 | configuration.sessionPreset = LFCaptureSessionPreset360x640;
31 | configuration.videoFrameRate = 15;
32 | configuration.videoMaxFrameRate = 15;
33 | configuration.videoMinFrameRate = 10;
34 | configuration.videoBitRate = 500 * 1024;
35 | configuration.videoMaxBitRate = 600 * 1024;
36 | configuration.videoMinBitRate = 250 * 1024;
37 | }
38 | break;
39 | case LFLiveVideoQuality_Low2:
40 | {
41 | configuration.sessionPreset = LFCaptureSessionPreset360x640;
42 | configuration.videoFrameRate = 24;
43 | configuration.videoMaxFrameRate = 24;
44 | configuration.videoMinFrameRate = 12;
45 | configuration.videoBitRate = 800 * 1024;
46 | configuration.videoMaxBitRate = 900 * 1024;
47 | configuration.videoMinBitRate = 500 * 1024;
48 | }
49 | break;
50 | case LFLiveVideoQuality_Low3:
51 | {
52 | configuration.sessionPreset = LFCaptureSessionPreset360x640;
53 | configuration.videoFrameRate = 30;
54 | configuration.videoMaxFrameRate = 30;
55 | configuration.videoMinFrameRate = 15;
56 | configuration.videoBitRate = 800 * 1024;
57 | configuration.videoMaxBitRate = 900 * 1024;
58 | configuration.videoMinBitRate = 500 * 1024;
59 | }
60 | break;
61 | case LFLiveVideoQuality_Medium1:
62 | {
63 | configuration.sessionPreset = LFCaptureSessionPreset540x960;
64 | configuration.videoFrameRate = 15;
65 | configuration.videoMaxFrameRate = 15;
66 | configuration.videoMinFrameRate = 10;
67 | configuration.videoBitRate = 800 * 1024;
68 | configuration.videoMaxBitRate = 900 * 1024;
69 | configuration.videoMinBitRate = 500 * 1024;
70 | }
71 | break;
72 | case LFLiveVideoQuality_Medium2:
73 | {
74 | configuration.sessionPreset = LFCaptureSessionPreset540x960;
75 | configuration.videoFrameRate = 24;
76 | configuration.videoMaxFrameRate = 24;
77 | configuration.videoMinFrameRate = 12;
78 | configuration.videoBitRate = 800 * 1024;
79 | configuration.videoMaxBitRate = 900 * 1024;
80 | configuration.videoMinBitRate = 500 * 1024;
81 | }
82 | break;
83 | case LFLiveVideoQuality_Medium3:
84 | {
85 | configuration.sessionPreset = LFCaptureSessionPreset540x960;
86 | configuration.videoFrameRate = 30;
87 | configuration.videoMaxFrameRate = 30;
88 | configuration.videoMinFrameRate = 15;
89 | configuration.videoBitRate = 1000 * 1024;
90 | configuration.videoMaxBitRate = 1200 * 1024;
91 | configuration.videoMinBitRate = 500 * 1024;
92 | }
93 | break;
94 | case LFLiveVideoQuality_High1:
95 | {
96 | configuration.sessionPreset = LFCaptureSessionPreset720x1280;
97 | configuration.videoFrameRate = 15;
98 | configuration.videoMaxFrameRate = 15;
99 | configuration.videoMinFrameRate = 10;
100 | configuration.videoBitRate = 1000 * 1024;
101 | configuration.videoMaxBitRate = 1200 * 1024;
102 | configuration.videoMinBitRate = 500 * 1024;
103 | }
104 | break;
105 | case LFLiveVideoQuality_High2:
106 | {
107 | configuration.sessionPreset = LFCaptureSessionPreset720x1280;
108 | configuration.videoFrameRate = 24;
109 | configuration.videoMaxFrameRate = 24;
110 | configuration.videoMinFrameRate = 12;
111 | configuration.videoBitRate = 1200 * 1024;
112 | configuration.videoMaxBitRate = 1300 * 1024;
113 | configuration.videoMinBitRate = 800 * 1024;
114 | }
115 | break;
116 | case LFLiveVideoQuality_High3:
117 | {
118 | configuration.sessionPreset = LFCaptureSessionPreset720x1280;
119 | configuration.videoFrameRate = 30;
120 | configuration.videoMaxFrameRate = 30;
121 | configuration.videoMinFrameRate = 15;
122 | configuration.videoBitRate = 1200 * 1024;
123 | configuration.videoMaxBitRate = 1300 * 1024;
124 | configuration.videoMinBitRate = 500 * 1024;
125 | }
126 | break;
127 | default:
128 | break;
129 | }
130 | configuration.sessionPreset = [configuration supportSessionPreset:configuration.sessionPreset];
131 | configuration.videoMaxKeyframeInterval = configuration.videoFrameRate*2;
132 | configuration.orientation = orientation;
133 | if(orientation == UIInterfaceOrientationPortrait || orientation == UIInterfaceOrientationPortraitUpsideDown){
134 | configuration.videoSize = CGSizeMake(368, 640);
135 | }else{
136 | configuration.videoSize = CGSizeMake(640, 368);
137 | }
138 | return configuration;
139 | }
140 |
141 | #pragma mark -- Setter Getter
142 | - (NSString*)avSessionPreset{
143 | NSString *avSessionPreset = nil;
144 | switch (self.sessionPreset) {
145 | case LFCaptureSessionPreset360x640:
146 | {
147 | avSessionPreset = AVCaptureSessionPreset640x480;
148 | }
149 | break;
150 | case LFCaptureSessionPreset540x960:
151 | {
152 | avSessionPreset = AVCaptureSessionPresetiFrame960x540;
153 | }
154 | break;
155 | case LFCaptureSessionPreset720x1280:
156 | {
157 | avSessionPreset = AVCaptureSessionPreset1280x720;
158 | }
159 | break;
160 | default:{
161 | avSessionPreset = AVCaptureSessionPreset640x480;
162 | }
163 | break;
164 | }
165 | return avSessionPreset;
166 | }
167 |
168 | - (void)setVideoMaxBitRate:(NSUInteger)videoMaxBitRate{
169 | if(videoMaxBitRate <= _videoBitRate) return;
170 | _videoMaxBitRate = videoMaxBitRate;
171 | }
172 |
173 | - (void)setVideoMinBitRate:(NSUInteger)videoMinBitRate{
174 | if(videoMinBitRate >= _videoBitRate) return;
175 | _videoMinBitRate = videoMinBitRate;
176 | }
177 |
178 | - (void)setVideoMaxFrameRate:(NSUInteger)videoMaxFrameRate{
179 | if(videoMaxFrameRate <= _videoFrameRate) return;
180 | _videoMaxFrameRate = videoMaxFrameRate;
181 | }
182 |
183 | - (void)setVideoMinFrameRate:(NSUInteger)videoMinFrameRate{
184 | if(videoMinFrameRate >= _videoFrameRate) return;
185 | _videoMinFrameRate = videoMinFrameRate;
186 | }
187 |
188 |
189 | #pragma mark -- Custom Method
190 | - (LFLiveVideoSessionPreset)supportSessionPreset:(LFLiveVideoSessionPreset)sessionPreset{
191 | NSString *avSessionPreset = [self avSessionPreset];
192 | AVCaptureSession *session = [[AVCaptureSession alloc] init];
193 |
194 | if(![session canSetSessionPreset:avSessionPreset]){
195 | if(sessionPreset == LFCaptureSessionPreset720x1280){
196 | sessionPreset = LFCaptureSessionPreset540x960;
197 | if(![session canSetSessionPreset:avSessionPreset]){
198 | sessionPreset = LFCaptureSessionPreset360x640;
199 | }
200 | }else if(sessionPreset == LFCaptureSessionPreset540x960){
201 | sessionPreset = LFCaptureSessionPreset360x640;
202 | }
203 | }
204 | return sessionPreset;
205 | }
206 |
207 | - (BOOL)isClipVideo{
208 | return self.sessionPreset == LFCaptureSessionPreset360x640 ? YES : NO;
209 | }
210 |
211 | #pragma mark -- encoder
212 | - (void)encodeWithCoder:(NSCoder *)aCoder {
213 | [aCoder encodeObject:[NSValue valueWithCGSize:self.videoSize] forKey:@"videoSize"];
214 | [aCoder encodeObject:@(self.videoFrameRate) forKey:@"videoFrameRate"];
215 | [aCoder encodeObject:@(self.videoMaxKeyframeInterval) forKey:@"videoMaxKeyframeInterval"];
216 | [aCoder encodeObject:@(self.videoBitRate) forKey:@"videoBitRate"];
217 | [aCoder encodeObject:@(self.sessionPreset) forKey:@"sessionPreset"];
218 | [aCoder encodeObject:@(self.orientation) forKey:@"orientation"];
219 | }
220 |
221 | - (id)initWithCoder:(NSCoder *)aDecoder {
222 | self = [super init];
223 | _videoSize = [[aDecoder decodeObjectForKey:@"videoSize"] CGSizeValue];
224 | _videoFrameRate = [[aDecoder decodeObjectForKey:@"videoFrameRate"] unsignedIntegerValue];
225 | _videoMaxKeyframeInterval = [[aDecoder decodeObjectForKey:@"videoMaxKeyframeInterval"] unsignedIntegerValue];
226 | _videoBitRate = [[aDecoder decodeObjectForKey:@"videoBitRate"] unsignedIntegerValue];
227 | _sessionPreset = [[aDecoder decodeObjectForKey:@"sessionPreset"] unsignedIntegerValue];
228 | _orientation = [[aDecoder decodeObjectForKey:@"orientation"] unsignedIntegerValue];
229 | return self;
230 | }
231 |
232 | - (NSUInteger)hash {
233 | NSUInteger hash = 0;
234 | NSArray *values = @[[NSValue valueWithCGSize:self.videoSize],
235 | @(self.videoFrameRate),
236 | @(self.videoMaxFrameRate),
237 | @(self.videoMinFrameRate),
238 | @(self.videoMaxKeyframeInterval),
239 | @(self.videoBitRate),
240 | @(self.videoMaxBitRate),
241 | @(self.videoMinBitRate),
242 | @(self.isClipVideo),
243 | self.avSessionPreset,
244 | @(self.sessionPreset),
245 | @(self.orientation),];
246 |
247 | for (NSObject *value in values) {
248 | hash ^= value.hash;
249 | }
250 | return hash;
251 | }
252 |
253 | - (BOOL)isEqual:(id)other
254 | {
255 | if (other == self) {
256 | return YES;
257 | } else if (![super isEqual:other]) {
258 | return NO;
259 | } else {
260 | LFLiveVideoConfiguration *object = other;
261 | return CGSizeEqualToSize(object.videoSize, self.videoSize) &&
262 | object.videoFrameRate == self.videoFrameRate &&
263 | object.videoMaxFrameRate == self.videoMaxFrameRate &&
264 | object.videoMinFrameRate == self.videoMinFrameRate &&
265 | object.videoMaxKeyframeInterval == self.videoMaxKeyframeInterval &&
266 | object.videoBitRate == self.videoBitRate &&
267 | object.videoMaxBitRate == self.videoMaxBitRate &&
268 | object.videoMinBitRate == self.videoMinBitRate &&
269 | object.isClipVideo == self.isClipVideo &&
270 | [object.avSessionPreset isEqualToString:self.avSessionPreset] &&
271 | object.sessionPreset == self.sessionPreset &&
272 | object.orientation == self.orientation;
273 | }
274 | }
275 |
276 | - (id)copyWithZone:(nullable NSZone *)zone{
277 | LFLiveVideoConfiguration *other = [self.class defaultConfiguration];
278 | return other;
279 | }
280 |
281 | - (NSString *)description{
282 | NSMutableString *desc = @"".mutableCopy;
283 | [desc appendFormat:@"",self];
284 | [desc appendFormat:@" videoSize:%@",NSStringFromCGSize(self.videoSize)];
285 | [desc appendFormat:@" videoFrameRate:%zi",self.videoFrameRate];
286 | [desc appendFormat:@" videoMaxFrameRate:%zi",self.videoMaxFrameRate];
287 | [desc appendFormat:@" videoMinFrameRate:%zi",self.videoMinFrameRate];
288 | [desc appendFormat:@" videoMaxKeyframeInterval:%zi",self.videoMaxKeyframeInterval];
289 | [desc appendFormat:@" videoBitRate:%zi",self.videoBitRate];
290 | [desc appendFormat:@" videoMaxBitRate:%zi",self.videoMaxBitRate];
291 | [desc appendFormat:@" videoMinBitRate:%zi",self.videoMinBitRate];
292 | [desc appendFormat:@" isClipVideo:%zi",self.isClipVideo];
293 | [desc appendFormat:@" avSessionPreset:%@",self.avSessionPreset];
294 | [desc appendFormat:@" sessionPreset:%zi",self.sessionPreset];
295 | [desc appendFormat:@" orientation:%zi",self.orientation];
296 | return desc;
297 | }
298 |
299 | @end
300 |
--------------------------------------------------------------------------------