├── .gitignore
├── Example
├── live-demo
│ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── AccentColor.colorset
│ │ │ └── Contents.json
│ │ └── AppIcon.appiconset
│ │ │ └── Contents.json
│ ├── Info.plist
│ ├── AppDelegate.swift
│ ├── Base.lproj
│ │ ├── LaunchScreen.storyboard
│ │ └── Main.storyboard
│ ├── SceneDelegate.swift
│ ├── Controllers
│ │ └── TestViewController.swift
│ └── ViewController.swift
├── live-demo.xcodeproj
│ ├── project.xcworkspace
│ │ ├── contents.xcworkspacedata
│ │ └── xcshareddata
│ │ │ └── IDEWorkspaceChecks.plist
│ ├── xcshareddata
│ │ └── xcschemes
│ │ │ └── live-demo.xcscheme
│ └── project.pbxproj
├── Podfile
├── live-demo.xcworkspace
│ ├── contents.xcworkspacedata
│ └── xcshareddata
│ │ └── IDEWorkspaceChecks.plist
├── Podfile.lock
└── .gitignore
├── FFLivekit
├── Utils
│ ├── FFmpegBlock.swift
│ ├── BufferConverter.swift
│ └── FFmpegUtils.swift
├── Codecs
│ ├── AAC.swift
│ ├── H264_VT.swift
│ └── Encoder.swift
├── Net
│ ├── SRTConnection.swift
│ ├── UDPConnection.swift
│ ├── RTSPConnection.swift
│ ├── RTMPConnection.swift
│ └── Connection.swift
├── IO
│ ├── FileSource.swift
│ ├── Source.swift
│ ├── MicrophoneSource.swift
│ └── CameraSource.swift
└── FFLiveKit.swift
├── LICENSE
├── FFLivekit.podspec
└── README.md
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
--------------------------------------------------------------------------------
/Example/live-demo/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Example/live-demo.xcodeproj/project.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/Example/live-demo/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/FFLivekit/Utils/FFmpegBlock.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FFmpegBlock.swift
3 | // FFLivekit
4 | //
5 | // Created by xkal on 11/3/2024.
6 | //
7 |
8 | import Foundation
9 |
10 | public class FFmpegBlock: NSObject {
11 | public var command: String = ""
12 | }
13 |
--------------------------------------------------------------------------------
/FFLivekit/Codecs/AAC.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AAC.swift
3 | // FFLivekit
4 | //
5 | // Created by xkal on 11/3/2024.
6 | //
7 |
8 | import Foundation
9 |
10 | public class AACEncoder: Encoder {
11 | public init() {
12 | super.init(str: "-c:a aac")
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/Example/live-demo/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | }
8 | ],
9 | "info" : {
10 | "author" : "xcode",
11 | "version" : 1
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/FFLivekit/Codecs/H264_VT.swift:
--------------------------------------------------------------------------------
1 | //
2 | // H264_VT.swift
3 | // FFLivekit
4 | //
5 | // Created by xkal on 11/3/2024.
6 | //
7 |
8 | import Foundation
9 |
10 | public class H264_VTEncoder: Encoder {
11 | public init() {
12 | super.init(str: "-c:v h264_videotoolbox")
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/FFLivekit/Codecs/Encoder.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Encoder.swift
3 | // FFLivekit
4 | //
5 | // Created by xkal on 11/3/2024.
6 | //
7 |
8 | import Foundation
9 |
10 | public class Encoder: FFmpegBlock {
11 |
12 | init(str: String) {
13 | super.init()
14 | command = str
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/Example/Podfile:
--------------------------------------------------------------------------------
1 | # Uncomment the next line to define a global platform for your project
2 | # platform :ios, '9.0'
3 |
4 | target 'live-demo' do
5 | # Comment the next line if you don't want to use dynamic frameworks
6 | use_frameworks!
7 |
8 | # Pods for live-demo
9 | pod 'FFLivekit', :path=> '../'
10 | end
11 |
--------------------------------------------------------------------------------
/Example/live-demo.xcworkspace/contents.xcworkspacedata:
--------------------------------------------------------------------------------
1 |
2 |
4 |
6 |
7 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/Example/live-demo.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/Example/live-demo.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | IDEDidComputeMac32BitWarning
6 |
7 |
8 |
9 |
--------------------------------------------------------------------------------
/FFLivekit/Net/SRTConnection.swift:
--------------------------------------------------------------------------------
1 |
2 | import Foundation
3 |
4 | public class SRTConnection: Connection {
5 |
6 | public init(baseUrl: String) throws {
7 | guard let url = URL(string: baseUrl), url.scheme == "srt" else {
8 | throw ConnectionError.SchemeError
9 | }
10 | super.init(fileType: FileType.MPEGTS.rawValue, baseUrl: baseUrl)
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/FFLivekit/Net/UDPConnection.swift:
--------------------------------------------------------------------------------
1 |
2 | import Foundation
3 |
4 | public class UDPConnection: Connection {
5 |
6 | public init(baseUrl: String) throws {
7 | guard let url = URL(string: baseUrl), url.scheme == "udp" else {
8 | throw ConnectionError.SchemeError
9 | }
10 | super.init(fileType: FileType.MPEGTS.rawValue, baseUrl: baseUrl)
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/FFLivekit/IO/FileSource.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FileSource.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 11/3/2024.
6 | //
7 |
8 | import Foundation
9 |
10 | public class FileSource: Source {
11 | let path: String
12 |
13 | public init(filetype: String, url: String) {
14 | self.path = url
15 | super.init()
16 | command = "-f \(filetype) -i \(url)"
17 | encoder = Encoder(str: "-c:v h264 -c:a aac")
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/FFLivekit/Net/RTSPConnection.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RTSPConnection.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 10/3/2024.
6 | //
7 |
8 | import Foundation
9 |
10 | public class RTSPConnection: Connection {
11 | public init(baseUrl: String) throws {
12 | guard let url = URL(string: baseUrl), url.scheme == "rtsp" else {
13 | throw ConnectionError.SchemeError
14 | }
15 | super.init(fileType: FileType.RTSP.rawValue, baseUrl: baseUrl)
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/FFLivekit/IO/Source.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Source.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 11/3/2024.
6 | //
7 |
8 | import Foundation
9 |
10 | public protocol SourceDelegate: AnyObject {
11 | func _Source(_ source: Source, onData: Data)
12 | func _Source(_ source: Source, extra: [String: Any])
13 | }
14 |
15 | public class Source: FFmpegBlock {
16 | weak var delegate: SourceDelegate?
17 | var encoder: Encoder?
18 |
19 | public func start() {}
20 | public func stop() {}
21 | }
22 |
--------------------------------------------------------------------------------
/FFLivekit/Net/RTMPConnection.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RTMPConnection.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 10/3/2024.
6 | //
7 |
8 | import Foundation
9 |
10 | public class RTMPConnection: Connection {
11 |
12 | public init(baseUrl: String) throws {
13 | guard let url = URL(string: baseUrl), url.scheme == "rtmp" || url.scheme == "rtmps" else {
14 | throw ConnectionError.SchemeError
15 | }
16 | super.init(fileType: FileType.RTMP.rawValue, baseUrl: baseUrl)
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/Example/Podfile.lock:
--------------------------------------------------------------------------------
1 | PODS:
2 | - FFLivekit (0.1.0):
3 | - ffmpeg-kit-srt
4 | - ffmpeg-kit-srt (2.0.0)
5 |
6 | DEPENDENCIES:
7 | - FFLivekit (from `../`)
8 |
9 | SPEC REPOS:
10 | trunk:
11 | - ffmpeg-kit-srt
12 |
13 | EXTERNAL SOURCES:
14 | FFLivekit:
15 | :path: "../"
16 |
17 | SPEC CHECKSUMS:
18 | FFLivekit: 86d749f291cd8d688de5ededa7c5ef14b04b4463
19 | ffmpeg-kit-srt: 21d77ca2a936c9cc41119832e3bce0eca80c425b
20 |
21 | PODFILE CHECKSUM: 5b0da832d0454c538fea7c3deeb3d8b6cf1a8d12
22 |
23 | COCOAPODS: 1.16.2
24 |
--------------------------------------------------------------------------------
/FFLivekit/Net/Connection.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Connection.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 10/3/2024.
6 | //
7 |
8 | import Foundation
9 |
10 | enum ConnectionError: Error {
11 | case SchemeError
12 | }
13 |
14 | public enum FileType: String {
15 | case RTSP = "rtsp"
16 | case RTMP = "flv"
17 | case MPEGTS = "mpegts"
18 | }
19 |
20 | public class Connection {
21 |
22 | let fileType: String!
23 | let baseUrl: String!
24 |
25 | public init(fileType: String, baseUrl: String) {
26 | self.fileType = fileType
27 | self.baseUrl = baseUrl
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/Example/.gitignore:
--------------------------------------------------------------------------------
1 | /Pods
2 | .DS_Store
3 |
4 | # Xcode
5 | #
6 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
7 |
8 | ## User settings
9 | xcuserdata/
10 |
11 | ## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
12 | *.xcscmblueprint
13 | *.xccheckout
14 |
15 | ## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
16 | build/
17 | DerivedData/
18 | *.moved-aside
19 | *.pbxuser
20 | !default.pbxuser
21 | *.mode1v3
22 | !default.mode1v3
23 | *.mode2v3
24 | !default.mode2v3
25 | *.perspectivev3
26 | !default.perspectivev3
27 |
28 | ## Obj-C/Swift specific
29 | *.hmap
30 |
31 | ## App packaging
32 | *.ipa
33 | *.dSYM.zip
34 | *.dSYM
--------------------------------------------------------------------------------
/Example/live-demo/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | NSAppTransportSecurity
6 |
7 | NSAllowsArbitraryLoads
8 |
9 |
10 | UIApplicationSceneManifest
11 |
12 | UIApplicationSupportsMultipleScenes
13 |
14 | UISceneConfigurations
15 |
16 | UIWindowSceneSessionRoleApplication
17 |
18 |
19 | UISceneConfigurationName
20 | Default Configuration
21 | UISceneDelegateClassName
22 | $(PRODUCT_MODULE_NAME).SceneDelegate
23 | UISceneStoryboardFile
24 | Main
25 |
26 |
27 |
28 |
29 | UIBackgroundModes
30 |
31 | audio
32 | voip
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Sudayn
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/FFLivekit.podspec:
--------------------------------------------------------------------------------
1 | Pod::Spec.new do |s|
2 | s.name = 'FFLivekit'
3 | s.version = '0.1.0'
4 | s.summary = "This is a live streaming FFmpeg based package to publish the live streams to RTMP or RTSP servers"
5 | s.description = "FFLivekit is a robust live streaming package that seamlessly integrates with FFmpeg to enable the effortless publishing of live streams to RTMP (Real-Time Messaging Protocol) or RTSP (Real-Time Streaming Protocol) servers. Leveraging the powerful capabilities of FFmpeg, this package empowers developers to create high-quality, real-time video broadcasts with ease. Whether you're building a live streaming platform, video conferencing application, or any real-time video communication tool, FFLivekit simplifies the integration of live streaming features, providing a reliable solution for delivering dynamic content to your audience."
6 | s.homepage = 'https://github.com/sxudan/FFLivekit'
7 | s.license = { :type => 'MIT', :file => 'LICENSE' }
8 | s.author = { 'Sudan Suwal' => 'sudosuwal@gmail.com.com' }
9 | s.source = { :git => 'https://github.com/sxudan/FFLivekit.git', :tag => s.version.to_s }
10 | s.swift_version = '5.0'
11 | s.platforms = { :ios => '13.0' }
12 | s.source_files = 'FFLivekit/**/*.{h,swift}'
13 | s.dependency 'ffmpeg-kit-srt'
14 | end
15 |
--------------------------------------------------------------------------------
/Example/live-demo/AppDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppDelegate.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 26/2/2024.
6 | //
7 |
8 | import UIKit
9 |
10 | @main
11 | class AppDelegate: UIResponder, UIApplicationDelegate {
12 |
13 |
14 |
15 | func application(_ application: UIApplication, didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?) -> Bool {
16 | // Override point for customization after application launch.
17 | return true
18 | }
19 |
20 | // MARK: UISceneSession Lifecycle
21 |
22 | func application(_ application: UIApplication, configurationForConnecting connectingSceneSession: UISceneSession, options: UIScene.ConnectionOptions) -> UISceneConfiguration {
23 | // Called when a new scene session is being created.
24 | // Use this method to select a configuration to create the new scene with.
25 | return UISceneConfiguration(name: "Default Configuration", sessionRole: connectingSceneSession.role)
26 | }
27 |
28 | func application(_ application: UIApplication, didDiscardSceneSessions sceneSessions: Set) {
29 | // Called when the user discards a scene session.
30 | // If any sessions were discarded while the application was not running, this will be called shortly after application:didFinishLaunchingWithOptions.
31 | // Use this method to release any resources that were specific to the discarded scenes, as they will not return.
32 | }
33 |
34 |
35 | }
36 |
37 |
--------------------------------------------------------------------------------
/Example/live-demo/Base.lproj/LaunchScreen.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
--------------------------------------------------------------------------------
/Example/live-demo/SceneDelegate.swift:
--------------------------------------------------------------------------------
1 | //
2 | // SceneDelegate.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 26/2/2024.
6 | //
7 |
8 | import UIKit
9 |
10 | class SceneDelegate: UIResponder, UIWindowSceneDelegate {
11 |
12 | var window: UIWindow?
13 |
14 |
15 | func scene(_ scene: UIScene, willConnectTo session: UISceneSession, options connectionOptions: UIScene.ConnectionOptions) {
16 | // Use this method to optionally configure and attach the UIWindow `window` to the provided UIWindowScene `scene`.
17 | // If using a storyboard, the `window` property will automatically be initialized and attached to the scene.
18 | // This delegate does not imply the connecting scene or session are new (see `application:configurationForConnectingSceneSession` instead).
19 | guard let _ = (scene as? UIWindowScene) else { return }
20 | }
21 |
22 | func sceneDidDisconnect(_ scene: UIScene) {
23 | // Called as the scene is being released by the system.
24 | // This occurs shortly after the scene enters the background, or when its session is discarded.
25 | // Release any resources associated with this scene that can be re-created the next time the scene connects.
26 | // The scene may re-connect later, as its session was not necessarily discarded (see `application:didDiscardSceneSessions` instead).
27 | }
28 |
29 | func sceneDidBecomeActive(_ scene: UIScene) {
30 | // Called when the scene has moved from an inactive state to an active state.
31 | // Use this method to restart any tasks that were paused (or not yet started) when the scene was inactive.
32 | }
33 |
34 | func sceneWillResignActive(_ scene: UIScene) {
35 | // Called when the scene will move from an active state to an inactive state.
36 | // This may occur due to temporary interruptions (ex. an incoming phone call).
37 | }
38 |
39 | func sceneWillEnterForeground(_ scene: UIScene) {
40 | // Called as the scene transitions from the background to the foreground.
41 | // Use this method to undo the changes made on entering the background.
42 | }
43 |
44 | func sceneDidEnterBackground(_ scene: UIScene) {
45 | // Called as the scene transitions from the foreground to the background.
46 | // Use this method to save data, release shared resources, and store enough scene-specific state information
47 | // to restore the scene back to its current state.
48 | }
49 |
50 |
51 | }
52 |
53 |
--------------------------------------------------------------------------------
/FFLivekit/IO/MicrophoneSource.swift:
--------------------------------------------------------------------------------
1 | //
2 | // MicrophoneSource.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 10/3/2024.
6 | //
7 |
8 | import AVFoundation
9 |
10 | //public protocol MicrophoneSourceDelegate {
11 | // func _MicrophoneSource(onData: Data)
12 | //}
13 |
14 | public class MicrophoneSource: Source {
15 |
16 | private var audioEngine: AVAudioEngine?
17 | let backgroundAudioQueue = DispatchQueue.global(qos: .background)
18 | // var delegate: MicrophoneSourceDelegate?
19 |
20 | public init(sampleRate: Double = 48000, encoder: Encoder = AACEncoder()) throws {
21 | super.init()
22 | command = "-f s16le -ar \(sampleRate) -ac 1 -itsoffset -5 -i %audioPipe%"
23 | self.encoder = encoder
24 | setupSession(sampleRate: sampleRate)
25 | try setupAudioEngine(sampleRate: sampleRate)
26 | }
27 |
28 |
29 | private func setupSession(sampleRate: Double) {
30 | /// Start the capture session
31 | do {
32 | try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .videoChat, options: [.allowAirPlay, .allowBluetooth])
33 | try AVAudioSession.sharedInstance().setPreferredSampleRate(sampleRate) // Set your preferred sample rate here
34 | try AVAudioSession.sharedInstance().setActive(true)
35 | } catch {
36 | print("Failed to set audio session settings: \(error.localizedDescription)")
37 | }
38 | }
39 |
40 | private func setupAudioEngine(sampleRate: Double, channels: Int = 1) throws {
41 | audioEngine = AVAudioEngine()
42 | let inputNode = audioEngine!.inputNode
43 | let defaultFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: inputNode.inputFormat(forBus: 0).sampleRate, channels: 1, interleaved: false)!
44 | print("Default sample rate \(inputNode.inputFormat(forBus: 0).sampleRate)")
45 | let outputFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: sampleRate, channels: 1, interleaved: false)!
46 |
47 |
48 |
49 | inputNode.installTap(onBus: 0, bufferSize: 1024, format: defaultFormat) { buffer, time in
50 | let convertedBuffer = outputFormat.sampleRate != defaultFormat.sampleRate ? BufferConverter.convert(from: defaultFormat, to: outputFormat, buffer: buffer) : buffer
51 |
52 | let audioData = BufferConverter.bufferToData(buffer: convertedBuffer)
53 | self.backgroundAudioQueue.async {
54 | self.delegate?._Source(self, onData: audioData)
55 | }
56 | }
57 | }
58 |
59 | public override func start() {
60 | do {
61 | // audioEngine?.prepare()
62 | try audioEngine?.start()
63 | } catch {
64 | print(error)
65 | }
66 | }
67 |
68 | public override func stop() {
69 | audioEngine?.stop()
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/FFLivekit/Utils/BufferConverter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // BufferConverter.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 10/3/2024.
6 | //
7 |
8 |
9 | import AVFAudio
10 |
11 | class BufferConverter {
12 | class func bufferToData(buffer: AVAudioPCMBuffer) -> Data {
13 | let channelData = buffer.int16ChannelData![0]
14 | let dataSize = Int(buffer.frameLength) * MemoryLayout.size
15 | let data = Data(bytes: channelData, count: dataSize)
16 | return data
17 | }
18 |
19 | class func extractBGRAData(from sampleBuffer: CMSampleBuffer) -> Data? {
20 | guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
21 | return nil
22 | }
23 | CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly)
24 | defer {
25 | CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly)
26 | }
27 | guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else {
28 | return nil
29 | }
30 | let width = CVPixelBufferGetWidth(pixelBuffer)
31 | let height = CVPixelBufferGetHeight(pixelBuffer)
32 | let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer)
33 | let byteBuffer = UnsafeBufferPointer(start: baseAddress.assumingMemoryBound(to: UInt8.self), count: bytesPerRow * height)
34 | let rawPointer = UnsafeRawPointer(byteBuffer.baseAddress!)
35 | return Data(bytes: rawPointer, count: bytesPerRow * height)
36 | }
37 |
38 | class func createEmptyRGBAData(width: Int, height: Int) -> Data {
39 | let bytesPerPixel = 4 // Assuming BGRA format (8 bits per channel)
40 | let bitsPerComponent = 8
41 | let bytesPerRow = width * bytesPerPixel
42 | let totalBytes = height * bytesPerRow
43 |
44 | // Allocate a single Data object with the total size
45 | var pixelData = Data(count: totalBytes * 2)
46 | return pixelData
47 | }
48 |
49 | class func convert(from inputFormat: AVAudioFormat, to outputFormat: AVAudioFormat, buffer: AVAudioPCMBuffer) -> AVAudioPCMBuffer {
50 | let converter = AVAudioConverter(from: inputFormat, to: outputFormat)!
51 | var newBufferAvailable = true
52 | let inputCallback: AVAudioConverterInputBlock = { inNumPackets, outStatus in
53 | if newBufferAvailable {
54 | outStatus.pointee = .haveData
55 | newBufferAvailable = false
56 | return buffer
57 | } else {
58 | outStatus.pointee = .noDataNow
59 | return nil
60 | }
61 | }
62 | let convertedBuffer = AVAudioPCMBuffer(pcmFormat: outputFormat, frameCapacity: AVAudioFrameCount(outputFormat.sampleRate) * buffer.frameLength / AVAudioFrameCount(buffer.format.sampleRate))!
63 | var error: NSError?
64 | let status = converter.convert(to: convertedBuffer, error: &error, withInputFrom: inputCallback)
65 | assert(status != .error)
66 | return convertedBuffer
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/Example/live-demo.xcodeproj/xcshareddata/xcschemes/live-demo.xcscheme:
--------------------------------------------------------------------------------
1 |
2 |
5 |
8 |
9 |
15 |
21 |
22 |
23 |
24 |
25 |
31 |
32 |
44 |
46 |
52 |
53 |
54 |
55 |
59 |
60 |
61 |
62 |
68 |
70 |
76 |
77 |
78 |
79 |
81 |
82 |
85 |
86 |
87 |
--------------------------------------------------------------------------------
/FFLivekit/FFLiveKit.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FFLiveKit.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 10/3/2024.
6 | //
7 |
8 | import Foundation
9 |
10 | public enum FFLiveKitError: Error {
11 | case notInitialized
12 | case emptyUrl
13 | case noConnection
14 | case noSources
15 | case sourceMissingEncoder(Source)
16 | case ioError(message: String)
17 | }
18 |
19 | public protocol FFLiveKitDelegate: FFmpegUtilsDelegate {
20 | // Inherits all methods from FFmpegUtilsDelegate
21 | }
22 |
23 | public class FFLiveKit {
24 |
25 | private var connection: Connection?
26 | private var url = ""
27 | var ffmpegUtil: FFmpegUtils?
28 | private weak var delegate: FFLiveKitDelegate?
29 |
30 | private var sources: [Source] = []
31 |
32 | private var options: [FFLivekitSettings] = []
33 |
34 | public init(options: [FFLivekitSettings] = []) {
35 | self.options = options
36 | }
37 |
38 |
39 | public func connect(connection: Connection) throws {
40 | guard !connection.baseUrl.isEmpty else {
41 | throw FFLiveKitError.emptyUrl
42 | }
43 | self.connection = connection
44 | }
45 |
46 | public func prepare(delegate: FFLiveKitDelegate?) throws {
47 | guard let connection = connection else {
48 | throw FFLiveKitError.noConnection
49 | }
50 |
51 | guard !sources.isEmpty else {
52 | throw FFLiveKitError.noSources
53 | }
54 |
55 | self.delegate = delegate
56 | ffmpegUtil = FFmpegUtils(outputFormat: connection.fileType, url: connection.baseUrl, delegate: delegate, options: options)
57 |
58 | // Connect sources to FFmpeg pipeline
59 | for source in sources {
60 | source.delegate = ffmpegUtil
61 | }
62 | }
63 |
64 | public func addSources(_ sources: [Source]) {
65 | self.sources = sources
66 | }
67 |
68 | // public func addSource(camera: CameraSource?, microphone: MicrophoneSource?, file: FileSource?) {
69 | // self.cameraSource = camera
70 | // self.microphoneSource = microphone
71 | // self.fileSource = file
72 | //
73 | // }
74 |
75 | /// Publish the stream to the server. For example: /mystream?pkt_size=1024:name=hello
76 | /// - Parameters:
77 | /// - name: "mystream". name of the stream. No need to add /
78 | /// - queryString: pkt_size=1024:name=hello. No need to add ?
79 | public func publish() throws {
80 | guard let connection = connection else {
81 | throw FFLiveKitError.notInitialized
82 | }
83 |
84 | guard let ffmpegUtil = ffmpegUtil else {
85 | throw FFLiveKitError.notInitialized
86 | }
87 |
88 | // Validate all sources have encoders
89 | for source in sources {
90 | guard source.encoder != nil else {
91 | throw FFLiveKitError.sourceMissingEncoder(source)
92 | }
93 | }
94 |
95 | let inputs = sources.map { $0.command }
96 | let encoders = sources.compactMap { $0.encoder?.command }
97 |
98 | guard encoders.count == sources.count else {
99 | throw FFLiveKitError.notInitialized
100 | }
101 |
102 | // Start all sources
103 | sources.forEach { $0.start() }
104 |
105 | // Start FFmpeg pipeline
106 | ffmpegUtil.start(inputcommands: inputs, encoders: encoders)
107 | }
108 |
109 | public func stop() {
110 | for source in sources {
111 | source.stop()
112 | }
113 | ffmpegUtil?.stop()
114 | }
115 | }
116 |
--------------------------------------------------------------------------------
/Example/live-demo/Controllers/TestViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TestViewController.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 10/3/2024.
6 | //
7 |
8 | import UIKit
9 | import FFLivekit
10 |
11 | class TestViewController: UIViewController, FFLiveKitDelegate {
12 |
13 |
14 | @IBOutlet weak var actionBtn: UIControl!
15 |
16 | @IBOutlet weak var fpsLabel: UILabel!
17 | @IBOutlet weak var audioRecLabel: UILabel!
18 | @IBOutlet weak var videoRecLabel: UILabel!
19 |
20 | let cameraSource = CameraSource(position: .front, preset: .hd1280x720)
21 | let microphoneSource = try! MicrophoneSource()
22 | let fileSource = FileSource(filetype: "mp4", url: "http://commondatastorage.googleapis.com/gtv-videos-bucket/sample/BigBuckBunny.mp4")
23 | let ffLiveKit = FFLiveKit(options: [.outputVideoSize((360, 640)), .outputVideoBitrate("400k")])
24 | var isRecording = false
25 |
26 |
27 |
28 | override func viewDidLoad() {
29 | super.viewDidLoad()
30 |
31 | // Setup camera preview
32 | cameraSource.startPreview(previewView: self.view)
33 |
34 | // Add sources
35 | ffLiveKit.addSources([cameraSource, microphoneSource])
36 |
37 | // Initialize connections (example - choose one)
38 | do {
39 | // You can choose any of these connection types:
40 | let srtConnection = try SRTConnection(baseUrl: "srt://192.168.0.135:8890?streamid=publish:mystream&pkt_size=1316")
41 | // let rtmpConnection = try RTMPConnection(baseUrl: "rtmp://192.168.0.135:1935/mystream")
42 | // let rtspConnection = try RTSPConnection(baseUrl: "rtsp://192.168.0.135:8554/mystream")
43 | // let udpConnection = try UDPConnection(baseUrl: "udp://192.168.1.100:1234?pkt_size=1316")
44 |
45 | try ffLiveKit.connect(connection: srtConnection)
46 | try ffLiveKit.prepare(delegate: self)
47 | } catch {
48 | print("Failed to setup FFLiveKit: \(error)")
49 | }
50 |
51 | initStartActionBtn()
52 | }
53 |
54 | func _FFLiveKit(didChange status: RecordingState) {
55 | print(status)
56 | if status == .RequestRecording {
57 | initLoadingActionBtn()
58 | } else if status == .Recording {
59 | isRecording = true
60 | initStopActionBtn()
61 | } else if status == .RequestStop {
62 | initLoadingActionBtn()
63 | } else {
64 | isRecording = false
65 | initStartActionBtn()
66 | }
67 | }
68 |
69 | func _FFLiveKit(onStats stats: FFStat) {
70 | self.fpsLabel.text = "FPS: \(stats.fps)"
71 | }
72 |
73 | func _FFLiveKit(onError error: String) {
74 | print("Error \(error)")
75 | }
76 |
77 | @IBAction func onTap(_ sender: Any) {
78 | if !isRecording {
79 | do {
80 | try ffLiveKit.publish()
81 | } catch {
82 | print("Failed to publish: \(error)")
83 | // Show error to user
84 | let alert = UIAlertController(title: "Error", message: error.localizedDescription, preferredStyle: .alert)
85 | alert.addAction(UIAlertAction(title: "OK", style: .default))
86 | present(alert, animated: true)
87 | }
88 | } else {
89 | ffLiveKit.stop()
90 | }
91 | }
92 |
93 | @IBAction func toggleTorch(_ sender: Any) {
94 | cameraSource.toggleTorch()
95 | }
96 |
97 | @IBAction func onCameraSwitch(_ sender: Any) {
98 | cameraSource.switchCamera()
99 | }
100 |
101 | func initStartActionBtn() {
102 | actionBtn.layer.opacity = 1
103 | actionBtn.layer.cornerRadius = 25
104 | actionBtn.layer.masksToBounds = true
105 | actionBtn.isEnabled = true
106 | }
107 |
108 | func initLoadingActionBtn() {
109 | actionBtn.layer.opacity = 0.5
110 | actionBtn.isEnabled = false
111 | }
112 |
113 | func initStopActionBtn() {
114 | actionBtn.layer.opacity = 1
115 | actionBtn.layer.cornerRadius = 5
116 | actionBtn.layer.masksToBounds = false
117 | actionBtn.isEnabled = true
118 | }
119 | }
120 |
--------------------------------------------------------------------------------
/FFLivekit/IO/CameraSource.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CameraSource.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 10/3/2024.
6 | //
7 |
8 | import AVFoundation
9 | import UIKit
10 |
11 | //public protocol CameraSourceDelegate {
12 | // func _CameraSource(onData: Data)
13 | // func _CameraSource(switchStarted: Bool)
14 | // func _CameraSource(switchEnded: Bool)
15 | //}
16 |
17 | public class CameraSource: Source, AVCaptureVideoDataOutputSampleBufferDelegate {
18 |
19 | let videoOutput = AVCaptureVideoDataOutput();
20 | private let previewLayer = AVCaptureVideoPreviewLayer()
21 | var session: AVCaptureSession?
22 | private var dimensions: (Int32, Int32) = (0 , 0)
23 | let backgroundVideoQueue = DispatchQueue.global(qos: .background)
24 | private var running = false
25 | // public var delegate: CameraSourceDelegate?
26 | var currentCameraPosition: AVCaptureDevice.Position?
27 |
28 | public init(position: AVCaptureDevice.Position, preset: AVCaptureSession.Preset = .hd1920x1080, encoder: Encoder = H264_VTEncoder()) {
29 | super.init()
30 | /// setup session
31 | /// calculates the dimensions
32 | session = setupCaptureSession(position: position, preset: preset)
33 | command = "-f rawvideo -pixel_format bgra -video_size \(dimensions.0)x\(dimensions.1) -framerate 30 -i %videoPipe%"
34 | self.encoder = encoder
35 | ///set delegate
36 | videoOutput.setSampleBufferDelegate(self, queue: backgroundVideoQueue)
37 | DispatchQueue.global().async {
38 | /// Set the session to output video frames
39 | self.session?.startRunning()
40 | }
41 | }
42 |
43 | public func switchCamera() {
44 | // self.delegate?._CameraSource(switchStarted: true)
45 | self.delegate?._Source(self, extra: ["switchStarted": true])
46 | session?.beginConfiguration()
47 | // Remove existing input
48 | if let currentInput = session?.inputs.first as? AVCaptureInput {
49 | session?.removeInput(currentInput)
50 | }
51 |
52 | // Toggle camera position
53 | let position: AVCaptureDevice.Position = currentCameraPosition == .back ? .front : .back
54 | self.currentCameraPosition = position
55 | // Set up new video input
56 | guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position) else {
57 | print("Failed to get AVCaptureDevice for video input.")
58 | return
59 | }
60 | do {
61 | let videoInput = try AVCaptureDeviceInput(device: videoDevice)
62 | if session?.canAddInput(videoInput) ?? false {
63 | session?.addInput(videoInput)
64 | } else {
65 | print("Failed to add video input to session.")
66 | }
67 | } catch {
68 | print("Error creating AVCaptureDeviceInput: \(error.localizedDescription)")
69 | }
70 | session?.commitConfiguration()
71 | self.delegate?._Source(self, extra: ["switchStarted": false])
72 | // self.delegate?._CameraSource(switchEnded: true)
73 | }
74 |
75 | private func addCamera(session: AVCaptureSession, position: AVCaptureDevice.Position) -> AVCaptureDeviceInput? {
76 | self.currentCameraPosition = position
77 | do {
78 | /// Check if the device has a camera
79 | guard let camera = AVCaptureDevice.default(.builtInWideAngleCamera ,for: .video, position: position) else {
80 | print("Camera not available")
81 | return nil
82 | }
83 | /// Create input from the camera
84 | let input = try AVCaptureDeviceInput(device: camera)
85 |
86 | if session.canAddInput(input) {
87 | session.addInput(input)
88 | }
89 | return input
90 | } catch {
91 | print(error)
92 | }
93 | return nil
94 | }
95 |
96 | private func setupCaptureSession(position: AVCaptureDevice.Position, preset: AVCaptureSession.Preset) -> AVCaptureSession? {
97 | do {
98 | // Create a session and add the input
99 | let session = AVCaptureSession()
100 | /// add camera to session input
101 | let cameraInput = addCamera(session: session, position: position)
102 | guard let camera = cameraInput?.device else {
103 | return nil
104 | }
105 | /// add videooutput as session output
106 | videoOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as String) : NSNumber(value: kCVPixelFormatType_32BGRA as UInt32),]
107 | if session.canAddOutput(videoOutput) {
108 | session.sessionPreset = preset
109 | session.addOutput(videoOutput)
110 |
111 | }
112 |
113 | /// set framerate 30
114 | do {
115 | try camera.lockForConfiguration()
116 | let desiredFrameRate = CMTimeMake(value: 1, timescale: 30)
117 | camera.activeVideoMinFrameDuration = desiredFrameRate
118 | camera.activeVideoMaxFrameDuration = desiredFrameRate
119 | camera.unlockForConfiguration()
120 |
121 | } catch {
122 | print("Error accessing video device: \(error)")
123 | }
124 | /// just print the current resoultion
125 | let activeFormat = camera.activeFormat.formatDescription
126 | let dimensions = CMVideoFormatDescriptionGetDimensions(activeFormat)
127 | let width = dimensions.width
128 | let height = dimensions.height
129 | print("Resolution: \(width) x \(height)")
130 | self.dimensions = (width , height)
131 |
132 | return session
133 | } catch {
134 | print("Error setting up AVCaptureDeviceInput: \(error)")
135 | }
136 | }
137 |
138 | public func getDimensions() -> (Int, Int) {
139 | return (Int(self.dimensions.0), Int(self.dimensions.1))
140 | }
141 |
142 | public func startPreview(previewView: UIView?) {
143 | /// Set the preview layer to display the camera feed
144 | if let view = previewView {
145 | DispatchQueue.main.async {
146 | self.previewLayer.session = self.session
147 | self.previewLayer.videoGravity = .resizeAspectFill
148 | /// Add the preview layer to your view's layer
149 | view.layer.insertSublayer(self.previewLayer, at: 0)
150 | /// Optional: Adjust the frame of the preview layer
151 | self.previewLayer.frame = view.layer.bounds
152 | }
153 | }
154 | }
155 |
156 | public override func start() {
157 | self.running = true
158 | }
159 |
160 | public override func stop() {
161 | self.running = false
162 | }
163 |
164 | public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
165 | if output is AVCaptureVideoDataOutput {
166 | if running, let data = BufferConverter.extractBGRAData(from: sampleBuffer) {
167 | self.delegate?._Source(self, onData: data)
168 | }
169 | }
170 | }
171 |
172 | /// check if torch is on or off
173 | public var isTorchOn: Bool {
174 | if let device = AVCaptureDevice.default(for: .video) {
175 | return device.hasTorch && device.isTorchActive
176 | }
177 | return false
178 | }
179 |
180 | /// Toggle torch
181 | public func toggleTorch(level: Float = 1.0) {
182 | guard let device = AVCaptureDevice.default(for: .video) else { return }
183 |
184 | do {
185 | try device.lockForConfiguration()
186 |
187 | if device.isTorchActive {
188 | device.torchMode = .off
189 | } else {
190 | try device.setTorchModeOn(level: level)
191 | }
192 |
193 | device.unlockForConfiguration()
194 | } catch {
195 | print("Error toggling torch: \(error.localizedDescription)")
196 | }
197 | }
198 | }
199 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # FFLiveKit
2 |
3 | A Swift library for live streaming video and audio from iOS devices to RTMP, RTSP, SRT, or UDP servers using FFmpegKit. This project demonstrates how to leverage FFmpegKit to capture, encode, and transmit real-time media streams.
4 |
5 | ## Overview
6 |
7 | FFLiveKit provides a high-level API for live streaming that abstracts away the complexity of FFmpeg command-line operations. It handles:
8 | - Camera and microphone capture
9 | - Real-time encoding (H.264 video, AAC audio)
10 | - Network streaming to various protocols
11 | - Buffer management and data pipeline optimization
12 |
13 | This is also a research project to evaluate how well FFmpegKit performs compared to existing live streaming packages like [Haishinkit](https://github.com/shogo4405/HaishinKit.swift).
14 |
15 | # Features:
16 |
17 | Inputs
18 |
19 | - [x] Camera
20 | - [x] Microphone
21 | - [x] File
22 |
23 | RTMP
24 |
25 | - [x] Ingest to RTMP server
26 | - [ ] Playback (Todo)
27 |
28 | RTSP
29 |
30 | - [x] Ingest to RTSP server
31 | - [ ] Playbakc (Todo)
32 |
33 | SRT
34 |
35 | - [x] Ingest to SRT server
36 |
37 | HLS
38 |
39 | - [ ] Playback (Todo)
40 |
41 | UDP Support
42 |
43 | - [x] Ingest using UDP protocol
44 |
45 | Others minor features
46 | - [x] Toggle Torch
47 | - [x] Switch Camera
48 | - [x] Background Publishing
49 |
50 |
51 | # How It Works
52 |
53 | ## Architecture
54 |
55 | FFLiveKit uses a pipeline architecture that efficiently moves data from iOS capture sources through FFmpeg to streaming servers:
56 |
57 | ```
58 | [Camera/Mic Sources] → [Data Buffers] → [Named Pipes] → [FFmpeg] → [Streaming Server]
59 | ```
60 |
61 | ## Data Flow
62 |
63 | ### 1. Source Capture
64 | - **CameraSource**: Captures video frames using `AVCaptureSession`, converts to BGRA format
65 | - **MicrophoneSource**: Captures audio using `AVAudioEngine`, converts to PCM format
66 | - Data is delivered via delegate callbacks to `FFmpegUtils`
67 |
68 | ### 2. State Management
69 | The system operates in four states:
70 | - **Normal**: Idle, no streaming
71 | - **RequestRecording**: Initializing pipes and FFmpeg process
72 | - **Recording**: Actively streaming (data is buffered and fed to FFmpeg)
73 | - **RequestStop**: Cleaning up and shutting down
74 |
75 | ### 3. Data Pipeline
76 |
77 | #### During RequestRecording State
78 | - Data is written **directly** to pipes (no buffering)
79 | - This ensures FFmpeg receives initial frames immediately
80 | - Helps FFmpeg establish the stream connection faster
81 |
82 | #### During Recording State
83 | - Data is **buffered** in memory (`videoDataBuffer`, `audioDataBuffer`)
84 | - A 10ms timer continuously feeds buffered data to pipes
85 | - This provides smooth, continuous data flow to FFmpeg
86 |
87 | ### 4. Timer-Based Feeding
88 | - A background timer runs every 10ms (`handleFeed()`)
89 | - On each tick:
90 | - Locks buffers
91 | - Writes all buffered video data to video pipe
92 | - Writes all buffered audio data to audio pipe
93 | - Unlocks buffers
94 | - This ensures FFmpeg receives data at a steady rate
95 |
96 | ### 5. Named Pipes
97 | - FFmpegKit creates named pipes (FIFOs) for video and audio
98 | - File descriptors are kept open to prevent EOF
99 | - Persistent `FileHandle` objects are used for efficient writes
100 | - Falls back to creating new handles if persistent ones fail
101 |
102 | ### 6. FFmpeg Processing
103 | - FFmpeg reads from named pipes as input streams
104 | - Encodes video (H.264 via VideoToolbox) and audio (AAC)
105 | - Applies filters (scaling, rotation, bitrate control)
106 | - Streams to destination server (RTMP/RTSP/SRT/UDP)
107 |
108 | ## Buffer Management
109 |
110 | ### Overflow Handling
111 | - Video buffer: Max 100MB, keeps last 50MB if overflow (FIFO)
112 | - Audio buffer: Max 50MB, keeps last 25MB if overflow (FIFO)
113 | - Prevents memory issues while maintaining recent data
114 |
115 | ### Background Mode
116 | - When app goes to background, sends blank frames to maintain stream
117 | - Prevents server disconnection during app backgrounding
118 | - Clears buffers when returning to foreground
119 |
120 | ## Key Components
121 |
122 | ### FFLiveKit
123 | - Main entry point
124 | - Manages sources, connection, and FFmpeg utilities
125 | - Provides high-level API for streaming control
126 |
127 | ### FFmpegUtils
128 | - Core pipeline manager
129 | - Handles state transitions
130 | - Manages buffers, pipes, and timer
131 | - Implements `SourceDelegate` to receive data from sources
132 |
133 | ### Source Classes
134 | - **CameraSource**: AVFoundation-based video capture
135 | - **MicrophoneSource**: AVAudioEngine-based audio capture
136 | - **FileSource**: File-based input (for testing)
137 |
138 | ### Connection Classes
139 | - Protocol-specific connection handlers (RTMP, RTSP, SRT, UDP)
140 | - Validates URLs and sets appropriate FFmpeg output format
141 |
142 | ## Performance Optimizations
143 |
144 | 1. **Persistent File Handles**: Reuses file handles instead of creating new ones for each write
145 | 2. **Efficient Buffering**: FIFO-based overflow handling prevents memory bloat
146 | 3. **Thread Safety**: Uses locks and dedicated queues for buffer operations
147 | 4. **Timer-Based Feeding**: Ensures steady data rate to FFmpeg (10ms intervals)
148 | 5. **Direct Writes During Init**: Reduces latency when starting stream
149 |
150 | # Usage
151 |
152 | ### Initialize the Source and FFLiveKit
153 | ```Swift
154 | let cameraSource = CameraSource(position: .front)
155 | let microphoneSource = MicrophoneSource()
156 | /// add options
157 | let ffLiveKit = FFLiveKit(options: [.outputVideoSize((360, 640)), .outputVideoBitrate("400k")])
158 | ```
159 |
160 | ### Initialize the connections according to your need
161 | ```Switf
162 | let srtConnection = try! SRTConnection(baseUrl: "srt://192.168.1.100:8890?streamid=publish:mystream&pkt_size=1316")
163 | let rtmpConnection = try! RTMPConnection(baseUrl: "rtmp://192.168.1.100:1935/mystream")
164 | let rtspConnection = try! RTSPConnection(baseUrl: "rtsp://192.168.1.100:8554/mystream")
165 | let udpConnection = try! UDPConnection(baseUrl: "udp://192.168.1.100:1234?pkt_size=1316")
166 | ```
167 |
168 | ### Connect
169 | ```Swift
170 | try! ffLiveKit.connect(connection: rtmpConnection)
171 | ```
172 |
173 | ### Add sources and prepare
174 | ```Swift
175 | ffLiveKit.addSources([cameraSource, microphoneSource])
176 | cameraSource.startPreview(previewView: self.view)
177 | try ffLiveKit.prepare(delegate: self)
178 | ```
179 |
180 | ### Start or Stop
181 |
182 | ```Swift
183 | do {
184 | if !isRecording {
185 | try ffLiveKit.publish()
186 | } else {
187 | ffLiveKit.stop()
188 | }
189 | } catch {
190 | print("Error: \(error)")
191 | }
192 | ```
193 |
194 | ### Delegates
195 |
196 | ```Swift
197 | func _FFLiveKit(didChange status: RecordingState)
198 | func _FFLiveKit(onStats stats: FFStat)
199 | func _FFLiveKit(onError error: String)
200 | ```
201 |
202 | ### Options
203 |
204 | ```Swift
205 | public enum FFLivekitSettings {
206 | case outputVideoFramerate(Int)
207 | case outputVideoPixelFormat(String)
208 | case outputVideoSize((Int, Int))
209 | /// example "500k" or "2M"
210 | case outputVideoBitrate(String)
211 | /// example "128k"
212 | case outputAudioBitrate(String)
213 |
214 | /// nil to no transpose
215 | /// 0 - Rotate 90 degrees counterclockwise and flip vertically.
216 | ///1 - Rotate 90 degrees clockwise.
217 | /// 2 - Rotate 90 degrees counterclockwise.
218 | /// 3 - Rotate 90 degrees clockwise and flip vertically.
219 | case outputVideoTranspose(Int?)
220 | }
221 | ```
222 |
223 | # Demo
224 |
225 | https://private-user-images.githubusercontent.com/31989781/311260826-f0fa60e3-41a7-4ac7-90fb-385a5ab6b97f.mp4?jwt=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJnaXRodWIuY29tIiwiYXVkIjoicmF3LmdpdGh1YnVzZXJjb250ZW50LmNvbSIsImtleSI6ImtleTUiLCJleHAiOjE3MDk5NjQ0NzcsIm5iZiI6MTcwOTk2NDE3NywicGF0aCI6Ii8zMTk4OTc4MS8zMTEyNjA4MjYtZjBmYTYwZTMtNDFhNy00YWM3LTkwZmItMzg1YTVhYjZiOTdmLm1wND9YLUFtei1BbGdvcml0aG09QVdTNC1ITUFDLVNIQTI1NiZYLUFtei1DcmVkZW50aWFsPUFLSUFWQ09EWUxTQTUzUFFLNFpBJTJGMjAyNDAzMDklMkZ1cy1lYXN0LTElMkZzMyUyRmF3czRfcmVxdWVzdCZYLUFtei1EYXRlPTIwMjQwMzA5VDA2MDI1N1omWC1BbXotRXhwaXJlcz0zMDAmWC1BbXotU2lnbmF0dXJlPWY2OWM4OTg0ZDIxNzdhNmQ3MTU2Yjk2MDdlZjFhZTAzMjc4ZGM5ZDhiN2NjMDNlMjM3ZDJhZDc4MzMzMWZjMTAmWC1BbXotU2lnbmVkSGVhZGVycz1ob3N0JmFjdG9yX2lkPTAma2V5X2lkPTAmcmVwb19pZD0wIn0.gmtVos0Xx--lM74gZzrQ_gSwr3lnWqE5uvaMcOisjyk
226 |
227 |
228 | # Research
229 |
230 | Please find the research on https://github.com/sxudan/FFLivekit/blob/research/README.md
231 |
232 |
233 |
--------------------------------------------------------------------------------
/Example/live-demo/Base.lproj/Main.storyboard:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
38 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
87 |
93 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
--------------------------------------------------------------------------------
/Example/live-demo.xcodeproj/project.pbxproj:
--------------------------------------------------------------------------------
1 | // !$*UTF8*$!
2 | {
3 | archiveVersion = 1;
4 | classes = {
5 | };
6 | objectVersion = 56;
7 | objects = {
8 |
9 | /* Begin PBXBuildFile section */
10 | 7830AB979939E8876AC5B5CF /* Pods_live_demo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B46632C18825C406E474741D /* Pods_live_demo.framework */; };
11 | D30E1B1F2B9D666F00D78458 /* TestViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B1E2B9D666F00D78458 /* TestViewController.swift */; };
12 | D3A2572D2B8BFD2800B69B54 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = D3A2572C2B8BFD2800B69B54 /* AppDelegate.swift */; };
13 | D3A2572F2B8BFD2800B69B54 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = D3A2572E2B8BFD2800B69B54 /* SceneDelegate.swift */; };
14 | D3A257312B8BFD2800B69B54 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = D3A257302B8BFD2800B69B54 /* ViewController.swift */; };
15 | D3A257342B8BFD2800B69B54 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = D3A257322B8BFD2800B69B54 /* Main.storyboard */; };
16 | D3A257362B8BFD2900B69B54 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = D3A257352B8BFD2900B69B54 /* Assets.xcassets */; };
17 | D3A257392B8BFD2900B69B54 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = D3A257372B8BFD2900B69B54 /* LaunchScreen.storyboard */; };
18 | /* End PBXBuildFile section */
19 |
20 | /* Begin PBXFileReference section */
21 | 17AE0299CEBC43D9EFBBEE6D /* Pods-live-demo.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-live-demo.release.xcconfig"; path = "Target Support Files/Pods-live-demo/Pods-live-demo.release.xcconfig"; sourceTree = ""; };
22 | B46632C18825C406E474741D /* Pods_live_demo.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_live_demo.framework; sourceTree = BUILT_PRODUCTS_DIR; };
23 | D30E1B1E2B9D666F00D78458 /* TestViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestViewController.swift; sourceTree = ""; };
24 | D3A257292B8BFD2800B69B54 /* live-demo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "live-demo.app"; sourceTree = BUILT_PRODUCTS_DIR; };
25 | D3A2572C2B8BFD2800B69B54 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; };
26 | D3A2572E2B8BFD2800B69B54 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; };
27 | D3A257302B8BFD2800B69B54 /* ViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ViewController.swift; sourceTree = ""; };
28 | D3A257332B8BFD2800B69B54 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/Main.storyboard; sourceTree = ""; };
29 | D3A257352B8BFD2900B69B54 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; };
30 | D3A257382B8BFD2900B69B54 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; };
31 | D3A2573A2B8BFD2900B69B54 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; };
32 | E713281D99B7811BD664DE09 /* Pods-live-demo.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-live-demo.debug.xcconfig"; path = "Target Support Files/Pods-live-demo/Pods-live-demo.debug.xcconfig"; sourceTree = ""; };
33 | /* End PBXFileReference section */
34 |
35 | /* Begin PBXFrameworksBuildPhase section */
36 | D3A257262B8BFD2800B69B54 /* Frameworks */ = {
37 | isa = PBXFrameworksBuildPhase;
38 | buildActionMask = 2147483647;
39 | files = (
40 | 7830AB979939E8876AC5B5CF /* Pods_live_demo.framework in Frameworks */,
41 | );
42 | runOnlyForDeploymentPostprocessing = 0;
43 | };
44 | /* End PBXFrameworksBuildPhase section */
45 |
46 | /* Begin PBXGroup section */
47 | 1CD4C3FCD5D1099670A50651 /* Frameworks */ = {
48 | isa = PBXGroup;
49 | children = (
50 | B46632C18825C406E474741D /* Pods_live_demo.framework */,
51 | );
52 | name = Frameworks;
53 | sourceTree = "";
54 | };
55 | D30E1B0B2B9AA2C100D78458 /* Controllers */ = {
56 | isa = PBXGroup;
57 | children = (
58 | D30E1B1E2B9D666F00D78458 /* TestViewController.swift */,
59 | );
60 | path = Controllers;
61 | sourceTree = "";
62 | };
63 | D3A257202B8BFD2700B69B54 = {
64 | isa = PBXGroup;
65 | children = (
66 | D3A2572B2B8BFD2800B69B54 /* live-demo */,
67 | D3A2572A2B8BFD2800B69B54 /* Products */,
68 | D7B75980CB2C39AF1917AF57 /* Pods */,
69 | 1CD4C3FCD5D1099670A50651 /* Frameworks */,
70 | );
71 | sourceTree = "";
72 | };
73 | D3A2572A2B8BFD2800B69B54 /* Products */ = {
74 | isa = PBXGroup;
75 | children = (
76 | D3A257292B8BFD2800B69B54 /* live-demo.app */,
77 | );
78 | name = Products;
79 | sourceTree = "";
80 | };
81 | D3A2572B2B8BFD2800B69B54 /* live-demo */ = {
82 | isa = PBXGroup;
83 | children = (
84 | D3A2572C2B8BFD2800B69B54 /* AppDelegate.swift */,
85 | D3A2572E2B8BFD2800B69B54 /* SceneDelegate.swift */,
86 | D3A257302B8BFD2800B69B54 /* ViewController.swift */,
87 | D3A257322B8BFD2800B69B54 /* Main.storyboard */,
88 | D3A257352B8BFD2900B69B54 /* Assets.xcassets */,
89 | D3A257372B8BFD2900B69B54 /* LaunchScreen.storyboard */,
90 | D3A2573A2B8BFD2900B69B54 /* Info.plist */,
91 | D30E1B0B2B9AA2C100D78458 /* Controllers */,
92 | );
93 | path = "live-demo";
94 | sourceTree = "";
95 | };
96 | D7B75980CB2C39AF1917AF57 /* Pods */ = {
97 | isa = PBXGroup;
98 | children = (
99 | E713281D99B7811BD664DE09 /* Pods-live-demo.debug.xcconfig */,
100 | 17AE0299CEBC43D9EFBBEE6D /* Pods-live-demo.release.xcconfig */,
101 | );
102 | path = Pods;
103 | sourceTree = "";
104 | };
105 | /* End PBXGroup section */
106 |
107 | /* Begin PBXNativeTarget section */
108 | D3A257282B8BFD2800B69B54 /* live-demo */ = {
109 | isa = PBXNativeTarget;
110 | buildConfigurationList = D3A2573D2B8BFD2900B69B54 /* Build configuration list for PBXNativeTarget "live-demo" */;
111 | buildPhases = (
112 | 3D76742FBB012588D9B58228 /* [CP] Check Pods Manifest.lock */,
113 | D3A257252B8BFD2800B69B54 /* Sources */,
114 | D3A257262B8BFD2800B69B54 /* Frameworks */,
115 | D3A257272B8BFD2800B69B54 /* Resources */,
116 | F2DDB71F21D5BD3446B1463D /* [CP] Embed Pods Frameworks */,
117 | );
118 | buildRules = (
119 | );
120 | dependencies = (
121 | );
122 | name = "live-demo";
123 | productName = "live-demo";
124 | productReference = D3A257292B8BFD2800B69B54 /* live-demo.app */;
125 | productType = "com.apple.product-type.application";
126 | };
127 | /* End PBXNativeTarget section */
128 |
129 | /* Begin PBXProject section */
130 | D3A257212B8BFD2700B69B54 /* Project object */ = {
131 | isa = PBXProject;
132 | attributes = {
133 | BuildIndependentTargetsInParallel = 1;
134 | LastSwiftUpdateCheck = 1520;
135 | LastUpgradeCheck = 1520;
136 | TargetAttributes = {
137 | D3A257282B8BFD2800B69B54 = {
138 | CreatedOnToolsVersion = 15.2;
139 | };
140 | };
141 | };
142 | buildConfigurationList = D3A257242B8BFD2700B69B54 /* Build configuration list for PBXProject "live-demo" */;
143 | compatibilityVersion = "Xcode 14.0";
144 | developmentRegion = en;
145 | hasScannedForEncodings = 0;
146 | knownRegions = (
147 | en,
148 | Base,
149 | );
150 | mainGroup = D3A257202B8BFD2700B69B54;
151 | productRefGroup = D3A2572A2B8BFD2800B69B54 /* Products */;
152 | projectDirPath = "";
153 | projectRoot = "";
154 | targets = (
155 | D3A257282B8BFD2800B69B54 /* live-demo */,
156 | );
157 | };
158 | /* End PBXProject section */
159 |
160 | /* Begin PBXResourcesBuildPhase section */
161 | D3A257272B8BFD2800B69B54 /* Resources */ = {
162 | isa = PBXResourcesBuildPhase;
163 | buildActionMask = 2147483647;
164 | files = (
165 | D3A257392B8BFD2900B69B54 /* LaunchScreen.storyboard in Resources */,
166 | D3A257362B8BFD2900B69B54 /* Assets.xcassets in Resources */,
167 | D3A257342B8BFD2800B69B54 /* Main.storyboard in Resources */,
168 | );
169 | runOnlyForDeploymentPostprocessing = 0;
170 | };
171 | /* End PBXResourcesBuildPhase section */
172 |
173 | /* Begin PBXShellScriptBuildPhase section */
174 | 3D76742FBB012588D9B58228 /* [CP] Check Pods Manifest.lock */ = {
175 | isa = PBXShellScriptBuildPhase;
176 | buildActionMask = 2147483647;
177 | files = (
178 | );
179 | inputFileListPaths = (
180 | );
181 | inputPaths = (
182 | "${PODS_PODFILE_DIR_PATH}/Podfile.lock",
183 | "${PODS_ROOT}/Manifest.lock",
184 | );
185 | name = "[CP] Check Pods Manifest.lock";
186 | outputFileListPaths = (
187 | );
188 | outputPaths = (
189 | "$(DERIVED_FILE_DIR)/Pods-live-demo-checkManifestLockResult.txt",
190 | );
191 | runOnlyForDeploymentPostprocessing = 0;
192 | shellPath = /bin/sh;
193 | shellScript = "diff \"${PODS_PODFILE_DIR_PATH}/Podfile.lock\" \"${PODS_ROOT}/Manifest.lock\" > /dev/null\nif [ $? != 0 ] ; then\n # print error to STDERR\n echo \"error: The sandbox is not in sync with the Podfile.lock. Run 'pod install' or update your CocoaPods installation.\" >&2\n exit 1\nfi\n# This output is used by Xcode 'outputs' to avoid re-running this script phase.\necho \"SUCCESS\" > \"${SCRIPT_OUTPUT_FILE_0}\"\n";
194 | showEnvVarsInLog = 0;
195 | };
196 | F2DDB71F21D5BD3446B1463D /* [CP] Embed Pods Frameworks */ = {
197 | isa = PBXShellScriptBuildPhase;
198 | buildActionMask = 2147483647;
199 | files = (
200 | );
201 | inputFileListPaths = (
202 | "${PODS_ROOT}/Target Support Files/Pods-live-demo/Pods-live-demo-frameworks-${CONFIGURATION}-input-files.xcfilelist",
203 | );
204 | name = "[CP] Embed Pods Frameworks";
205 | outputFileListPaths = (
206 | "${PODS_ROOT}/Target Support Files/Pods-live-demo/Pods-live-demo-frameworks-${CONFIGURATION}-output-files.xcfilelist",
207 | );
208 | runOnlyForDeploymentPostprocessing = 0;
209 | shellPath = /bin/sh;
210 | shellScript = "\"${PODS_ROOT}/Target Support Files/Pods-live-demo/Pods-live-demo-frameworks.sh\"\n";
211 | showEnvVarsInLog = 0;
212 | };
213 | /* End PBXShellScriptBuildPhase section */
214 |
215 | /* Begin PBXSourcesBuildPhase section */
216 | D3A257252B8BFD2800B69B54 /* Sources */ = {
217 | isa = PBXSourcesBuildPhase;
218 | buildActionMask = 2147483647;
219 | files = (
220 | D3A257312B8BFD2800B69B54 /* ViewController.swift in Sources */,
221 | D3A2572D2B8BFD2800B69B54 /* AppDelegate.swift in Sources */,
222 | D3A2572F2B8BFD2800B69B54 /* SceneDelegate.swift in Sources */,
223 | D30E1B1F2B9D666F00D78458 /* TestViewController.swift in Sources */,
224 | );
225 | runOnlyForDeploymentPostprocessing = 0;
226 | };
227 | /* End PBXSourcesBuildPhase section */
228 |
229 | /* Begin PBXVariantGroup section */
230 | D3A257322B8BFD2800B69B54 /* Main.storyboard */ = {
231 | isa = PBXVariantGroup;
232 | children = (
233 | D3A257332B8BFD2800B69B54 /* Base */,
234 | );
235 | name = Main.storyboard;
236 | sourceTree = "";
237 | };
238 | D3A257372B8BFD2900B69B54 /* LaunchScreen.storyboard */ = {
239 | isa = PBXVariantGroup;
240 | children = (
241 | D3A257382B8BFD2900B69B54 /* Base */,
242 | );
243 | name = LaunchScreen.storyboard;
244 | sourceTree = "";
245 | };
246 | /* End PBXVariantGroup section */
247 |
248 | /* Begin XCBuildConfiguration section */
249 | D3A2573B2B8BFD2900B69B54 /* Debug */ = {
250 | isa = XCBuildConfiguration;
251 | buildSettings = {
252 | ALWAYS_SEARCH_USER_PATHS = NO;
253 | ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
254 | CLANG_ANALYZER_NONNULL = YES;
255 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
256 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
257 | CLANG_ENABLE_MODULES = YES;
258 | CLANG_ENABLE_OBJC_ARC = YES;
259 | CLANG_ENABLE_OBJC_WEAK = YES;
260 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
261 | CLANG_WARN_BOOL_CONVERSION = YES;
262 | CLANG_WARN_COMMA = YES;
263 | CLANG_WARN_CONSTANT_CONVERSION = YES;
264 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
265 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
266 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
267 | CLANG_WARN_EMPTY_BODY = YES;
268 | CLANG_WARN_ENUM_CONVERSION = YES;
269 | CLANG_WARN_INFINITE_RECURSION = YES;
270 | CLANG_WARN_INT_CONVERSION = YES;
271 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
272 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
273 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
274 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
275 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
276 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
277 | CLANG_WARN_STRICT_PROTOTYPES = YES;
278 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
279 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
280 | CLANG_WARN_UNREACHABLE_CODE = YES;
281 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
282 | COPY_PHASE_STRIP = NO;
283 | DEBUG_INFORMATION_FORMAT = dwarf;
284 | ENABLE_STRICT_OBJC_MSGSEND = YES;
285 | ENABLE_TESTABILITY = YES;
286 | ENABLE_USER_SCRIPT_SANDBOXING = YES;
287 | GCC_C_LANGUAGE_STANDARD = gnu17;
288 | GCC_DYNAMIC_NO_PIC = NO;
289 | GCC_NO_COMMON_BLOCKS = YES;
290 | GCC_OPTIMIZATION_LEVEL = 0;
291 | GCC_PREPROCESSOR_DEFINITIONS = (
292 | "DEBUG=1",
293 | "$(inherited)",
294 | );
295 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
296 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
297 | GCC_WARN_UNDECLARED_SELECTOR = YES;
298 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
299 | GCC_WARN_UNUSED_FUNCTION = YES;
300 | GCC_WARN_UNUSED_VARIABLE = YES;
301 | IPHONEOS_DEPLOYMENT_TARGET = 17.2;
302 | LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
303 | MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
304 | MTL_FAST_MATH = YES;
305 | ONLY_ACTIVE_ARCH = YES;
306 | SDKROOT = iphoneos;
307 | SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
308 | SWIFT_OPTIMIZATION_LEVEL = "-Onone";
309 | };
310 | name = Debug;
311 | };
312 | D3A2573C2B8BFD2900B69B54 /* Release */ = {
313 | isa = XCBuildConfiguration;
314 | buildSettings = {
315 | ALWAYS_SEARCH_USER_PATHS = NO;
316 | ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
317 | CLANG_ANALYZER_NONNULL = YES;
318 | CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
319 | CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
320 | CLANG_ENABLE_MODULES = YES;
321 | CLANG_ENABLE_OBJC_ARC = YES;
322 | CLANG_ENABLE_OBJC_WEAK = YES;
323 | CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
324 | CLANG_WARN_BOOL_CONVERSION = YES;
325 | CLANG_WARN_COMMA = YES;
326 | CLANG_WARN_CONSTANT_CONVERSION = YES;
327 | CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
328 | CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
329 | CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
330 | CLANG_WARN_EMPTY_BODY = YES;
331 | CLANG_WARN_ENUM_CONVERSION = YES;
332 | CLANG_WARN_INFINITE_RECURSION = YES;
333 | CLANG_WARN_INT_CONVERSION = YES;
334 | CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
335 | CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
336 | CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
337 | CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
338 | CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
339 | CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
340 | CLANG_WARN_STRICT_PROTOTYPES = YES;
341 | CLANG_WARN_SUSPICIOUS_MOVE = YES;
342 | CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
343 | CLANG_WARN_UNREACHABLE_CODE = YES;
344 | CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
345 | COPY_PHASE_STRIP = NO;
346 | DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
347 | ENABLE_NS_ASSERTIONS = NO;
348 | ENABLE_STRICT_OBJC_MSGSEND = YES;
349 | ENABLE_USER_SCRIPT_SANDBOXING = YES;
350 | GCC_C_LANGUAGE_STANDARD = gnu17;
351 | GCC_NO_COMMON_BLOCKS = YES;
352 | GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
353 | GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
354 | GCC_WARN_UNDECLARED_SELECTOR = YES;
355 | GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
356 | GCC_WARN_UNUSED_FUNCTION = YES;
357 | GCC_WARN_UNUSED_VARIABLE = YES;
358 | IPHONEOS_DEPLOYMENT_TARGET = 17.2;
359 | LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
360 | MTL_ENABLE_DEBUG_INFO = NO;
361 | MTL_FAST_MATH = YES;
362 | SDKROOT = iphoneos;
363 | SWIFT_COMPILATION_MODE = wholemodule;
364 | VALIDATE_PRODUCT = YES;
365 | };
366 | name = Release;
367 | };
368 | D3A2573E2B8BFD2900B69B54 /* Debug */ = {
369 | isa = XCBuildConfiguration;
370 | baseConfigurationReference = E713281D99B7811BD664DE09 /* Pods-live-demo.debug.xcconfig */;
371 | buildSettings = {
372 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
373 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
374 | CODE_SIGN_IDENTITY = "Apple Development";
375 | CODE_SIGN_STYLE = Automatic;
376 | CURRENT_PROJECT_VERSION = 1;
377 | DEVELOPMENT_TEAM = UY9S8H9G47;
378 | ENABLE_APP_SANDBOX = NO;
379 | ENABLE_USER_SCRIPT_SANDBOXING = NO;
380 | GENERATE_INFOPLIST_FILE = YES;
381 | INFOPLIST_FILE = "live-demo/Info.plist";
382 | INFOPLIST_KEY_NSCameraUsageDescription = "Use camera";
383 | INFOPLIST_KEY_NSFileProviderDomainUsageDescription = "";
384 | INFOPLIST_KEY_NSMicrophoneUsageDescription = "Use microphone";
385 | INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Use photo library";
386 | INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
387 | INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen;
388 | INFOPLIST_KEY_UIMainStoryboardFile = Main;
389 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
390 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
391 | IPHONEOS_DEPLOYMENT_TARGET = 13.1;
392 | LD_RUNPATH_SEARCH_PATHS = (
393 | "$(inherited)",
394 | "@executable_path/Frameworks",
395 | );
396 | MARKETING_VERSION = 1.0;
397 | PRODUCT_BUNDLE_IDENTIFIER = "com.sudayn.livedemotest.live-demo";
398 | PRODUCT_NAME = "$(TARGET_NAME)";
399 | PROVISIONING_PROFILE_SPECIFIER = "";
400 | SWIFT_EMIT_LOC_STRINGS = YES;
401 | SWIFT_VERSION = 5.0;
402 | TARGETED_DEVICE_FAMILY = "1,2";
403 | };
404 | name = Debug;
405 | };
406 | D3A2573F2B8BFD2900B69B54 /* Release */ = {
407 | isa = XCBuildConfiguration;
408 | baseConfigurationReference = 17AE0299CEBC43D9EFBBEE6D /* Pods-live-demo.release.xcconfig */;
409 | buildSettings = {
410 | ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
411 | ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
412 | CODE_SIGN_IDENTITY = "Apple Development";
413 | CODE_SIGN_STYLE = Automatic;
414 | CURRENT_PROJECT_VERSION = 1;
415 | DEVELOPMENT_TEAM = UY9S8H9G47;
416 | ENABLE_APP_SANDBOX = NO;
417 | ENABLE_USER_SCRIPT_SANDBOXING = NO;
418 | GENERATE_INFOPLIST_FILE = YES;
419 | INFOPLIST_FILE = "live-demo/Info.plist";
420 | INFOPLIST_KEY_NSCameraUsageDescription = "Use camera";
421 | INFOPLIST_KEY_NSFileProviderDomainUsageDescription = "";
422 | INFOPLIST_KEY_NSMicrophoneUsageDescription = "Use microphone";
423 | INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "Use photo library";
424 | INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
425 | INFOPLIST_KEY_UILaunchStoryboardName = LaunchScreen;
426 | INFOPLIST_KEY_UIMainStoryboardFile = Main;
427 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
428 | INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
429 | IPHONEOS_DEPLOYMENT_TARGET = 13.1;
430 | LD_RUNPATH_SEARCH_PATHS = (
431 | "$(inherited)",
432 | "@executable_path/Frameworks",
433 | );
434 | MARKETING_VERSION = 1.0;
435 | PRODUCT_BUNDLE_IDENTIFIER = "com.sudayn.livedemotest.live-demo";
436 | PRODUCT_NAME = "$(TARGET_NAME)";
437 | PROVISIONING_PROFILE_SPECIFIER = "";
438 | SWIFT_EMIT_LOC_STRINGS = YES;
439 | SWIFT_VERSION = 5.0;
440 | TARGETED_DEVICE_FAMILY = "1,2";
441 | };
442 | name = Release;
443 | };
444 | /* End XCBuildConfiguration section */
445 |
446 | /* Begin XCConfigurationList section */
447 | D3A257242B8BFD2700B69B54 /* Build configuration list for PBXProject "live-demo" */ = {
448 | isa = XCConfigurationList;
449 | buildConfigurations = (
450 | D3A2573B2B8BFD2900B69B54 /* Debug */,
451 | D3A2573C2B8BFD2900B69B54 /* Release */,
452 | );
453 | defaultConfigurationIsVisible = 0;
454 | defaultConfigurationName = Release;
455 | };
456 | D3A2573D2B8BFD2900B69B54 /* Build configuration list for PBXNativeTarget "live-demo" */ = {
457 | isa = XCConfigurationList;
458 | buildConfigurations = (
459 | D3A2573E2B8BFD2900B69B54 /* Debug */,
460 | D3A2573F2B8BFD2900B69B54 /* Release */,
461 | );
462 | defaultConfigurationIsVisible = 0;
463 | defaultConfigurationName = Release;
464 | };
465 | /* End XCConfigurationList section */
466 | };
467 | rootObject = D3A257212B8BFD2700B69B54 /* Project object */;
468 | }
469 |
--------------------------------------------------------------------------------
/FFLivekit/Utils/FFmpegUtils.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FFmpegUtils.swift
3 | // live-demo
4 | //
5 | // Created by xkal on 10/3/2024.
6 | //
7 |
8 | import AVFoundation
9 | import ffmpegkit
10 |
11 | enum RecordingType {
12 | case Microphone
13 | case Camera
14 | case Camera_Microphone
15 | case File
16 | }
17 |
18 | public class FFStat {
19 |
20 | public let bitrate: Double
21 | public let size: Int
22 | public let time: Double
23 | public let speed: Double
24 | public let rate: Double
25 | public let fps: Float
26 | public let quality: Float
27 | public let frameNumber: Int32
28 | public let sessionId: Int
29 |
30 | init(stat: Statistics) {
31 | bitrate = stat.getBitrate()
32 | size = stat.getSize()
33 | time = stat.getTime()
34 | speed = stat.getSpeed()
35 | rate = stat.getBitrate()
36 | fps = stat.getVideoFps()
37 | quality = stat.getVideoQuality()
38 | frameNumber = stat.getVideoFrameNumber()
39 | sessionId = stat.getSessionId()
40 | }
41 |
42 |
43 | }
44 |
45 | public enum RecordingState {
46 | case RequestRecording
47 | case Recording
48 | case RequestStop
49 | case Normal
50 | }
51 |
52 | public protocol FFmpegUtilsDelegate: AnyObject {
53 | func _FFLiveKit(didChange status: RecordingState)
54 | func _FFLiveKit(onStats stats: FFStat)
55 | func _FFLiveKit(onError error: String)
56 | }
57 |
58 | public enum FFLivekitSettings {
59 | case outputVideoFramerate(Int)
60 | case outputVideoPixelFormat(String)
61 | case outputVideoSize((Int, Int))
62 | /// example "500k" or "2M"
63 | case outputVideoBitrate(String)
64 | /// example "128k"
65 | case outputAudioBitrate(String)
66 |
67 | /// nil to no transpose
68 | /// 0 - Rotate 90 degrees counterclockwise and flip vertically.
69 | ///1 - Rotate 90 degrees clockwise.
70 | /// 2 - Rotate 90 degrees counterclockwise.
71 | /// 3 - Rotate 90 degrees clockwise and flip vertically.
72 | case outputVideoTranspose(Int?)
73 | }
74 |
75 |
76 | struct FFmpegOptions {
77 | /// input settings
78 | var outputVideoFramerate: Int
79 | var outputVideoPixelFormat: String
80 | var outputVideoSize: (Int, Int)
81 | var outputVideoBitrate: String
82 | var outputAudioBitrate: String
83 | var outputVideoTranspose: Int?
84 |
85 | init(outputVideoFramerate: Int, outputVideoPixelFormat: String, outputVideoSize: (Int, Int), outputVideoBitrate: String, outputAudioBitrate: String, outputVideoTranspose: Int?) {
86 | self.outputVideoFramerate = outputVideoFramerate
87 | self.outputVideoPixelFormat = outputVideoPixelFormat
88 | self.outputVideoSize = outputVideoSize
89 | self.outputVideoBitrate = outputVideoBitrate
90 | self.outputAudioBitrate = outputAudioBitrate
91 | self.outputVideoTranspose = outputVideoTranspose
92 | }
93 |
94 | init(settings: [FFLivekitSettings]) {
95 | self = FFmpegOptions.shared()
96 |
97 | for setting in settings {
98 | switch setting {
99 | case .outputAudioBitrate(let value):
100 | self.outputAudioBitrate = value
101 | break
102 | case .outputVideoBitrate(let value):
103 | self.outputVideoBitrate = value
104 | break
105 | case .outputVideoSize(let value):
106 | self.outputVideoSize = value
107 | case .outputVideoFramerate(let value):
108 | self.outputVideoFramerate = value
109 | case .outputVideoPixelFormat(let value):
110 | self.outputVideoPixelFormat = value
111 | case .outputVideoTranspose(let value):
112 | self.outputVideoTranspose = value
113 | }
114 | }
115 | }
116 |
117 | public static func shared() -> FFmpegOptions {
118 | let option = FFmpegOptions(outputVideoFramerate: 30, outputVideoPixelFormat: "yuv420p", outputVideoSize: (1280, 720), outputVideoBitrate: "640k", outputAudioBitrate: "64k", outputVideoTranspose: 1)
119 | return option
120 | }
121 |
122 | }
123 |
124 | class FFmpegUtils: NSObject, SourceDelegate {
125 |
126 |
127 | var audioPipe: String?
128 | var videoPipe: String?
129 |
130 | var outputFormat = ""
131 | var baseUrl = ""
132 | // var streamName: String?
133 | // var queryString = ""
134 | let options: FFmpegOptions!
135 |
136 |
137 | var url: String {
138 | get {
139 | return baseUrl
140 | }
141 | }
142 |
143 | var enableWritingToPipe = false
144 | var isInBackground = false
145 |
146 | private var videoTimer: Timer?
147 | private var blankFrames: Data?
148 | private var videoFileDescriptor: Int32!
149 | private var audioFileDescriptor: Int32!
150 |
151 | // Persistent file handles for better performance (instead of creating new ones each write)
152 | private var videoFileHandle: FileHandle?
153 | private var audioFileHandle: FileHandle?
154 |
155 | // var recordingType = RecordingType.Camera_Microphone
156 |
157 | var inputCommands: [String] = []
158 | // var outputCommands: [String] = []
159 | var encoders: [String] = []
160 |
161 | /// threads
162 | private let background = DispatchQueue.global(qos: .background)
163 | private let videoFeedThread = DispatchQueue.global(qos: .background)
164 | private let audioFeedThread = DispatchQueue.global(qos: .background)
165 |
166 | /// buffers and locks
167 | private let videoBufferLock = NSLock()
168 | private var videoDataBuffer = Data()
169 |
170 | private let audioBufferLock = NSLock()
171 | private var audioDataBuffer = Data()
172 |
173 | private weak var delegate: FFmpegUtilsDelegate?
174 |
175 | init(outputFormat: String, url: String, delegate: FFmpegUtilsDelegate?, options: [FFLivekitSettings]) {
176 | self.options = FFmpegOptions(settings: options)
177 | super.init()
178 | self.outputFormat = outputFormat
179 | self.baseUrl = url
180 | self.delegate = delegate
181 | FFmpegKitConfig.enableLogCallback({log in
182 | if let log = log {
183 | print(log.getMessage()!)
184 | }
185 | })
186 | registerForInterruption()
187 | self.recordingState = .Normal
188 | }
189 |
190 | func registerForInterruption() {
191 | // Add observers for AVCaptureSession notifications
192 | NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError), name: .AVCaptureSessionRuntimeError, object: nil)
193 | NotificationCenter.default.addObserver(self, selector: #selector(sessionWasInterrupted), name: .AVCaptureSessionWasInterrupted, object: nil)
194 | NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionEnded), name: .AVCaptureSessionInterruptionEnded, object: nil)
195 | }
196 |
197 | // Handle AVCaptureSession runtime error
198 | @objc func sessionRuntimeError(notification: Notification) {
199 | if let error = notification.userInfo?[AVCaptureSessionErrorKey] as? Error {
200 | print("AVCaptureSession runtime error: \(error.localizedDescription)")
201 | // Handle the error as needed
202 | }
203 | }
204 |
205 | // Handle AVCaptureSession interruption
206 | @objc func sessionWasInterrupted(notification: Notification) {
207 | if let reasonValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as? Int,
208 | let reason = AVCaptureSession.InterruptionReason(rawValue: reasonValue) {
209 | print("AVCaptureSession was interrupted. Reason: \(reason)")
210 | // Handle the interruption as needed
211 | if reasonValue == 1 {
212 | blankFrames = BufferConverter.createEmptyRGBAData(width: 1920, height: 1080)
213 | isInBackground = true
214 | }
215 | }
216 | }
217 |
218 | // Handle AVCaptureSession interruption ended
219 | @objc func sessionInterruptionEnded(notification: Notification) {
220 | print("AVCaptureSession interruption ended.")
221 | isInBackground = false
222 | blankFrames = nil
223 | clearVideoBuffer()
224 | clearAudioBuffer()
225 | }
226 |
227 | // Remove observers when the view controller is deallocated
228 | deinit {
229 | NotificationCenter.default.removeObserver(self)
230 | stopTimer()
231 | closePipes()
232 | videoDataBuffer.removeAll()
233 | audioDataBuffer.removeAll()
234 | }
235 |
236 |
237 | var recordingState: RecordingState = .Normal {
238 | willSet {
239 | DispatchQueue.main.async {
240 | self.delegate?._FFLiveKit(didChange: newValue)
241 | }
242 | switch newValue {
243 | case .Normal:
244 | enableWritingToPipe = false
245 | break
246 | case .RequestRecording:
247 | clearVideoBuffer()
248 | clearAudioBuffer()
249 | enableWritingToPipe = true
250 | /// initialize pipes
251 | createPipes()
252 | background.async {
253 | self.executeCommand()
254 | }
255 | startTimer()
256 | break
257 | case .Recording:
258 | enableWritingToPipe = true
259 | break
260 | case .RequestStop:
261 | enableWritingToPipe = false
262 | stopTimer()
263 | closePipes()
264 | clearVideoBuffer()
265 | clearAudioBuffer()
266 | FFmpegKit.cancel()
267 | DispatchQueue.main.asyncAfter(deadline: .now() + 0.5, execute: {
268 | self.recordingState = .Normal
269 | })
270 | break
271 | }
272 | }
273 | }
274 |
275 | func start(inputcommands: [String], encoders: [String]) {
276 | self.inputCommands = inputcommands
277 | self.encoders = encoders
278 | recordingState = .RequestRecording
279 | }
280 |
281 | func stop() {
282 | recordingState = .RequestStop
283 | }
284 |
285 | private func stopTimer() {
286 | videoTimer?.invalidate()
287 | videoTimer = nil
288 | }
289 |
290 | private func startTimer() {
291 | DispatchQueue.global().async {
292 | self.videoTimer = Timer.scheduledTimer(timeInterval: 0.010, target: self, selector: #selector(self.handleFeed), userInfo: nil, repeats: true)
293 | RunLoop.current.add(self.videoTimer!, forMode: .default)
294 | RunLoop.current.run()
295 | }
296 | }
297 |
298 | @objc func handleFeed() {
299 | if isInBackground {
300 | self.appendToVideoBuffer(data: self.blankFrames!)
301 | if self.videoDataBuffer.count > 10*1000000 {
302 | print("Flushing....")
303 | self.feedToVideoPipe()
304 | }
305 | } else {
306 | feedToVideoPipe()
307 | feedToAudioPipe()
308 | }
309 | }
310 |
311 | private func createPipes() {
312 | // create a pipe for video
313 | videoPipe = FFmpegKitConfig.registerNewFFmpegPipe()
314 | audioPipe = FFmpegKitConfig.registerNewFFmpegPipe()
315 | // open the videopipe so that ffempg doesnot closes when the video pipe receives EOF
316 | videoFileDescriptor = open(videoPipe!, O_RDWR)
317 | audioFileDescriptor = open(audioPipe!, O_RDWR)
318 |
319 | // Create persistent file handles for better performance
320 | if videoFileDescriptor >= 0 {
321 | videoFileHandle = FileHandle(fileDescriptor: videoFileDescriptor, closeOnDealloc: false)
322 | }
323 | if audioFileDescriptor >= 0 {
324 | audioFileHandle = FileHandle(fileDescriptor: audioFileDescriptor, closeOnDealloc: false)
325 | }
326 | }
327 |
328 | private func closePipes() {
329 | // Close file handles first
330 | videoFileHandle?.closeFile()
331 | audioFileHandle?.closeFile()
332 | videoFileHandle = nil
333 | audioFileHandle = nil
334 |
335 | // Then close file descriptors
336 | if videoFileDescriptor != nil {
337 | close(videoFileDescriptor)
338 | videoFileDescriptor = nil
339 | }
340 | if audioFileDescriptor != nil {
341 | close(audioFileDescriptor)
342 | audioFileDescriptor = nil
343 | }
344 |
345 | // Finally close FFmpeg pipes
346 | if let videoPipe = videoPipe {
347 | FFmpegKitConfig.closeFFmpegPipe(videoPipe)
348 | }
349 | if let audioPipe = audioPipe {
350 | FFmpegKitConfig.closeFFmpegPipe(audioPipe)
351 | }
352 | }
353 |
354 | func appendToVideoBuffer(data: Data) {
355 | videoFeedThread.sync {
356 | self.videoBufferLock.lock()
357 | /// Max bytes buffer 100MB - drop oldest data if exceeded (FIFO)
358 | let maxBufferSize = 100 * 1000000
359 | if self.videoDataBuffer.count > maxBufferSize {
360 | // Remove oldest data to make room (keep last 50MB)
361 | let keepSize = 50 * 1000000
362 | if self.videoDataBuffer.count > keepSize {
363 | let removeCount = self.videoDataBuffer.count - keepSize
364 | self.videoDataBuffer.removeFirst(removeCount)
365 | }
366 | }
367 | self.videoDataBuffer.append(data)
368 | self.videoBufferLock.unlock()
369 | }
370 | }
371 |
372 | func appendToAudioBuffer(data: Data) {
373 | audioFeedThread.sync {
374 | self.audioBufferLock.lock()
375 | /// Max bytes buffer 50MB - drop oldest data if exceeded (FIFO)
376 | let maxBufferSize = 50 * 1000000
377 | if self.audioDataBuffer.count > maxBufferSize {
378 | // Remove oldest data to make room (keep last 25MB)
379 | let keepSize = 25 * 1000000
380 | if self.audioDataBuffer.count > keepSize {
381 | let removeCount = self.audioDataBuffer.count - keepSize
382 | self.audioDataBuffer.removeFirst(removeCount)
383 | }
384 | }
385 | self.audioDataBuffer.append(data)
386 | self.audioBufferLock.unlock()
387 | }
388 | }
389 |
390 | func writeToVideoPipe(data: Data) {
391 | // Use persistent file handle if available, otherwise fall back to creating new one
392 | if let fileHandle = videoFileHandle {
393 | do {
394 | if #available(iOS 13.4, *) {
395 | try fileHandle.write(contentsOf: data)
396 | } else {
397 | fileHandle.write(data)
398 | }
399 | } catch {
400 | print("Error writing video to pipe: \(error.localizedDescription)")
401 | }
402 | } else if let currentPipe = self.videoPipe, let fileHandle = try? FileHandle(forWritingTo: URL(fileURLWithPath: currentPipe)) {
403 | // Fallback: create new file handle (original behavior)
404 | if #available(iOS 13.4, *) {
405 | try? fileHandle.write(contentsOf: data)
406 | } else {
407 | fileHandle.write(data)
408 | }
409 | fileHandle.closeFile()
410 | } else {
411 | print("Failed to open video file handle for writing")
412 | }
413 | }
414 |
415 | func writeToAudioPipe(data: Data) {
416 | // Use persistent file handle if available, otherwise fall back to creating new one
417 | if let fileHandle = audioFileHandle {
418 | do {
419 | if #available(iOS 13.4, *) {
420 | try fileHandle.write(contentsOf: data)
421 | } else {
422 | fileHandle.write(data)
423 | }
424 | } catch {
425 | print("Error writing audio to pipe: \(error.localizedDescription)")
426 | }
427 | } else if let currentPipe = self.audioPipe, let fileHandle = try? FileHandle(forWritingTo: URL(fileURLWithPath: currentPipe)) {
428 | // Fallback: create new file handle (original behavior)
429 | if #available(iOS 13.4, *) {
430 | try? fileHandle.write(contentsOf: data)
431 | } else {
432 | fileHandle.write(data)
433 | }
434 | fileHandle.closeFile()
435 | } else {
436 | print("Failed to open audio file handle for writing")
437 | }
438 | }
439 |
440 | @objc func feedToVideoPipe() {
441 | // print("Feeding video")
442 | self.videoBufferLock.lock()
443 | // Feed video
444 | if !self.videoDataBuffer.isEmpty {
445 | self.writeToVideoPipe(data: self.videoDataBuffer)
446 | self.videoDataBuffer.removeAll()
447 | }
448 | self.videoBufferLock.unlock()
449 | }
450 |
451 | @objc func feedToAudioPipe() {
452 | // print("Feeding video")
453 | self.audioBufferLock.lock()
454 | // Feed video
455 | if !self.audioDataBuffer.isEmpty {
456 | self.writeToAudioPipe(data: self.audioDataBuffer)
457 | self.audioDataBuffer.removeAll()
458 | }
459 | self.audioBufferLock.unlock()
460 | }
461 |
462 | func clearVideoBuffer() {
463 | self.videoBufferLock.lock()
464 | self.videoDataBuffer.removeAll()
465 | self.videoBufferLock.unlock()
466 | }
467 |
468 | func clearAudioBuffer() {
469 | self.audioBufferLock.lock()
470 | self.audioDataBuffer.removeAll()
471 | self.audioBufferLock.unlock()
472 | }
473 |
474 |
475 | private func generateVideoOutputCommand() -> String {
476 | return "-framerate \(options.outputVideoFramerate) -pixel_format \(options.outputVideoPixelFormat) -vf \"\(options.outputVideoTranspose == nil ? "" : "transpose=\(options.outputVideoTranspose!),")scale=\(options.outputVideoSize.0):\(options.outputVideoSize.1)\" -b:v \(options.outputVideoBitrate)"
477 | }
478 |
479 | private func generateAudioOutputCommand() -> String {
480 | return "-b:a \(options.outputAudioBitrate)"
481 | }
482 |
483 | private func executeCommand() {
484 | let inputs = self.inputCommands.joined(separator: " ").replacingOccurrences(of: "%videoPipe%", with: videoPipe!).replacingOccurrences(of: "%audioPipe%", with: audioPipe!)
485 | let encoders = self.encoders.joined(separator: " ")
486 | let cmd = "-re -thread_queue_size 512 \(inputs) \(encoders) \(generateVideoOutputCommand()) \(generateAudioOutputCommand()) -fps_mode cfr -f \(outputFormat) \"\(url)\""
487 | execute(cmd: cmd)
488 | }
489 |
490 |
491 | private func execute(cmd: String) {
492 | print("Executing \(cmd)..........")
493 | FFmpegKit.executeAsync(cmd, withCompleteCallback: {session in
494 | if let session = session {
495 | if let stats = session.getStatistics().first as? Statistics {
496 | DispatchQueue.main.async {
497 | self.delegate?._FFLiveKit(onStats: FFStat(stat: stats))
498 | }
499 | }
500 | if let code = session.getReturnCode() {
501 | if ReturnCode.isSuccess(code) {
502 | print("Finished")
503 | } else if ReturnCode.isCancel(code) {
504 | print("Cancelled")
505 | } else {
506 | print("Error")
507 | DispatchQueue.main.async {
508 | let output = session.getOutput() ?? ""
509 | self.delegate?._FFLiveKit(onError: output)
510 | }
511 | }
512 | }
513 |
514 | }
515 | self.stop()
516 | }, withLogCallback: nil, withStatisticsCallback: {stats in
517 | guard let stats = stats else {
518 | return
519 | }
520 | /// For Video
521 | if stats.getTime() > 0 {
522 | self.recordingState = .Recording
523 | }
524 | DispatchQueue.main.async {
525 | self.delegate?._FFLiveKit(onStats: FFStat(stat: stats))
526 | }
527 | })
528 | }
529 |
530 |
531 | // func _CameraSource(switchStarted: Bool) {
532 | // startPiping = false
533 | // clearVideoBuffer()
534 | // }
535 | //
536 | // func _CameraSource(switchEnded: Bool) {
537 | // running = true
538 | // }
539 |
540 |
541 | func _Source(_ source: Source, onData: Data) {
542 | if self.enableWritingToPipe {
543 | if source is CameraSource {
544 | if !self.isInBackground, let data = isInBackground ? blankFrames : onData {
545 | if self.recordingState == .RequestRecording {
546 | self.writeToVideoPipe(data: data)
547 | } else if self.recordingState == .Recording {
548 | self.appendToVideoBuffer(data: data)
549 | }
550 | }
551 | } else if source is MicrophoneSource {
552 | if self.recordingState == .RequestRecording {
553 | self.writeToAudioPipe(data: onData)
554 | } else if self.recordingState == .Recording {
555 | if isInBackground {
556 | self.writeToAudioPipe(data: onData)
557 | } else {
558 | self.appendToAudioBuffer(data: onData)
559 | }
560 | }
561 | }
562 | }
563 | }
564 |
565 | func _Source(_ source: Source, extra: [String : Any]) {
566 | if self.recordingState == .Recording {
567 | if source is CameraSource {
568 | if let switchStarted = extra["switchStarted"] as? Bool {
569 | if switchStarted == true {
570 | self.enableWritingToPipe = false
571 | clearVideoBuffer()
572 | clearAudioBuffer()
573 | } else if switchStarted == false {
574 | self.enableWritingToPipe = true
575 | }
576 | }
577 |
578 | }
579 | }
580 |
581 | }
582 | }
583 |
--------------------------------------------------------------------------------
/Example/live-demo/ViewController.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ViewController.swift
3 | // live-publish-demo
4 | //
5 | // Created by xkal on 24/2/2024.
6 | //
7 |
8 | import UIKit
9 | import AVFoundation
10 | import ffmpegkit
11 |
12 | class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, UIImagePickerControllerDelegate, AVCaptureFileOutputRecordingDelegate, UINavigationControllerDelegate {
13 |
14 | func fileOutput(_ output: AVCaptureFileOutput, didFinishRecordingTo outputFileURL: URL, from connections: [AVCaptureConnection], error: Error?) {
15 | print(outputFileURL)
16 | let cmd = "-re -i \(outputFileURL.path) -c:a aac -c:v h264 -b:v 2M -f flv rtmp://192.168.1.100:1935/mystream"
17 | print(cmd)
18 | background.async {
19 | let session = FFmpegKit.executeAsync(cmd, withCompleteCallback: {data in
20 | print("completed")
21 | self.startPublish()
22 | })
23 | self.sessionId = session?.getId()
24 | }
25 | }
26 |
27 | func fileOutput(_ output: AVCaptureFileOutput, didStartRecordingTo fileURL: URL, from connections: [AVCaptureConnection]) {
28 |
29 | background.asyncAfter(deadline: .now() + 3, execute: {
30 | self.stopPublish()
31 | })
32 |
33 | }
34 |
35 |
36 | @IBOutlet weak var cameraView: UIView!
37 |
38 | let captureSession = AVCaptureSession()
39 | var frontCamera: AVCaptureDevice?
40 | var rearCamera: AVCaptureDevice?
41 | var microphone: AVCaptureDevice?
42 |
43 | var frontCameraInput: AVCaptureDeviceInput?
44 | var backCameraInput: AVCaptureDeviceInput?
45 | var microphoneInput: AVCaptureDeviceInput?
46 | var currentCameraPosition: AVCaptureDevice.Position = .back
47 |
48 | var movieOutput = AVCaptureMovieFileOutput()
49 |
50 |
51 | var sessionOutput = AVCaptureVideoDataOutput();
52 |
53 |
54 |
55 | var _currentWritingStatus: AVAssetWriter.Status = .unknown
56 |
57 | var videoUrl: String?
58 |
59 | var sessionId: Int?
60 |
61 |
62 | let background = DispatchQueue.global(qos: .background)
63 |
64 |
65 |
66 | private let previewLayer = AVCaptureVideoPreviewLayer()
67 |
68 |
69 | // var currentWritingStatus: AVAssetWriter.Status {
70 | // set {
71 | // _currentWritingStatus = newValue
72 | // }
73 | //
74 | // get {
75 | // return _currentWritingStatus
76 | // }
77 | // }
78 |
79 |
80 | var tempVideoFileUrl: URL {
81 | return FileManager.default.temporaryDirectory.appendingPathComponent("temp.mp4")
82 | }
83 |
84 |
85 | func createNamedPipe(atPath path: String) -> Bool {
86 | if FileManager.default.fileExists(atPath: path) {
87 | return true
88 | }
89 | let result = mkfifo(path, S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP | S_IROTH | S_IWOTH)
90 |
91 | if result == 0 {
92 | print("Named pipe created at: \(path)")
93 | return true
94 | } else {
95 | perror("mkfifo")
96 | return false
97 | }
98 | }
99 |
100 | let pipePath = FileManager.default.temporaryDirectory.appendingPathComponent("fff").path
101 |
102 | @IBAction func onPick(_ sender: Any) {
103 | // imagePickerController.sourceType = .photoLibrary
104 | // imagePickerController.delegate = self
105 | // imagePickerController.mediaTypes = ["public.image", "public.movie"]
106 | //
107 | // present(imagePickerController, animated: true, completion: nil)
108 | background.async {
109 | self.writeToNamedPipe(atPath: self.pipePath, data: "Hello world bitch")
110 | }
111 | }
112 |
113 | func writeToNamedPipe(atPath path: String, data: String) {
114 | if let fileHandle = FileHandle(forWritingAtPath: path) {
115 | // defer {
116 | // fileHandle.closeFile()
117 | // }
118 |
119 | if let data = data.data(using: .utf8) {
120 | fileHandle.write(data)
121 | print("Data written to named pipe: \(data)")
122 | } else {
123 | print("Error converting string to data")
124 | }
125 | } else {
126 | print("Error opening file handle for writing")
127 | }
128 | }
129 |
130 |
131 | func readFromNamedPipe(atPath path: String) {
132 | if let fileHandle = FileHandle(forReadingAtPath: path) {
133 | // defer {
134 | // fileHandle.closeFile()
135 | // }
136 |
137 | fileHandle.readabilityHandler = { handler in
138 | let string = String(data: handler.availableData, encoding: .utf8)
139 | print("Data read from named pipe: \(string)")
140 | }
141 | // let data = fileHandle.readDataToEndOfFile()
142 |
143 | } else {
144 | print("Error opening file handle for reading")
145 |
146 | }
147 | }
148 |
149 |
150 | func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) {
151 | let url = (info[UIImagePickerController.InfoKey.mediaURL] as? NSURL)?.path
152 | copyFileToTemporaryDirectory(source: url!)
153 | // copyFileToTemporaryDirectory(source: url)
154 |
155 | imagePickerController.dismiss(animated: true, completion: nil)
156 | }
157 |
158 | func copyFileToTemporaryDirectory(source: String) {
159 |
160 | let fileManager = FileManager.default
161 |
162 | // Replace "sourceFilePath" with the path to your source file
163 | let sourceFilePath = source
164 |
165 | // Create a URL for the source file
166 | let sourceFileURL = URL(fileURLWithPath: sourceFilePath)
167 |
168 | // Get the temporary directory URL
169 | let temporaryDirectoryURL = FileManager.default.temporaryDirectory
170 |
171 | // Create a destination URL in the temporary directory
172 | let destinationFileURL = temporaryDirectoryURL.appendingPathComponent(sourceFileURL.lastPathComponent)
173 |
174 | do {
175 | // Check if the file already exists at the destination
176 | if fileManager.fileExists(atPath: destinationFileURL.path) {
177 | // If it exists, you may want to handle it according to your requirements
178 | print("File already exists in the temporary directory.")
179 | try fileManager.removeItem(at: destinationFileURL)
180 | }
181 |
182 | // Copy the file to the temporary directory
183 | try fileManager.copyItem(at: sourceFileURL, to: destinationFileURL)
184 |
185 | // Print the path to the copied file in the temporary directory
186 | print("File copied to: \(destinationFileURL.path)")
187 |
188 | videoUrl = destinationFileURL.path
189 |
190 | } catch {
191 | // Handle the error if the copy operation fails
192 | print("Error: \(error.localizedDescription)")
193 | }
194 | }
195 |
196 | var writer: AVAssetWriter?
197 | var videoWriterInput: AVAssetWriterInput?
198 |
199 |
200 | let imagePickerController = UIImagePickerController()
201 |
202 | override func viewDidLoad() {
203 | super.viewDidLoad()
204 |
205 | // setupCameraPreview()
206 | // setupDevice()
207 | // setupSessionInput()
208 | // prepare()
209 |
210 | background.async {
211 | self.listen()
212 | }
213 | }
214 |
215 | let pipe = Pipe()
216 |
217 |
218 | func listen() {
219 | // make pipe
220 | signal(SIGPIPE) { _ in
221 | print("Received SIGPIPE signal")
222 | }
223 | if createNamedPipe(atPath: pipePath) {
224 | self.readFromNamedPipe(atPath: self.pipePath)
225 | } else {
226 | print("Failed")
227 | }
228 |
229 | }
230 |
231 | private func setupCameraPreview() {
232 | // Check if the device has a camera
233 | guard let camera = AVCaptureDevice.default(for: .video) else {
234 | print("Camera not available")
235 | return
236 | }
237 |
238 | do {
239 | // Create input from the camera
240 | let input = try AVCaptureDeviceInput(device: camera)
241 |
242 | // Create a session and add the input
243 | let session = AVCaptureSession()
244 | session.addInput(input)
245 |
246 | // Set the session to output video frames
247 | background.async {
248 | session.startRunning()
249 | }
250 |
251 | // Set the preview layer to display the camera feed
252 | previewLayer.session = session
253 | previewLayer.videoGravity = .resizeAspectFill
254 |
255 | // Add the preview layer to your view's layer
256 | cameraView.layer.addSublayer(previewLayer)
257 |
258 | // Optional: Adjust the frame of the preview layer
259 | previewLayer.frame = view.layer.bounds
260 |
261 | } catch {
262 | print("Error setting up AVCaptureDeviceInput: \(error)")
263 | }
264 | }
265 |
266 | override func viewDidLayoutSubviews() {
267 | super.viewDidLayoutSubviews()
268 | // Update the frame of the preview layer when the view's bounds change
269 | previewLayer.frame = view.layer.bounds
270 | }
271 |
272 | func setupDevice() {
273 | let session = AVCaptureDevice.DiscoverySession.init(deviceTypes:[.builtInWideAngleCamera, .builtInMicrophone], mediaType: AVMediaType.video, position: AVCaptureDevice.Position.unspecified)
274 |
275 | let cameras = (session.devices.compactMap{$0})
276 |
277 | for camera in cameras {
278 | if camera.position == .front {
279 |
280 | self.frontCamera = camera
281 | }
282 | if camera.position == .back {
283 | self.rearCamera = camera
284 |
285 | try? camera.lockForConfiguration()
286 | camera.focusMode = .continuousAutoFocus
287 | camera.unlockForConfiguration()
288 | }
289 |
290 | }
291 |
292 | let audioSession = AVCaptureDevice.DiscoverySession.init(deviceTypes:[.builtInMicrophone], mediaType: AVMediaType.audio, position: AVCaptureDevice.Position.unspecified)
293 |
294 | let audioDevices = (audioSession.devices.compactMap{$0})
295 |
296 | for audioDevice in audioDevices {
297 | if audioDevice.hasMediaType(.audio) {
298 | microphone = audioDevice
299 | }
300 | }
301 | }
302 |
303 | func setupSessionInput() {
304 |
305 | do {
306 | if let rearCamera = self.rearCamera {
307 | self.backCameraInput = try AVCaptureDeviceInput(device: rearCamera)
308 | if captureSession.canAddInput(self.backCameraInput!) {
309 | captureSession.addInput(self.backCameraInput!)
310 | self.currentCameraPosition = .back
311 | } else {
312 | return
313 | }
314 | } else if let frontCamera = self.frontCamera {
315 | self.frontCameraInput = try AVCaptureDeviceInput(device: frontCamera)
316 | if captureSession.canAddInput(self.frontCameraInput!) {
317 | captureSession.addInput(self.frontCameraInput!)
318 | self.currentCameraPosition = .front
319 | } else {
320 | return
321 | }
322 | } else {
323 | print("no cameras ")
324 | return
325 | }
326 |
327 | // Add audio input
328 | if let audioDevice = self.microphone {
329 | self.microphoneInput = try AVCaptureDeviceInput(device: audioDevice)
330 | if captureSession.canAddInput(self.microphoneInput!) {
331 | captureSession.addInput(self.microphoneInput!)
332 | } else {
333 | print("cannot add input")
334 | }
335 | }
336 | } catch let error {
337 | print(error)
338 | }
339 | }
340 |
341 | func prepare() {
342 | // let videoSettings: [String: Any] = [
343 | // AVVideoCodecKey: AVVideoCodecType.h264,
344 | // AVVideoWidthKey: 640,
345 | // AVVideoHeightKey: 360,
346 | // ]
347 | // if let connection = movieOutput.connection(with: .video) {
348 | // movieOutput.setOutputSettings(videoSettings, for: connection)
349 | // }
350 | if captureSession.canAddOutput(movieOutput) {
351 | captureSession.addOutput(movieOutput)
352 | }
353 |
354 |
355 | let captureLayer = AVCaptureVideoPreviewLayer(session: captureSession)
356 | captureLayer.bounds = CGRect(x: 0, y: 0, width: self.cameraView.bounds.width, height: self.cameraView.bounds.height)
357 | captureLayer.position = CGPoint(x: self.cameraView.bounds.midX, y: self.cameraView.bounds.midY)
358 | captureLayer.videoGravity = AVLayerVideoGravity.resize
359 | cameraView.layer.insertSublayer(captureLayer, at: 0)
360 |
361 | background.async {[weak self] in
362 | self?.captureSession.startRunning()
363 | }
364 |
365 | // sessionOutput.setSampleBufferDelegate(self, queue: backgroundQueue)
366 |
367 | }
368 |
369 | @IBAction func onStartWriting(_ sender: Any) {
370 | // print("Start wrting")
371 | // setupWriter()
372 | self.startPublish()
373 | }
374 |
375 | func startPublish() {
376 | // let file = FileManager.default.temporaryDirectory.appendingPathComponent("capture.mp4")
377 | // if FileManager.default.fileExists(atPath: file.path) {
378 | // try? FileManager.default.removeItem(at: file)
379 | // print("file removed")
380 | // }
381 | // movieOutput.startRecording(to: FileManager.default.temporaryDirectory.appendingPathComponent("capture.mp4"), recordingDelegate: self)
382 | //
383 | // var pipe = FFmpegKitConfig.registerNewFFmpegPipe()
384 | //
385 | // print(pipe)
386 |
387 |
388 | // let cmd = "-re -i \(videoUrl!) -c:a aac -c:v h264 -b:v 2M -f mpegts \"srt://192.168.1.100:8890?streamid=publish:mystream&pkt_size=1316\""
389 | let cmd = """
390 | -f avfoundation -r 30 -video_size 1280x720 -pixel_format bgr0 -rtbufsize 2G -i 1:0 -vsync 1 -vf \"transpose=1\" -af \"asetpts=N/SR/TB\" -c:a aac -c:v h264_videotoolbox -b:v 2M -f flv -
391 | """
392 | print(cmd)
393 | background.async {
394 | let session = FFmpegKit.execute(cmd)
395 | self.sessionId = session?.getId()
396 |
397 | }
398 |
399 | }
400 |
401 | func stopPublish() {
402 | // movieOutput.stopRecording()
403 | background.async {[weak self] in
404 | print("stopping session \(self?.sessionId)")
405 | if let sessionId = self?.sessionId {
406 | FFmpegKit.cancel(sessionId)
407 | } else {
408 | FFmpegKit.cancel()
409 | }
410 | }
411 | }
412 |
413 | @IBAction func onStopWriting(_ sender: Any) {
414 | // movieOutput.stopRecording()
415 | // stopPublish()
416 | FFmpegKit.cancel()
417 | // FFmpegKitConfig.closeFFmpegPipe(pipe)
418 | }
419 |
420 | func setupWriter() {
421 | do {
422 | writer = try AVAssetWriter(url: tempVideoFileUrl, fileType: .mp4)
423 | let videoSettings: [String: Any] = [
424 | AVVideoCodecKey: AVVideoCodecType.h264,
425 | AVVideoWidthKey: 640,
426 | AVVideoHeightKey: 480,
427 | // (kCVPixelBufferPixelFormatTypeKey as String) : NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)
428 | // Add more settings as needed
429 | ]
430 | videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)
431 |
432 | guard let videoWriterInput = videoWriterInput, writer!.canAdd(videoWriterInput) else {
433 | fatalError("Cannot add video input to asset writer")
434 | }
435 | videoWriterInput.expectsMediaDataInRealTime = true
436 | writer?.add(videoWriterInput)
437 | } catch let e {
438 | print(e)
439 | }
440 | }
441 |
442 | func imageFromSampleBuffer(sampleBuffer : CMSampleBuffer) -> UIImage?
443 | {
444 | // Get a CMSampleBuffer's Core Video image buffer for the media data
445 | let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
446 | // Lock the base address of the pixel buffer
447 | CVPixelBufferLockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);
448 |
449 |
450 | // Get the number of bytes per row for the pixel buffer
451 | let baseAddress = CVPixelBufferGetBaseAddress(imageBuffer!);
452 |
453 | // Get the number of bytes per row for the pixel buffer
454 | let bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer!);
455 | // Get the pixel buffer width and height
456 | let width = CVPixelBufferGetWidth(imageBuffer!);
457 | let height = CVPixelBufferGetHeight(imageBuffer!);
458 |
459 | // Create a device-dependent RGB color space
460 | let colorSpace = CGColorSpaceCreateDeviceRGB();
461 |
462 | // Create a bitmap graphics context with the sample buffer data
463 | var bitmapInfo: UInt32 = CGBitmapInfo.byteOrder32Little.rawValue
464 | bitmapInfo |= CGImageAlphaInfo.premultipliedFirst.rawValue & CGBitmapInfo.alphaInfoMask.rawValue
465 | //let bitmapInfo: UInt32 = CGBitmapInfo.alphaInfoMask.rawValue
466 | let context = CGContext.init(data: baseAddress, width: width, height: height, bitsPerComponent: 8, bytesPerRow: bytesPerRow, space: colorSpace, bitmapInfo: bitmapInfo)
467 | // Create a Quartz image from the pixel data in the bitmap graphics context
468 | let quartzImage = context?.makeImage();
469 | // Unlock the pixel buffer
470 | CVPixelBufferUnlockBaseAddress(imageBuffer!, CVPixelBufferLockFlags.readOnly);
471 | if quartzImage == nil {
472 | return nil
473 | }
474 | // Create an image object from the Quartz image
475 | let image = UIImage.init(cgImage: quartzImage!);
476 |
477 | return (image);
478 | }
479 |
480 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
481 |
482 | // let image = imageFromSampleBuffer(sampleBuffer: sampleBuffer)
483 | background.async {[weak self] in
484 | guard let imageBuffer = sampleBuffer.imageBuffer else {
485 | print("no image buffer :(")
486 | return
487 | }
488 | let img = UIImage(ciImage: CIImage(cvImageBuffer: imageBuffer))
489 | guard let jpeg = img.jpegData(compressionQuality: 0.6) else {
490 | print("failed to compress jpeg :(")
491 | return
492 | }
493 |
494 |
495 |
496 |
497 |
498 | }
499 | // print(pipe)
500 | // print(jpeg)
501 |
502 | // if let data = image?.jpegData(compressionQuality: 50) {
503 | // if let str = String(data: jpeg, encoding: .utf8) {
504 | //// pipe!.write(str)
505 | //
506 | // }
507 | // }
508 |
509 | // if writer != nil {
510 | // if CMSampleBufferDataIsReady(sampleBuffer) {
511 | // if writer?.status == .unknown {
512 | // let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
513 | // writer?.startWriting()
514 | // writer?.startSession(atSourceTime: timestamp)
515 | // }
516 | //
517 | //
518 | // if videoWriterInput?.isReadyForMoreMediaData ?? false {
519 | // videoWriterInput?.append(sampleBuffer)
520 | // }
521 | //
522 | //// if _currentWritingStatus != writer?.status && writer?.status == .writing {
523 | //// startPublish()
524 | //// }
525 | //
526 | // _currentWritingStatus = writer?.status ?? .unknown
527 | // }
528 | // }
529 | }
530 |
531 |
532 |
533 | // func prepare() {
534 | // self.videoOutput = AVCaptureVideoDataOutput()
535 | // if captureSession.canAddOutput(self.videoOutput!) {
536 | // captureSession.addOutput(self.videoOutput!)
537 | // captureSession.add
538 | // }
539 | //
540 | // captureSession.startRunning()
541 | // }
542 | //
543 | // func recordVideo(completion: @escaping (URL?, Error?) -> Void) {
544 | // guard let captureSession = self.captureSession, captureSession.isRunning else {
545 | // completion(nil, CameraControllerError.captureSessionIsMissing)
546 | // return
547 | // }
548 | // let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)
549 | // let fileUrl = paths[0].appendingPathComponent("output.mp4")
550 | // try? FileManager.default.removeItem(at: fileUrl)
551 | // videoOutput!.startRecording(to: fileUrl, recordingDelegate: self)
552 | // self.videoRecordCompletionBlock = completion
553 | // }
554 | }
555 |
556 |
557 |
558 | enum VideoError: Error {
559 | case failedToGetParameterSetCount
560 | case failedToGetParameterSet(index: Int)
561 | }
562 |
563 | extension CMSampleBuffer {
564 | /// Convert a CMSampleBuffer holding a CMBlockBuffer in AVCC format into Annex B format.
565 | func dataBufferAsAnnexB() -> Data? {
566 | guard let dataBuffer, let formatDescription else {
567 | return nil
568 | }
569 |
570 | do {
571 | var result = Data()
572 | let startCode = Data([0x00, 0x00, 0x00, 0x01])
573 |
574 | try formatDescription.forEachParameterSet { buf in
575 | result.append(startCode)
576 | result.append(buf)
577 | }
578 |
579 | try dataBuffer.withContiguousStorage { rawBuffer in
580 | // Since the startCode is 4 bytes, we can append the whole AVCC buffer to the output,
581 | // and then replace the 4-byte length values with start codes.
582 | var offset = result.count
583 | result.append(rawBuffer.assumingMemoryBound(to: UInt8.self))
584 | result.withUnsafeMutableBytes { resultBuffer in
585 | while offset + 4 < resultBuffer.count {
586 | let nalUnitLength = Int(UInt32(bigEndian: resultBuffer.loadUnaligned(fromByteOffset: offset, as: UInt32.self)))
587 | resultBuffer[offset..) -> Void) throws {
603 | var parameterSetCount = 0
604 | var status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
605 | self,
606 | parameterSetIndex: 0,
607 | parameterSetPointerOut: nil,
608 | parameterSetSizeOut: nil,
609 | parameterSetCountOut: ¶meterSetCount,
610 | nalUnitHeaderLengthOut: nil
611 | )
612 | guard noErr == status else {
613 | throw VideoError.failedToGetParameterSetCount
614 | }
615 |
616 | for idx in 0..? = nil
618 | var size = 0
619 | status = CMVideoFormatDescriptionGetH264ParameterSetAtIndex(
620 | self,
621 | parameterSetIndex: idx,
622 | parameterSetPointerOut: &ptr,
623 | parameterSetSizeOut: &size,
624 | parameterSetCountOut: nil,
625 | nalUnitHeaderLengthOut: nil
626 | )
627 | guard noErr == status else {
628 | throw VideoError.failedToGetParameterSet(index: idx)
629 | }
630 | callback(UnsafeBufferPointer(start: ptr, count: size))
631 | }
632 | }
633 | }
634 |
--------------------------------------------------------------------------------