diff --git a/README.md b/README.md index 17b1200..71bb804 100644 --- a/README.md +++ b/README.md @@ -91,6 +91,28 @@ let audioFileDescriptor = open(audioPipe!, O_RDWR) This worked very well!! +# Usage + +```Swift +let cameraSource = CameraSource(position: .front) +let microphoneSource = MicrophoneSource() +let ffLiveKit = FFLiveKit() +``` + +```Swift +try? ffLiveKit.connect(connection: RTMPConnection(baseUrl: "rtmp://192.168.1.100:1935")) +ffLiveKit.addSource(camera: cameraSource, microphone: microphoneSource) +cameraSource.startPreview(previewView: self.view) +ffLiveKit.prepare(delegate: self) +``` + +```Swift +if !isRecording { + try? ffLiveKit.publish(name: "mystream") +} else { + ffLiveKit.stop() +} +``` # Demo @@ -113,6 +135,5 @@ https://private-user-images.githubusercontent.com/31989781/311260826-f0fa60e3-41 # TODO - CPU Optimization -- Continue live streaming when app is put in background (Audio) diff --git a/live-demo.xcodeproj/project.pbxproj b/live-demo.xcodeproj/project.pbxproj index 4da5551..8d49319 100644 --- a/live-demo.xcodeproj/project.pbxproj +++ b/live-demo.xcodeproj/project.pbxproj @@ -8,24 +8,39 @@ /* Begin PBXBuildFile section */ 7830AB979939E8876AC5B5CF /* Pods_live_demo.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = B46632C18825C406E474741D /* Pods_live_demo.framework */; }; - D30E1B0A2B9A9F2600D78458 /* LiveFeedController.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B092B9A9F2600D78458 /* LiveFeedController.swift */; }; - D30E1B0E2B9AA30100D78458 /* CameraUtility.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B0D2B9AA30100D78458 /* CameraUtility.swift */; }; - D30E1B102B9AAC2000D78458 /* StreamPublisher.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B0F2B9AAC2000D78458 /* StreamPublisher.swift */; }; + D30E1B122B9D607E00D78458 /* Connection.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B112B9D607E00D78458 /* Connection.swift */; }; + D30E1B152B9D60A400D78458 /* RTMPConnection.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B142B9D60A400D78458 /* RTMPConnection.swift */; }; + D30E1B172B9D60B200D78458 /* RTSPConnection.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B162B9D60B200D78458 /* RTSPConnection.swift */; }; + D30E1B1D2B9D639800D78458 /* CameraSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B1C2B9D639800D78458 /* CameraSource.swift */; }; + D30E1B1F2B9D666F00D78458 /* TestViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B1E2B9D666F00D78458 /* TestViewController.swift */; }; + D30E1B212B9D6A3700D78458 /* MicrophoneSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B202B9D6A3700D78458 /* MicrophoneSource.swift */; }; + D30E1B242B9D6B5F00D78458 /* BufferConverter.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B232B9D6B5F00D78458 /* BufferConverter.swift */; }; + D30E1B262B9D6E3B00D78458 /* FFLiveKit.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B252B9D6E3B00D78458 /* FFLiveKit.swift */; }; + D30E1B282B9D914C00D78458 /* FFmpegUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B272B9D914C00D78458 /* FFmpegUtils.swift */; }; + D30E1B2A2B9DEBFA00D78458 /* Source.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B292B9DEBFA00D78458 /* Source.swift */; }; + D30E1B2C2B9DEFC100D78458 /* FileSource.swift in Sources */ = {isa = PBXBuildFile; fileRef = D30E1B2B2B9DEFC100D78458 /* FileSource.swift */; }; D3A2572D2B8BFD2800B69B54 /* AppDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = D3A2572C2B8BFD2800B69B54 /* AppDelegate.swift */; }; D3A2572F2B8BFD2800B69B54 /* SceneDelegate.swift in Sources */ = {isa = PBXBuildFile; fileRef = D3A2572E2B8BFD2800B69B54 /* SceneDelegate.swift */; }; D3A257312B8BFD2800B69B54 /* ViewController.swift in Sources */ = {isa = PBXBuildFile; fileRef = D3A257302B8BFD2800B69B54 /* ViewController.swift */; }; D3A257342B8BFD2800B69B54 /* Main.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = D3A257322B8BFD2800B69B54 /* Main.storyboard */; }; D3A257362B8BFD2900B69B54 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = D3A257352B8BFD2900B69B54 /* Assets.xcassets */; }; D3A257392B8BFD2900B69B54 /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = D3A257372B8BFD2900B69B54 /* LaunchScreen.storyboard */; }; - D3B4D7002B994E61001CFE6B /* Helper.swift in Sources */ = {isa = PBXBuildFile; fileRef = D3B4D6FF2B994E61001CFE6B /* Helper.swift */; }; /* End PBXBuildFile section */ /* Begin PBXFileReference section */ 17AE0299CEBC43D9EFBBEE6D /* Pods-live-demo.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-live-demo.release.xcconfig"; path = "Target Support Files/Pods-live-demo/Pods-live-demo.release.xcconfig"; sourceTree = ""; }; B46632C18825C406E474741D /* Pods_live_demo.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = Pods_live_demo.framework; sourceTree = BUILT_PRODUCTS_DIR; }; - D30E1B092B9A9F2600D78458 /* LiveFeedController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LiveFeedController.swift; sourceTree = ""; }; - D30E1B0D2B9AA30100D78458 /* CameraUtility.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraUtility.swift; sourceTree = ""; }; - D30E1B0F2B9AAC2000D78458 /* StreamPublisher.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StreamPublisher.swift; sourceTree = ""; }; + D30E1B112B9D607E00D78458 /* Connection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Connection.swift; sourceTree = ""; }; + D30E1B142B9D60A400D78458 /* RTMPConnection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTMPConnection.swift; sourceTree = ""; }; + D30E1B162B9D60B200D78458 /* RTSPConnection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RTSPConnection.swift; sourceTree = ""; }; + D30E1B1C2B9D639800D78458 /* CameraSource.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraSource.swift; sourceTree = ""; }; + D30E1B1E2B9D666F00D78458 /* TestViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TestViewController.swift; sourceTree = ""; }; + D30E1B202B9D6A3700D78458 /* MicrophoneSource.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = MicrophoneSource.swift; sourceTree = ""; }; + D30E1B232B9D6B5F00D78458 /* BufferConverter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = BufferConverter.swift; sourceTree = ""; }; + D30E1B252B9D6E3B00D78458 /* FFLiveKit.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FFLiveKit.swift; sourceTree = ""; }; + D30E1B272B9D914C00D78458 /* FFmpegUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FFmpegUtils.swift; sourceTree = ""; }; + D30E1B292B9DEBFA00D78458 /* Source.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Source.swift; sourceTree = ""; }; + D30E1B2B2B9DEFC100D78458 /* FileSource.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FileSource.swift; sourceTree = ""; }; D3A257292B8BFD2800B69B54 /* live-demo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "live-demo.app"; sourceTree = BUILT_PRODUCTS_DIR; }; D3A2572C2B8BFD2800B69B54 /* AppDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AppDelegate.swift; sourceTree = ""; }; D3A2572E2B8BFD2800B69B54 /* SceneDelegate.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SceneDelegate.swift; sourceTree = ""; }; @@ -34,7 +49,6 @@ D3A257352B8BFD2900B69B54 /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; D3A257382B8BFD2900B69B54 /* Base */ = {isa = PBXFileReference; lastKnownFileType = file.storyboard; name = Base; path = Base.lproj/LaunchScreen.storyboard; sourceTree = ""; }; D3A2573A2B8BFD2900B69B54 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; - D3B4D6FF2B994E61001CFE6B /* Helper.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Helper.swift; sourceTree = ""; }; E713281D99B7811BD664DE09 /* Pods-live-demo.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-live-demo.debug.xcconfig"; path = "Target Support Files/Pods-live-demo/Pods-live-demo.debug.xcconfig"; sourceTree = ""; }; /* End PBXFileReference section */ @@ -61,17 +75,48 @@ D30E1B0B2B9AA2C100D78458 /* Controllers */ = { isa = PBXGroup; children = ( - D30E1B092B9A9F2600D78458 /* LiveFeedController.swift */, + D30E1B1E2B9D666F00D78458 /* TestViewController.swift */, ); path = Controllers; sourceTree = ""; }; - D30E1B0C2B9AA2EC00D78458 /* Utils */ = { + D30E1B132B9D608A00D78458 /* Source */ = { isa = PBXGroup; children = ( - D3B4D6FF2B994E61001CFE6B /* Helper.swift */, - D30E1B0D2B9AA30100D78458 /* CameraUtility.swift */, - D30E1B0F2B9AAC2000D78458 /* StreamPublisher.swift */, + D30E1B222B9D6B4A00D78458 /* Utils */, + D30E1B192B9D61B300D78458 /* IO */, + D30E1B182B9D61A900D78458 /* Net */, + D30E1B252B9D6E3B00D78458 /* FFLiveKit.swift */, + ); + path = Source; + sourceTree = ""; + }; + D30E1B182B9D61A900D78458 /* Net */ = { + isa = PBXGroup; + children = ( + D30E1B112B9D607E00D78458 /* Connection.swift */, + D30E1B142B9D60A400D78458 /* RTMPConnection.swift */, + D30E1B162B9D60B200D78458 /* RTSPConnection.swift */, + ); + path = Net; + sourceTree = ""; + }; + D30E1B192B9D61B300D78458 /* IO */ = { + isa = PBXGroup; + children = ( + D30E1B1C2B9D639800D78458 /* CameraSource.swift */, + D30E1B292B9DEBFA00D78458 /* Source.swift */, + D30E1B202B9D6A3700D78458 /* MicrophoneSource.swift */, + D30E1B2B2B9DEFC100D78458 /* FileSource.swift */, + ); + path = IO; + sourceTree = ""; + }; + D30E1B222B9D6B4A00D78458 /* Utils */ = { + isa = PBXGroup; + children = ( + D30E1B232B9D6B5F00D78458 /* BufferConverter.swift */, + D30E1B272B9D914C00D78458 /* FFmpegUtils.swift */, ); path = Utils; sourceTree = ""; @@ -105,7 +150,7 @@ D3A257372B8BFD2900B69B54 /* LaunchScreen.storyboard */, D3A2573A2B8BFD2900B69B54 /* Info.plist */, D30E1B0B2B9AA2C100D78458 /* Controllers */, - D30E1B0C2B9AA2EC00D78458 /* Utils */, + D30E1B132B9D608A00D78458 /* Source */, ); path = "live-demo"; sourceTree = ""; @@ -234,13 +279,20 @@ isa = PBXSourcesBuildPhase; buildActionMask = 2147483647; files = ( + D30E1B212B9D6A3700D78458 /* MicrophoneSource.swift in Sources */, D3A257312B8BFD2800B69B54 /* ViewController.swift in Sources */, + D30E1B2C2B9DEFC100D78458 /* FileSource.swift in Sources */, + D30E1B282B9D914C00D78458 /* FFmpegUtils.swift in Sources */, D3A2572D2B8BFD2800B69B54 /* AppDelegate.swift in Sources */, + D30E1B262B9D6E3B00D78458 /* FFLiveKit.swift in Sources */, D3A2572F2B8BFD2800B69B54 /* SceneDelegate.swift in Sources */, - D30E1B0A2B9A9F2600D78458 /* LiveFeedController.swift in Sources */, - D3B4D7002B994E61001CFE6B /* Helper.swift in Sources */, - D30E1B102B9AAC2000D78458 /* StreamPublisher.swift in Sources */, - D30E1B0E2B9AA30100D78458 /* CameraUtility.swift in Sources */, + D30E1B122B9D607E00D78458 /* Connection.swift in Sources */, + D30E1B1F2B9D666F00D78458 /* TestViewController.swift in Sources */, + D30E1B172B9D60B200D78458 /* RTSPConnection.swift in Sources */, + D30E1B152B9D60A400D78458 /* RTMPConnection.swift in Sources */, + D30E1B242B9D6B5F00D78458 /* BufferConverter.swift in Sources */, + D30E1B1D2B9D639800D78458 /* CameraSource.swift in Sources */, + D30E1B2A2B9DEBFA00D78458 /* Source.swift in Sources */, ); runOnlyForDeploymentPostprocessing = 0; }; diff --git a/live-demo/Base.lproj/Main.storyboard b/live-demo/Base.lproj/Main.storyboard index 6690765..7a8d4f2 100644 --- a/live-demo/Base.lproj/Main.storyboard +++ b/live-demo/Base.lproj/Main.storyboard @@ -1,5 +1,5 @@ - + @@ -9,94 +9,113 @@ - - + + - - + + - + - + - + + + + + + + + - + - - + + - + - - - - - - - + + + + + + + - - + - - - - - - - - - + + + + + + + + + - - - - + + + + - + - + + + + + diff --git a/live-demo/Controllers/LiveFeedController.swift b/live-demo/Controllers/LiveFeedController.swift deleted file mode 100644 index a41e967..0000000 --- a/live-demo/Controllers/LiveFeedController.swift +++ /dev/null @@ -1,101 +0,0 @@ -// -// File.swift -// live-demo -// -// Created by xkal on 8/3/2024. -// - -import UIKit -import AVFoundation -import ffmpegkit - -enum RecState { - case RequestRecording - case RequestStop - case Recording - case Normal -} - -class LiveFeedController: UIViewController, StreamPublisherDelegate { - - - @IBOutlet weak var actionBtn: UIControl! - let camera = CameraUtility(useAudioEngine: true) - let publisher = StreamPublisher() - let url = "rtmp://192.168.1.100:1935/mystream" - @IBOutlet weak var fpsLabel: UILabel! - @IBOutlet weak var videoStsLabel: UILabel! - @IBOutlet weak var audioStsLabel: UILabel! - - - var recState: RecState = .Normal { - willSet { - print(newValue) - if newValue == .RequestRecording { - initLoadingActionBtn() - publisher.publish(url: url) - } else if newValue == .Recording { - initStopActionBtn() - } else if newValue == .RequestStop { - initLoadingActionBtn() - publisher.stop() - } else { - initStartActionBtn() - } - } - } - - override func viewDidLoad() { - super.viewDidLoad() - recState = .Normal - camera.attach(view: self.view) - publisher.attach(mediaUtil: camera) - publisher.delegate = self - } - - - func initStartActionBtn() { - actionBtn.layer.opacity = 1 - actionBtn.layer.cornerRadius = 25 - actionBtn.layer.masksToBounds = true - actionBtn.isEnabled = true - } - - func initLoadingActionBtn() { - actionBtn.layer.opacity = 0.5 - actionBtn.isEnabled = false - } - - func initStopActionBtn() { - actionBtn.layer.opacity = 1 - actionBtn.layer.cornerRadius = 5 - actionBtn.layer.masksToBounds = false - actionBtn.isEnabled = true - } - - @IBAction func onActionTapped(_ sender: Any) { - if recState == .Normal { - recState = .RequestRecording - } else if recState == .Recording { - recState = .RequestStop - } - } - - func onStats(stats: Statistics) { -// print("\(stats.getSize()) \(stats.getTime()) \(stats.getSpeed()) \(stats.getBitrate()) \(stats.getVideoFps()) \(stats.getVideoFrameNumber()) \(stats.getVideoQuality())") - self.fpsLabel.text = "FPS: \(stats.getVideoFps())" - } - - func didVideoRecordingStatusChanged(isVideoRecording: Bool) { - if isVideoRecording { - self.recState = .Recording - } else { - self.recState = .Normal - } - self.videoStsLabel.text = "Video Recording: \(isVideoRecording)" - } - - func didAudioRecordingStatusChanged(isAudioRecording: Bool) { - self.audioStsLabel.text = "Audio Recording: \(isAudioRecording)" - } -} diff --git a/live-demo/Controllers/TestViewController.swift b/live-demo/Controllers/TestViewController.swift new file mode 100644 index 0000000..73fcf5a --- /dev/null +++ b/live-demo/Controllers/TestViewController.swift @@ -0,0 +1,84 @@ +// +// TestViewController.swift +// live-demo +// +// Created by xkal on 10/3/2024. +// + +import UIKit + +class TestViewController: UIViewController, FFLiveKitDelegate { + + @IBOutlet weak var actionBtn: UIControl! + + @IBOutlet weak var fpsLabel: UILabel! + @IBOutlet weak var audioRecLabel: UILabel! + @IBOutlet weak var videoRecLabel: UILabel! + + let cameraSource = CameraSource(position: .front, preset: .hd1280x720) + let microphoneSource = MicrophoneSource() + let fileSource = FileSource(filetype: "rtsp", url: "rtsp://192.168.1.100:8554/mystream1") + let ffLiveKit = FFLiveKit() + var isRecording = false + + override func viewDidLoad() { + super.viewDidLoad() + try? ffLiveKit.connect(connection: RTMPConnection(baseUrl: "rtmp://192.168.1.100:1935")) + ffLiveKit.addSource(camera: cameraSource, microphone: microphoneSource, file: nil) + cameraSource.startPreview(previewView: self.view) + ffLiveKit.prepare(delegate: self) + initStartActionBtn() + } + + func FFmpegUtils(didChange status: RecordingState) { + print(status) + if status == .RequestRecording { + initLoadingActionBtn() + } else if status == .Recording { + isRecording = true + initStopActionBtn() + } else if status == .RequestStop { + initLoadingActionBtn() + } else { + isRecording = false + initStartActionBtn() + } + } + + func FFmpegUtils(onStats stats: FFStat) { + self.fpsLabel.text = "FPS: \(stats.fps)" + self.videoRecLabel.text = "Video Recording: \(stats.isVideoRecording)" + self.audioRecLabel.text = "Audio Recording: \(stats.isAudioRecording)" + } + + @IBAction func onTap(_ sender: Any) { + if !isRecording { + try? ffLiveKit.publish(name: "mystream") + } else { + ffLiveKit.stop() + } + } + + @IBAction func onCameraSwitch(_ sender: Any) { + cameraSource.switchCamera() + } + + func initStartActionBtn() { + actionBtn.layer.opacity = 1 + actionBtn.layer.cornerRadius = 25 + actionBtn.layer.masksToBounds = true + actionBtn.isEnabled = true + } + + func initLoadingActionBtn() { + actionBtn.layer.opacity = 0.5 + actionBtn.isEnabled = false + } + + func initStopActionBtn() { + actionBtn.layer.opacity = 1 + actionBtn.layer.cornerRadius = 5 + actionBtn.layer.masksToBounds = false + actionBtn.isEnabled = true + } +} diff --git a/live-demo/Source/FFLiveKit.swift b/live-demo/Source/FFLiveKit.swift new file mode 100644 index 0000000..eb5db25 --- /dev/null +++ b/live-demo/Source/FFLiveKit.swift @@ -0,0 +1,88 @@ +// +// FFLiveKit.swift +// live-demo +// +// Created by xkal on 10/3/2024. +// + +import Foundation + +enum FFLiveKitError: Error { + case NotInitialized + case EmptyUrl + case IOError(message: String) +} + +protocol FFLiveKitDelegate: FFmpegUtilsDelegate { + +} + +class FFLiveKit { + + private var connection: Connection? + private var cameraSource: CameraSource? + private var microphoneSource: MicrophoneSource? + private var fileSource: FileSource? + private var url = "" + var ffmpegUtil: FFmpegUtils? + private var delegate: FFLiveKitDelegate? + + + func connect(connection: Connection) throws { + /// compute url + if connection.baseUrl.isEmpty { + throw FFLiveKitError.EmptyUrl + } + self.connection = connection + } + + func prepare(delegate: FFLiveKitDelegate?) { + self.delegate = delegate + ffmpegUtil = FFmpegUtils(outputFormat: connection!.fileType, url: connection!.baseUrl, options: FFmpegOptions( + inputVideoFileType: fileSource != nil ? (fileSource?.type ?? "") : (cameraSource?.type ?? ""), + inputVideoPixelFormat: "bgra", + inputVideoSize: cameraSource != nil ? (cameraSource!.getDimensions().0, cameraSource!.getDimensions().1) : (0, 0), + inputAudioFileType: microphoneSource?.type ?? "", + inputAudioRate: 48000, + inputAudioChannel: 1, + inputAudioItsOffset: -5, + outputVideoFramerate: 30, + outputVideoCodec: "h264", + outputVideoPixelFormat: "bgra", + outputVideoSize: (360, 640), + outputVideoBitrate: "640k", + outputAudioBitrate: "64k", + outputAudioCodec: "aac", inputFilePath: fileSource?.path ?? ""), delegate: delegate) + /// delegate + cameraSource?.delegate = ffmpegUtil + microphoneSource?.delegate = ffmpegUtil + } + + func addSource(camera: CameraSource?, microphone: MicrophoneSource?, file: FileSource?) { + self.cameraSource = camera + self.microphoneSource = microphone + self.fileSource = file + + } + + func publish(name: String?) throws { + guard let connection = self.connection else { + throw FFLiveKitError.NotInitialized + } + self.url = connection.baseUrl + "/" + (name ?? "") + /// start + cameraSource?.start() + do { + try microphoneSource?.start() + } catch { + throw FFLiveKitError.IOError(message: error.localizedDescription) + } + ffmpegUtil?.start(videoRec: self.cameraSource != nil, audioRec: self.microphoneSource != nil, fileRec: self.fileSource != nil, streamName: name) + } + + func stop() { + cameraSource?.stop() + microphoneSource?.stop() + ffmpegUtil?.stop() + } +} diff --git a/live-demo/Source/IO/CameraSource.swift b/live-demo/Source/IO/CameraSource.swift new file mode 100644 index 0000000..aa9ebbc --- /dev/null +++ b/live-demo/Source/IO/CameraSource.swift @@ -0,0 +1,164 @@ +// +// CameraSource.swift +// live-demo +// +// Created by xkal on 10/3/2024. +// + +import AVFoundation +import UIKit + +protocol CameraSourceDelegate { + func _CameraSource(onData: Data) + func _CameraSource(switchStarted: Bool) + func _CameraSource(switchEnded: Bool) +} + +class CameraSource: Source, AVCaptureVideoDataOutputSampleBufferDelegate { + + let videoOutput = AVCaptureVideoDataOutput(); + private let previewLayer = AVCaptureVideoPreviewLayer() + var session: AVCaptureSession? + private var dimensions: (Int32, Int32) = (0 , 0) + let backgroundVideoQueue = DispatchQueue.global(qos: .background) + private var running = false + public var delegate: CameraSourceDelegate? + var currentCameraPosition: AVCaptureDevice.Position? + + init(position: AVCaptureDevice.Position, preset: AVCaptureSession.Preset = .hd1920x1080) { + super.init(fileType: "rawvideo") + session = setupCaptureSession(position: position, preset: preset) + ///set delegate + videoOutput.setSampleBufferDelegate(self, queue: backgroundVideoQueue) + DispatchQueue.global().async { + /// Set the session to output video frames + self.session?.startRunning() + } + } + + func switchCamera() { + self.delegate?._CameraSource(switchStarted: true) + session?.beginConfiguration() + // Remove existing input + if let currentInput = session?.inputs.first as? AVCaptureInput { + session?.removeInput(currentInput) + } + // Toggle camera position + let position: AVCaptureDevice.Position = currentCameraPosition == .back ? .front : .back + self.currentCameraPosition = position + // Set up new video input + guard let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: position) else { + print("Failed to get AVCaptureDevice for video input.") + return + } + do { + let videoInput = try AVCaptureDeviceInput(device: videoDevice) + if session?.canAddInput(videoInput) ?? false { + session?.addInput(videoInput) + } else { + print("Failed to add video input to session.") + } + } catch { + print("Error creating AVCaptureDeviceInput: \(error.localizedDescription)") + } + session?.commitConfiguration() + self.delegate?._CameraSource(switchEnded: true) + } + + private func addCamera(session: AVCaptureSession, position: AVCaptureDevice.Position) -> AVCaptureDeviceInput? { + self.currentCameraPosition = position + do { + /// Check if the device has a camera + guard let camera = AVCaptureDevice.default(.builtInWideAngleCamera ,for: .video, position: position) else { + print("Camera not available") + return nil + } + /// Create input from the camera + let input = try AVCaptureDeviceInput(device: camera) + + if session.canAddInput(input) { + session.addInput(input) + } + return input + } catch { + print(error) + } + return nil + } + + private func setupCaptureSession(position: AVCaptureDevice.Position, preset: AVCaptureSession.Preset) -> AVCaptureSession? { + do { + // Create a session and add the input + let session = AVCaptureSession() + /// add camera to session input + let cameraInput = addCamera(session: session, position: position) + guard let camera = cameraInput?.device else { + return nil + } + /// add videooutput as session output + videoOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as String) : NSNumber(value: kCVPixelFormatType_32BGRA as UInt32),] + if session.canAddOutput(videoOutput) { + session.sessionPreset = preset + session.addOutput(videoOutput) + + } + + /// set framerate 30 + do { + try camera.lockForConfiguration() + let desiredFrameRate = CMTimeMake(value: 1, timescale: 30) + camera.activeVideoMinFrameDuration = desiredFrameRate + camera.activeVideoMaxFrameDuration = desiredFrameRate + camera.unlockForConfiguration() + + } catch { + print("Error accessing video device: \(error)") + } + /// just print the current resoultion + let activeFormat = camera.activeFormat.formatDescription + let dimensions = CMVideoFormatDescriptionGetDimensions(activeFormat) + let width = dimensions.width + let height = dimensions.height + print("Resolution: \(width) x \(height)") + self.dimensions = (width , height) + + return session + } catch { + print("Error setting up AVCaptureDeviceInput: \(error)") + } + } + + func getDimensions() -> (Int, Int) { + return (Int(self.dimensions.0), Int(self.dimensions.1)) + } + + func startPreview(previewView: UIView?) { + /// Set the preview layer to display the camera feed + if let view = previewView { + DispatchQueue.main.async { + self.previewLayer.session = self.session + self.previewLayer.videoGravity = .resizeAspectFill + /// Add the preview layer to your view's layer + view.layer.insertSublayer(self.previewLayer, at: 0) + /// Optional: Adjust the frame of the preview layer + self.previewLayer.frame = view.layer.bounds + } + } + } + + func start() { + self.running = true + } + + func stop() { + self.running = false + } + + func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + if output is AVCaptureVideoDataOutput { + if running, let data = BufferConverter.extractBGRAData(from: sampleBuffer) { + self.delegate?._CameraSource(onData: data) + } + } + } +} diff --git a/live-demo/Source/IO/FileSource.swift b/live-demo/Source/IO/FileSource.swift new file mode 100644 index 0000000..d978a22 --- /dev/null +++ b/live-demo/Source/IO/FileSource.swift @@ -0,0 +1,17 @@ +// +// FileSource.swift +// live-demo +// +// Created by xkal on 11/3/2024. +// + +import Foundation + +class FileSource: Source { + let path: String + + init(filetype: String, url: String) { + self.path = url + super.init(fileType: filetype) + } +} diff --git a/live-demo/Source/IO/MicrophoneSource.swift b/live-demo/Source/IO/MicrophoneSource.swift new file mode 100644 index 0000000..b510f46 --- /dev/null +++ b/live-demo/Source/IO/MicrophoneSource.swift @@ -0,0 +1,63 @@ +// +// MicrophoneSource.swift +// live-demo +// +// Created by xkal on 10/3/2024. +// + +import AVFoundation + +protocol MicrophoneSourceDelegate { + func _MicrophoneSource(onData: Data) +} + +class MicrophoneSource: Source { + + private var audioEngine: AVAudioEngine? + let backgroundAudioQueue = DispatchQueue.global(qos: .background) + var delegate: MicrophoneSourceDelegate? + + init() { + super.init(fileType: "s16le") + setupSession() + setupAudioEngine() + } + + private func setupSession() { + /// Start the capture session + do { + try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .videoChat, options: [.allowAirPlay, .allowBluetooth]) + try AVAudioSession.sharedInstance().setPreferredSampleRate(48000) // Set your preferred sample rate here + try AVAudioSession.sharedInstance().setActive(true) + } catch { + print("Failed to set audio session settings: \(error.localizedDescription)") + } + } + + private func setupAudioEngine() { + audioEngine = AVAudioEngine() + let inputNode = audioEngine!.inputNode + let defaultFormat = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 48000, channels: 1, interleaved: false) + + inputNode.installTap(onBus: 0, bufferSize: 1024, format: defaultFormat) { buffer, time in + let audioData = BufferConverter.bufferToData(buffer: buffer) + self.backgroundAudioQueue.async { + self.delegate?._MicrophoneSource(onData: audioData) + } + } + + } + + func start() throws { + do { +// audioEngine?.prepare() + try audioEngine?.start() + } catch { + throw(error) + } + } + + func stop() { + audioEngine?.stop() + } +} diff --git a/live-demo/Source/IO/Source.swift b/live-demo/Source/IO/Source.swift new file mode 100644 index 0000000..2d66fee --- /dev/null +++ b/live-demo/Source/IO/Source.swift @@ -0,0 +1,17 @@ +// +// Source.swift +// live-demo +// +// Created by xkal on 11/3/2024. +// + +import Foundation + +class Source: NSObject { + + let type: String! + + init(fileType: String) { + type = fileType + } +} diff --git a/live-demo/Source/Net/Connection.swift b/live-demo/Source/Net/Connection.swift new file mode 100644 index 0000000..c7ae455 --- /dev/null +++ b/live-demo/Source/Net/Connection.swift @@ -0,0 +1,19 @@ +// +// Connection.swift +// live-demo +// +// Created by xkal on 10/3/2024. +// + +import Foundation + +class Connection { + + let fileType: String! + let baseUrl: String! + + init(fileType: String, baseUrl: String) { + self.fileType = fileType + self.baseUrl = baseUrl + } +} diff --git a/live-demo/Source/Net/RTMPConnection.swift b/live-demo/Source/Net/RTMPConnection.swift new file mode 100644 index 0000000..bb3b8dc --- /dev/null +++ b/live-demo/Source/Net/RTMPConnection.swift @@ -0,0 +1,14 @@ +// +// RTMPConnection.swift +// live-demo +// +// Created by xkal on 10/3/2024. +// + +import Foundation + +class RTMPConnection: Connection { + init(baseUrl: String) { + super.init(fileType: "flv", baseUrl: baseUrl) + } +} diff --git a/live-demo/Source/Net/RTSPConnection.swift b/live-demo/Source/Net/RTSPConnection.swift new file mode 100644 index 0000000..4578908 --- /dev/null +++ b/live-demo/Source/Net/RTSPConnection.swift @@ -0,0 +1,14 @@ +// +// RTSPConnection.swift +// live-demo +// +// Created by xkal on 10/3/2024. +// + +import Foundation + +class RTSPConnection: Connection { + init(baseUrl: String) { + super.init(fileType: "rtsp", baseUrl: baseUrl) + } +} diff --git a/live-demo/Source/Utils/BufferConverter.swift b/live-demo/Source/Utils/BufferConverter.swift new file mode 100644 index 0000000..914296c --- /dev/null +++ b/live-demo/Source/Utils/BufferConverter.swift @@ -0,0 +1,48 @@ +// +// BufferConverter.swift +// live-demo +// +// Created by xkal on 10/3/2024. +// + + +import AVFAudio + +class BufferConverter { + class func bufferToData(buffer: AVAudioPCMBuffer) -> Data { + let channelData = buffer.int16ChannelData![0] + let dataSize = Int(buffer.frameLength) * MemoryLayout.size + let data = Data(bytes: channelData, count: dataSize) + return data + } + + class func extractBGRAData(from sampleBuffer: CMSampleBuffer) -> Data? { + guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { + return nil + } + CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) + defer { + CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly) + } + guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else { + return nil + } + let width = CVPixelBufferGetWidth(pixelBuffer) + let height = CVPixelBufferGetHeight(pixelBuffer) + let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer) + let byteBuffer = UnsafeBufferPointer(start: baseAddress.assumingMemoryBound(to: UInt8.self), count: bytesPerRow * height) + let rawPointer = UnsafeRawPointer(byteBuffer.baseAddress!) + return Data(bytes: rawPointer, count: bytesPerRow * height) + } + + class func createEmptyRGBAData(width: Int, height: Int) -> Data { + let bytesPerPixel = 4 // Assuming BGRA format (8 bits per channel) + let bitsPerComponent = 8 + let bytesPerRow = width * bytesPerPixel + let totalBytes = height * bytesPerRow + + // Allocate a single Data object with the total size + var pixelData = Data(count: totalBytes * 2) + return pixelData + } +} diff --git a/live-demo/Source/Utils/FFmpegUtils.swift b/live-demo/Source/Utils/FFmpegUtils.swift new file mode 100644 index 0000000..2389956 --- /dev/null +++ b/live-demo/Source/Utils/FFmpegUtils.swift @@ -0,0 +1,469 @@ +// +// FFmpegUtils.swift +// live-demo +// +// Created by xkal on 10/3/2024. +// + +import AVFoundation +import ffmpegkit + +enum RecordingType { + case Microphone + case Camera + case Camera_Microphone + case File +} + +class FFStat { + + let bitrate: Double + let size: Int + let time: Double + let speed: Double + let rate: Double + let fps: Float + let quality: Float + let frameNumber: Int32 + let sessionId: Int + let isVideoRecording: Bool + let isAudioRecording: Bool + + init(stat: Statistics, isVideoRecording: Bool, isAudioRecording: Bool) { + bitrate = stat.getBitrate() + size = stat.getSize() + time = stat.getTime() + speed = stat.getSpeed() + rate = stat.getBitrate() + fps = stat.getVideoFps() + quality = stat.getVideoQuality() + frameNumber = stat.getVideoFrameNumber() + sessionId = stat.getSessionId() + self.isAudioRecording = isAudioRecording + self.isVideoRecording = isVideoRecording + } + + +} + +enum RecordingState { + case RequestRecording + case Recording + case RequestStop + case Normal +} + +protocol FFmpegUtilsDelegate { + func FFmpegUtils(didChange status: RecordingState) + func FFmpegUtils(onStats stats: FFStat) +} + + +struct FFmpegOptions { + /// input settings + var inputVideoFileType: String + var inputVideoPixelFormat: String + var inputVideoSize: (Int, Int) + var inputAudioFileType: String + var inputAudioRate: Int + var inputAudioChannel: Int + var inputAudioItsOffset: Int + var outputVideoFramerate: Int + var outputVideoCodec: String + var outputVideoPixelFormat: String + var outputVideoSize: (Int, Int) + var outputVideoBitrate: String + var outputAudioBitrate: String + var outputAudioCodec: String + var inputFilePath: String +} + +class FFmpegUtils: NSObject, CameraSourceDelegate, MicrophoneSourceDelegate { + + var audioPipe: String? + var videoPipe: String? + + var outputFormat = "" + var baseUrl = "" + var streamName: String? + + let options: FFmpegOptions! + + + var url: String { + get { + if streamName != nil { + return "\(baseUrl)/\(streamName!)" + } else { + return baseUrl + } + } + } + + var running = false + var isInBackground = false + var isVideoRecording = false + var isAudioRecording = false + + private var videoTimer: Timer? + private var blankFrames: Data? + private var videoFileDescriptor: Int32! + private var audioFileDescriptor: Int32! + + var recordingType = RecordingType.Camera_Microphone + + /// threads + private let background = DispatchQueue.global(qos: .background) + private let videoFeedThread = DispatchQueue.global(qos: .background) + + /// buffers and locks + private let videoBufferLock = NSLock() + private var videoDataBuffer = Data() + + private var delegate: FFmpegUtilsDelegate? + + init(outputFormat: String, url: String,options: FFmpegOptions, delegate: FFmpegUtilsDelegate?) { + self.options = options + super.init() + self.outputFormat = outputFormat + self.baseUrl = url + self.delegate = delegate + FFmpegKitConfig.enableLogCallback({log in + if let log = log { + print(log.getMessage()!) + } + }) + registerForInterruption() + self.recordingState = .Normal + } + + func registerForInterruption() { + // Add observers for AVCaptureSession notifications + NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError), name: .AVCaptureSessionRuntimeError, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(sessionWasInterrupted), name: .AVCaptureSessionWasInterrupted, object: nil) + NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionEnded), name: .AVCaptureSessionInterruptionEnded, object: nil) + } + + // Handle AVCaptureSession runtime error + @objc func sessionRuntimeError(notification: Notification) { + if let error = notification.userInfo?[AVCaptureSessionErrorKey] as? Error { + print("AVCaptureSession runtime error: \(error.localizedDescription)") + // Handle the error as needed + } + } + + // Handle AVCaptureSession interruption + @objc func sessionWasInterrupted(notification: Notification) { + if let reasonValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as? Int, + let reason = AVCaptureSession.InterruptionReason(rawValue: reasonValue) { + print("AVCaptureSession was interrupted. Reason: \(reason)") + // Handle the interruption as needed + if reasonValue == 1 { + blankFrames = BufferConverter.createEmptyRGBAData(width: 1920, height: 1080) + isInBackground = true + } + } + } + + // Handle AVCaptureSession interruption ended + @objc func sessionInterruptionEnded(notification: Notification) { + print("AVCaptureSession interruption ended.") + isInBackground = false + blankFrames = nil + clearVideoBuffer() + } + + // Remove observers when the view controller is deallocated + deinit { + NotificationCenter.default.removeObserver(self) + videoDataBuffer.removeAll() + } + + + var recordingState: RecordingState = .Normal { + willSet { + DispatchQueue.main.async { + self.delegate?.FFmpegUtils(didChange: newValue) + } + switch newValue { + case .Normal: + running = false + break + case .RequestRecording: + clearVideoBuffer() + running = true + /// initialize pipes + createPipes() + background.async { + if self.recordingType == .Camera_Microphone { + self.executeVideo_Audio() + } else if self.recordingType == .Camera { + self.executeVideoOnly() + } else if self.recordingType == .Microphone { + self.executeAudioOnly() + } else if self.recordingType == .File { + self.executeFile() + } + } + startTimer() + break + case .Recording: + running = true + break + case .RequestStop: + running = false + stopTimer() + closePipes() + clearVideoBuffer() + FFmpegKit.cancel() + clearVideoBuffer() + DispatchQueue.main.asyncAfter(deadline: .now() + 0.5, execute: { + self.recordingState = .Normal + }) + break + } + } + } + + func start(videoRec: Bool = true, audioRec: Bool = true, fileRec: Bool, streamName: String?) { + self.streamName = streamName + if videoRec && audioRec { + self.recordingType = .Camera_Microphone + } else if videoRec { + self.recordingType = .Camera + } else if audioRec { + self.recordingType = .Microphone + } else if fileRec { + self.recordingType = .File + } + recordingState = .RequestRecording + } + + func stop() { + recordingState = .RequestStop + } + + private func stopTimer() { + videoTimer?.invalidate() + videoTimer = nil + } + + private func startTimer() { + DispatchQueue.global().async { + self.videoTimer = Timer.scheduledTimer(timeInterval: 0.005, target: self, selector: #selector(self.handleFeed), userInfo: nil, repeats: true) + RunLoop.current.add(self.videoTimer!, forMode: .default) + RunLoop.current.run() + } + } + + @objc func handleFeed() { + if isInBackground { + self.appendToVideoBuffer(data: self.blankFrames!) + if self.videoDataBuffer.count > 10*1000000 { + print("Flushing....") + self.feedToVideoPipe() + } + } else { + feedToVideoPipe() + } + } + + private func createPipes() { + // create a pipe for video + videoPipe = FFmpegKitConfig.registerNewFFmpegPipe() + audioPipe = FFmpegKitConfig.registerNewFFmpegPipe() + // open the videopipe so that ffempg doesnot closes when the video pipe receives EOF + videoFileDescriptor = open(videoPipe!, O_RDWR) + audioFileDescriptor = open(audioPipe!, O_RDWR) + // audioFileDescriptor = open(audioPipe!, O_RDWR) + } + + private func closePipes() { + if videoFileDescriptor != nil { + close(videoFileDescriptor) + } + if audioFileDescriptor != nil { + close(audioFileDescriptor) + } + FFmpegKitConfig.closeFFmpegPipe(videoPipe) + FFmpegKitConfig.closeFFmpegPipe(audioPipe) + } + + func appendToVideoBuffer(data: Data) { + videoFeedThread.sync { + self.videoBufferLock.lock() + /// Max bytes buffer 100MB + if self.videoDataBuffer.count > (100 * 1000000) { + self.videoDataBuffer.removeAll() + } + self.videoDataBuffer.append(data) + self.videoBufferLock.unlock() + } + } + + func writeToVideoPipe(data: Data) { + if let currentPipe = self.videoPipe, let fileHandle = try? FileHandle(forWritingTo: URL(fileURLWithPath: currentPipe)) { + // print("writing to the data \(data)") + // Convert the message to data + if #available(iOS 13.4, *) { + try? fileHandle.write(contentsOf: data) + } else { + fileHandle.write(data) + } + fileHandle.closeFile() + print("Video written successfully") + } else { + print("Failed to open video file handle for writing") + } + } + + func writeToAudioPipe(data: Data) { + if let currentPipe = self.audioPipe, let fileHandle = try? FileHandle(forWritingTo: URL(fileURLWithPath: currentPipe)) { + // print("writing audio to the data \(data)") + // Convert the message to data + if #available(iOS 13.4, *) { + try? fileHandle.write(contentsOf: data) + } else { + fileHandle.write(data) + } + fileHandle.closeFile() + print("Audio written successfully") + } else { + print("Failed to open audio file handle for writing") + } + } + + @objc func feedToVideoPipe() { + // print("Feeding video") + self.videoBufferLock.lock() + // Feed video + if !self.videoDataBuffer.isEmpty { + self.writeToVideoPipe(data: self.videoDataBuffer) + self.videoDataBuffer.removeAll() + } + self.videoBufferLock.unlock() + } + + func clearVideoBuffer() { + self.videoBufferLock.lock() + self.videoDataBuffer.removeAll() + self.videoBufferLock.unlock() + } + + private func generateVideoInputCommand() -> String { + return "-f \(options.inputVideoFileType) -pixel_format \(options.inputVideoPixelFormat) -video_size \(options.inputVideoSize.0)x\(options.inputVideoSize.1) -framerate 30 -i \(videoPipe!)" + } + + private func generateAudioInputCommand() -> String { + return "-f \(options.inputAudioFileType) -ar \(options.inputAudioRate) -ac \(options.inputAudioChannel) -itsoffset \(options.inputAudioItsOffset) -i \(audioPipe!)" + } + + private func generateVideoOutputCommand() -> String { + return "-framerate \(options.outputVideoFramerate) -pixel_format \(options.outputVideoPixelFormat) -c:v \(options.outputVideoCodec) -vf \"transpose=1,scale=\(options.outputVideoSize.0):\(options.outputVideoSize.1)\" -b:v \(options.outputVideoBitrate)" + } + + private func generateAudioOutputCommand() -> String { + return "-c:a \(options.outputAudioCodec) -b:a \(options.outputAudioBitrate)" + } + + private func executeVideoOnly() { + let cmd = "-re \(generateVideoInputCommand()) \(generateVideoOutputCommand()) -f \(outputFormat) \(url)" + execute(cmd: cmd) + } + + private func executeAudioOnly() { + let cmd = "-re \(generateAudioInputCommand()) -vn \(generateAudioOutputCommand()) -f \(outputFormat) \(url)" + execute(cmd: cmd) + } + + private func generateFileInputCommand() -> String { + return "-f \(options.inputVideoFileType) -i \(options.inputFilePath)" + } + + private func generateFileOutputCommand() -> String { + return "-c:v \(options.outputVideoCodec) -c:a \(options.outputAudioCodec)" + } + + private func executeVideo_Audio() { + let cmd = "-re \(generateVideoInputCommand()) \(generateAudioInputCommand()) \(generateVideoOutputCommand()) \(generateAudioOutputCommand()) -vsync 1 -f \(outputFormat) \(url)" + + execute(cmd: cmd) + } + + private func executeFile() { + let cmd = "-re \(generateFileInputCommand()) \(generateFileOutputCommand()) -f \(outputFormat) \(url)" + execute(cmd: cmd) + } + + private func execute(cmd: String) { + print("Executing \(cmd)..........") + FFmpegKit.executeAsync(cmd, withCompleteCallback: {session in + self.stop() + }, withLogCallback: nil, withStatisticsCallback: {stats in + guard let stats = stats else { + return + } + /// For Video + if stats.getVideoFps() > 0 { + if self.isVideoRecording == false { + DispatchQueue.main.async { + // self.delegate?.didVideoRecordingStatusChanged(isVideoRecording: true) + } + } + self.isVideoRecording = true + } else if stats.getSize() > 0, stats.getVideoFps() == 0 { + if self.isAudioRecording == false { + DispatchQueue.main.async { + // self.delegate?.didAudioRecordingStatusChanged(isAudioRecording: true) + } + } + self.isAudioRecording = true + } + if self.recordingState == .RequestRecording { + if self.recordingType == .Camera_Microphone { + if self.isVideoRecording && self.isAudioRecording { + self.recordingState = .Recording + } + } else if self.recordingType == .Microphone { + if self.isAudioRecording { + self.recordingState = .Recording + } + } else if self.recordingType == .Camera { + if self.isVideoRecording { + self.recordingState = .Recording + } + } else if self.recordingType == .File { + self.recordingState = .Recording + } + } + DispatchQueue.main.async { + self.delegate?.FFmpegUtils(onStats: FFStat(stat: stats, isVideoRecording: self.isVideoRecording, isAudioRecording: self.isAudioRecording)) + } + }) + } + + func _CameraSource(onData: Data) { + if !self.isInBackground, self.running, let data = isInBackground ? blankFrames : onData { + if !self.isVideoRecording { + self.writeToVideoPipe(data: data) + } else { + self.appendToVideoBuffer(data: data) + } + } + } + + func _CameraSource(switchStarted: Bool) { + running = false + clearVideoBuffer() + } + + func _CameraSource(switchEnded: Bool) { + running = true + } + + func _MicrophoneSource(onData: Data) { + if self.running { + self.writeToAudioPipe(data: onData) + } + } +} diff --git a/live-demo/Utils/CameraUtility.swift b/live-demo/Utils/CameraUtility.swift deleted file mode 100644 index 07a6a9b..0000000 --- a/live-demo/Utils/CameraUtility.swift +++ /dev/null @@ -1,191 +0,0 @@ -// -// CameraUtility.swift -// live-demo -// -// Created by xkal on 8/3/2024. -// - -import AVFoundation -import UIKit - -protocol AudioVideoDelegate: AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { - func onAudioEngine(data didReceived: Data) -} - -class CameraUtility { - - private let previewLayer = AVCaptureVideoPreviewLayer() - let videoOutput = AVCaptureVideoDataOutput(); - let audioOutput = AVCaptureAudioDataOutput(); - let backgroundVideoQueue = DispatchQueue.global(qos: .background) - let backgroundAudioQueue = DispatchQueue.global(qos: .background) - private var sessionOutputDelegate: AudioVideoDelegate? - private var useAudioEngine = false - private var audioEngine: AVAudioEngine? - - init(useAudioEngine: Bool = false) { - self.useAudioEngine = useAudioEngine - if useAudioEngine { - setupAudioEngine() - } - - } - - - func attach(view: UIView) { - let session = self.setupCaptureSession(view: view) - DispatchQueue.global().async { - /// Set the session to output video frames - session?.startRunning() - } - } - - func setDelegate(delegate: AudioVideoDelegate) { - self.sessionOutputDelegate = delegate - - videoOutput.setSampleBufferDelegate(self.sessionOutputDelegate, queue: backgroundVideoQueue) - audioOutput.setSampleBufferDelegate(self.sessionOutputDelegate, queue: backgroundAudioQueue) - } - - private func addCamera(session: AVCaptureSession) -> AVCaptureDeviceInput? { - do { - /// Check if the device has a camera - guard let camera = AVCaptureDevice.default(for: .video) else { - print("Camera not available") - return nil - } - /// Create input from the camera - let input = try AVCaptureDeviceInput(device: camera) - - if session.canAddInput(input) { - session.addInput(input) - } - return input - } catch { - print(error) - } - return nil - } - - private func addMicrophone(session: AVCaptureSession) -> AVCaptureDeviceInput? { - do { - // Check if the device has a microphone - guard let mic = AVCaptureDevice.default(for: .audio) else { - print("Microphone not available") - return nil - } - // Create input from the camera - let input = try AVCaptureDeviceInput(device: mic) - - if session.canAddInput(input) { - session.addInput(input) - } - return input - } catch { - print(error) - } - return nil - } - - private func setupCaptureSession(view: UIView) -> AVCaptureSession? { - do { - // Create a session and add the input - let session = AVCaptureSession() - /// add camera to session input - let cameraInput = addCamera(session: session) - guard let camera = cameraInput?.device else { - return nil - } - /// add videooutput as session output - videoOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as String) : NSNumber(value: kCVPixelFormatType_32BGRA as UInt32),] - if session.canAddOutput(videoOutput) { - session.addOutput(videoOutput) - - } - /// add microphone as session input - if !useAudioEngine { - let audioInput = addMicrophone(session: session) - /// add session output - if session.canAddOutput(audioOutput) { - session.addOutput(audioOutput) - } - } - /// Start the capture session - do { - try AVAudioSession.sharedInstance().setCategory(.playAndRecord, mode: .videoChat, options: [.allowAirPlay, .allowBluetooth]) - try AVAudioSession.sharedInstance().setPreferredSampleRate(48000) // Set your preferred sample rate here - try AVAudioSession.sharedInstance().setActive(true) - } catch { - print("Failed to set audio session settings: \(error.localizedDescription)") - return nil - } - /// Set the preview layer to display the camera feed - DispatchQueue.main.async { - self.previewLayer.session = session - self.previewLayer.videoGravity = .resizeAspectFill - /// Add the preview layer to your view's layer - view.layer.insertSublayer(self.previewLayer, at: 0) - /// Optional: Adjust the frame of the preview layer - self.previewLayer.frame = view.layer.bounds - } - - /// set framerate 30 - do { - try camera.lockForConfiguration() - - let desiredFrameRate = CMTimeMake(value: 1, timescale: 30) - camera.activeVideoMinFrameDuration = desiredFrameRate - camera.activeVideoMaxFrameDuration = desiredFrameRate - camera.unlockForConfiguration() - - } catch { - print("Error accessing video device: \(error)") - } - /// just print the current resoultion - let activeFormat = camera.activeFormat.formatDescription - let dimensions = CMVideoFormatDescriptionGetDimensions(activeFormat) - let width = dimensions.width - let height = dimensions.height - print("Resolution: \(width) x \(height)") - - return session - } catch { - print("Error setting up AVCaptureDeviceInput: \(error)") - } - } - - func startAudioCapture() { - if useAudioEngine { - do { - try audioEngine?.start() - } catch { - print("Error starting audio engine: \(error.localizedDescription)") - } - } - } - - func stopAudioCapture() { - if useAudioEngine { - audioEngine?.stop() - } - } - - private func setupAudioEngine() { - audioEngine = AVAudioEngine() - let inputNode = audioEngine!.inputNode - let format = AVAudioFormat(commonFormat: .pcmFormatInt16, sampleRate: 48000, channels: 1, interleaved: false) - inputNode.installTap(onBus: 0, bufferSize: 1024, format: format) { buffer, time in - let audioData = self.bufferToData(buffer: buffer) - self.backgroundAudioQueue.async { - self.sessionOutputDelegate?.onAudioEngine(data: audioData) - } - } - } - - private func bufferToData(buffer: AVAudioPCMBuffer) -> Data { - let channelData = buffer.int16ChannelData![0] - let dataSize = Int(buffer.frameLength) * MemoryLayout.size - let data = Data(bytes: channelData, count: dataSize) - return data - } -} diff --git a/live-demo/Utils/Helper.swift b/live-demo/Utils/Helper.swift deleted file mode 100644 index 7c5be9e..0000000 --- a/live-demo/Utils/Helper.swift +++ /dev/null @@ -1,24 +0,0 @@ -// -// Helper.swift -// live-demo -// -// Created by xkal on 7/3/2024. -// - -import Foundation -import CoreGraphics - -class Helper { - static func createEmptyRGBAData(width: Int, height: Int) -> Data { - let bytesPerPixel = 4 // Assuming BGRA format (8 bits per channel) - let bitsPerComponent = 8 - let bytesPerRow = width * bytesPerPixel - let totalBytes = height * bytesPerRow - - // Allocate a single Data object with the total size - var pixelData = Data(count: totalBytes * 2) - return pixelData - } - - -} diff --git a/live-demo/Utils/StreamPublisher.swift b/live-demo/Utils/StreamPublisher.swift deleted file mode 100644 index 23f05af..0000000 --- a/live-demo/Utils/StreamPublisher.swift +++ /dev/null @@ -1,391 +0,0 @@ -// -// StreamPublisher.swift -// live-demo -// -// Created by xkal on 8/3/2024. -// - - -import AVFoundation -import ffmpegkit - -protocol StreamPublisherDelegate { - func onStats(stats: Statistics) - func didVideoRecordingStatusChanged(isVideoRecording: Bool) - func didAudioRecordingStatusChanged(isAudioRecording: Bool) -} - -class StreamPublisher: NSObject, AudioVideoDelegate { - - private let background = DispatchQueue.global(qos: .background) - private let videoFeedThread = DispatchQueue.global(qos: .background) - private let audioFeedThread = DispatchQueue.global(qos: .background) - private var url: String? - private var running = false - private var videoPipe: String? - private var audioPipe: String? - private var videoFileDescriptor: Int32! - private var audioFileDescriptor: Int32! - private var videoTimer: Timer? - private var audioTimer: Timer? - private let videoBufferLock = NSLock() - private var videoDataBuffer = Data() - - private let audioBufferLock = NSLock() - private var audioDataBuffer = Data() - - private var cameraUtility: CameraUtility? - - var delegate: StreamPublisherDelegate? - - private var isVideoRecording = false - private var isAudioRecording = false - - private var isInBackground = false - - var blankFrames: Data? - - - override init () { - super.init() - initFFmpeg() - // Add observers for AVCaptureSession notifications - NotificationCenter.default.addObserver(self, selector: #selector(sessionRuntimeError), name: .AVCaptureSessionRuntimeError, object: nil) - NotificationCenter.default.addObserver(self, selector: #selector(sessionWasInterrupted), name: .AVCaptureSessionWasInterrupted, object: nil) - NotificationCenter.default.addObserver(self, selector: #selector(sessionInterruptionEnded), name: .AVCaptureSessionInterruptionEnded, object: nil) - } - - // Handle AVCaptureSession runtime error - @objc func sessionRuntimeError(notification: Notification) { - if let error = notification.userInfo?[AVCaptureSessionErrorKey] as? Error { - print("AVCaptureSession runtime error: \(error.localizedDescription)") - // Handle the error as needed - } - } - - // Handle AVCaptureSession interruption - @objc func sessionWasInterrupted(notification: Notification) { - if let reasonValue = notification.userInfo?[AVCaptureSessionInterruptionReasonKey] as? Int, - let reason = AVCaptureSession.InterruptionReason(rawValue: reasonValue) { - print("AVCaptureSession was interrupted. Reason: \(reason)") - // Handle the interruption as needed - if reasonValue == 1 { - blankFrames = Helper.createEmptyRGBAData(width: 1920, height: 1080) - isInBackground = true - } - } - } - - // Handle AVCaptureSession interruption ended - @objc func sessionInterruptionEnded(notification: Notification) { - print("AVCaptureSession interruption ended.") - isInBackground = false - blankFrames = nil - clearVideoBuffer() - } - - // Remove observers when the view controller is deallocated - deinit { - NotificationCenter.default.removeObserver(self) - } - - func attach(mediaUtil: CameraUtility) { - self.cameraUtility = mediaUtil - self.cameraUtility?.setDelegate(delegate: self) - } - - func initFFmpeg() { - FFmpegKitConfig.enableLogCallback({log in - if let log = log { - print(log.getMessage()!) - } - }) - } - - func publish(url: String) { - self.url = url - self.cameraUtility?.startAudioCapture() - print("Audio started") - self.running = true - // create a pipe for video - videoPipe = FFmpegKitConfig.registerNewFFmpegPipe() - audioPipe = FFmpegKitConfig.registerNewFFmpegPipe() - print("Pipes created") - // open the videopipe so that ffempg doesnot closes when the video pipe receives EOF - videoFileDescriptor = open(videoPipe!, O_RDWR) - audioFileDescriptor = open(audioPipe!, O_RDWR) - print("Pipes opened") - /// Start FFMPEG - background.async { - self.executeVideo_Audio() - } - startTimer() - } - - func appendToVideoBuffer(data: Data) { - videoFeedThread.sync { -// print("appending video___") - self.videoBufferLock.lock() - /// Max bytes buffer 100MB - if self.videoDataBuffer.count > (100 * 1000000) { - self.videoDataBuffer.removeAll() - } - self.videoDataBuffer.append(data) - self.videoBufferLock.unlock() - } - } - - func clearVideoBuffer() { - self.videoBufferLock.lock() - self.videoDataBuffer.removeAll() - self.videoBufferLock.unlock() - } - - func appendToAudioBuffer(data: Data) { - audioFeedThread.async { -// print("appending audio___") - self.audioBufferLock.lock() - /// Max bytes buffer 100MB - if self.audioDataBuffer.count > (100 * 1000000) { - self.audioDataBuffer.removeAll() - } - self.audioDataBuffer.append(data) - self.audioBufferLock.unlock() - } - print("Audio Buffer \(self.audioDataBuffer)") - } - - func stop() { - running = false - cameraUtility?.stopAudioCapture() - stopTimer() - closePipes() - stopFFmpeg() - self.audioDataBuffer.removeAll() - self.videoDataBuffer.removeAll() - self.isVideoRecording = false - self.isAudioRecording = false - DispatchQueue.main.async { - self.delegate?.didVideoRecordingStatusChanged(isVideoRecording: false) - self.delegate?.didAudioRecordingStatusChanged(isAudioRecording: false) - } - } - - func closePipes() { - if videoFileDescriptor != nil { - close(videoFileDescriptor) - } - if audioFileDescriptor != nil { - close(audioFileDescriptor) - } - FFmpegKitConfig.closeFFmpegPipe(videoPipe) - FFmpegKitConfig.closeFFmpegPipe(audioPipe) - } - - func stopFFmpeg() { - DispatchQueue.main.asyncAfter(deadline: .now() + 1, execute: { - FFmpegKit.cancel() - }) - } - - private func startTimer() { - DispatchQueue.global().async { - self.videoTimer = Timer.scheduledTimer(timeInterval: 0.005, target: self, selector: #selector(self.handleFeed), userInfo: nil, repeats: true) - RunLoop.current.add(self.videoTimer!, forMode: .default) - RunLoop.current.run() - } - } - - private func stopTimer() { - videoTimer?.invalidate() - audioTimer?.invalidate() - videoTimer = nil - audioTimer = nil - } - - - @objc func handleFeed() { - if isInBackground { - self.appendToVideoBuffer(data: self.blankFrames!) - if self.videoDataBuffer.count > 10*1000000 { - print("Flushing....") - self.feedToVideoPipe() - } - } else { - feedToVideoPipe() - } - } - - /// Not using anymore - @objc func mux() { - feedToVideoPipe() - feedToAudioPipe() - } - - @objc func feedToVideoPipe() { -// print("Feeding video") - self.videoBufferLock.lock() - // Feed video - if !self.videoDataBuffer.isEmpty { - self.writeToVideoPipe(data: self.videoDataBuffer) - self.videoDataBuffer.removeAll() - } - self.videoBufferLock.unlock() - } - - @objc func feedToAudioPipe() { - self.audioFeedThread.sync { -// print("Feeding audio") - self.audioBufferLock.lock() - // Feed audio - if !self.audioDataBuffer.isEmpty { - self.writeToAudioPipe(data: self.audioDataBuffer) - self.audioDataBuffer.removeAll() - } - self.audioBufferLock.unlock() - } - } - - private func executeVideoOnly() { - let cmd = "-f rawvideo -pixel_format bgra -video_size 1920x1080 -framerate 30 -i \(videoPipe!) -framerate 30 -pixel_format yuv420p -c:v h264 -an -vf \"transpose=1,scale=360:640\" -b:v 2M -f flv \(url!)" - execute(cmd: cmd) - } - - private func executeAudioOnly() { - let cmd = "-re -f s16le -ar 48000 -ac 1 -itsoffset -5 -i \(audioPipe!) -vn -c:a aac -b:a 64k -f flv \(url!)" - execute(cmd: cmd) - } - - private func executeVideo_Audio() { - let cmd = "-re -f rawvideo -pixel_format bgra -video_size 1920x1080 -framerate 30 -i \(videoPipe!) -f s16le -ar 48000 -ac 1 -itsoffset -5 -i \(audioPipe!) -framerate 30 -pixel_format yuv420p -c:v h264 -c:a aac -vf \"transpose=1,scale=360:640\" -b:v 640k -b:a 64k -vsync 1 -f flv \(url!)" - - execute(cmd: cmd) - } - - private func execute(cmd: String) { - print("Executing \(cmd)..........") - FFmpegKit.executeAsync(cmd, withCompleteCallback: {session in - self.stop() - }, withLogCallback: nil, withStatisticsCallback: {stats in - guard let stats = stats else { - return - } - /// For Video - if stats.getVideoFps() > 0 { - if self.isVideoRecording == false { - DispatchQueue.main.async { - self.delegate?.didVideoRecordingStatusChanged(isVideoRecording: true) - } - } - self.isVideoRecording = true - } else if stats.getSize() > 0, stats.getVideoFps() == 0 { - if self.isAudioRecording == false { - DispatchQueue.main.async { - self.delegate?.didAudioRecordingStatusChanged(isAudioRecording: true) - } - } - self.isAudioRecording = true - } - DispatchQueue.main.async { - self.delegate?.onStats(stats: stats) - } - }) - } - - func writeToVideoPipe(data: Data) { - if let currentPipe = self.videoPipe, let fileHandle = try? FileHandle(forWritingTo: URL(fileURLWithPath: currentPipe)) { -// print("writing to the data \(data)") - // Convert the message to data - if #available(iOS 13.4, *) { - try? fileHandle.write(contentsOf: data) - } else { - fileHandle.write(data) - } - fileHandle.closeFile() - print("Video written successfully") - } else { - print("Failed to open video file handle for writing") - } - } - - func writeToAudioPipe(data: Data) { - if let currentPipe = self.audioPipe, let fileHandle = try? FileHandle(forWritingTo: URL(fileURLWithPath: currentPipe)) { -// print("writing audio to the data \(data)") - // Convert the message to data - if #available(iOS 13.4, *) { - try? fileHandle.write(contentsOf: data) - } else { - fileHandle.write(data) - } - fileHandle.closeFile() - print("Audio written successfully") - } else { - print("Failed to open audio file handle for writing") - } - } - - func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { - if output is AVCaptureVideoDataOutput { -// print("Video") - if !self.isInBackground, self.running, let data = isInBackground ? blankFrames : extractBGRAData(from: sampleBuffer) { - if !self.isVideoRecording { - self.writeToVideoPipe(data: data) - } else { - self.appendToVideoBuffer(data: data) - } - } - } else if output is AVCaptureAudioDataOutput { -// print("Audio") -// print(sampleBuffer) - if self.running, let data = convertCMSampleBufferToPCM16Data(sampleBuffer: sampleBuffer) { - if !self.isAudioRecording { - self.writeToAudioPipe(data: data) - } else { - self.appendToAudioBuffer(data: data) - } -// self.writeToAudioPipe(data: data) -// self.appendToAudioBuffer(data: data) - } - } - - } - - func convertCMSampleBufferToPCM16Data(sampleBuffer: CMSampleBuffer) -> Data? { - guard let blockBuffer = CMSampleBufferGetDataBuffer(sampleBuffer) else { - return nil - } - - guard let data = NSMutableData(length: CMBlockBufferGetDataLength(blockBuffer)) else { - return nil - } - - CMBlockBufferCopyDataBytes(blockBuffer, atOffset: 0, dataLength: CMBlockBufferGetDataLength(blockBuffer), destination: data.mutableBytes) - - return data as Data - } - - func extractBGRAData(from sampleBuffer: CMSampleBuffer) -> Data? { - guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { - return nil - } - CVPixelBufferLockBaseAddress(pixelBuffer, .readOnly) - defer { - CVPixelBufferUnlockBaseAddress(pixelBuffer, .readOnly) - } - guard let baseAddress = CVPixelBufferGetBaseAddress(pixelBuffer) else { - return nil - } - let width = CVPixelBufferGetWidth(pixelBuffer) - let height = CVPixelBufferGetHeight(pixelBuffer) - let bytesPerRow = CVPixelBufferGetBytesPerRow(pixelBuffer) - let byteBuffer = UnsafeBufferPointer(start: baseAddress.assumingMemoryBound(to: UInt8.self), count: bytesPerRow * height) - let rawPointer = UnsafeRawPointer(byteBuffer.baseAddress!) - return Data(bytes: rawPointer, count: bytesPerRow * height) - } - - func onAudioEngine(data didReceived: Data) { - if self.running { - self.writeToAudioPipe(data: didReceived) - } - } -}