Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Creating RTMP onMetaData based on input data. #1304

Merged
merged 1 commit into from
Sep 26, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions Sources/Codec/AudioCodec.swift
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,9 @@ public class AudioCodec {
}
}
}
var outputFormat: AVAudioFormat? {
return audioConverter?.outputFormat
}
private var cursor: Int = 0
private var inputBuffers: [AVAudioBuffer] = []
private var outputBuffers: [AVAudioBuffer] = []
Expand Down
2 changes: 1 addition & 1 deletion Sources/Codec/VTSessionMode.swift
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ enum VTSessionMode {
}
return session
case .decompression:
guard let formatDescription = videoCodec.formatDescription else {
guard let formatDescription = videoCodec.outputFormat else {
videoCodec.delegate?.videoCodec(videoCodec, errorOccurred: .failedToCreate(status: kVTParameterErr))
return nil
}
Expand Down
10 changes: 5 additions & 5 deletions Sources/Codec/VideoCodec.swift
Original file line number Diff line number Diff line change
Expand Up @@ -62,12 +62,12 @@ public class VideoCodec {

var lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoCodec.lock")
var expectedFrameRate = IOMixer.defaultFrameRate
var formatDescription: CMFormatDescription? {
private(set) var outputFormat: CMFormatDescription? {
didSet {
guard !CMFormatDescriptionEqual(formatDescription, otherFormatDescription: oldValue) else {
guard !CMFormatDescriptionEqual(outputFormat, otherFormatDescription: oldValue) else {
return
}
delegate?.videoCodec(self, didOutput: formatDescription)
delegate?.videoCodec(self, didOutput: outputFormat)
}
}
var needsSync: Atomic<Bool> = .init(true)
Expand Down Expand Up @@ -108,7 +108,7 @@ public class VideoCodec {
delegate?.videoCodec(self, errorOccurred: .failedToFlame(status: status))
return
}
formatDescription = sampleBuffer.formatDescription
outputFormat = sampleBuffer.formatDescription
delegate?.videoCodec(self, didOutput: sampleBuffer)
}
}
Expand Down Expand Up @@ -214,7 +214,7 @@ extension VideoCodec: Running {
self.session = nil
self.invalidateSession = true
self.needsSync.mutate { $0 = true }
self.formatDescription = nil
self.outputFormat = nil
#if os(iOS)
NotificationCenter.default.removeObserver(self, name: AVAudioSession.interruptionNotification, object: nil)
NotificationCenter.default.removeObserver(self, name: UIApplication.willEnterForegroundNotification, object: nil)
Expand Down
10 changes: 0 additions & 10 deletions Sources/Media/HKView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,6 @@ public class HKView: UIView {
}
}

/// A value that displays a video format.
public var videoFormatDescription: CMVideoFormatDescription? {
currentStream?.mixer.videoIO.formatDescription
}

#if !os(tvOS)
public var videoOrientation: AVCaptureVideoOrientation = .portrait {
didSet {
Expand Down Expand Up @@ -139,11 +134,6 @@ public class HKView: NSView {
}
}

/// A value that displays a video format.
public var videoFormatDescription: CMVideoFormatDescription? {
currentStream?.mixer.videoIO.formatDescription
}

public var videoOrientation: AVCaptureVideoOrientation = .portrait

private var currentSampleBuffer: CMSampleBuffer?
Expand Down
4 changes: 4 additions & 0 deletions Sources/Media/IOAudioResampler.swift
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,10 @@ final class IOAudioResampler<T: IOAudioResamplerDelegate> {
}
weak var delegate: T?

var inputFormat: AVAudioFormat? {
return audioConverter?.inputFormat
}

var outputFormat: AVAudioFormat? {
return audioConverter?.outputFormat
}
Expand Down
8 changes: 8 additions & 0 deletions Sources/Media/IOAudioUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import SwiftPMSupport
#endif

final class IOAudioUnit: NSObject, IOUnit {
typealias FormatDescription = CMAudioFormatDescription

lazy var codec: AudioCodec = {
var codec = AudioCodec()
codec.lockQueue = lockQueue
Expand Down Expand Up @@ -33,6 +35,10 @@ final class IOAudioUnit: NSObject, IOUnit {
resampler.settings = settings.makeAudioResamplerSettings()
}
}
var inputFormat: FormatDescription?
var outputFormat: FormatDescription? {
return codec.outputFormat?.formatDescription
}
private(set) var presentationTimeStamp: CMTime = .invalid
private lazy var resampler: IOAudioResampler<IOAudioUnit> = {
var resampler = IOAudioResampler<IOAudioUnit>()
Expand Down Expand Up @@ -65,6 +71,7 @@ final class IOAudioUnit: NSObject, IOUnit {
guard let device else {
try capture.attachDevice(nil, audioUnit: self)
presentationTimeStamp = .invalid
inputFormat = nil
return
}
try capture.attachDevice(device, audioUnit: self)
Expand Down Expand Up @@ -152,6 +159,7 @@ extension IOAudioUnit: IOAudioResamplerDelegate {
}

func resampler(_ resampler: IOAudioResampler<IOAudioUnit>, didOutput audioFormat: AVAudioFormat) {
inputFormat = resampler.inputFormat?.formatDescription
codec.inSourceFormat = audioFormat.formatDescription.audioStreamBasicDescription
monitor.inSourceFormat = audioFormat.formatDescription.audioStreamBasicDescription
}
Expand Down
1 change: 0 additions & 1 deletion Sources/Media/IOMixer.swift
Original file line number Diff line number Diff line change
Expand Up @@ -222,7 +222,6 @@ public final class IOMixer {
case kCMMediaType_Audio:
audioIO.codec.appendSampleBuffer(sampleBuffer)
case kCMMediaType_Video:
videoIO.codec.formatDescription = sampleBuffer.formatDescription
videoIO.codec.appendSampleBuffer(sampleBuffer)
default:
break
Expand Down
4 changes: 4 additions & 0 deletions Sources/Media/IOUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,12 @@ import Foundation
public typealias AVCodecDelegate = AudioCodecDelegate & VideoCodecDelegate

protocol IOUnit {
associatedtype FormatDescription

var mixer: IOMixer? { get set }
var muted: Bool { get set }
var inputFormat: FormatDescription? { get }
var outputFormat: FormatDescription? { get }

func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer)
}
Expand Down
67 changes: 27 additions & 40 deletions Sources/Media/IOVideoUnit.swift
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ import AVFoundation
import CoreImage

final class IOVideoUnit: NSObject, IOUnit {
typealias FormatDescription = CMVideoFormatDescription

enum Error: Swift.Error {
case multiCamNotSupported
}
Expand All @@ -12,61 +14,28 @@ final class IOVideoUnit: NSObject, IOUnit {
]

let lockQueue = DispatchQueue(label: "com.haishinkit.HaishinKit.VideoIOComponent.lock")

var context: CIContext = .init() {
didSet {
for effect in effects {
effect.ciContext = context
}
}
}

weak var drawable: (any NetStreamDrawable)? {
didSet {
#if os(iOS) || os(macOS)
drawable?.videoOrientation = videoOrientation
#endif
}
}

var formatDescription: CMVideoFormatDescription? {
didSet {
codec.formatDescription = formatDescription
}
}

var multiCamCaptureSettings: MultiCamCaptureSettings = .default
lazy var codec: VideoCodec = {
var codec = VideoCodec()
codec.lockQueue = lockQueue
return codec
}()

weak var mixer: IOMixer?

var muted = false

private(set) var presentationTimeStamp: CMTime = .invalid
private(set) var effects: Set<VideoEffect> = []

private var extent = CGRect.zero {
didSet {
guard extent != oldValue else {
return
}
CVPixelBufferPoolCreate(nil, nil, attributes as CFDictionary?, &pixelBufferPool)
pixelBufferPool?.createPixelBuffer(&pixelBuffer)
}
}

private var attributes: [NSString: NSObject] {
var attributes: [NSString: NSObject] = Self.defaultAttributes
attributes[kCVPixelBufferWidthKey] = NSNumber(value: Int(extent.width))
attributes[kCVPixelBufferHeightKey] = NSNumber(value: Int(extent.height))
return attributes
}

private var pixelBufferPool: CVPixelBufferPool?

var frameRate = IOMixer.defaultFrameRate {
didSet {
if #available(tvOS 17.0, *) {
Expand All @@ -75,7 +44,6 @@ final class IOVideoUnit: NSObject, IOUnit {
}
}
}

#if !os(tvOS)
var videoOrientation: AVCaptureVideoOrientation = .portrait {
didSet {
Expand All @@ -98,7 +66,6 @@ final class IOVideoUnit: NSObject, IOUnit {
}
}
#endif

var torch = false {
didSet {
guard torch != oldValue else {
Expand All @@ -109,7 +76,10 @@ final class IOVideoUnit: NSObject, IOUnit {
}
}
}

var inputFormat: FormatDescription?
var outputFormat: FormatDescription? {
codec.outputFormat
}
#if os(tvOS)
private var _capture: Any?
@available(tvOS 17.0, *)
Expand All @@ -132,10 +102,25 @@ final class IOVideoUnit: NSObject, IOUnit {
private(set) var capture: IOVideoCaptureUnit = .init()
private(set) var multiCamCapture: IOVideoCaptureUnit = .init()
#endif

var multiCamCaptureSettings: MultiCamCaptureSettings = .default

private(set) var presentationTimeStamp: CMTime = .invalid
private(set) var effects: Set<VideoEffect> = []
private var extent = CGRect.zero {
didSet {
guard extent != oldValue else {
return
}
CVPixelBufferPoolCreate(nil, nil, attributes as CFDictionary?, &pixelBufferPool)
pixelBufferPool?.createPixelBuffer(&pixelBuffer)
}
}
private var attributes: [NSString: NSObject] {
var attributes: [NSString: NSObject] = Self.defaultAttributes
attributes[kCVPixelBufferWidthKey] = NSNumber(value: Int(extent.width))
attributes[kCVPixelBufferHeightKey] = NSNumber(value: Int(extent.height))
return attributes
}
private var pixelBuffer: CVPixelBuffer?
private var pixelBufferPool: CVPixelBufferPool?
private var multiCamSampleBuffer: CMSampleBuffer?

deinit {
Expand All @@ -161,6 +146,7 @@ final class IOVideoUnit: NSObject, IOUnit {
capture.detachSession(mixer.session)
try capture.attachDevice(nil, videoUnit: self)
presentationTimeStamp = .invalid
inputFormat = nil
return
}
mixer.session.beginConfiguration()
Expand Down Expand Up @@ -248,6 +234,7 @@ final class IOVideoUnit: NSObject, IOUnit {
}

func appendSampleBuffer(_ sampleBuffer: CMSampleBuffer) {
inputFormat = sampleBuffer.formatDescription
guard let buffer = sampleBuffer.imageBuffer else {
return
}
Expand Down
4 changes: 0 additions & 4 deletions Sources/Media/MTHKView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,6 @@ public class MTHKView: MTKView {
/// Specifies how the video is displayed within a player layer’s bounds.
public var videoGravity: AVLayerVideoGravity = .resizeAspect

public var videoFormatDescription: CMVideoFormatDescription? {
currentStream?.mixer.videoIO.formatDescription
}

#if !os(tvOS)
public var videoOrientation: AVCaptureVideoOrientation = .portrait
#endif
Expand Down
10 changes: 0 additions & 10 deletions Sources/Media/PiPHKView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,6 @@ public class PiPHKView: UIView {
}
}

/// A value that displays a video format.
public var videoFormatDescription: CMVideoFormatDescription? {
currentStream?.mixer.videoIO.formatDescription
}

#if !os(tvOS)
public var videoOrientation: AVCaptureVideoOrientation = .portrait {
didSet {
Expand Down Expand Up @@ -119,11 +114,6 @@ public class PiPHKView: NSView {
}
}

/// A value that displays a video format.
public var videoFormatDescription: CMVideoFormatDescription? {
currentStream?.mixer.videoIO.formatDescription
}

public var videoOrientation: AVCaptureVideoOrientation = .portrait {
didSet {
if Thread.isMainThread {
Expand Down
3 changes: 0 additions & 3 deletions Sources/Net/NetStreamDrawable.swift
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,6 @@ public protocol NetStreamDrawable: AnyObject {
var videoOrientation: AVCaptureVideoOrientation { get set }
#endif

/// The videoFormatDescription which is the current CMSampleBuffer.
var videoFormatDescription: CMVideoFormatDescription? { get }

/// Attaches a drawable to a new NetStream object.
func attachStream(_ stream: NetStream?)

Expand Down
6 changes: 3 additions & 3 deletions Sources/RTMP/RTMPMessage.swift
Original file line number Diff line number Diff line change
Expand Up @@ -709,7 +709,7 @@ final class RTMPVideoMessage: RTMPMessage {
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: stream.mixer.videoIO.formatDescription,
formatDescription: stream.mixer.videoIO.inputFormat,
sampleCount: 1,
sampleTimingEntryCount: 1,
sampleTimingArray: &timing,
Expand All @@ -728,11 +728,11 @@ final class RTMPVideoMessage: RTMPMessage {
case .h264:
var config = AVCDecoderConfigurationRecord()
config.data = payload.subdata(in: FLVTagType.video.headerSize..<payload.count)
status = config.makeFormatDescription(&stream.mixer.videoIO.formatDescription)
status = config.makeFormatDescription(&stream.mixer.videoIO.inputFormat)
case .hevc:
var config = HEVCDecoderConfigurationRecord()
config.data = payload.subdata(in: FLVTagType.video.headerSize..<payload.count)
status = config.makeFormatDescription(&stream.mixer.videoIO.formatDescription)
status = config.makeFormatDescription(&stream.mixer.videoIO.inputFormat)
}
if status == noErr {
stream.mixer.mediaLink.hasVideo = true
Expand Down
20 changes: 10 additions & 10 deletions Sources/RTMP/RTMPStream.swift
Original file line number Diff line number Diff line change
Expand Up @@ -413,24 +413,24 @@ open class RTMPStream: NetStream {
/// Creates flv metadata for a stream.
open func makeMetaData() -> ASObject {
var metadata: [String: Any] = [:]
#if os(iOS) || os(macOS)
if mixer.videoIO.capture.device != nil {
metadata["width"] = mixer.videoIO.codec.settings.videoSize.width
metadata["height"] = mixer.videoIO.codec.settings.videoSize.height
metadata["framerate"] = mixer.videoIO.frameRate
switch mixer.videoIO.codec.settings.format {
if mixer.videoIO.inputFormat != nil {
metadata["width"] = videoSettings.videoSize.width
metadata["height"] = videoSettings.videoSize.height
metadata["framerate"] = frameRate
switch videoSettings.format {
case .h264:
metadata["videocodecid"] = FLVVideoCodec.avc.rawValue
case .hevc:
metadata["videocodecid"] = FLVVideoFourCC.hevc.rawValue
}
metadata["videodatarate"] = mixer.videoIO.codec.settings.bitRate / 1000
metadata["videodatarate"] = videoSettings.bitRate / 1000
}
#endif
if let inSourceFormat = mixer.audioIO.codec.inSourceFormat {
if mixer.audioIO.inputFormat != nil {
metadata["audiocodecid"] = FLVAudioCodec.aac.rawValue
metadata["audiodatarate"] = audioSettings.bitRate / 1000
metadata["audiosamplerate"] = inSourceFormat.mSampleRate
if let outputFormat = mixer.audioIO.outputFormat?.audioStreamBasicDescription {
metadata["audiosamplerate"] = outputFormat.mSampleRate
}
}
return metadata
}
Expand Down
Loading