-
-
Notifications
You must be signed in to change notification settings - Fork 605
/
AudioCodecSettings.swift
211 lines (191 loc) · 6.87 KB
/
AudioCodecSettings.swift
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
import AVFAudio
import Foundation
/// The AudioCodecSettings class specifying audio compression settings.
public struct AudioCodecSettings: Codable {
/// The default value.
public static let `default` = AudioCodecSettings()
/// Maximum number of channels supported by the system
public static let maximumNumberOfChannels: UInt32 = 2
/// Maximum sampleRate supported by the system
public static let mamimumSampleRate: Float64 = 48000
/// The type of the AudioCodec supports format.
enum Format: Codable {
/// The AAC format.
case aac
/// The PCM format.
case pcm
var formatID: AudioFormatID {
switch self {
case .aac:
return kAudioFormatMPEG4AAC
case .pcm:
return kAudioFormatLinearPCM
}
}
var formatFlags: UInt32 {
switch self {
case .aac:
return UInt32(MPEG4ObjectID.AAC_LC.rawValue)
case .pcm:
return kAudioFormatFlagIsNonInterleaved | kAudioFormatFlagIsPacked | kAudioFormatFlagIsFloat
}
}
var framesPerPacket: UInt32 {
switch self {
case .aac:
return 1024
case .pcm:
return 1
}
}
var packetSize: UInt32 {
switch self {
case .aac:
return 1
case .pcm:
return 1024
}
}
var bitsPerChannel: UInt32 {
switch self {
case .aac:
return 0
case .pcm:
return 32
}
}
var bytesPerPacket: UInt32 {
switch self {
case .aac:
return 0
case .pcm:
return (bitsPerChannel / 8)
}
}
var bytesPerFrame: UInt32 {
switch self {
case .aac:
return 0
case .pcm:
return (bitsPerChannel / 8)
}
}
var bufferCounts: Int {
switch self {
case .aac:
return 6
case .pcm:
return 1
}
}
func makeAudioBuffer(_ format: AVAudioFormat) -> AVAudioBuffer? {
switch self {
case .aac:
return AVAudioCompressedBuffer(format: format, packetCapacity: 1, maximumPacketSize: 1024 * Int(format.channelCount))
case .pcm:
return AVAudioPCMBuffer(pcmFormat: format, frameCapacity: 1024)
}
}
func makeAudioFormat(_ inSourceFormat: AudioStreamBasicDescription?) -> AVAudioFormat? {
guard let inSourceFormat else {
return nil
}
switch self {
case .aac:
var streamDescription = AudioStreamBasicDescription(
mSampleRate: inSourceFormat.mSampleRate,
mFormatID: formatID,
mFormatFlags: formatFlags,
mBytesPerPacket: bytesPerPacket,
mFramesPerPacket: framesPerPacket,
mBytesPerFrame: bytesPerFrame,
mChannelsPerFrame: min(inSourceFormat.mChannelsPerFrame, AudioCodecSettings.maximumNumberOfChannels),
mBitsPerChannel: bitsPerChannel,
mReserved: 0
)
return AVAudioFormat(streamDescription: &streamDescription)
case .pcm:
return AVAudioFormat(
commonFormat: .pcmFormatFloat32,
sampleRate: inSourceFormat.mSampleRate,
channels: min(inSourceFormat.mChannelsPerFrame, AudioCodecSettings.maximumNumberOfChannels),
interleaved: true
)
}
}
}
/// Specifies the bitRate of audio output.
public var bitRate: Int
/// Specifies the sampleRate of audio output.
public var sampleRate: Float64
/// Specifies the channels of audio output.
public var channels: Int
/// Map of the output to input channels.
public var channelMap: [Int: Int]
/// Specifies the output format.
var format: AudioCodecSettings.Format = .aac
/// Create an new AudioCodecSettings instance.
public init(
bitRate: Int = 64 * 1000,
sampleRate: Float64 = 0,
channels: Int = 0,
channelMap: [Int: Int] = [0: 0, 1: 1]
) {
self.bitRate = bitRate
self.sampleRate = sampleRate
self.channels = channels
self.channelMap = channelMap
}
func invalidateConverter(_ oldValue: AudioCodecSettings) -> Bool {
return !(
sampleRate == oldValue.sampleRate &&
channels == oldValue.channels &&
channelMap == oldValue.channelMap
)
}
func apply(_ converter: AVAudioConverter?, oldValue: AudioCodecSettings?) {
guard let converter else {
return
}
if bitRate != oldValue?.bitRate {
let minAvailableBitRate = converter.applicableEncodeBitRates?.min(by: { a, b in
return a.intValue < b.intValue
})?.intValue ?? bitRate
let maxAvailableBitRate = converter.applicableEncodeBitRates?.max(by: { a, b in
return a.intValue < b.intValue
})?.intValue ?? bitRate
converter.bitRate = min(maxAvailableBitRate, max(minAvailableBitRate, bitRate))
}
}
func makeOutputChannels(_ inChannels: Int) -> Int {
return min(channels == 0 ? inChannels : channels, Int(Self.maximumNumberOfChannels))
}
func makeChannelMap(_ inChannels: Int) -> [NSNumber] {
let outChannels = makeOutputChannels(inChannels)
var result = Array(repeating: -1, count: outChannels)
for inputIndex in 0..<min(inChannels, outChannels) {
result[inputIndex] = inputIndex
}
for currentIndex in 0..<outChannels {
if let inputIndex = channelMap[currentIndex], inputIndex < inChannels {
result[currentIndex] = inputIndex
}
}
return result.map { NSNumber(value: $0) }
}
func makeOutputFormat(_ inputFormat: AVAudioFormat?) -> AVAudioFormat? {
guard let inputFormat else {
return nil
}
let numberOfChannels = makeOutputChannels(Int(inputFormat.channelCount))
guard let channelLayout = AVAudioChannelLayout(layoutTag: kAudioChannelLayoutTag_DiscreteInOrder | UInt32(numberOfChannels)) else {
return nil
}
return .init(
commonFormat: inputFormat.commonFormat,
sampleRate: min(sampleRate == 0 ? inputFormat.sampleRate : sampleRate, Self.mamimumSampleRate),
interleaved: inputFormat.isInterleaved,
channelLayout: channelLayout
)
}
}