diff --git a/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramFlatView.swift b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramFlatView.swift new file mode 100644 index 0000000..2a2da28 --- /dev/null +++ b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramFlatView.swift @@ -0,0 +1,147 @@ +// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/AudioKitUI/ +/* + Dataflow overview: + * FFTTap analyzed the sound and creates an array of frequencies and amplitudes + several times per second. As soon as the data is ready, a new slice is instantiated. + On init, the slice converts the array of measurements to an image and caches it. + The conversion of data and creating an image takes quite some time and is + done only once. +* Drawing is done using UIGraphicsImageRenderer with context.fill primitives. + These are cached as UImage and layouted onto the view. + + +Steps involved: + * FFTTap calls SpectrogramFlatModel with newly analyzed sound using + ``SpectrogramFlatModel/pushData(_ fftFloats: [Float])`` + * The model then creates a SpectrogramSlice and puts it into the queue. + * Body of this view watches this queue and shows all slices in the queue. + * Because the body and therefore each slice is redrawn on any update of + the queue, the drawing of the slice should be fast. Current implementation + of SpectrogramSlice caches an image of itself after drawing. +* The image is drawn pixel aligned on a CGContext. The image then is resized + to fit into this view. + + + Brief history of this class + * Class was created using SpectrogramView as starting point + * SpectrogramView looked/looks like coming from an 90ies japanese synth, + in a kind of 3D surface which is cool. Most common spectrograms or sonographs + have a flat look. + * The flat look makes it easier to analyze music, make voice fingerprints and compare bird songs + * SpectrogramView had/has a major design flaw: on each update (as soon as new data arrived + from the FFT), all slices were completely redrawn from raw data. All recent measurements (80) + are converted from an array of measurements to Paths with all the lines. + * Measuring with Instruments showed that this takes a lot of time, therefore + this implementation caches the resulting image. + + + Cause of inefficiency of this implementation + * Each time a new slice arrives from FFTTap, the view gets a complete layout update. + * Rendering of new slices is done on a background thread and involves too many steps + * Frame rate is defined by how many samples come per second. This look ugly in case of less than 25 per second. + * It somehow doesn't show the frequency range that is selected, so some cpu time + is wasted for calculating stuff that isn't shown. + * Some arrays are iterated several times in a row whereas it could be done in one enumeration. + + Following possibilities to be considered for a more energy efficient implementation: + * Only calc what is shown, enumerate array only once (see comment on captureAmplitudeFrequencyData()). + * Make the layouting independent of sample rate, just move the slices left with a continous, builtin animation. + * Layout and draw the slices directly on a Canvas (instead of HStack) and independently move the Canvas left. + * To make it shown crisp, all images should be drawn and layouted pixel aligned (integral size and position). + * Try .drawingGroup() if it helps up the performance + * Use ImageRenderer objectwillchange to create a stream of images + * Use Sample Code from Apple of vDSP and Accellerate (macOS) and port it to iOS: + https://developer.apple.com/documentation/accelerate/visualizing_sound_as_an_audio_spectrogram + * Spectrogram is actually kind of a Heatmap, so use SwiftUI.Chart + * Use factory and emitter to emit new slice images (like in a particle system) + * Measure performance impact when spreading on several threads or combine on main thread + * Use Metal-API with shaders similar to what aurioTouch Sample Code by Apple did in OpenGL + * Try to replace all CGPoint and CGPoint[] calculations using Accelerate or some other optimized library + * Measure efficiency and compare if it would make a difference to only use opaque colors in gradient + * By all these possibilites to improve energy efficiency, don't forget the latency. + * might be easy to make available in earlier versions than iOS 17, primarly because of .onChange(of: + + */ + +import AudioKit +import SwiftUI + +/// Displays a rolling plot of the frequency spectrum. +/// +/// Each slice represents a point in time with the frequencies shown from bottom to top +/// at this moment. Each frequency-cell is colored according to the amplitude. +/// The spectrum is shown logarithmic so octaves have the same distance. +/// +/// This implementation is rather energy inefficent. You might not want to use it +/// a central feature in your app. Furthermore it's not scientificicly correct, when displaying +/// white noise, it will not show a uniform distribution. + +public struct SpectrogramFlatView: View { + // this static var is a shortcut: better to have this in SpectrogramModel or SpectrogramFFTMetaData + public static var gradientUIColors: [UIColor] = [(#colorLiteral(red: 0, green: 0, blue: 0, alpha: 0)), (#colorLiteral(red: 0.1411764771, green: 0.3960784376, blue: 0.5647059083, alpha: 0.6275583187)), (#colorLiteral(red: 0.4217140079, green: 0.6851614118, blue: 0.9599093795, alpha: 0.8245213468)), (#colorLiteral(red: 0.8122602105, green: 0.6033009887, blue: 0.8759307861, alpha: 1)), (#colorLiteral(red: 0.9826132655, green: 0.5594901443, blue: 0.4263145328, alpha: 1)), (#colorLiteral(red: 1, green: 0.2607713342, blue: 0.4242972136, alpha: 1))] + @StateObject var spectrogram = SpectrogramFlatModel() + let node: Node + let backgroundColor: Color + + /// put only one color into the array for a monochrome view + public init(node: Node, + amplitudeColors: [Color] = [], + backgroundColor: Color = Color.black) { + self.node = node + if amplitudeColors.count > 1 { + Self.gradientUIColors = amplitudeColors.map { UIColor($0) } + } else if amplitudeColors.count == 1 { + Self.gradientUIColors = [UIColor(backgroundColor), UIColor(amplitudeColors[0])] + } + self.backgroundColor = backgroundColor + } + + public var body: some View { + return GeometryReader { geometry in + ZStack { + backgroundColor + .onAppear { + spectrogram.updateNode(node) + } + HStack(spacing: 0.0) { + ForEach(spectrogram.slices.items) { slice in + // flip it as the slice was drawn in the first quadrant + slice.scaleEffect(x: 1, y: -1) + // .border(.green, width: 2.0) + } + // flip it so the new slices come in right and move to the left + .scaleEffect(x: -1, y: 1) + } + // .border(.red, width: 5.0) + .frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .trailing) + }.onAppear { + spectrogram.sliceSize = calcSliceSize(fromFrameSize: geometry.size) + } + .onChange(of: geometry.size) { newSize in + spectrogram.sliceSize = calcSliceSize(fromFrameSize: newSize) + } + } + } + + func calcSliceSize(fromFrameSize frameSize: CGSize) -> CGSize { + let outSize = CGSize( + // even when we have non-integral width for a slice, the + // resulting image will be integral in size but resizable + // the HStack will then layout them not pixel aligned and stretched. + // that's why we ceil/floor it: ceiling makes them a bit more precise. + // floor makes it more energy efficient. + // We did some measurements, it's hard to tell visually + width: floor(frameSize.width / CGFloat(spectrogram.slices.maxItems)), + height: frameSize.height + ) + return outSize + } +} + +// MARK: Preview + +struct SpectrogramFlatView_Previews: PreviewProvider { + static var previews: some View { + return SpectrogramFlatView(node: Mixer()) + } +} diff --git a/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramModel.swift b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramModel.swift new file mode 100644 index 0000000..59ba7de --- /dev/null +++ b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramModel.swift @@ -0,0 +1,170 @@ +// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/AudioKitUI/ +// + +import AudioKit +import SwiftUI + +/// Considerations for further development; depending on usage and requirements: +/// Make this struct public so the look can be configured. Define fftSize as enum. +/// Also add something like a gain or similar to adjust sensitivity of display. +struct SpectrogramFFTMetaData { + // fftSize defines how detailled the music is analyzed in the time domain. + // the lower the value, the less detail: + // * 1024: will receive about four analyzed frequencies between C2 and C* (65Hz to 130Hz). + // New data comes roughly 21.5 times per second, each 46ms. + // * 2048: will receive about eight analyzed frequencies between C2 and C* (65Hz to 130Hz). + // New data comes roughly 11 times per second, each 93ms. + // * 4096: will receive about 16 analyzed frequencies between C2 and C* (65Hz to 130Hz). + // New data comes roughly 5.5 times per second, each 186ms. + // Choose a higher value when you want to analyze low frequencies, + // choose a lower value when you want fast response and high frame rate on display. + let fftSize = 2048 + + // Lowest and highest frequencies shown. + // We use 48Hz, which is a bit lower than G1. A1 would be 440Hz/8 = 55Hz. + // The lowest human bass voice in choral music is reaching down to C1 (32.7 Hz). + // Don't go lower than 6.0, it just doesn't make sense and the display gets terribly distorted + // don't use 0 as it breaks the display because log10(0) is undefined and this error not handled + let minFreq: CGFloat = 48.0 + // we will not show anything above 13500 as it's not music anymore but just overtones and noise + let maxFreq: CGFloat = 13500.0 + + // how/why can the sample rate be edited? Shouldn't this come from the node/engine? + // if the sample rate is changed, does the displayed frequency range also have to be changed? + // took this from existing SpectrogramView, will investigate later + let sampleRate: double_t = 44100 +} + +struct SliceQueue { + var maxItems: Int = 120 + var items: [SpectrogramSlice] = [] + + public mutating func pushToQueue(element: SpectrogramSlice) { + enqueue(element: element) + if items.count > maxItems { + dequeue() + } + } + + private mutating func enqueue(element: SpectrogramSlice) { + items.append(element) + } + + private mutating func dequeue() { + if !items.isEmpty { + items.remove(at: 0) + } + } +} + +/// Model for the SpectrogramFlatView. Makes connection to the audio node and receives FFT data +class SpectrogramFlatModel: ObservableObject { + /// A queue full of SpectrogramSlice + @Published var slices = SliceQueue() + /// Dimensions of the slices. Set prior to rendering to get slices that fit. + var sliceSize = CGSize(width: 10, height: 250) { + didSet { + if xcodePreview { createTestData() } + } + } + let nodeMetaData = SpectrogramFFTMetaData() + let xcodePreview = ProcessInfo.processInfo.environment["XCODE_RUNNING_FOR_PREVIEWS"] == "1" + var nodeTap: FFTTap! + var node: Node? + + // create a filled Queue, always full of stuff. looks a bit better. + // otherwise it would be fast moving at the beginning and then + // pressing together until full (looks funny though :-). + // In case of Xcode Preview, filling of queue will be done in + // setSliceSize called typically from the geometry reader. + init() { + if !xcodePreview { + createEmptyData() + } + } + + // fill the queue with empty data so the layouting doesn't start in the middle + private func createEmptyData() { + for _ in 0 ... slices.maxItems - 1 { + var points: [CGPoint] = [] + for index in 0 ..< 10 { + let frequency = CGFloat(Float(index) * Float.pi) + let amplitude = CGFloat(-200.0) + points.append(CGPoint(x: frequency, y: amplitude)) + } + // size and freuqency doesnt' really matter as it will all be black + let slice = SpectrogramSlice( + gradientUIColors: SpectrogramFlatView.gradientUIColors, + sliceWidth: sliceSize.width, + sliceHeight: sliceSize.height, + fftReadingsFrequencyAmplitudePairs: points, + fftMetaData: nodeMetaData + ) + slices.pushToQueue(element: slice) + } + } + + private func createTestData() { + let testCellAmount = 200 + for _ in 0 ... slices.maxItems - 1 { + var points: [CGPoint] = [] + // lowest and highest frequency full amplitude to see the rendering showing full frequency spectrum + // CGPoint x: frequency y: Amplitude -200 ... 0 whereas 0 is full loud volume + for index in 0 ... testCellAmount { + // linear frequency range from 48 to 13500 in amount of steps we generate + let frequency = 48.0 + CGFloat( index * (13500 / testCellAmount )) + var amplitude = CGFloat.random(in: -200 ... 0) + // add some silence to the test data + amplitude = amplitude < -80 ? amplitude : -200.0 + points.append(CGPoint(x: frequency, y: amplitude)) + } + let slice = SpectrogramSlice( + gradientUIColors: SpectrogramFlatView.gradientUIColors, + sliceWidth: sliceSize.width, + sliceHeight: sliceSize.height, + fftReadingsFrequencyAmplitudePairs: points, + fftMetaData: nodeMetaData + ) + slices.pushToQueue(element: slice) + } + } + + func updateNode(_ node: Node) { + // Using a background thread to get data from FFTTap. + // This doesn't make it more efficient but will not bother + // main thread and user while doing the work + if node !== self.node { + self.node = node + nodeTap = FFTTap(node, bufferSize: UInt32(nodeMetaData.fftSize * 2), callbackQueue: .global()) { fftData in + self.pushData(fftData) + } + // normalization would mean that on each slice, the loudest would have + // amplitude 1.0, independent of what has happened before. + // we don't want that as we want absolute measurements that can be compared over time. + nodeTap.isNormalized = false + nodeTap.zeroPaddingFactor = 1 + nodeTap.start() + } + } + + func pushData(_ fftFloats: [Float]) { + // Comes several times per second, depending on fftSize. + // This call pushes new fftReadings into the queue. + // Queue ist observed by the view and thus view is updated. + // The incoming array of floats contains 2 * fftSize entries. coded in real and imaginery part. + // The frequencies in the even numbers and the amplitudes in the odd numbers of the array. + let slice = SpectrogramSlice( + gradientUIColors: SpectrogramFlatView.gradientUIColors, + sliceWidth: sliceSize.width, + sliceHeight: sliceSize.height, + fftReadings: fftFloats, + fftMetaData: nodeMetaData + ) + // we receive the callback typically on a background thread, where + // also the slice image was rendered. to inform UI we dispatch it on main thread + DispatchQueue.main.async { + self.slices.pushToQueue(element: slice) + } + } + +} diff --git a/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramSlice.swift b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramSlice.swift new file mode 100644 index 0000000..58c2269 --- /dev/null +++ b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramSlice.swift @@ -0,0 +1,288 @@ +// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/AudioKitUI/ + +import SwiftUI + +/// One slice with frequencies from low frequencies at the bottom up to high frequences. +/// Amplitudes shown in different colors according to the submitted gradient +/// Resulting image has an integral size (dimensions in Int), so they are most of +/// the time a bit smaller than requested. This is because they are drawn in +/// a CGContext that doesn't have fractions of pixels to draw. +struct SpectrogramSlice: View, Identifiable { + static var counterSinceStart = 0 + // static Int instead of a UUID as identifier. While debugging it's practical + // to see the order and therefore time the slice was created. + // Furthermore for the sake of premature performance optimisation: + // Incrementing an Int could be supposedly faster than creating UUID. + // depending on the version of swiftlint, this will be marked as rule violation to be 2 characters in length + // swiftlint:disable identifier_name + let id: Int + // swiftlint:enable identifier_name + // we don't provide defaults, the caller really should know about these + let gradientUIColors: [UIColor] + let sliceWidth: CGFloat + let sliceHeight: CGFloat + let rawFftReadings: [Float] + let fftMetaData: SpectrogramFFTMetaData + // they don't contain CGPoints in the sense of graphic but in the sense of vectors. + // x describing the frequency axis and y the amplitude axis. + private var fftReadingFrequencyAmplitudePairs: [CGPoint] + private var cachedUIImage: UIImage + private var allRects: [CGRect] + private var allColors: [Color] + + init( + gradientUIColors: [UIColor], + sliceWidth: CGFloat, + sliceHeight: CGFloat, + fftReadings: [Float], + fftMetaData: SpectrogramFFTMetaData + ) { + self.gradientUIColors = gradientUIColors + self.sliceWidth = sliceWidth + self.sliceHeight = sliceHeight + self.rawFftReadings = fftReadings + self.fftMetaData = fftMetaData + Self.counterSinceStart = Self.counterSinceStart &+ 1 + id = Self.counterSinceStart + allRects = [] + allColors = [] + fftReadingFrequencyAmplitudePairs = [] + cachedUIImage = UIImage(systemName: "pause")! + + self.fftReadingFrequencyAmplitudePairs = captureAmplitudeFrequencyData(fftReadings) + + createSpectrumRects() + cachedUIImage = createSpectrumImage() + + // release data, we don't need it anymore + fftReadingFrequencyAmplitudePairs = [] + allRects = [] + allColors = [] + } + + /// convenience initialiser, useful when measurements are created manually + init( + gradientUIColors: [UIColor], + sliceWidth: CGFloat, + sliceHeight: CGFloat, + fftReadingsFrequencyAmplitudePairs: [CGPoint], + fftMetaData: SpectrogramFFTMetaData + ) { + self.gradientUIColors = gradientUIColors + self.sliceWidth = sliceWidth + self.sliceHeight = sliceHeight + self.fftReadingFrequencyAmplitudePairs = fftReadingsFrequencyAmplitudePairs + self.fftMetaData = fftMetaData + Self.counterSinceStart = Self.counterSinceStart &+ 1 + id = Self.counterSinceStart + allRects = [] + allColors = [] + self.rawFftReadings = [] + cachedUIImage = UIImage(systemName: "pause")! + + createSpectrumRects() + cachedUIImage = createSpectrumImage() + } + + public var body: some View { + return Image(uiImage: cachedUIImage).resizable() + } + + // This code draws in the first quadrant, it's much easier to understand + // when we can draw from low to high frequency bottom to top. + // will have to flip the image when using in a typical Spectrogram View + func createSpectrumImage() -> UIImage { + // return an empty image when no data here to visualize. + guard allRects.count > 0 else { return UIImage() } + let renderer = UIGraphicsImageRenderer(size: CGSize(width: sliceWidth, height: sliceHeight)) + let img = renderer.image { ctx in + for index in 0...allRects.count-1 { + UIColor(allColors[index]).setFill() + ctx.fill(allRects[index]) + } + } + return img + } + + // unused method drawing into a Canvas. Might be useful in the future + // when doing more energy efficent drawing. + // MARK: createSpectrumSlice() + /* func createSpectrumSlice() -> some View { + return Canvas { context, _ in + for index in 0...allRects.count-1 { + context.fill( + Path(allRects[index]), + with: .color(allColors[index]) + ) + } + // flip it back. Code is much easier to understand when we can draw from low to high frequency + // drawing in the first quadrant, as we did in macOS Core Animation + }.scaleEffect(x: 1, y: -1) + } */ + + mutating func createSpectrumRects() { + guard fftReadingFrequencyAmplitudePairs.count > 0 else { return } + // calc rects and color within initialiser, so the drawing will just use those + // fftReadings contains typically 210 pairs with frequency (x) and amplitude (y) + // those then are mapped to y coordinate and color + let mappedCells = mapFftReadingsToCells() + // size.height is it's height shown + // size.width is intensitiy between 0..1 + var cumulativePosition = 0.0 + var cellHeight = sliceHeight / CGFloat(fftReadingFrequencyAmplitudePairs.count) + // iterating thru the array with an index (instead of enumeration) + // as index is used to calc height + for index in 0...mappedCells.count - 1 { + // index 0 contains highest y, meaning lowest frequency + cellHeight = mappedCells[index].height + let thisRect = CGRect( + origin: CGPoint(x: 0, y: cumulativePosition), + size: CGSize(width: sliceWidth, height: cellHeight)) + cumulativePosition += cellHeight + allRects.append(thisRect) + allColors.append(Color(SpectrogramFlatView.gradientUIColors.intermediate(mappedCells[index].width)) ) + } + if cumulativePosition > sliceHeight { + // print("Warning: all cells sum up higher than what could fit: " + + // "\(cumulativePosition) should be less or equal than: \(sliceHeight) for ID: \(id)") + } + + } + + // the incoming array of fft readings should be sorted by frequency + func mapFftReadingsToCells() -> [CGSize] { + guard fftReadingFrequencyAmplitudePairs.count > 0 else { return [] } + var outCells: [CGSize] = [] + // never return an empty array + // the lowest delimiter in full amplitude but no height + outCells.append(CGSize(width: 1.0, height: 0.0)) + // starting at line 1 + var lastFrequencyPosition = 0.0 + for index in 1 ..< fftReadingFrequencyAmplitudePairs.count { + let amplitude = fftReadingFrequencyAmplitudePairs[index].y.mapped(from: -200 ... 0, to: 0 ... 1.0) + // the frequency comes out from lowest frequency at 0 to max frequency at height + let frequency = fftReadingFrequencyAmplitudePairs[index].x + let frequencyPosition = frequency.mappedLog10( + from: fftMetaData.minFreq ... fftMetaData.maxFreq, + to: 0 ... sliceHeight + ) + + if frequencyPosition < 0.0 { + // those frequencies come from the fft but we don't show them + // these are the ones typcally smaller than minFreq + continue + } + // calc height using the last frequency and ceil it to prevent black lines between measurements. + // it may happen that a cell is less than 1.0 high: that shouldn't bother us + let cellHeight = ceil(frequencyPosition - lastFrequencyPosition) + lastFrequencyPosition += cellHeight + outCells.append(CGSize(width: amplitude, height: cellHeight)) + } + // delimiter at top end in full Amplitude but no height + outCells.append(CGSize(width: 1.0, height: 0.0)) + return outCells + } + + /// Returns frequency, amplitude pairs after removing unwanted data points, + /// there are simply too many in the high frequencies. + /// The resulting array has fftSize amount of readings. The incoming array is compiled to CGPoints containing + /// frequency and amplitude, where as x is frequency and y amplitude. + /// The amount of pairs depends on minFreq and maxFreq as well as the fftSize. + /// To understand CGPoint x and y imagine a chart that spans from left to right for lowest to highest frequency + /// and on shows vertically the amplitude, as the equalizer view of an 80ies stereo system. + /// The FFT-slices start at frequency 0, which is odd. + /// Lowest frequency meaning amplitude of all frequencies + /// from 0 to the first other frequency (typically 5Hz or 21.533Hz) + /// + /// Alternative implementation: have this array not with CGPoint of frequency and amplitude + /// but only of amplitude already color coded in the gradient. The frequency axis + /// would then be hardcoded as the plot distance on y-axis + /// + /// Improvement: make the filtering of high frequencies dependent of fftSize. + /// The more data, the more filtering is needed. + /// + /// Make this more energy efficient by combining this function with mapFftReadingsToCells + + func captureAmplitudeFrequencyData(_ fftFloats: [Float]) -> [CGPoint] { + // need at least two data points + guard fftFloats.count > 1 else { return [] } + var maxSquared: Float = 0.0 + var frequencyChosen = 0.0 + var points: [CGPoint] = [] + // Frequencies are shown in a logarithmic scale (meaning octaves have same distance). + // Therefore frequencies above these levels are reduced. + let filterFrequencyHigh = 8000.0 + let filterFrequencyMid = 4000.0 + let filterFrequency = 1000.0 + + for index in 1 ... (fftFloats.count / 2) { + // Compiler or LLVM will make these four following array access' into two + let real = fftFloats[index-1].isNaN ? 0.0 : fftFloats[index-1] + let imaginary = fftFloats[index].isNaN ? 0.0 : fftFloats[index] + let frequencyForBin = fftMetaData.sampleRate * 0.5 * Double(index * 2) / Double(fftFloats.count * 2) + var squared = real * real + imaginary * imaginary + + // if the frequency is higher as we need: continue + // we don't filter low frequencies, they are all pushed to the queue + if frequencyForBin > Double(fftMetaData.maxFreq) { continue } + frequencyChosen = frequencyForBin + + if frequencyForBin > filterFrequencyHigh { + // take the greatest 1 in every 16 points when > 8k Hz. + maxSquared = squared > maxSquared ? squared : maxSquared + if index % 16 != 0 { continue + } else { + squared = maxSquared + maxSquared = 0.0 + } + } else if frequencyForBin > filterFrequencyMid { + // take the greatest 1 in every 8 points when > 4k Hz. + maxSquared = squared > maxSquared ? squared : maxSquared + if index % 8 != 0 { continue + } else { + squared = maxSquared + maxSquared = 0.0 + } + } else if frequencyForBin > filterFrequency { + // take the greatest 1 in every 2 points when > 1k Hz. + // This might be already too much data, depending on the highest frequency shown + // and the height of where this slice is shown. + // might reduce it to show every 4th point. + maxSquared = squared > maxSquared ? squared : maxSquared + if index % 2 != 0 { continue + } else { + squared = maxSquared + maxSquared = 0.0 + } + } + let amplitude = Double(10 * log10(4 * squared / (Float(fftMetaData.fftSize) * Float(fftMetaData.fftSize)))) + points.append(CGPoint(x: frequencyChosen, y: amplitude)) + } + return points + } +} + +// MARK: Preview +@available(iOS 17.0, *) +struct SpectrogramSlice_Previews: PreviewProvider { + static var previews: some View { + // This shows the wrong behaviour of the slice: the lowest frequency isn't shown, the + // lowest amplitude below -200 should be black but is white. + return SpectrogramSlice(gradientUIColors: + [(#colorLiteral(red: 0, green: 0, blue: 0, alpha: 1)), (#colorLiteral(red: 0.1411764771, green: 0.3960784376, blue: 0.5647059083, alpha: 1)), (#colorLiteral(red: 0.4217140079, green: 0.6851614118, blue: 0.9599093795, alpha: 1)), (#colorLiteral(red: 0.8122602105, green: 0.6033009887, blue: 0.8759307861, alpha: 1)), (#colorLiteral(red: 0.9826132655, green: 0.5594901443, blue: 0.4263145328, alpha: 1)), (#colorLiteral(red: 1, green: 0.2607713342, blue: 0.4242972136, alpha: 1))], + sliceWidth: 40, sliceHeight: 150, + fftReadingsFrequencyAmplitudePairs: [ + CGPoint(x: 150, y: -80), + CGPoint(x: 350, y: -50), + CGPoint(x: 500, y: -10), + CGPoint(x: 1000, y: -160), + CGPoint(x: 1500, y: -260), + CGPoint(x: 2000, y: -120), + CGPoint(x: 3000, y: -80), + CGPoint(x: 5000, y: -30), + CGPoint(x: 8800, y: -40), + CGPoint(x: 8000, y: -10)], + fftMetaData: SpectrogramFFTMetaData() + ).scaleEffect(x: 1, y: -1) + } +} diff --git a/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/UIColor+intermediate.swift b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/UIColor+intermediate.swift new file mode 100644 index 0000000..dae71f0 --- /dev/null +++ b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/UIColor+intermediate.swift @@ -0,0 +1,40 @@ +// Copyright AudioKit. All Rights Reserved. Revision History at http://github.com/AudioKit/AudioKitUI/ + +import Foundation +import UIKit + +/// usage You can use it to get an intermediate color between two or more colors: +/// let color = [.green, .yellow, .red].intermediate(0.7) +/// inspired by +/// https://stackoverflow.com/questions/15032562/ios-find-color-at-point-between-two-colors/59996029#59996029 +extension Array where Element: UIColor { + public func intermediate(_ percentage: CGFloat) -> UIColor { + let percentage = Swift.max(Swift.min(percentage, 1), 0) + switch percentage { + case 0: return first ?? .clear + case 1: return last ?? .clear + default: + let approxIndex = percentage / (1 / CGFloat(count - 1)) + let firstIndex = Int(approxIndex.rounded(.down)) + let secondIndex = Int(approxIndex.rounded(.up)) + let fallbackIndex = Int(approxIndex.rounded()) + + let firstColor = self[firstIndex] + let secondColor = self[secondIndex] + let fallbackColor = self[fallbackIndex] + + var red1: CGFloat = 0, green1: CGFloat = 0, blue1: CGFloat = 0, alpha1: CGFloat = 0 + var red2: CGFloat = 0, green2: CGFloat = 0, blue2: CGFloat = 0, alpha2: CGFloat = 0 + guard firstColor.getRed(&red1, green: &green1, blue: &blue1, alpha: &alpha1) else { return fallbackColor } + guard secondColor.getRed(&red2, green: &green2, blue: &blue2, alpha: &alpha2) else { return fallbackColor } + + let intermediatePercentage = approxIndex - CGFloat(firstIndex) + return UIColor( + red: CGFloat(red1 + (red2 - red1) * intermediatePercentage), + green: CGFloat(green1 + (green2 - green1) * intermediatePercentage), + blue: CGFloat(blue1 + (blue2 - blue1) * intermediatePercentage), + alpha: CGFloat(alpha1 + (alpha2 - alpha1) * intermediatePercentage) + ) + } + } +}