From 83bacdba297ad921f793ead02f0ebaf8619c7b41 Mon Sep 17 00:00:00 2001 From: Martin Halter Date: Sun, 21 Apr 2024 01:54:27 +0200 Subject: [PATCH] Minor adjustments to documentation and minor adjust to SpectrogramFlatView (#86) * Added AudioKitUI.docc with a markdown so Documentation can easily be built for iOS. * Minor adjustments to documentation * Fixed title in main docc article so it shows directly on home page of AudioKitUI * Added links to other UI related frameworks of AudioKit * Fixed amplitude. It was not sensitive enough, showing full amplitude at half the intensity. * Cleanup of commented .borders * First quadrant flipping only once in the body * Trailing white space --- README.md | 16 +++++----- .../AudioKitUI/AudioKitUI.docc/AudioKitUI.md | 31 +++++++++++++++++++ Sources/AudioKitUI/Controls/ADSRView.swift | 1 + .../AudioKitUI/Helpers/MorphableShape.swift | 7 ++--- .../SpectrogramFlatView.swift | 5 +-- .../SpectrogramModel.swift | 4 +-- .../SpectrogramSlice.swift | 9 ++++-- .../UIColor+intermediate.swift | 12 ++++--- 8 files changed, 61 insertions(+), 24 deletions(-) create mode 100644 Sources/AudioKitUI/AudioKitUI.docc/AudioKitUI.md diff --git a/README.md b/README.md index ad2a8f6..ee3731c 100644 --- a/README.md +++ b/README.md @@ -32,10 +32,12 @@ Just like AudioKit, the example project for AudioKitUI is the [AudioKit Cookbook ## More! -Because some user interfaces are quite complex, and don't really have AudioKit as a dependency, we will be putting them in other repositories under the AudioKit umbrella. - -### Piano Roll - -[https://github.com/AudioKit/PianoRoll](https://github.com/AudioKit/PianoRoll) - -piano roll screenshot +Because some user interfaces are quite complex, and don't really have AudioKit as a dependency, they are in other repositories under the AudioKit umbrella: + +* Controls: SwiftUI Knobs, Sliders, X-Y Pads, and more [github.com/AudioKit/Controls](https://github.com/AudioKit/Controls) +* Flow: Generic node graph editor [github.com/AudioKit/Flow](https://github.com/AudioKit/Flow) +* Keyboard: SwiftUI music keyboard [github.com/AudioKit/Keyboard](https://github.com/AudioKit/Keyboard) +* Piano Roll: Touch oriented piano roll [github.com/AudioKit/PianoRoll](https://github.com/AudioKit/PianoRoll) +* PianoRollEditor: Logic Pro like piano roll editor [github.com/AudioKit/PianoRollEditor](https://github.com/AudioKit/PianoRollEditor) +* MIDITrackView: View representing a MIDI Track [github.com/AudioKit/MIDITrackView](https://github.com/AudioKit/MIDITrackView) +* Waveform: GPU accelerated waveform view [github.com/AudioKit/Waveform](https://github.com/AudioKit/Waveform) diff --git a/Sources/AudioKitUI/AudioKitUI.docc/AudioKitUI.md b/Sources/AudioKitUI/AudioKitUI.docc/AudioKitUI.md new file mode 100644 index 0000000..9b94c7f --- /dev/null +++ b/Sources/AudioKitUI/AudioKitUI.docc/AudioKitUI.md @@ -0,0 +1,31 @@ +# ``AudioKitUI`` + +@Metadata { + @TitleHeading("AudioKit User Interfaces") +} + +Waveform plots and controls that can be used to jump start your AudioKit-powered app. + +## Requirements + +We use SwiftUI so you need to target iOS 13+ and macOS 10.15+. + +## Installation via Swift Package Manager + +To add AudioKitUI to your Xcode project, select File -> Swift Packages -> Add Package Dependency. Enter `https://github.com/AudioKit/AudioKitUI` for the URL. + +## Examples + +Just like AudioKit, the example project for AudioKitUI is the [AudioKit Cookbook](https://github.com/AudioKit/Cookbook/). + +## More + +Because some user interfaces are quite complex, and don't really have AudioKit as a dependency, they are in other repositories under the AudioKit umbrella. + +- term Controls: SwiftUI Knobs, Sliders, X-Y Pads, and more [github.com/AudioKit/Controls](https://github.com/AudioKit/Controls) +- term Flow: Generic node graph editor [github.com/AudioKit/Flow](https://github.com/AudioKit/Flow) +- term Keyboard: SwiftUI music keyboard [github.com/AudioKit/Keyboard](https://github.com/AudioKit/Keyboard) +- term Piano Roll: Touch oriented piano roll [github.com/AudioKit/PianoRoll](https://github.com/AudioKit/PianoRoll) +- term PianoRollEditor: Logic Pro like piano roll editor [github.com/AudioKit/PianoRollEditor](https://github.com/AudioKit/PianoRollEditor) +- term MIDITrackView: View representing a MIDI Track [github.com/AudioKit/MIDITrackView](https://github.com/AudioKit/MIDITrackView) +- term Waveform: GPU accelerated waveform view [github.com/AudioKit/Waveform](https://github.com/AudioKit/Waveform) diff --git a/Sources/AudioKitUI/Controls/ADSRView.swift b/Sources/AudioKitUI/Controls/ADSRView.swift index 4c932e0..5af1af7 100644 --- a/Sources/AudioKitUI/Controls/ADSRView.swift +++ b/Sources/AudioKitUI/Controls/ADSRView.swift @@ -7,6 +7,7 @@ import AVFoundation import UIKit /// A click and draggable view of an ADSR Envelope (Atttack, Decay, Sustain, Release) +/// /// All values are normalised 0->1, so scale them how you would like in your callback @IBDesignable public class ADSRView: UIView { diff --git a/Sources/AudioKitUI/Helpers/MorphableShape.swift b/Sources/AudioKitUI/Helpers/MorphableShape.swift index 0889ed9..e431133 100644 --- a/Sources/AudioKitUI/Helpers/MorphableShape.swift +++ b/Sources/AudioKitUI/Helpers/MorphableShape.swift @@ -31,17 +31,16 @@ struct MorphableShape: Shape { } } -// MARK: Path extension - +/// Return points at a given offset and create AnimatableVector for control points extension Path { - // return point at the curve + /// return point at the curve func point(at offset: CGFloat) -> CGPoint { let limitedOffset = min(max(offset, 0), 1) guard limitedOffset > 0 else { return cgPath.currentPoint } return trimmedPath(from: 0, to: limitedOffset).cgPath.currentPoint } - // return control points along the path + /// return control points along the path func controlPoints(count: Int) -> AnimatableVector { var retPoints = [Double]() for index in 0 ..< count { diff --git a/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramFlatView.swift b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramFlatView.swift index 646da37..53b057e 100644 --- a/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramFlatView.swift +++ b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramFlatView.swift @@ -120,14 +120,11 @@ public struct SpectrogramFlatView: View { } HStack(spacing: 0.0) { ForEach(spectrogram.slices.items) { slice in - // flip it as the slice was drawn in the first quadrant - slice.scaleEffect(x: 1, y: -1) - // .border(.green, width: 2.0) + slice } // flip it so the new slices come in right and move to the left .scaleEffect(x: -1, y: 1) } - // .border(.red, width: 5.0) .frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .trailing) }.onAppear { spectrogram.sliceSize = calcSliceSize(fromFrameSize: geometry.size) diff --git a/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramModel.swift b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramModel.swift index 8e9c7e5..3946c6d 100644 --- a/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramModel.swift +++ b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramModel.swift @@ -20,7 +20,7 @@ struct SpectrogramFFTMetaData { // New data comes roughly 5.5 times per second, each 186ms. // Choose a higher value when you want to analyze low frequencies, // choose a lower value when you want fast response and high frame rate on display. - let fftSize = 2048 + let fftSize: UInt32 = 2048 // Lowest and highest frequencies shown. // We use 48Hz, which is a bit lower than G1. A1 would be 440Hz/8 = 55Hz. @@ -137,7 +137,7 @@ class SpectrogramFlatModel: ObservableObject { // main thread and user while doing the work if node !== self.node { self.node = node - nodeTap = FFTTap(node, bufferSize: UInt32(nodeMetaData.fftSize * 2), callbackQueue: .global()) { fftData in + nodeTap = FFTTap(node, bufferSize: nodeMetaData.fftSize * 2, callbackQueue: .global()) { fftData in self.pushData(fftData) } // normalization would mean that on each slice, the loudest would have diff --git a/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramSlice.swift b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramSlice.swift index ad93873..e89fcb0 100644 --- a/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramSlice.swift +++ b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/SpectrogramSlice.swift @@ -88,6 +88,8 @@ struct SpectrogramSlice: View, Identifiable { public var body: some View { return Image(uiImage: cachedUIImage).resizable() + // flip it as the slice was drawn in the first quadrant + .scaleEffect(x: 1, y: -1) } // This code draws in the first quadrant, it's much easier to understand @@ -222,7 +224,7 @@ struct SpectrogramSlice: View, Identifiable { let real = fftFloats[index-1].isNaN ? 0.0 : fftFloats[index-1] let imaginary = fftFloats[index].isNaN ? 0.0 : fftFloats[index] let frequencyForBin = fftMetaData.sampleRate * 0.5 * Double(index * 2) / Double(fftFloats.count * 2) - var squared = real * real + imaginary * imaginary + var squared: Float = real * real + imaginary * imaginary // if the frequency is higher as we need: continue // we don't filter low frequencies, they are all pushed to the queue @@ -257,7 +259,8 @@ struct SpectrogramSlice: View, Identifiable { maxSquared = 0.0 } } - let amplitude = Double(10 * log10(4 * squared / (Float(fftMetaData.fftSize) * Float(fftMetaData.fftSize)))) + let fftBins = CGFloat(fftMetaData.fftSize) + let amplitude = Double(10 * log10(4 * CGFloat(squared) / fftBins * fftBins)) points.append(CGPoint(x: frequencyChosen, y: amplitude)) } return points @@ -285,7 +288,7 @@ struct SpectrogramSlice_Previews: PreviewProvider { CGPoint(x: 8800, y: -40), CGPoint(x: 8000, y: -10)], fftMetaData: SpectrogramFFTMetaData() - ).scaleEffect(x: 1, y: -1) + ) } } diff --git a/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/UIColor+intermediate.swift b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/UIColor+intermediate.swift index fb41b54..a33b17e 100644 --- a/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/UIColor+intermediate.swift +++ b/Sources/AudioKitUI/Visualizations/SpectrogramFlatView/UIColor+intermediate.swift @@ -5,11 +5,15 @@ import Foundation import UIKit -/// usage You can use it to get an intermediate color between two or more colors: -/// let color = [.green, .yellow, .red].intermediate(0.7) -/// inspired by -/// https://stackoverflow.com/questions/15032562/ios-find-color-at-point-between-two-colors/59996029#59996029 +/// Get an intermediate color between two or more colors +/// +/// Example: `let color = [.green, .yellow, .red].intermediate(0.7)` +/// +/// inspired by [stackoverflow 15032562 answer Nikaaner](https://stackoverflow.com/questions/15032562/ios-find-color-at-point-between-two-colors/59996029#59996029) extension Array where Element: UIColor { + /// Get an intermediate color between two or more colors + /// + /// Example: `let color = [.green, .yellow, .red].intermediate(0.7)` public func intermediate(_ percentage: CGFloat) -> UIColor { let percentage = Swift.max(Swift.min(percentage, 1), 0) switch percentage {