Permalink
Browse files

[WIP] working on more complete interface

  • Loading branch information...
1 parent f4572b0 commit 5d46854f5e1615a017918e48197fb3d0aebe8463 @ndonald2 ndonald2 committed Jun 12, 2013
Showing with 112 additions and 44 deletions.
  1. +4 −0 Novocaine.xcodeproj/project.pbxproj
  2. +3 −2 Novocaine/AudioFileReader.h
  3. +0 −2 Novocaine/AudioFileReader.mm
  4. +2 −0 Novocaine/Novocaine.h
  5. +103 −40 Novocaine/Novocaine.m
@@ -7,6 +7,7 @@
objects = {
/* Begin PBXBuildFile section */
+ 9AE2143E1768D0C60068A4DC /* AVFoundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 9AE2143D1768D0C60068A4DC /* AVFoundation.framework */; };
FA0B92E9154ACB100079834E /* AudioFileReader.mm in Sources */ = {isa = PBXBuildFile; fileRef = FA0B92E8154ACB100079834E /* AudioFileReader.mm */; };
FA0B92EA154ACB100079834E /* AudioFileReader.mm in Sources */ = {isa = PBXBuildFile; fileRef = FA0B92E8154ACB100079834E /* AudioFileReader.mm */; };
FA2015DD154B02CC00F8D3AC /* TLC.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = FA2015DC154B02CC00F8D3AC /* TLC.mp3 */; };
@@ -42,6 +43,7 @@
/* End PBXBuildFile section */
/* Begin PBXFileReference section */
+ 9AE2143D1768D0C60068A4DC /* AVFoundation.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = AVFoundation.framework; path = System/Library/Frameworks/AVFoundation.framework; sourceTree = SDKROOT; };
FA0B92E7154ACB100079834E /* AudioFileReader.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = AudioFileReader.h; sourceTree = "<group>"; };
FA0B92E8154ACB100079834E /* AudioFileReader.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = AudioFileReader.mm; sourceTree = "<group>"; };
FA2015DC154B02CC00F8D3AC /* TLC.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = TLC.mp3; sourceTree = "<group>"; };
@@ -105,6 +107,7 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
+ 9AE2143E1768D0C60068A4DC /* AVFoundation.framework in Frameworks */,
FA5526EF152A6466003D9601 /* Accelerate.framework in Frameworks */,
FA5526F0152A6466003D9601 /* AudioToolbox.framework in Frameworks */,
FA5526F2152A6466003D9601 /* CoreAudio.framework in Frameworks */,
@@ -146,6 +149,7 @@
FA55269E152A6405003D9601 /* AudioToolbox.framework */,
FA5526ED152A6466003D9601 /* AudioUnit.framework */,
FA55269F152A6405003D9601 /* AudioUnit.framework */,
+ 9AE2143D1768D0C60068A4DC /* AVFoundation.framework */,
FA5526EE152A6466003D9601 /* CoreAudio.framework */,
FA5526A0152A6405003D9601 /* CoreAudio.framework */,
FA55263B152A63A4003D9601 /* CoreGraphics.framework */,
@@ -32,23 +32,24 @@
@property (nonatomic, assign, getter=getCurrentTime, setter=setCurrentTime:) float currentTime;
@property (nonatomic, copy) NovocaineInputBlock readerBlock;
+@property (nonatomic, assign) float latency;
// ----- Read-only ------
@property (nonatomic, copy, readonly) NSURL *audioFileURL;
@property (nonatomic, assign, readonly, getter=getDuration) float duration;
@property (nonatomic, assign, readonly) float samplingRate;
@property (nonatomic, assign, readonly) UInt32 numChannels;
-@property (nonatomic, assign, readonly) float latency;
@property (nonatomic, assign, readonly) BOOL playing;
- (id)initWithAudioFileURL:(NSURL *)urlToAudioFile samplingRate:(float)thisSamplingRate numChannels:(UInt32)thisNumChannels;
// You use this method to grab audio if you have your own callback.
// The buffer'll fill at the speed the audio is normally being played.
+
- (void)retrieveFreshAudio:(float *)buffer numFrames:(UInt32)thisNumFrames numChannels:(UInt32)thisNumChannels;
-//- (float)getCurrentTime;
+
- (void)play;
- (void)pause;
- (void)stop;
@@ -37,7 +37,6 @@ @interface AudioFileReader ()
@property (nonatomic, assign, readwrite, getter=getDuration) float duration;
@property (nonatomic, assign, readwrite) float samplingRate;
@property (nonatomic, assign, readwrite) UInt32 numChannels;
-@property (nonatomic, assign, readwrite) float latency;
@property (nonatomic, assign, readwrite) BOOL playing;
@property (nonatomic, assign) AudioStreamBasicDescription outputFormat;
@@ -215,7 +214,6 @@ - (float)getDuration
- (void)configureReaderCallback
{
-
if (!self.callbackTimer)
{
self.callbackTimer = dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, dispatch_get_main_queue());
View
@@ -30,6 +30,7 @@
#include <CoreAudio/CoreAudio.h>
#else
#define USING_IOS
+ #include <AVFoundation/AVFoundation.h>
#endif
#include <Block.h>
@@ -101,6 +102,7 @@ typedef void (^NovocaineInputBlock)(float *data, UInt32 numFrames, UInt32 numCha
// ------ These properties/methods are used for configuration -------
@property (nonatomic, copy) NSString *inputRoute;
+@property (nonatomic, assign) BOOL inputEnabled;
// ND: Exposing the block setters this way will create the correct block signature for auto-complete.
// These will map to "copy" property setters in class continuation in source file
View
@@ -74,8 +74,9 @@ @interface Novocaine()
- (void)enumerateAudioDevices;
#endif
-- (void)setupAudio;
-- (void)ifAudioInputIsAvailableThenSetupAudioSession;
+// must be called prior to playing audio
+- (void)setupAudioSession;
+- (void)setupAudioUnits;
- (NSString *)applicationDocumentsDirectory;
@@ -135,7 +136,7 @@ - (id)init
// self.playThroughEnabled = NO;
// Fire up the audio session ( with steady error checking ... )
- [self ifAudioInputIsAvailableThenSetupAudioSession];
+ [self setupAudioSession];
return self;
@@ -174,12 +175,81 @@ - (void)freeBuffers
}
}
+#pragma mark - Properties
+
+- (void)setInputEnabled:(BOOL)inputEnabled
+{
+ if (inputEnabled != _inputEnabled){
+
+ if (inputEnabled){
+
+ if (self.inputAvailable) {
+
+ // Enable input
+ UInt32 one = 1;
+ OSStatus err = AudioUnitSetProperty(_inputUnit,
+ kAudioOutputUnitProperty_EnableIO,
+ kAudioUnitScope_Input,
+ kInputBus,
+ &one,
+ sizeof(one));
+
+ if (err){
+
+ }
+ else{
+ _inputEnabled = YES;
+ }
+ }
+
+ // If we don't have input, then ask the user to provide some
+ else
+ {
+
+ // TODO: Not sure a
+#if defined ( USING_IOS )
+ UIAlertView *noInputAlert =
+ [[UIAlertView alloc] initWithTitle:@"No Audio Input"
+ message:@"Couldn't find any audio input. Plug in your Apple headphones or another microphone."
+ delegate:self
+ cancelButtonTitle:@"OK"
+ otherButtonTitles:nil];
+
+ [noInputAlert show];
+#endif
+
+ }
+ }
+ else
+ {
+ // Disable input
+ UInt32 zero = 0;
+ OSStatus err = AudioUnitSetProperty(_inputUnit,
+ kAudioOutputUnitProperty_EnableIO,
+ kAudioUnitScope_Input,
+ kInputBus,
+ &zero,
+ sizeof(zero));
+
+ if (err){
+
+ }
+ else{
+ _inputEnabled = NO;
+ }
+ }
+
+ }
+}
+
#pragma mark - Audio Methods
-- (void)ifAudioInputIsAvailableThenSetupAudioSession {
- // Initialize and configure the audio session, and add an interuption listener
+- (void)setupAudioSession
+{
+
+ // Initialize and configure the audio session, and add an interuption listener
#if defined ( USING_IOS )
CheckError( AudioSessionInitialize(NULL, NULL, sessionInterruptionListener, (__bridge void *)(self)), "Couldn't initialize audio session");
@@ -189,39 +259,37 @@ - (void)ifAudioInputIsAvailableThenSetupAudioSession {
[self enumerateAudioDevices];
self.inputAvailable = YES;
#endif
+
// Check the session properties (available input routes, number of channels, etc)
-
-
// If we do have input, then let's rock 'n roll.
- if (self.inputAvailable) {
- [self setupAudio];
- [self play];
- }
-
- // If we don't have input, then ask the user to provide some
- else
- {
-#if defined ( USING_IOS )
- UIAlertView *noInputAlert =
- [[UIAlertView alloc] initWithTitle:@"No Audio Input"
- message:@"Couldn't find any audio input. Plug in your Apple headphones or another microphone."
- delegate:self
- cancelButtonTitle:@"OK"
- otherButtonTitles:nil];
-
- [noInputAlert show];
-#endif
-
- }
+// if (self.inputAvailable) {
+// [self setupAudio];
+// [self play];
+// }
+//
+// // If we don't have input, then ask the user to provide some
+// else
+// {
+//#if defined ( USING_IOS )
+// UIAlertView *noInputAlert =
+// [[UIAlertView alloc] initWithTitle:@"No Audio Input"
+// message:@"Couldn't find any audio input. Plug in your Apple headphones or another microphone."
+// delegate:self
+// cancelButtonTitle:@"OK"
+// otherButtonTitles:nil];
+//
+// [noInputAlert show];
+//#endif
+//
+// }
}
-- (void)setupAudio
+- (void)setupAudioUnits
{
-
// --- Audio Session Setup ---
// ---------------------------
@@ -245,7 +313,11 @@ - (void)setupAudio
// Set the audio session active
- CheckError( AudioSessionSetActive(YES), "Couldn't activate the audio session");
+ NSError *err = nil;
+ if (![[AVAudioSession sharedInstance] setActive:YES error:&err]){
+ NSLog(@"Couldn't activate audio session: %@", err);
+ }
+
[self checkSessionProperties];
@@ -294,15 +366,6 @@ - (void)setupAudio
#endif
- // Enable input
- UInt32 one = 1;
- CheckError( AudioUnitSetProperty(_inputUnit,
- kAudioOutputUnitProperty_EnableIO,
- kAudioUnitScope_Input,
- kInputBus,
- &one,
- sizeof(one)), "Couldn't enable IO on the input scope of output unit");
-
#if defined ( USING_OSX )
// Disable output on the input unit
// (only on Mac, since on the iPhone, the input unit is also the output unit)
@@ -663,7 +726,6 @@ - (void)play {
#endif
-
self.inputAvailable = isInputAvailable;
if ( self.inputAvailable ) {
@@ -762,6 +824,7 @@ OSStatus renderCallback (void *inRefCon,
UInt32 inNumberFrames,
AudioBufferList * ioData)
{
+ // autorelease pool for much faster ARC performance on repeated calls from separate thread
@autoreleasepool {
Novocaine *sm = (__bridge Novocaine *)inRefCon;

0 comments on commit 5d46854

Please sign in to comment.