From 1a1fda02da5998943dbb91d078b44edac65ae8be Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Tue, 31 Jan 2017 17:40:06 -0300 Subject: [PATCH 01/18] Add suppport for saving WAV file directly. --- src/android/AudioInputCapture.java | 55 +++++++- src/android/AudioInputReceiver.java | 189 +++++++++++++++++++++++++--- www/audioInputCapture.js | 16 ++- 3 files changed, 236 insertions(+), 24 deletions(-) diff --git a/src/android/AudioInputCapture.java b/src/android/AudioInputCapture.java index 3a27a88..b968267 100644 --- a/src/android/AudioInputCapture.java +++ b/src/android/AudioInputCapture.java @@ -9,6 +9,10 @@ import org.json.JSONObject; import java.lang.ref.WeakReference; +import java.io.File; +import java.net.URI; +import java.net.URISyntaxException; + import android.os.Handler; import android.os.Message; @@ -28,12 +32,14 @@ public class AudioInputCapture extends CordovaPlugin public static String[] permissions = { Manifest.permission.RECORD_AUDIO }; public static int RECORD_AUDIO = 0; public static final int PERMISSION_DENIED_ERROR = 20; + public static final int INVALID_URL_ERROR = 30; private int sampleRate = 44100; private int bufferSize = 4096; private int channels = 1; private String format = null; private int audioSource = 0; + private URI fileUrl = null; @Override public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { @@ -51,9 +57,29 @@ public boolean execute(String action, JSONArray args, CallbackContext callbackCo this.channels = args.getInt(2); this.format = args.getString(3); this.audioSource = args.getInt(4); + String fileUrlString = args.getString(5); + if (fileUrlString != null) { + this.fileUrl = new URI(fileUrlString); + // ensure it's a file URL + new File(this.fileUrl); + } promptForRecord(); } + catch (URISyntaxException e) { // not a valid URL + receiver.interrupt(); + + this.callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, + INVALID_URL_ERROR)); + return false; + } + catch (IllegalArgumentException e) { // not a file URL + receiver.interrupt(); + + this.callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, + INVALID_URL_ERROR)); + return false; + } catch (Exception e) { receiver.interrupt(); @@ -69,9 +95,13 @@ public boolean execute(String action, JSONArray args, CallbackContext callbackCo return true; } else if (action.equals("stop")) { - receiver.interrupt(); - this.sendUpdate(new JSONObject(), false); // release status callback in JS side - this.callbackContext = null; + receiver.interrupt(); + // only do this if we're not saving to a file, + // otherwise we won't get the event about the file being complete + if (fileUrl == null) { + this.sendUpdate(new JSONObject(), false); // release status callback in JS side + this.callbackContext = null; + } callbackContext.success(); return true; } @@ -120,6 +150,21 @@ public void handleMessage(Message msg) { catch (JSONException e) { Log.e(LOG_TAG, e.getMessage(), e); } + try { + info.put("error", msg.getData().getString("error")); + } + catch (JSONException e) { + Log.e(LOG_TAG, e.getMessage(), e); + } + try { + info.put("file", msg.getData().getString("file")); + activity.sendUpdate(info, false); // release status callback in JS side + activity.callbackContext = null; + return; + } + catch (JSONException e) { + Log.e(LOG_TAG, e.getMessage(), e); + } activity.sendUpdate(info, true); } } @@ -137,7 +182,7 @@ protected void getMicPermission(int requestCode) { */ private void promptForRecord() { if(PermissionHelper.hasPermission(this, permissions[RECORD_AUDIO])) { - receiver = new AudioInputReceiver(this.sampleRate, this.bufferSize, this.channels, this.format, this.audioSource); + receiver = new AudioInputReceiver(this.sampleRate, this.bufferSize, this.channels, this.format, this.audioSource, this.fileUrl); receiver.setHandler(handler); receiver.start(); } @@ -162,4 +207,4 @@ public void onRequestPermissionResult(int requestCode, String[] permissions, promptForRecord(); } -} \ No newline at end of file +} diff --git a/src/android/AudioInputReceiver.java b/src/android/AudioInputReceiver.java index 065ba8b..5c36ccf 100644 --- a/src/android/AudioInputReceiver.java +++ b/src/android/AudioInputReceiver.java @@ -8,6 +8,14 @@ import android.os.Handler; import android.os.Message; import java.util.Arrays; +import java.io.File; +import java.io.FileOutputStream; +import java.io.BufferedOutputStream; +import java.io.DataOutputStream; +import java.io.FileInputStream; +import java.io.OutputStream; +import java.io.IOException; +import java.net.URI; import android.util.Base64; @@ -30,12 +38,13 @@ public class AudioInputReceiver extends Thread { private Handler handler; private Message message; private Bundle messageBundle = new Bundle(); + private URI fileUrl; public AudioInputReceiver() { recorder = new AudioRecord(MediaRecorder.AudioSource.DEFAULT, sampleRateInHz, channelConfig, audioFormat, minBufferSize * RECORDING_BUFFER_FACTOR); } - public AudioInputReceiver(int sampleRate, int bufferSizeInBytes, int channels, String format, int audioSource) { + public AudioInputReceiver(int sampleRate, int bufferSizeInBytes, int channels, String format, int audioSource, URI fileUrl) { sampleRateInHz = sampleRate; switch (channels) { @@ -69,6 +78,7 @@ public AudioInputReceiver(int sampleRate, int bufferSizeInBytes, int channels, S } recorder = new AudioRecord(audioSource, sampleRateInHz, channelConfig, audioFormat, recordingBufferSize); + this.fileUrl = fileUrl; } public void setHandler(Handler handler) { @@ -77,39 +87,182 @@ public void setHandler(Handler handler) { @Override public void run() { + if (fileUrl == null) { int numReadBytes = 0; - short audioBuffer[] = new short[readBufferSize]; - + // byte audioBuffer[] = new byte[readBufferSize]; + short audioBuffer[] = new short[readBufferSize]; synchronized(this) { recorder.startRecording(); - while (!isInterrupted()) { - numReadBytes = recorder.read(audioBuffer, 0, readBufferSize); - - if (numReadBytes > 0) { - try { + try + { + while (!isInterrupted()) { + numReadBytes = recorder.read(audioBuffer, 0, readBufferSize); + + if (numReadBytes > 0) { + try { String decoded = Arrays.toString(audioBuffer); - + message = handler.obtainMessage(); + messageBundle = new Bundle(); messageBundle.putString("data", decoded); message.setData(messageBundle); handler.sendMessage(message); - } - catch(Exception ex) { + } + catch(Exception ex) { message = handler.obtainMessage(); + messageBundle = new Bundle(); messageBundle.putString("error", ex.toString()); message.setData(messageBundle); handler.sendMessage(message); - } - } - } + } + } + } // loop - if (recorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) { - recorder.stop(); - } + if (recorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) { + recorder.stop(); + } + } + catch(Exception ex) + { + message = handler.obtainMessage(); + messageBundle = new Bundle(); + messageBundle.putString("error", ex.toString()); + message.setData(messageBundle); + handler.sendMessage(message); + } recorder.release(); recorder = null; } + } + else + { // recording to fileUrl + int numReadBytes = 0; + byte audioBuffer[] = new byte[readBufferSize]; + synchronized(this) { + recorder.startRecording(); + + try + { + File audioFile = File.createTempFile("AudioInputReceiver-", ".pcm"); + FileOutputStream os = new FileOutputStream(audioFile.getPath()); + + while (!isInterrupted()) { + numReadBytes = recorder.read(audioBuffer, 0, readBufferSize); + + if (numReadBytes > 0) { + try { + os.write(audioBuffer, 0, numReadBytes); + } + catch(Exception ex) { + message = handler.obtainMessage(); + messageBundle = new Bundle(); + messageBundle.putString("error", ex.toString()); + message.setData(messageBundle); + handler.sendMessage(message); + } + } + } // loop + os.close(); + File wav = new File(fileUrl); + addWavHeader(audioFile, wav); + message = handler.obtainMessage(); + messageBundle = new Bundle(); + messageBundle.putString("file", wav.toURI().toString()); + message.setData(messageBundle); + handler.sendMessage(message); + + if (recorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) { + recorder.stop(); + } + } + catch(Throwable ex) + { + message = handler.obtainMessage(); + messageBundle = new Bundle(); + messageBundle.putString("error", ex.toString()); + message.setData(messageBundle); + handler.sendMessage(message); + } + + recorder.release(); + recorder = null; + } + } // recording to fileUrl } -} \ No newline at end of file + + private File addWavHeader(File fPCM, File wav) { + try { + long mySubChunk1Size = 16; + int myBitsPerSample= audioFormat==AudioFormat.ENCODING_PCM_8BIT?8:16; + int myFormat = 1; + long myChannels = channelConfig==AudioFormat.CHANNEL_IN_STEREO?2:1; + long mySampleRate = sampleRateInHz; + long myByteRate = mySampleRate * myChannels * myBitsPerSample/8; + int myBlockAlign = (int) (myChannels * myBitsPerSample/8); + + long myDataSize = fPCM.length(); + long myChunk2Size = myDataSize * myChannels * myBitsPerSample/8; + long myChunkSize = 36 + myChunk2Size; + + OutputStream os = new FileOutputStream(wav); + BufferedOutputStream bos = new BufferedOutputStream(os); + DataOutputStream outFile = new DataOutputStream(bos); + + outFile.writeBytes("RIFF"); // 00 - RIFF + outFile.write(intToByteArray((int)myChunkSize), 0, 4); // 04 - how big is the rest of this file? + outFile.writeBytes("WAVE"); // 08 - WAVE + outFile.writeBytes("fmt "); // 12 - fmt + outFile.write(intToByteArray((int)mySubChunk1Size), 0, 4); // 16 - size of this chunk + outFile.write(shortToByteArray((short)myFormat), 0, 2); // 20 - what is the audio format? 1 for PCM = Pulse Code Modulation + outFile.write(shortToByteArray((short)myChannels), 0, 2); // 22 - mono or stereo? 1 or 2? (or 5 or ???) + outFile.write(intToByteArray((int)mySampleRate), 0, 4); // 24 - samples per second (numbers per second) + outFile.write(intToByteArray((int)myByteRate), 0, 4); // 28 - bytes per second + outFile.write(shortToByteArray((short)myBlockAlign), 0, 2); // 32 - # of bytes in one sample, for all channels + outFile.write(shortToByteArray((short)myBitsPerSample), 0, 2); // 34 - how many bits in a sample(number)? usually 16 or 24 + outFile.writeBytes("data"); // 36 - data + outFile.write(intToByteArray((int)myDataSize), 0, 4); // 40 - how big is this data chunk + //outFile.write(clipData); // 44 - the actual data itself - just a long string of numbers + FileInputStream pcmIn = new FileInputStream(fPCM); + byte buffer[] = new byte[1024]; + int iBytesRead = pcmIn.read(buffer); + while (iBytesRead > 0) + { + outFile.write(buffer, 0, iBytesRead); + iBytesRead = pcmIn.read(buffer); + } // next chunk + pcmIn.close(); + + outFile.flush(); + outFile.close(); + + } catch (IOException e) { + e.printStackTrace(); + } + fPCM.delete(); + return wav; + } + + private static byte[] intToByteArray(int i) + { + byte[] b = new byte[4]; + b[0] = (byte) (i & 0x00FF); + b[1] = (byte) ((i >> 8) & 0x000000FF); + b[2] = (byte) ((i >> 16) & 0x000000FF); + b[3] = (byte) ((i >> 24) & 0x000000FF); + return b; + } + + // convert a short to a byte array + public static byte[] shortToByteArray(short data) + { + /* + * NB have also tried: + * return new byte[]{(byte)(data & 0xff),(byte)((data >> 8) & 0xff)}; + * + */ + + return new byte[]{(byte)(data & 0xff),(byte)((data >>> 8) & 0xff)}; + } +} diff --git a/www/audioInputCapture.js b/www/audioInputCapture.js index fde8a6c..9bee960 100644 --- a/www/audioInputCapture.js +++ b/www/audioInputCapture.js @@ -85,6 +85,7 @@ audioinput.start = function (cfg) { audioinput._cfg.audioContext = cfg.audioContext || null; audioinput._cfg.concatenateMaxChunks = cfg.concatenateMaxChunks || audioinput.DEFAULT.CONCATENATE_MAX_CHUNKS; audioinput._cfg.audioSourceType = cfg.audioSourceType || 0; + audioinput._cfg.fileUrl = cfg.fileUrl || null; if (audioinput._cfg.channels < 1 && audioinput._cfg.channels > 2) { throw "Invalid number of channels (" + audioinput._cfg.channels + "). Only mono (1) and stereo (2) is" + @@ -108,7 +109,8 @@ audioinput.start = function (cfg) { audioinput._cfg.bufferSize, audioinput._cfg.channels, audioinput._cfg.format, - audioinput._cfg.audioSourceType]); + audioinput._cfg.audioSourceType, + audioinput._cfg.fileUrl]); audioinput._capturing = true; @@ -224,6 +226,9 @@ audioinput._audioInputEvent = function (audioInputData) { } else if (audioInputData && audioInputData.error) { audioinput._audioInputErrorEvent(audioInputData.error); + } + else if (audioInputData && audioInputData.file) { + audioinput._audioInputFinishedEvent(audioInputData.file); } } catch (ex) { @@ -240,6 +245,15 @@ audioinput._audioInputErrorEvent = function (e) { cordova.fireWindowEvent("audioinputerror", {message: e}); }; +/** + * Finished callback for AudioInputCapture start + * @private + */ + +audioinput._audioInputFinishedEvent = function (fileUrl) { + cordova.fireWindowEvent("audioinputfinished", {file: fileUrl}); +}; + /** * Normalize audio input * From 99f833c705aaa63881f24e855194ffb2422cb363 Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Tue, 31 Jan 2017 19:25:33 -0300 Subject: [PATCH 02/18] Delete temporary file after we've finished with it. --- src/android/AudioInputReceiver.java | 1 + 1 file changed, 1 insertion(+) diff --git a/src/android/AudioInputReceiver.java b/src/android/AudioInputReceiver.java index 5c36ccf..0c66f04 100644 --- a/src/android/AudioInputReceiver.java +++ b/src/android/AudioInputReceiver.java @@ -167,6 +167,7 @@ public void run() { os.close(); File wav = new File(fileUrl); addWavHeader(audioFile, wav); + audioFile.delete(); message = handler.obtainMessage(); messageBundle = new Bundle(); messageBundle.putString("file", wav.toURI().toString()); From 2e06d291fc827dcb93c2f0d16dfa89fe61176c33 Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Mon, 10 Jul 2017 16:26:42 -0300 Subject: [PATCH 03/18] Initial implementation of native file writing (currently excludes original event based approach). --- src/ios/AudioReceiver.h | 7 ++-- src/ios/AudioReceiver.m | 58 +++++++++++++++++++++++++++++++++- src/ios/CDVAudioInputCapture.m | 26 ++++++++++++++- 3 files changed, 87 insertions(+), 4 deletions(-) diff --git a/src/ios/AudioReceiver.h b/src/ios/AudioReceiver.h index 9746e1d..48a6506 100644 --- a/src/ios/AudioReceiver.h +++ b/src/ios/AudioReceiver.h @@ -31,6 +31,9 @@ typedef struct { @property (nonatomic, assign) id delegate; @property (nonatomic, assign) AQRecordState recordState; + @property (nonatomic, strong) AVAudioRecorder *audioRecorder; + @property (nonatomic, strong) NSString* fileUrl; + @property (nonatomic, strong) NSString* filePath; @property (nonatomic) int mySampleRate; @property (nonatomic) int myBufferSize; @@ -42,7 +45,7 @@ typedef struct { - (void)stop; - (void)pause; - (void)dealloc; -- (AudioReceiver*)init:(int)sampleRate bufferSize:(int)bufferSizeInBytes noOfChannels:(short)channels audioFormat:(NSString*)format sourceType:(int)audioSourceType; +- (AudioReceiver*)init:(int)sampleRate bufferSize:(int)bufferSizeInBytes noOfChannels:(short)channels audioFormat:(NSString*)format sourceType:(int)audioSourceType fileUrl:(NSString*)url; - (void)didReceiveAudioData:(short*)samples dataLength:(int)length; - (void)hasError:(int)statusCode:(char*)file:(int)line; @@ -54,4 +57,4 @@ typedef struct { - (void)didReceiveAudioData:(short*)data dataLength:(int)length; - (void)didEncounterError:(NSString*)msg; -@end \ No newline at end of file +@end diff --git a/src/ios/AudioReceiver.m b/src/ios/AudioReceiver.m index 494bab6..569f911 100644 --- a/src/ios/AudioReceiver.m +++ b/src/ios/AudioReceiver.m @@ -54,7 +54,7 @@ @implementation AudioReceiver /** Init instance */ -- (AudioReceiver*)init:(int)sampleRate bufferSize:(int)bufferSizeInBytes noOfChannels:(short)channels audioFormat:(NSString*)format sourceType:(int)audioSourceType +- (AudioReceiver*)init:(int)sampleRate bufferSize:(int)bufferSizeInBytes noOfChannels:(short)channels audioFormat:(NSString*)format sourceType:(int)audioSourceType fileUrl:(NSString*)url { static const int maxBufferSize = 0x100000; @@ -97,6 +97,10 @@ - (AudioReceiver*)init:(int)sampleRate bufferSize:(int)bufferSizeInBytes noOfCha _recordState.mDataFormat.mReserved = 0; _recordState.mDataFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked; _recordState.bufferByteSize = (UInt32) MIN(bufferSizeInBytes, maxBufferSize); + + // assign fileUrl + _fileUrl = url; + _filePath = _fileUrl.path; } return self; @@ -114,6 +118,7 @@ - (void)start { - (void) startRecording{ OSStatus status = noErr; + /* _recordState.mCurrentPacket = 0; _recordState.mSelf = self; @@ -137,15 +142,57 @@ - (void) startRecording{ _recordState.mIsRunning = YES; status = AudioQueueStart(_recordState.mQueue, NULL); [self hasError:status:__FILE__:__LINE__]; + */ + + NSURL *soundFileURL = [NSURL URLWithString:[_myFileUrl stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding]]; + + if (_audioRecorder != nil) + { + if (_audioRecorder.recording) + { + [_audioRecorder stop]; + } + [_audioRecorder dealloc]; + } + + NSError *error = nil; + + AVAudioSession *audioSession = [AVAudioSession sharedInstance]; + [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord + error:nil]; + + _audioRecorder = [[AVAudioRecorder alloc] + initWithURL:soundFileURL + settings:recordingSettings + error:&error]; + + if (error) + { + NSLog(@"[INFO] iosaudiorecorder: error: %@", [error localizedDescription]); + } + else + { + [_audioRecorder prepareToRecord]; + [_audioRecorder record]; + _recordState.mIsRunning = YES; + NSLog(@"[INFO] iosaudiorecorder:Recording..."); + } } /** Stop Audio Input capture */ - (void)stop { + if (_recordState.mIsRunning) { +/* AudioQueueStop(_recordState.mQueue, true); +*/ + [_audioRecorder stop]; _recordState.mIsRunning = false; + + [self didFinish:url]; + } } @@ -162,7 +209,9 @@ - (void)pause { Deallocate audio queue */ - (void)dealloc { +/* AudioQueueDispose(_recordState.mQueue, true); +*/ } @@ -185,5 +234,12 @@ -(void)hasError:(int)statusCode:(char*)file:(int)line } } +/** + Finished + */ +- (void)didFinish:(NSString*)file { + [self.delegate didfinish:file]; +} + @end diff --git a/src/ios/CDVAudioInputCapture.m b/src/ios/CDVAudioInputCapture.m index 65d28f4..d846de8 100644 --- a/src/ios/CDVAudioInputCapture.m +++ b/src/ios/CDVAudioInputCapture.m @@ -14,6 +14,7 @@ - (void)stop:(CDVInvokedUrlCommand*)command; - (void)startRecording:(CDVInvokedUrlCommand*)command; - (void)didReceiveAudioData:(short*)data dataLength:(int)length; - (void)didEncounterError:(NSString*)msg; +- (void)didFinish:(NSString*)url; @end @@ -50,8 +51,9 @@ - (void)startRecording:(CDVInvokedUrlCommand*)command short channels = [[command.arguments objectAtIndex:2] intValue]; NSString* format = [command.arguments objectAtIndex:3]; int audioSourceType = [[command.arguments objectAtIndex:4] intValue]; + NSString* fileUrl = [command.arguments objectAtIndex:5]; - self.audioReceiver = [[AudioReceiver alloc] init:sampleRate bufferSize:bufferSizeInBytes noOfChannels:channels audioFormat:format sourceType:audioSourceType]; + self.audioReceiver = [[AudioReceiver alloc] init:sampleRate bufferSize:bufferSizeInBytes noOfChannels:channels audioFormat:format sourceType:audioSourceType fileUrl:fileUrl]; self.audioReceiver.delegate = self; @@ -136,6 +138,28 @@ - (void)didEncounterError:(NSString*)msg }]; } +- (void)didFinish:(NSString*)url +{ + [self.commandDelegate runInBackground:^{ + @try { + if (self.callbackId) { + NSDictionary* messageData = [NSDictionary dictionaryWithObject:[NSString stringWithString:url] forKey:@"file"]; + CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDictionary:messageData]; + [result setKeepCallbackAsBool:YES]; + [self.commandDelegate sendPluginResult:result callbackId:self.callbackId]; + } + } + @catch (NSException *exception) { + if (self.callbackId) { + CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_ERROR + messageAsString:@"Exception in didEncounterError"]; + [result setKeepCallbackAsBool:YES]; + [self.commandDelegate sendPluginResult:result callbackId:self.callbackId]; + } + } + }]; +} + From f8148db3739859c2bf7e12bfcc3ca323ccc171df Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Mon, 10 Jul 2017 16:57:16 -0300 Subject: [PATCH 04/18] Initial implementation of native file writing (currently excludes original event based approach). --- src/ios/AudioReceiver.m | 20 +++++++++++++++----- src/ios/CDVAudioInputCapture.m | 2 +- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/src/ios/AudioReceiver.m b/src/ios/AudioReceiver.m index 569f911..2b7c7ea 100644 --- a/src/ios/AudioReceiver.m +++ b/src/ios/AudioReceiver.m @@ -100,7 +100,6 @@ - (AudioReceiver*)init:(int)sampleRate bufferSize:(int)bufferSizeInBytes noOfCha // assign fileUrl _fileUrl = url; - _filePath = _fileUrl.path; } return self; @@ -144,7 +143,8 @@ - (void) startRecording{ [self hasError:status:__FILE__:__LINE__]; */ - NSURL *soundFileURL = [NSURL URLWithString:[_myFileUrl stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding]]; + NSURL *soundFileURL = [NSURL URLWithString:[_fileUrl stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding]]; + _filePath = soundFileURL.path; if (_audioRecorder != nil) { @@ -152,9 +152,19 @@ - (void) startRecording{ { [_audioRecorder stop]; } - [_audioRecorder dealloc]; + /* [_audioRecorder dealloc]; TODO */ } + NSDictionary *recordingSettings = @{AVFormatIDKey : @(kAudioFormatLinearPCM), + AVNumberOfChannelsKey : @(_recordState.mDataFormat.mChannelsPerFrame), + AVSampleRateKey : @(_recordState.mDataFormat.mSampleRate), + AVLinearPCMBitDepthKey : @(16), + AVLinearPCMIsBigEndianKey : @NO, + //AVLinearPCMIsNonInterleaved : @YES, + AVLinearPCMIsFloatKey : @NO, + AVEncoderAudioQualityKey : @(AVAudioQualityMax) + }; + NSError *error = nil; AVAudioSession *audioSession = [AVAudioSession sharedInstance]; @@ -191,7 +201,7 @@ - (void)stop { [_audioRecorder stop]; _recordState.mIsRunning = false; - [self didFinish:url]; + [self didFinish:_fileUrl]; } } @@ -238,7 +248,7 @@ -(void)hasError:(int)statusCode:(char*)file:(int)line Finished */ - (void)didFinish:(NSString*)file { - [self.delegate didfinish:file]; + [self.delegate didFinish:file]; } diff --git a/src/ios/CDVAudioInputCapture.m b/src/ios/CDVAudioInputCapture.m index d846de8..8f30f85 100644 --- a/src/ios/CDVAudioInputCapture.m +++ b/src/ios/CDVAudioInputCapture.m @@ -68,7 +68,7 @@ - (void)stop:(CDVInvokedUrlCommand*)command if (self.callbackId) { CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDouble:0.0f]; - [result setKeepCallbackAsBool:NO]; + [result setKeepCallbackAsBool:YES]; /* TODO NO]; */ [self.commandDelegate sendPluginResult:result callbackId:self.callbackId]; } From 32b8b702ac4aae58a4fcd45fcc601442ea4b160a Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Tue, 11 Jul 2017 12:55:57 -0300 Subject: [PATCH 05/18] Ensure that plugin will work with or without fileUrl, validate URL, report errors. --- src/ios/AudioReceiver.h | 2 +- src/ios/AudioReceiver.m | 144 ++++++++++++++++++--------------- src/ios/CDVAudioInputCapture.m | 22 +++-- 3 files changed, 98 insertions(+), 70 deletions(-) diff --git a/src/ios/AudioReceiver.h b/src/ios/AudioReceiver.h index 48a6506..f3ad82f 100644 --- a/src/ios/AudioReceiver.h +++ b/src/ios/AudioReceiver.h @@ -32,7 +32,7 @@ typedef struct { @property (nonatomic, assign) AQRecordState recordState; @property (nonatomic, strong) AVAudioRecorder *audioRecorder; - @property (nonatomic, strong) NSString* fileUrl; + @property (nonatomic, strong) NSURL* fileUrl; @property (nonatomic, strong) NSString* filePath; @property (nonatomic) int mySampleRate; diff --git a/src/ios/AudioReceiver.m b/src/ios/AudioReceiver.m index 2b7c7ea..cb3eba8 100644 --- a/src/ios/AudioReceiver.m +++ b/src/ios/AudioReceiver.m @@ -98,8 +98,18 @@ - (AudioReceiver*)init:(int)sampleRate bufferSize:(int)bufferSizeInBytes noOfCha _recordState.mDataFormat.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked; _recordState.bufferByteSize = (UInt32) MIN(bufferSizeInBytes, maxBufferSize); - // assign fileUrl - _fileUrl = url; + if (url == nil) { + _fileUrl = nil; + } else { + // assign fileUrl + _fileUrl = [NSURL URLWithString:[url stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding]]; + if (_fileUrl.isFileURL) { + _filePath = _fileUrl.path; + NSLog(@"[INFO] iosaudiorecorder:temp file path: %@", _filePath); + } + + } + } return self; @@ -117,45 +127,43 @@ - (void)start { - (void) startRecording{ OSStatus status = noErr; - /* - _recordState.mCurrentPacket = 0; - _recordState.mSelf = self; - - status = AudioQueueNewInput(&_recordState.mDataFormat, - HandleInputBuffer, - &_recordState, - CFRunLoopGetCurrent(), - kCFRunLoopCommonModes, - 0, - &_recordState.mQueue); - [self hasError:status:__FILE__:__LINE__]; - - for (int i = 0; i < kNumberBuffers; i++) { + if (_fileUrl == nil) { + _recordState.mCurrentPacket = 0; + _recordState.mSelf = self; + + status = AudioQueueNewInput(&_recordState.mDataFormat, + HandleInputBuffer, + &_recordState, + CFRunLoopGetCurrent(), + kCFRunLoopCommonModes, + 0, + &_recordState.mQueue); + [self hasError:status:__FILE__:__LINE__]; + + for (int i = 0; i < kNumberBuffers; i++) { status = AudioQueueAllocateBuffer(_recordState.mQueue, _recordState.bufferByteSize, &_recordState.mBuffers[i]); [self hasError:status:__FILE__:__LINE__]; - + status = AudioQueueEnqueueBuffer(_recordState.mQueue, _recordState.mBuffers[i], 0, NULL); [self hasError:status:__FILE__:__LINE__]; - } + } - _recordState.mIsRunning = YES; - status = AudioQueueStart(_recordState.mQueue, NULL); - [self hasError:status:__FILE__:__LINE__]; - */ - - NSURL *soundFileURL = [NSURL URLWithString:[_fileUrl stringByAddingPercentEscapesUsingEncoding:NSUTF8StringEncoding]]; - _filePath = soundFileURL.path; + _recordState.mIsRunning = YES; + status = AudioQueueStart(_recordState.mQueue, NULL); + [self hasError:status:__FILE__:__LINE__]; - if (_audioRecorder != nil) - { - if (_audioRecorder.recording) + } else { /* recording direct to file */ + + if (_audioRecorder != nil) { - [_audioRecorder stop]; + if (_audioRecorder.recording) + { + [_audioRecorder stop]; + } + /* [_audioRecorder dealloc]; TODO */ } - /* [_audioRecorder dealloc]; TODO */ - } - - NSDictionary *recordingSettings = @{AVFormatIDKey : @(kAudioFormatLinearPCM), + + NSDictionary *recordingSettings = @{AVFormatIDKey : @(kAudioFormatLinearPCM), AVNumberOfChannelsKey : @(_recordState.mDataFormat.mChannelsPerFrame), AVSampleRateKey : @(_recordState.mDataFormat.mSampleRate), AVLinearPCMBitDepthKey : @(16), @@ -165,28 +173,35 @@ - (void) startRecording{ AVEncoderAudioQualityKey : @(AVAudioQualityMax) }; - NSError *error = nil; - - AVAudioSession *audioSession = [AVAudioSession sharedInstance]; - [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord - error:nil]; - - _audioRecorder = [[AVAudioRecorder alloc] - initWithURL:soundFileURL - settings:recordingSettings - error:&error]; - - if (error) - { - NSLog(@"[INFO] iosaudiorecorder: error: %@", [error localizedDescription]); - } - else - { - [_audioRecorder prepareToRecord]; - [_audioRecorder record]; - _recordState.mIsRunning = YES; - NSLog(@"[INFO] iosaudiorecorder:Recording..."); - } + NSError *error = nil; + + if (_filePath == nil) { + /* URL was not a valid file URL */ + NSString *msg = [NSString stringWithFormat:@"Invalid file URL: %@", _fileUrl]; + [self.delegate didEncounterError:msg]; + } else { + + AVAudioSession *audioSession = [AVAudioSession sharedInstance]; + [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord + error:nil]; + + _audioRecorder = [[AVAudioRecorder alloc] + initWithURL:_fileUrl + settings:recordingSettings + error:&error]; + + if (error) { + NSLog(@"[INFO] iosaudiorecorder: error: %@", [error localizedDescription]); + NSString *msg = [NSString stringWithFormat:@"AVAudioRecorder error: %@", [error localizedDescription]]; + [self.delegate didEncounterError:msg]; + } else { + [_audioRecorder prepareToRecord]; + [_audioRecorder record]; + _recordState.mIsRunning = YES; + NSLog(@"[INFO] iosaudiorecorder:Recording..."); + } + } + } /* recording direct to file */ } /** @@ -195,14 +210,13 @@ - (void) startRecording{ - (void)stop { if (_recordState.mIsRunning) { -/* + if (_fileUrl == nil) { AudioQueueStop(_recordState.mQueue, true); -*/ + } else { [_audioRecorder stop]; - _recordState.mIsRunning = false; - - [self didFinish:_fileUrl]; - + [self didFinish:_fileUrl.absoluteString]; + } + _recordState.mIsRunning = false; } } @@ -219,9 +233,9 @@ - (void)pause { Deallocate audio queue */ - (void)dealloc { -/* + if (_fileUrl == nil) { AudioQueueDispose(_recordState.mQueue, true); -*/ + } } @@ -240,7 +254,9 @@ -(void)hasError:(int)statusCode:(char*)file:(int)line { if (statusCode) { NSLog(@"Error Code responded %d in file %s on line %d\n", statusCode, file, line); - exit(-1); + NSString *msg = [NSString stringWithFormat:@"AudioReceiver error [%d]", statusCode]; + /* exit(-1); */ + [self.delegate didEncounterError:msg]; } } diff --git a/src/ios/CDVAudioInputCapture.m b/src/ios/CDVAudioInputCapture.m index 8f30f85..6cf24f6 100644 --- a/src/ios/CDVAudioInputCapture.m +++ b/src/ios/CDVAudioInputCapture.m @@ -7,6 +7,7 @@ @interface CDVAudioInputCapture : CDVPlugin { } @property (strong, nonatomic) AudioReceiver* audioReceiver; +@property (strong, nonatomic) NSString* fileUrl; @property (strong) NSString* callbackId; - (void)start:(CDVInvokedUrlCommand*)command; @@ -51,9 +52,15 @@ - (void)startRecording:(CDVInvokedUrlCommand*)command short channels = [[command.arguments objectAtIndex:2] intValue]; NSString* format = [command.arguments objectAtIndex:3]; int audioSourceType = [[command.arguments objectAtIndex:4] intValue]; - NSString* fileUrl = [command.arguments objectAtIndex:5]; + _fileUrl = [command.arguments objectAtIndex:5]; - self.audioReceiver = [[AudioReceiver alloc] init:sampleRate bufferSize:bufferSizeInBytes noOfChannels:channels audioFormat:format sourceType:audioSourceType fileUrl:fileUrl]; + if (self.audioReceiver != nil) { + [self.audioReceiver stop]; + /* TODO [self.audioReceiver dealloc]; */ + self.audioReceiver = nil; + } + + self.audioReceiver = [[AudioReceiver alloc] init:sampleRate bufferSize:bufferSizeInBytes noOfChannels:channels audioFormat:format sourceType:audioSourceType fileUrl:_fileUrl]; self.audioReceiver.delegate = self; @@ -68,11 +75,14 @@ - (void)stop:(CDVInvokedUrlCommand*)command if (self.callbackId) { CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDouble:0.0f]; - [result setKeepCallbackAsBool:YES]; /* TODO NO]; */ + /* if we are recording directly to file, we want to keep the callback */ + [result setKeepCallbackAsBool:(_fileUrl == nil?NO:YES)]; [self.commandDelegate sendPluginResult:result callbackId:self.callbackId]; } - self.callbackId = nil; + if (_fileUrl == nil) { + self.callbackId = nil; + } }]; } @@ -145,8 +155,10 @@ - (void)didFinish:(NSString*)url if (self.callbackId) { NSDictionary* messageData = [NSDictionary dictionaryWithObject:[NSString stringWithString:url] forKey:@"file"]; CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDictionary:messageData]; - [result setKeepCallbackAsBool:YES]; + [result setKeepCallbackAsBool:NO]; [self.commandDelegate sendPluginResult:result callbackId:self.callbackId]; + + self.callbackId = nil; } } @catch (NSException *exception) { From 7bb90a7d3cb009cde70ae979c1cdfb8e55550385 Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Tue, 18 Jul 2017 15:42:36 -0300 Subject: [PATCH 06/18] Android: Ensure the file URL used for recording is the URL given by the event when recording finishes. --- src/android/AudioInputReceiver.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/android/AudioInputReceiver.java b/src/android/AudioInputReceiver.java index 0c66f04..a14a618 100644 --- a/src/android/AudioInputReceiver.java +++ b/src/android/AudioInputReceiver.java @@ -141,6 +141,7 @@ public void run() { int numReadBytes = 0; byte audioBuffer[] = new byte[readBufferSize]; synchronized(this) { + URI finalUrl = fileUrl; // even if the member changes, we use what we were originally given recorder.startRecording(); try @@ -165,7 +166,7 @@ public void run() { } } // loop os.close(); - File wav = new File(fileUrl); + File wav = new File(finalUrl); addWavHeader(audioFile, wav); audioFile.delete(); message = handler.obtainMessage(); @@ -189,7 +190,7 @@ public void run() { recorder.release(); recorder = null; - } + } // syncrhonized } // recording to fileUrl } From f5655a118ac0fb0b769219b7f2a7adf2cc78df91 Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Tue, 18 Jul 2017 15:45:26 -0300 Subject: [PATCH 07/18] Change repo URL and version to differentiate plugin from 'standard' version. --- plugin.xml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/plugin.xml b/plugin.xml index dd462a5..f5cabf2 100644 --- a/plugin.xml +++ b/plugin.xml @@ -1,6 +1,6 @@ @@ -9,8 +9,8 @@ Edin Mujkanovic MIT cordova,phonegap,media,microphone,mic,input,audio,waapi,audionode,web,audio,api,audionode,capture,ios,android - https://github.com/edimuj/cordova-plugin-audioinput.git - https://github.com/edimuj/cordova-plugin-audioinput/issues + git://github.com/nzilbb/cordova-plugin-audioinput.git + From d4731b0c984b5df86a1f30a1dfea5a075eb96545 Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Tue, 18 Jul 2017 16:36:58 -0300 Subject: [PATCH 08/18] Android: Ensure current state is checked on start/stop. - if started before stopping previous recording, interrupt previous recording - if stopped before starting, return error --- src/android/AudioInputCapture.java | 39 ++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 13 deletions(-) diff --git a/src/android/AudioInputCapture.java b/src/android/AudioInputCapture.java index b968267..783bf5e 100644 --- a/src/android/AudioInputCapture.java +++ b/src/android/AudioInputCapture.java @@ -33,6 +33,7 @@ public class AudioInputCapture extends CordovaPlugin public static int RECORD_AUDIO = 0; public static final int PERMISSION_DENIED_ERROR = 20; public static final int INVALID_URL_ERROR = 30; + public static final int INVALID_STATE_ERROR = 40; private int sampleRate = 44100; private int bufferSize = 4096; @@ -67,21 +68,21 @@ public boolean execute(String action, JSONArray args, CallbackContext callbackCo promptForRecord(); } catch (URISyntaxException e) { // not a valid URL - receiver.interrupt(); + if (receiver != null) receiver.interrupt(); this.callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, INVALID_URL_ERROR)); return false; } catch (IllegalArgumentException e) { // not a file URL - receiver.interrupt(); + if (receiver != null) receiver.interrupt(); this.callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, INVALID_URL_ERROR)); return false; } catch (Exception e) { - receiver.interrupt(); + if (receiver != null) receiver.interrupt(); this.callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, PERMISSION_DENIED_ERROR)); @@ -95,28 +96,37 @@ public boolean execute(String action, JSONArray args, CallbackContext callbackCo return true; } else if (action.equals("stop")) { - receiver.interrupt(); - // only do this if we're not saving to a file, - // otherwise we won't get the event about the file being complete - if (fileUrl == null) { - this.sendUpdate(new JSONObject(), false); // release status callback in JS side - this.callbackContext = null; + if (receiver != null) + { + receiver.interrupt(); + // only do this if we're not saving to a file, + // otherwise we won't get the event about the file being complete + if (fileUrl == null) { + this.sendUpdate(new JSONObject(), false); // release status callback in JS side + this.callbackContext = null; + } + callbackContext.success(); + return true; + } + else + { // not recording, so can't stop + this.callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, + INVALID_STATE_ERROR)); + return false; } - callbackContext.success(); - return true; } return false; } public void onDestroy() { - if (!receiver.isInterrupted()) { + if (receiver != null && !receiver.isInterrupted()) { receiver.interrupt(); } } public void onReset() { - if (!receiver.isInterrupted()) { + if (receiver != null && !receiver.isInterrupted()) { receiver.interrupt(); } } @@ -181,6 +191,9 @@ protected void getMicPermission(int requestCode) { * Ensure that we have gotten record audio permission */ private void promptForRecord() { + // if we've already got a receiver, stop it + if (receiver != null) receiver.interrupt(); + if(PermissionHelper.hasPermission(this, permissions[RECORD_AUDIO])) { receiver = new AudioInputReceiver(this.sampleRate, this.bufferSize, this.channels, this.format, this.audioSource, this.fileUrl); receiver.setHandler(handler); From b8ff95c2e2e47838ba249cd2f109e556abe90e06 Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Thu, 27 Jul 2017 18:16:37 -0300 Subject: [PATCH 09/18] Don't pause/start when going to background/foreground on iOS when recording to a file, because it generates two "finished" events instead of one, and resets the recording when resuming, losing the first half of the recording (the OS appears to pause recording anyway) --- src/ios/CDVAudioInputCapture.m | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/ios/CDVAudioInputCapture.m b/src/ios/CDVAudioInputCapture.m index 6cf24f6..7191ca3 100644 --- a/src/ios/CDVAudioInputCapture.m +++ b/src/ios/CDVAudioInputCapture.m @@ -192,13 +192,17 @@ - (void)onReset - (void)didEnterBackground { - [self.audioReceiver pause]; + // only pause recording when we go into the background if we're not recording to a file + // (otherwis it generates a spurious finished event, and starting again when in the foreground resets the file) + if (_fileUrl == nil) [self.audioReceiver pause]; } - (void)willEnterForeground { - [self.audioReceiver start]; + // only start recording when we go into the foreground if we're not recording to a file + // (otherwise starting again resets the file) + if (_fileUrl == nil) [self.audioReceiver start]; } @end From cd98e4133ae79383f8dd02cd1eb8954cc99686bc Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Sun, 12 Nov 2017 16:41:58 -0300 Subject: [PATCH 10/18] Add support for the "browser" platform, and add API calls for intialization, and probing microphone access permissions. --- README.md | 45 +++- plugin.xml | 18 +- src/browser/AudioInputCaptureProxy.js | 340 ++++++++++++++++++++++++++ src/browser/RecorderWorker.js | 185 ++++++++++++++ www/audioInputCapture.js | 77 +++++- 5 files changed, 659 insertions(+), 6 deletions(-) create mode 100644 src/browser/AudioInputCaptureProxy.js create mode 100644 src/browser/RecorderWorker.js diff --git a/README.md b/README.md index 6fbe71f..e2d11aa 100644 --- a/README.md +++ b/README.md @@ -13,6 +13,7 @@ The plugin supports two different methods for microphone capture: ## Supported Platforms * Android * iOS +* browser ## Installation From the Cordova Plugin Repository: @@ -104,9 +105,40 @@ Remember that unfiltered microphone output likely will create a nasty audio feed * file-demo - How to encode recorded data to WAV format and save the resulting blob as a file. To run this demo ```cordova plugin add cordova-plugin-file``` is required. ## API +**Prepare for capturing audio** from the microphone. +Performs any required preparation for recording audio on the given platform. +```javascript +audioinput.initialize( captureCfg, onInitialized ); +``` + +**Check whether the module already has permission to access the microphone. +The callback function has a single boolean argument, which is true if access to the microphone +has been granted, and false otherwise. The check is passive - the user is not asked for permission +if they haven't already granted it. +```javascript +audioinput.checkMicrophonePermission( onComplete ); +``` + +**Obtains permission to access the microphone from the user. +This function will prompt the user for access to the microphone if they haven't already +granted it. +The callback function has two arguments: + * hasPermission - true if access to the microphone has been granted, and false otherwise. + * message - optionally, a reason message, hasPermission is false +```javascript +audioinput.getMicrophonePermission( onComplete ); +``` + **Start capturing audio** from the microphone. If your app doesn't have recording permission on the users device, the plugin will ask for permission when start is called. And the new Android 6.0 runtime permissions are also supported. ```javascript +audioinput.initialize( captureCfg ); +``` + +**Start capturing audio** from the microphone. +Ensure that initialize and at least checkMicrophonePermission have been called before calling this. +The captureCfg parameter can include more configuration than previously passed to initialize. +```javascript audioinput.start( captureCfg ); ``` @@ -158,7 +190,18 @@ var captureCfg = { // -VOICE_COMMUNICATION - Tuned for voice communications such as VoIP. // -MIC - Microphone audio source. (Android only) // -VOICE_RECOGNITION - Tuned for voice recognition if available (Android only) - audioSourceType: audioinput.AUDIOSOURCE_TYPE.DEFAULT + audioSourceType: audioinput.AUDIOSOURCE_TYPE.DEFAULT, + + // Optionally specifies a file://... URL to which the audio should be saved. + // If this is set, then no audioinput events will be raised during recording. + // When stop is called, a single audioinputfinished event will be raised, with + // a "file" argument that contains the URL to which the audio was written, + // and the callback passed into stop() will be invoked. + // Currently, only WAV format files are guaranteed to be supported on all platforms. + // When called initialize(), this should be a URL to the directory in which files will + // be saved when calling start(), so that initialize() can ensure access to the directory + // is available. + fileUrl: null }; diff --git a/plugin.xml b/plugin.xml index f5cabf2..d8e0c47 100644 --- a/plugin.xml +++ b/plugin.xml @@ -1,6 +1,6 @@ @@ -56,4 +56,20 @@ + + + + + + + + + + + + + + + + diff --git a/src/browser/AudioInputCaptureProxy.js b/src/browser/AudioInputCaptureProxy.js new file mode 100644 index 0000000..d18e966 --- /dev/null +++ b/src/browser/AudioInputCaptureProxy.js @@ -0,0 +1,340 @@ +/*License (MIT) + +Copyright © 2013 Matt Diamond + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and +to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of +the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +*/ + +// 2017-10-29 robert@fromont.net.nz Implement as a Cordova plugin for the 'browser' platform + +// Public: + +function initialize(success, error, opts) { + console.log("AudioInputCaptureProxy: initialize: " + JSON.stringify(opts)); + onInitialized = success; + if (!intialized) { + sampleRate = opts[0] || sampleRate; + bufferSize = opts[1] || bufferSize; + channels = opts[2] || channels; + format = opts[3] || format; + audioSourceType = opts[4] || audioSourceType; + fileUrl = opts[5] || fileUrl; + + if (fileUrl) { + window.requestFileSystem = window.requestFileSystem || window.webkitRequestFileSystem; + if (window.webkitStorageInfo && window.webkitStorageInfo.requestQuota) { + // Chrome/Android requires calling requestQuota first + window.webkitStorageInfo.requestQuota( + /file:\/\/\/temporary.*/.test(fileUrl)?window.TEMPORARY:window.PERSISTENT, + 10*1024*1024, + function(grantedBytes) { + console.log("AudioInputCaptureProxy: Granted " + grantedBytes + " bytes storage"); + window.requestFileSystem( + /file:\/\/\/temporary.*/.test(fileUrl)?window.TEMPORARY:window.PERSISTENT, + 10*1024*1024, + function(fs) { + console.log("AudioInputCaptureProxy: Got file system: " + fs.name); + fileSystem = fs; + intialized = true; + onInitialized(); + }, error); + }, error); + } else { + // Firefox and Safari/iOS require calling requestFileSystem directly + window.requestFileSystem( + /file:\/\/\/temporary.*/.test(fileUrl)?window.TEMPORARY:window.PERSISTENT, + 10*1024*1024, + function(fs) { + console.log("AudioInputCaptureProxy: Got file system: " + fs.name); + fileSystem = fs; + intialized = true; + onInitialized(); + }, error); + } + return; + } // fileUrl set + intialized = true; + } // !initialized + onInitialized(); +} +function checkMicrophonePermission(success, error, opts) { + console.log("AudioInputCaptureProxy: checkMicrophonePermission"); + success(microphonePermission); +} +function getMicrophonePermission(success, error, opts) { + console.log("AudioInputCaptureProxy: getMicrophonePermission"); + if (microphonePermission) { // already got permission + success(microphonePermission); + } else { // start audio processing + initAudio(success, error); + } +} +function start(success, error, opts) { + console.log("AudioInputCaptureProxy: start: " + JSON.stringify(opts)); + sampleRate = opts[0] || sampleRate; + bufferSize = opts[1] || bufferSize; + channels = opts[2] || channels; + format = opts[3] || format; + audioSourceType = opts[4] || audioSourceType; + fileUrl = opts[5] || fileUrl; + console.log("AudioInputCaptureProxy: start - fileUrl: " + fileUrl); + + if (!audioRecorder) { + error("Not initialized"); + return; + } + audioRecorder.clear(); + audioRecorder.record(); + +} +function stop(success, error, opts) { + console.log("AudioInputCaptureProxy: stop"); + onStopped = success; + onStopError = error; + audioRecorder.stop(); + audioRecorder.getBuffers(gotBuffers); +} + +// Private: + +var sampleRate = 44100; +var bufferSize = 1024; +var channels = 1; +var format = null; +var audioSourceType = null; +var fileUrl = null; +var intialized = false; +var microphonePermission = false; +var audioContext = null; +var onInitialized = null; +var onSuccessGotStream = null; +var onStopped = null; +var onStopError = null; +var audioRecorder = null; +var fileSystem = null; + +function initAudio(onSuccess, onError) { + console.log("AudioInputCaptureProxy: initAudio"); + audioContext = new window.AudioContext(); + if (!navigator.getUserMedia) { + navigator.getUserMedia = navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia; + } + if (!navigator.getUserMedia) + { + onSuccess(false, "getUserMedia not supported"); + return; + } + onSuccessGotStream = onSuccess; + navigator.getUserMedia( + { + "audio": { + "mandatory": { + "googEchoCancellation": "false", + "googAutoGainControl": "false", + "googNoiseSuppression": "false", + "googHighpassFilter": "false" + }, + "optional": [] + }, + }, gotStream, function(e) { + console.log("AudioInputCaptureProxy: " + e); + onSuccess(false, e); + }); +} // initiAudio + +// callback by web audio when access to the microphone is gained (browser platform) +function gotStream(stream) { + console.log("AudioInputCaptureProxy: gotStream"); + + inputPoint = audioContext.createGain(); + + // Create an AudioNode from the stream. + audioStream = stream; + realAudioInput = audioContext.createMediaStreamSource(stream); + if (channels = 1) { + audioInput = convertToMono( realAudioInput ); + } else { + audioInput = realAudioInput; + } + + // we will end up downsampling, but recorderWorker.js does this by simply dropping samples + // so we use a low pass filter to prevent aliasing of higher frequencies + if (sampleRate < audioContext.sampleRate) { + var lowPassFilter = audioContext.createBiquadFilter(); + audioInput.connect(lowPassFilter); + lowPassFilter.connect(inputPoint); + lowPassFilter.type = lowPassFilter.LOWPASS||"lowpass"; + lowPassFilter.frequency.value = sampleRate/2; + lowPassFilter.connect(inputPoint); + } else { + audioInput.connect(inputPoint); + } + + console.log("AudioInputCaptureProxy: creating audioRecorder"); + audioRecorder = new Recorder( inputPoint, { sampleRate: sampleRate } ); + + // pump through zero gain so that the microphone input doesn't play out the speakers causing feedback + zeroGain = audioContext.createGain(); + zeroGain.gain.value = 0.0; + inputPoint.connect( zeroGain ); + zeroGain.connect( audioContext.destination ); + + microphonePermission = true; + onSuccessGotStream(microphonePermission); +} // gotStream + +function convertToMono( input ) { + var splitter = audioContext.createChannelSplitter(2); + var merger = audioContext.createChannelMerger(2); + + input.connect( splitter ); + splitter.connect( merger, 0, 0 ); + splitter.connect( merger, 0, 1 ); + return merger; +} + +// callback from recorder invoked when recordin is finished +function gotBuffers(wav) { + if (channels == 1) { + audioRecorder.exportMonoWAV(doneEncoding, wav); + } else { + audioRecorder.exportWAV(doneEncoding, wav); + } +} + +function doneEncoding( blob ) { + console.log("AudioInputCaptureProxy: doneEncoding - write to: " + fileUrl); + var fileName = fileUrl.replace(/.*\//,""); + console.log("AudioInputCaptureProxy: doneEncoding - write to file: " + fileName); + fileSystem.root.getFile(fileName, {create: true}, function(fileEntry) { + fileEntry.createWriter(function(fileWriter) { + fileWriter.onwriteend = function(e) { + onStopped(fileUrl); + }; + fileWriter.onerror = function(e) { + console.log("AudioInputCaptureProxy: " + fileUrl + " failed"); + onStopError(e); + }; + console.log("AudioInputCaptureProxy: Saving " + fileUrl); + fileWriter.write(blob); + }, function(e) { + console.log("AudioInputCaptureProxy: Could not create writer for " + fileUrl); + onStopError(e); + }); // createWriter .wav + }, function(e) { + console.log("AudioInputCaptureProxy: Could not get "+fileUrl+ " - " + e.toString()); + onStopError(e); + }); // getFile .wav +} + +// 2015-01-28 robert@fromont.net.nz Adding a unique query string ensures it's loaded +// which in turn ensures the workers starts (in Firefox) +var WORKER_PATH = 'RecorderWorker.js?' + new Date(); + +var Recorder = function(source, cfg){ + var config = cfg || {}; + var bufferLen = config.bufferLen || 4096; + this.context = source.context; + if(!this.context.createScriptProcessor){ + this.node = this.context.createJavaScriptNode(bufferLen, 2, 2); + } else { + this.node = this.context.createScriptProcessor(bufferLen, 2, 2); + } + var worker = new Worker(config.workerPath || WORKER_PATH); + worker.postMessage({ + command: 'init', + config: { + sampleRate: this.context.sampleRate, + downsampleRate: config.sampleRate || this.context.sampleRate + } + }); + var recording = false, + currCallback; + + this.node.onaudioprocess = function(e){ + if (!recording) return; + worker.postMessage({ + command: 'record', + buffer: [ + e.inputBuffer.getChannelData(0), + e.inputBuffer.getChannelData(1) + ] + }); + } + + this.configure = function(cfg){ + for (var prop in cfg){ + if (cfg.hasOwnProperty(prop)){ + config[prop] = cfg[prop]; + } + } + } + + this.record = function(){ + recording = true; + } + + this.stop = function(){ + recording = false; + } + + this.clear = function(){ + worker.postMessage({ command: 'clear' }); + } + + this.getBuffers = function(cb) { + currCallback = cb || config.callback; + worker.postMessage({ command: 'getBuffers' }) + } + + this.exportWAV = function(cb){ + currCallback = cb || config.callback; + type = config.type || 'audio/wav'; + if (!currCallback) throw new Error('Callback not set'); + worker.postMessage({ + command: 'exportWAV', + type: type + }); + } + + this.exportMonoWAV = function(cb){ + currCallback = cb || config.callback; + type = config.type || 'audio/wav'; + if (!currCallback) throw new Error('Callback not set'); + worker.postMessage({ + command: 'exportMonoWAV', + type: type + }); + } + + worker.onmessage = function(e){ + var blob = e.data; + currCallback(blob); + } + + source.connect(this.node); + this.node.connect(this.context.destination); // if the script node is not connected to an output the "onaudioprocess" event is not triggered in chrome. +}; + +// 2017-10-29 robert@fromont.net.nz Implement as a Cordova plugin for the 'browser' platform +// Define module exports: +module.exports = { + initialize: initialize, + checkMicrophonePermission: checkMicrophonePermission, + getMicrophonePermission: getMicrophonePermission, + start: start, + stop: stop +}; +require('cordova/exec/proxy').add('AudioInputCapture', module.exports); diff --git a/src/browser/RecorderWorker.js b/src/browser/RecorderWorker.js new file mode 100644 index 0000000..d4fd96f --- /dev/null +++ b/src/browser/RecorderWorker.js @@ -0,0 +1,185 @@ +/*License (MIT) + +Copyright © 2013 Matt Diamond + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated +documentation files (the "Software"), to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and +to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of +the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO +THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF +CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER +DEALINGS IN THE SOFTWARE. +*/ +var recLength = 0, + recBuffersL = [], + recBuffersR = [], + sampleRate, + downsampleRate; + +this.onmessage = function(e){ + switch(e.data.command){ + case 'init': + init(e.data.config); + break; + case 'record': + record(e.data.buffer); + break; + case 'exportWAV': + exportWAV(e.data.type); + break; + case 'exportMonoWAV': + exportMonoWAV(e.data.type); + break; + case 'getBuffers': + getBuffers(); + break; + case 'clear': + clear(); + break; + } +}; + +function init(config){ + sampleRate = config.sampleRate; + downsampleRate = config.downsampleRate || config.sampleRate; +} + +function record(inputBuffer){ + recBuffersL.push(inputBuffer[0]); + recBuffersR.push(inputBuffer[1]); + recLength += inputBuffer[0].length; +} + +function exportWAV(type){ + var bufferL = mergeBuffers(recBuffersL, recLength); + var bufferR = mergeBuffers(recBuffersR, recLength); + var interleaved = interleave( + downsample(bufferL, sampleRate, downsampleRate), + downsample(bufferR, sampleRate, downsampleRate)); + var dataview = encodeWAV(interleaved); + var audioBlob = new Blob([dataview], { type: type }); + + this.postMessage(audioBlob); +} + +function exportMonoWAV(type){ + var bufferL = mergeBuffers(recBuffersL, recLength); + var dataview = encodeWAV(downsample(bufferL, sampleRate, downsampleRate), true); + var audioBlob = new Blob([dataview], { type: type }); + + this.postMessage(audioBlob); +} + +function getBuffers() { + var buffers = []; + buffers.push( mergeBuffers(recBuffersL, recLength) ); + buffers.push( mergeBuffers(recBuffersR, recLength) ); + this.postMessage(buffers); +} + +function clear(){ + recLength = 0; + recBuffersL = []; + recBuffersR = []; +} + +function mergeBuffers(recBuffers, recLength){ + var result = new Float32Array(recLength); + var offset = 0; + for (var i = 0; i < recBuffers.length; i++){ + result.set(recBuffers[i], offset); + offset += recBuffers[i].length; + } + return result; +} + +function interleave(inputL, inputR){ + var length = inputL.length + inputR.length; + var result = new Float32Array(length); + + var index = 0, + inputIndex = 0; + + while (index < length){ + result[index++] = inputL[inputIndex]; + result[index++] = inputR[inputIndex]; + inputIndex++; + } + return result; +} + +// thanks to http://stackoverflow.com/questions/31818112/downsample-pcm-audio-from-44100-to-8000 +function downsample(e, sampleRate, outputSampleRate){ + if (sampleRate <= outputSampleRate) return e; + + var t = e.length; + sampleRate += 0.0; + outputSampleRate += 0.0; + var s = 0, + o = sampleRate / outputSampleRate, + u = Math.ceil(t * outputSampleRate / sampleRate), + a = new Float32Array(u); + for (i = 0; i < u; i++) { + a[i] = e[Math.floor(s)]; + s += o; + } + + return a; +} + +function floatTo16BitPCM(output, offset, input){ + for (var i = 0; i < input.length; i++, offset+=2){ + var s = Math.max(-1, Math.min(1, input[i])); + output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true); + } +} + +function writeString(view, offset, string){ + for (var i = 0; i < string.length; i++){ + view.setUint8(offset + i, string.charCodeAt(i)); + } +} + +function encodeWAV(samples, mono){ + var buffer = new ArrayBuffer(44 + samples.length * 2); + var view = new DataView(buffer); + + /* RIFF identifier */ + writeString(view, 0, 'RIFF'); + /* file length */ + view.setUint32(4, 32 + samples.length * 2, true); + /* RIFF type */ + writeString(view, 8, 'WAVE'); + /* format chunk identifier */ + writeString(view, 12, 'fmt '); + /* format chunk length */ + view.setUint32(16, 16, true); + /* sample format (raw) */ + view.setUint16(20, 1, true); + /* channel count */ + var channelCount = mono?1:2; + view.setUint16(22, channelCount, true); + /* sample rate */ + view.setUint32(24, downsampleRate, true); + var blockAlign = channelCount * 2; // 2 = bytes per sample + /* byte rate (sample rate * block align) */ + view.setUint32(28, downsampleRate * blockAlign, true); + /* block align (channel count * bytes per sample) */ + view.setUint16(32, blockAlign, true); + /* bits per sample */ + view.setUint16(34, 16, true); + /* data chunk identifier */ + writeString(view, 36, 'data'); + /* data chunk length */ + view.setUint32(40, samples.length * 2, true); + + floatTo16BitPCM(view, 44, samples); + + return view; +} diff --git a/www/audioInputCapture.js b/www/audioInputCapture.js index 9bee960..4c59d22 100644 --- a/www/audioInputCapture.js +++ b/www/audioInputCapture.js @@ -51,6 +51,74 @@ audioinput.DEFAULT = { AUDIOSOURCE_TYPE: audioinput.AUDIOSOURCE_TYPE.DEFAULT }; +/** + * Does any initialization that might be required. + * + * @param {Callback} onComplete + */ +audioinput.initialize = function (cfg, onComplete) { + console.log("audioinput.initialize"); + if (!cfg) { + cfg = {}; + } + audioinput._cfg = {}; + audioinput._cfg.sampleRate = cfg.sampleRate || audioinput.DEFAULT.SAMPLERATE; + audioinput._cfg.bufferSize = cfg.bufferSize || audioinput.DEFAULT.BUFFER_SIZE; + audioinput._cfg.channels = cfg.channels || audioinput.DEFAULT.CHANNELS; + audioinput._cfg.format = cfg.format || audioinput.DEFAULT.FORMAT; + audioinput._cfg.normalize = typeof cfg.normalize == 'boolean' ? cfg.normalize : audioinput.DEFAULT.NORMALIZE; + audioinput._cfg.normalizationFactor = cfg.normalizationFactor || audioinput.DEFAULT.NORMALIZATION_FACTOR; + audioinput._cfg.streamToWebAudio = typeof cfg.streamToWebAudio == 'boolean' ? cfg.streamToWebAudio : audioinput.DEFAULT.STREAM_TO_WEBAUDIO; + audioinput._cfg.audioContext = cfg.audioContext || null; + audioinput._cfg.concatenateMaxChunks = cfg.concatenateMaxChunks || audioinput.DEFAULT.CONCATENATE_MAX_CHUNKS; + audioinput._cfg.audioSourceType = cfg.audioSourceType || 0; + audioinput._cfg.fileUrl = cfg.fileUrl || null; + + if (audioinput._cfg.channels < 1 && audioinput._cfg.channels > 2) { + throw "Invalid number of channels (" + audioinput._cfg.channels + "). Only mono (1) and stereo (2) is" + + " supported."; + } + else if (audioinput._cfg.format != "PCM_16BIT" && audioinput._cfg.format != "PCM_8BIT") { + throw "Invalid format (" + audioinput._cfg.format + "). Only 'PCM_8BIT' and 'PCM_16BIT' is" + + " supported."; + } + + if (audioinput._cfg.bufferSize <= 0) { + throw "Invalid bufferSize (" + audioinput._cfg.bufferSize + "). Must be greater than zero."; + } + + if (audioinput._cfg.concatenateMaxChunks <= 0) { + throw "Invalid concatenateMaxChunks (" + audioinput._cfg.concatenateMaxChunks + "). Must be greater than zero."; + } + exec(onComplete, audioinput._audioInputErrorEvent, "AudioInputCapture", "initialize", + [audioinput._cfg.sampleRate, + audioinput._cfg.bufferSize, + audioinput._cfg.channels, + audioinput._cfg.format, + audioinput._cfg.audioSourceType, + audioinput._cfg.fileUrl]); +} + +/** + * Checks (silently) whether the user has already given permission to access the microphone. + * + * @param {Callback} onComplete + */ +audioinput.checkMicrophonePermission = function (onComplete) { + console.log("audioinput.checkMicrophonePermission"); + exec(onComplete, audioinput._audioInputErrorEvent, "AudioInputCapture", "checkMicrophonePermission", []); +} + +/** + * Asks the user for permission to access the microphone. + * + * @param {Callback} onComplete + */ +audioinput.getMicrophonePermission = function (onComplete) { + console.log("audioinput.getMicrophonePermission"); + exec(onComplete, audioinput._audioInputErrorEvent, "AudioInputCapture", "getMicrophonePermission", []); +} + /** * Start capture of Audio input * @@ -68,13 +136,14 @@ audioinput.DEFAULT = { * audioSourceType (Use audioinput.AUDIOSOURCE_TYPE.) */ audioinput.start = function (cfg) { + console.log("audioinput.start"); if (!audioinput._capturing) { if (!cfg) { cfg = {}; } - audioinput._cfg = {}; + if (!audioinput._cfg) audioinput._cfg = {}; audioinput._cfg.sampleRate = cfg.sampleRate || audioinput.DEFAULT.SAMPLERATE; audioinput._cfg.bufferSize = cfg.bufferSize || audioinput.DEFAULT.BUFFER_SIZE; audioinput._cfg.channels = cfg.channels || audioinput.DEFAULT.CHANNELS; @@ -133,9 +202,10 @@ audioinput.start = function (cfg) { /** * Stop capturing audio */ -audioinput.stop = function () { +audioinput.stop = function (onStopped) { + console.log("audioinput.stop"); if (audioinput._capturing) { - exec(null, audioinput._audioInputErrorEvent, "AudioInputCapture", "stop", []); + exec(onStopped, audioinput._audioInputErrorEvent, "AudioInputCapture", "stop", []); audioinput._capturing = false; } @@ -147,7 +217,6 @@ audioinput.stop = function () { } }; - /** * Connect the audio node * From cfbb38cd73d482618ddaaae03e47cb9a1bfb48bc Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Sun, 12 Nov 2017 17:55:44 -0300 Subject: [PATCH 11/18] Fix copy/paste error in README --- README.md | 6 ------ 1 file changed, 6 deletions(-) diff --git a/README.md b/README.md index e2d11aa..823aa1d 100644 --- a/README.md +++ b/README.md @@ -129,12 +129,6 @@ The callback function has two arguments: audioinput.getMicrophonePermission( onComplete ); ``` -**Start capturing audio** from the microphone. -If your app doesn't have recording permission on the users device, the plugin will ask for permission when start is called. And the new Android 6.0 runtime permissions are also supported. -```javascript -audioinput.initialize( captureCfg ); -``` - **Start capturing audio** from the microphone. Ensure that initialize and at least checkMicrophonePermission have been called before calling this. The captureCfg parameter can include more configuration than previously passed to initialize. From f2a194e94bdbbf6b6d75f8eea427baaebf304ccf Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Sun, 12 Nov 2017 17:56:39 -0300 Subject: [PATCH 12/18] Implement new API functions on Android. --- src/android/AudioInputCapture.java | 116 ++++++++++++++++++++++++++--- 1 file changed, 104 insertions(+), 12 deletions(-) diff --git a/src/android/AudioInputCapture.java b/src/android/AudioInputCapture.java index 783bf5e..3b0311b 100644 --- a/src/android/AudioInputCapture.java +++ b/src/android/AudioInputCapture.java @@ -26,6 +26,7 @@ public class AudioInputCapture extends CordovaPlugin private static final String LOG_TAG = "AudioInputCapture"; private CallbackContext callbackContext = null; + private CallbackContext getPermissionCallbackContext = null; private AudioInputReceiver receiver; private final AudioInputCaptureHandler handler = new AudioInputCaptureHandler(this); @@ -35,23 +36,99 @@ public class AudioInputCapture extends CordovaPlugin public static final int INVALID_URL_ERROR = 30; public static final int INVALID_STATE_ERROR = 40; + private boolean initialized = false; private int sampleRate = 44100; private int bufferSize = 4096; private int channels = 1; private String format = null; private int audioSource = 0; private URI fileUrl = null; - + @Override public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { - if (action.equals("start")) { - if (this.callbackContext != null) { - callbackContext.error( "AudioInputCapture listener already running."); - return true; - } + if (action.equals("initialize") + // allow "start" to be called without "initialize", to keep backward compatibility + || (action.equals("start") && !initialized)) { this.callbackContext = callbackContext; + + try { + this.sampleRate = args.getInt(0); + this.bufferSize = args.getInt(1); + this.channels = args.getInt(2); + this.format = args.getString(3); + this.audioSource = args.getInt(4); + String fileUrlString = args.getString(5); + if (fileUrlString != null) { + this.fileUrl = new URI(fileUrlString); + // ensure it's a file URL + new File(this.fileUrl); + } + } + catch (URISyntaxException e) { // not a valid URL + if (receiver != null) receiver.interrupt(); + + this.callbackContext.sendPluginResult( + new PluginResult(PluginResult.Status.ERROR, INVALID_URL_ERROR)); + return false; + } + catch (IllegalArgumentException e) { // not a file URL + if (receiver != null) receiver.interrupt(); + + this.callbackContext.sendPluginResult( + new PluginResult(PluginResult.Status.ERROR, INVALID_URL_ERROR)); + return false; + } + catch (Exception e) { + if (receiver != null) receiver.interrupt(); + + this.callbackContext.sendPluginResult( + new PluginResult(PluginResult.Status.ERROR, PERMISSION_DENIED_ERROR)); + return false; + } + + if (action.equals("initialize")) { + // Invoke callback + PluginResult result = new PluginResult(PluginResult.Status.OK); + callbackContext.sendPluginResult(result); + return true; + } + } // allow fall-through through to "start"... + + if (action.equals("checkMicrophonePermission")) { + if(PermissionHelper.hasPermission(this, permissions[RECORD_AUDIO])) { + PluginResult result = new PluginResult(PluginResult.Status.OK, Boolean.TRUE); + callbackContext.sendPluginResult(result); + } + else { + PluginResult result = new PluginResult(PluginResult.Status.OK, Boolean.FALSE); + callbackContext.sendPluginResult(result); + } + return true; + } + + if (action.equals("getMicrophonePermission")) { + if(PermissionHelper.hasPermission(this, permissions[RECORD_AUDIO])) { + PluginResult result = new PluginResult(PluginResult.Status.OK, Boolean.TRUE); + callbackContext.sendPluginResult(result); + } + else { + // save context for when we know whether they've given permission + getPermissionCallbackContext = callbackContext; + + // return nothing in particular for now... + PluginResult pluginResult = new PluginResult(PluginResult.Status.NO_RESULT); + pluginResult.setKeepCallback(true); + callbackContext.sendPluginResult(pluginResult); + + // ask for permission + getMicPermission(RECORD_AUDIO); + + } + return true; + } + if (action.equals("start")) { try { this.sampleRate = args.getInt(0); this.bufferSize = args.getInt(1); @@ -95,7 +172,8 @@ public boolean execute(String action, JSONArray args, CallbackContext callbackCo callbackContext.sendPluginResult(pluginResult); return true; } - else if (action.equals("stop")) { + + if (action.equals("stop")) { if (receiver != null) { receiver.interrupt(); @@ -209,15 +287,29 @@ private void promptForRecord() { */ public void onRequestPermissionResult(int requestCode, String[] permissions, int[] grantResults) throws JSONException { - + for(int r:grantResults) { if(r == PackageManager.PERMISSION_DENIED) { - this.callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.ERROR, - PERMISSION_DENIED_ERROR)); - return; + if (this.getPermissionCallbackContext == null) { + // called directly from "start" + this.callbackContext.sendPluginResult( + new PluginResult(PluginResult.Status.ERROR, PERMISSION_DENIED_ERROR)); + return; + } + else { // called from "getMicrophonePermission" + PluginResult result = new PluginResult(PluginResult.Status.OK, Boolean.FALSE); + this.getPermissionCallbackContext.sendPluginResult(result); + } } } - promptForRecord(); + if (this.getPermissionCallbackContext == null) { + // called directly from "start" + promptForRecord(); + } + else { // called from "getMicrophonePermission" + PluginResult result = new PluginResult(PluginResult.Status.OK, Boolean.TRUE); + this.getPermissionCallbackContext.sendPluginResult(result); + } } } From cdfdf410807faf292adac0519dcb9bdb5181c965 Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Sun, 19 Nov 2017 15:53:25 -0300 Subject: [PATCH 13/18] Add NSMicrophoneUsageDescription string to info.plist file, in order to avoid crashes on OS X. --- plugin.xml | 32 ++++++++++++++++++-------------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/plugin.xml b/plugin.xml index d8e0c47..caaffa6 100644 --- a/plugin.xml +++ b/plugin.xml @@ -39,21 +39,25 @@ - - - - - - - - - - - - - - + + This app needs microphone access + + + + + + + + + + + + + + + + From 2ff2394da2d953616dcfb5fd8dc08c23bbe7c764 Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Sun, 19 Nov 2017 15:54:58 -0300 Subject: [PATCH 14/18] iOS: implement new initialize, checkMicrophonePermission, getMicrophonePermission API calls. --- src/ios/CDVAudioInputCapture.m | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/src/ios/CDVAudioInputCapture.m b/src/ios/CDVAudioInputCapture.m index 7191ca3..759e22d 100644 --- a/src/ios/CDVAudioInputCapture.m +++ b/src/ios/CDVAudioInputCapture.m @@ -36,6 +36,37 @@ - (void)pluginInitialize object:nil]; } +- (void)initialize:(CDVInvokedUrlCommand*)command +{ + _fileUrl = [command.arguments objectAtIndex:5]; + CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsDictionary:nil]; + [result setKeepCallbackAsBool:NO]; + [self.commandDelegate sendPluginResult:result callbackId:command.callbackId]; +} + +- (void)checkMicrophonePermission:(CDVInvokedUrlCommand*)command +{ + BOOL hasPermission = FALSE; + if ([[AVAudioSession sharedInstance] recordPermission] == AVAudioSessionRecordPermissionGranted) { + hasPermission = TRUE; + } + CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsBool:hasPermission]; + [result setKeepCallbackAsBool:NO]; + [self.commandDelegate sendPluginResult:result callbackId:command.callbackId]; +} + +- (void)getMicrophonePermission:(CDVInvokedUrlCommand*)command +{ + [[AVAudioSession sharedInstance] requestRecordPermission:^(BOOL granted) { + NSLog(@"permission : %d", granted); + + CDVPluginResult* result = [CDVPluginResult resultWithStatus:CDVCommandStatus_OK messageAsBool:granted]; + [result setKeepCallbackAsBool:NO]; + [self.commandDelegate sendPluginResult:result callbackId:command.callbackId]; + }]; +} + + - (void)start:(CDVInvokedUrlCommand*)command { From 76becd51b2850085bde091db47d7644b8ab8591c Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Sun, 19 Nov 2017 15:56:43 -0300 Subject: [PATCH 15/18] iOS: ensure file URL returned when finished recording is the one that was called at start (regardless of whether there's been an intervening start call). --- src/ios/AudioReceiver.h | 1 + src/ios/AudioReceiver.m | 25 ++++++++++++------------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/src/ios/AudioReceiver.h b/src/ios/AudioReceiver.h index f3ad82f..5b1b6f8 100644 --- a/src/ios/AudioReceiver.h +++ b/src/ios/AudioReceiver.h @@ -33,6 +33,7 @@ typedef struct { @property (nonatomic, assign) AQRecordState recordState; @property (nonatomic, strong) AVAudioRecorder *audioRecorder; @property (nonatomic, strong) NSURL* fileUrl; + @property (nonatomic, strong) NSURL* startedFileUrl; @property (nonatomic, strong) NSString* filePath; @property (nonatomic) int mySampleRate; diff --git a/src/ios/AudioReceiver.m b/src/ios/AudioReceiver.m index cb3eba8..1f611ba 100644 --- a/src/ios/AudioReceiver.m +++ b/src/ios/AudioReceiver.m @@ -125,8 +125,13 @@ - (void)start { - (void) startRecording{ + NSLog(@"[INFO] startRecording: %d", _recordState.mIsRunning); OSStatus status = noErr; + if (_recordState.mIsRunning == YES) { + [self stop]; + } + if (_fileUrl == nil) { _recordState.mCurrentPacket = 0; _recordState.mSelf = self; @@ -153,16 +158,7 @@ - (void) startRecording{ [self hasError:status:__FILE__:__LINE__]; } else { /* recording direct to file */ - - if (_audioRecorder != nil) - { - if (_audioRecorder.recording) - { - [_audioRecorder stop]; - } - /* [_audioRecorder dealloc]; TODO */ - } - + NSDictionary *recordingSettings = @{AVFormatIDKey : @(kAudioFormatLinearPCM), AVNumberOfChannelsKey : @(_recordState.mDataFormat.mChannelsPerFrame), AVSampleRateKey : @(_recordState.mDataFormat.mSampleRate), @@ -201,6 +197,7 @@ - (void) startRecording{ NSLog(@"[INFO] iosaudiorecorder:Recording..."); } } + _startedFileUrl = _fileUrl; } /* recording direct to file */ } @@ -208,16 +205,18 @@ - (void) startRecording{ Stop Audio Input capture */ - (void)stop { - + NSLog(@"[INFO] stop: %d", _recordState.mIsRunning); + if (_recordState.mIsRunning) { + _recordState.mIsRunning = false; if (_fileUrl == nil) { AudioQueueStop(_recordState.mQueue, true); } else { [_audioRecorder stop]; - [self didFinish:_fileUrl.absoluteString]; + [self didFinish:_startedFileUrl.absoluteString]; } - _recordState.mIsRunning = false; } + NSLog(@"[INFO] stopped: %d", _recordState.mIsRunning); } From e850d9e32ac08a331398e352bba4f85b31d9df48 Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Mon, 27 Nov 2017 17:52:45 -0300 Subject: [PATCH 16/18] Get fileUrl usage working on Chrome by ensuring URL is of the form cdvfile://localhost/... --- src/browser/AudioInputCaptureProxy.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/browser/AudioInputCaptureProxy.js b/src/browser/AudioInputCaptureProxy.js index d18e966..be75994 100644 --- a/src/browser/AudioInputCaptureProxy.js +++ b/src/browser/AudioInputCaptureProxy.js @@ -89,6 +89,8 @@ function start(success, error, opts) { format = opts[3] || format; audioSourceType = opts[4] || audioSourceType; fileUrl = opts[5] || fileUrl; + // the URL must be converted to a cdvfile:... URL to ensure it's readable from the outside + fileUrl = fileUrl.replace("filesystem:file:///", "cdvfile://localhost/"); console.log("AudioInputCaptureProxy: start - fileUrl: " + fileUrl); if (!audioRecorder) { From 39766b63eb9fb38d3fd5a872aeddffc8a27c1ab7 Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Mon, 27 Nov 2017 17:56:34 -0300 Subject: [PATCH 17/18] Fix RecorderWorker.js target. --- plugin.xml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/plugin.xml b/plugin.xml index caaffa6..fa15eb1 100644 --- a/plugin.xml +++ b/plugin.xml @@ -62,7 +62,6 @@ - @@ -71,7 +70,7 @@ - + From 0d7ae97e5db43cd87357696a782278637b7b1f43 Mon Sep 17 00:00:00 2001 From: Robert Fromont Date: Mon, 27 Nov 2017 18:20:44 -0300 Subject: [PATCH 18/18] Update README to include fileUrl example. --- README.md | 80 ++++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 79 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 823aa1d..e2ef6ef 100644 --- a/README.md +++ b/README.md @@ -92,6 +92,81 @@ audioinput.start({ // Stop capturing audio input audioinput.stop() +``` + +## Advanced Usage Example - saving to files +Use fileUrl in captureCfg if you want to save audio files directly to the file system. + +This requires adding cordova-plugin-file to your project. + +```javascript + +// get access to the file system +window.requestFileSystem(window.TEMPORARY, 5*1024*1024, function(fs) { + console.log("Got file system: " + fs.name); + fileSystem = fs; + + // now you can initialis audio, telling it about the file system you want to use + var captureCfg = { + sampleRate: 16000, + bufferSize: 8192, + channels: 1, + format: audioinput.FORMAT.PCM_16BIT, + audioSourceType: audioinput.AUDIOSOURCE_TYPE.DEFAULT, + fileUrl: cordova.file.cacheDirectory + }; + window.audioinput.initialize(captureCfg, function() { + + // now check whether we already have permission to access the microphone + window.audioinput.checkMicrophonePermission(function(hasPermission) { + if (hasPermission) { + console.log("already have permission to record"); + } else { + // ask the user for permission to access the microphone + window.audioinput.getMicrophonePermission(function(hasPermission, message) { + if (hasPermission) { + console.log("granted permission to record"); + } else { + console.warn("Denied permission to record"); + } + }); // getMicrophonePermission + } + }); // checkMicrophonePermission + }); // initialize +}, function (e) { + console.log("Couldn't get file system: " + e.message) +}); + + +// then later on, when we want to record to a file... + +var captureCfg = { + fileUrl : cordova.file.cacheDirectory + "temp.wav" +} +audioinput.start(captureCfg); + +// ... and when we're ready to stop recording +audioinput.stop(function(url) { + // now you have the URL (which might be different to the one passed in to start()) + // you might, for example, read the data into a blob + window.resolveLocalFileSystemURL(url, function (tempFile) { + tempFile.file(function (tempWav) { + var reader = new FileReader(); + reader.onloadend = function(e) { + var blob = new Blob([new Uint8Array(this.result)], { type: "audio/wav" }); + // delete the temporary file + tempFile.remove(function (e) { console.log("temporary WAV deleted"); }, fileError); + // do something with the blob: + doSomethingWithWAVData(blob); + } + reader.readAsArrayBuffer(tempWav); + }); + }, function(e) { + console.log("Could not resolveLocalFileSystemURL: " + e.message); + }); +}); + + ``` ## Demos @@ -202,8 +277,11 @@ var captureCfg = { ``` **Stop capturing audio** from the microphone: +The callback function has a single string argument, which is the url where the file was saved, +if a fileUrl was passed in to start as part of captureCfg. +Note that the url passed out from stop is not guaranteed to be the same as the fileUrl passed in. ```javascript -audioinput.stop(); +audioinput.stop( onStopped ); ``` **Check if the plugin is capturing**, i.e. if it is started or not: