Skip to content

Commit

Permalink
Merge pull request #5 from SimformSolutionsPvtLtd/feature/audio-forma…
Browse files Browse the repository at this point in the history
…ts-support

✨ added support for encoders ♻️ refactored code to support different audio format support different audio formats
  • Loading branch information
DevarshRanpara committed Mar 15, 2022
2 parents 8ab0508 + 29d2e99 commit 04f7608
Show file tree
Hide file tree
Showing 10 changed files with 294 additions and 106 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,31 @@ import kotlin.math.log10
private const val LOG_TAG = "AudioWave"
private const val RECORD_AUDIO_REQUEST_CODE = 1001

class AudioWaveMethodCall: PluginRegistry.RequestPermissionsResultListener {
class AudioWaveMethodCall : PluginRegistry.RequestPermissionsResultListener {
private var permissions = arrayOf(Manifest.permission.RECORD_AUDIO)
fun getDecibel(result: MethodChannel.Result, recorder: MediaRecorder?) {
fun getDecibel(result: MethodChannel.Result, recorder: MediaRecorder?) {
val db = 20 * log10((recorder?.maxAmplitude?.toDouble() ?: 0.0 / 32768.0))
result.success(db)
if (db == Double.NEGATIVE_INFINITY) {
Log.e(LOG_TAG, "Microphone might be turned off")
} else {
result.success(db)
}
}

fun initRecorder(path: String, result: MethodChannel.Result, recorder: MediaRecorder?) {
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
fun initRecorder(
path: String,
result: MethodChannel.Result,
recorder: MediaRecorder?,
enCoder: Int,
outputFormat: Int,
sampleRate: Int
) {
recorder?.apply {
setAudioSource(MediaRecorder.AudioSource.MIC)
setOutputFormat(MediaRecorder.OutputFormat.MPEG_4)
setAudioEncoder(MediaRecorder.AudioEncoder.AAC)
setOutputFormat(getOutputFormat(outputFormat))
setAudioEncoder(getEncoder(enCoder))
setAudioSamplingRate(sampleRate)
setOutputFile(path)
try {
recorder.prepare()
Expand All @@ -39,13 +52,14 @@ class AudioWaveMethodCall: PluginRegistry.RequestPermissionsResultListener {
}
}

fun stopRecording(result: MethodChannel.Result, recorder: MediaRecorder?) {
fun stopRecording(result: MethodChannel.Result, recorder: MediaRecorder?, path: String) {
try {
recorder?.apply {
stop()
reset()
release()
}
result.success(false)
result.success(path)
} catch (e: IllegalStateException) {
Log.e(LOG_TAG, "Failed to stop recording")
}
Expand Down Expand Up @@ -79,6 +93,7 @@ class AudioWaveMethodCall: PluginRegistry.RequestPermissionsResultListener {
Log.e(LOG_TAG, "Failed to resume recording")
}
}

override fun onRequestPermissionsResult(
requestCode: Int,
permissions: Array<out String>?,
Expand All @@ -97,7 +112,7 @@ class AudioWaveMethodCall: PluginRegistry.RequestPermissionsResultListener {
return result == PackageManager.PERMISSION_GRANTED
}

fun checkPermission(result: MethodChannel.Result,activity: Activity?) {
fun checkPermission(result: MethodChannel.Result, activity: Activity?) {
if (!isPermissionGranted(activity)) {
activity?.let {
ActivityCompat.requestPermissions(
Expand All @@ -109,4 +124,30 @@ class AudioWaveMethodCall: PluginRegistry.RequestPermissionsResultListener {
result.success(true)
}
}

private fun getEncoder(enCoder: Int): Int {
when (enCoder) {

1 -> return MediaRecorder.AudioEncoder.AAC_ELD
2 -> return MediaRecorder.AudioEncoder.HE_AAC
3 -> return MediaRecorder.AudioEncoder.AMR_NB
4 -> return MediaRecorder.AudioEncoder.AMR_WB
5 -> {
return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
MediaRecorder.AudioEncoder.OPUS
} else {
Log.e(LOG_TAG, "Minimum android Q is required, Setting Acc encoder.")
MediaRecorder.AudioEncoder.AAC
}
}
else -> return MediaRecorder.AudioEncoder.AAC
}
}

private fun getOutputFormat(format: Int): Int {
if (format == 3 || format == 4) {
return MediaRecorder.OutputFormat.THREE_GPP
}
return MediaRecorder.OutputFormat.MPEG_4
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ import io.flutter.plugin.common.MethodChannel.MethodCallHandler
import io.flutter.plugin.common.MethodChannel.Result
import java.io.File
import java.io.IOException
import java.text.DateFormat.getDateTimeInstance
import java.util.*


/** AudioWavePlugin */
Expand All @@ -26,6 +28,8 @@ class AudioWavePlugin : FlutterPlugin, MethodCallHandler, ActivityAware {
private var activity: Activity? = null
private lateinit var audioWaveMethodCall: AudioWaveMethodCall
private var path: String? = null
private var codec: Int = 0
private var sampleRate: Int = 16000

object Constants {
const val initRecorder = "initRecorder"
Expand All @@ -38,6 +42,9 @@ class AudioWavePlugin : FlutterPlugin, MethodCallHandler, ActivityAware {
const val path = "path"
const val LOG_TAG = "AudioWave"
const val methodChannelName = "simform_audio_wave_plugin/methods"
const val enCoder = "enCoder"
const val sampleRate = "sampleRate"
const val fileNameFormat = "dd-MM-yy-hh-mm-ss"
}

override fun onAttachedToEngine(@NonNull flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) {
Expand All @@ -51,12 +58,14 @@ class AudioWavePlugin : FlutterPlugin, MethodCallHandler, ActivityAware {
when (call.method) {
Constants.initRecorder -> {
path = call.argument(Constants.path) as String?
checkPathAndInitialiseRecorder(result)
codec = (call.argument(Constants.enCoder) as Int?) ?: 0
sampleRate = (call.argument(Constants.sampleRate) as Int?) ?: 16000
checkPathAndInitialiseRecorder(result, codec, sampleRate)
}
Constants.startRecording -> audioWaveMethodCall.startRecorder(result, recorder)
Constants.stopRecording -> {
audioWaveMethodCall.stopRecording(result, recorder, path!!)
recorder = null
audioWaveMethodCall.stopRecording(result, recorder)
}
Constants.pauseRecording -> audioWaveMethodCall.pauseRecording(result, recorder)
Constants.resumeRecording -> audioWaveMethodCall.resumeRecording(result, recorder)
Expand All @@ -66,25 +75,47 @@ class AudioWavePlugin : FlutterPlugin, MethodCallHandler, ActivityAware {
}
}

private fun checkPathAndInitialiseRecorder(result: Result) {
@RequiresApi(Build.VERSION_CODES.LOLLIPOP)
private fun checkPathAndInitialiseRecorder(
result: Result,
enCoder: Int,
sampleRate: Int
) {
if (path == null) {
val outputDir = activity?.cacheDir
val outputFile: File?
val dateTimeInstance = getDateTimeInstance()
dateTimeInstance.format(Constants.fileNameFormat)
val currentDate = dateTimeInstance.format(Date())
try {
outputFile = File.createTempFile("audio-wave", ".mp3", outputDir)
outputFile = File.createTempFile(currentDate, ".aac", outputDir)
path = outputFile.path
try {
recorder = MediaRecorder()
} catch (e: Exception) {
Log.e(Constants.LOG_TAG, "Failed to initialise Recorder")
}

audioWaveMethodCall.initRecorder(path!!, result, recorder)
audioWaveMethodCall.initRecorder(
path!!,
result,
recorder,
enCoder,
enCoder,
sampleRate
)
} catch (e: IOException) {
Log.e(Constants.LOG_TAG, "Failed to create file")
}
} else {
audioWaveMethodCall.initRecorder(path!!, result, recorder)
audioWaveMethodCall.initRecorder(
path!!,
result,
recorder,
enCoder,
enCoder,
sampleRate
)
}
}

Expand Down
98 changes: 60 additions & 38 deletions example/lib/main.dart
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ class _HomeState extends State<Home> {
@override
void initState() {
super.initState();
waveController = WaveController();
waveController = WaveController()..encoder = Encoder.aac;
}

@override
Expand All @@ -43,9 +43,7 @@ class _HomeState extends State<Home> {
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: const Text("Audio Wave Example"),
),
backgroundColor: const Color(0xFF394253),
body: Column(
crossAxisAlignment: CrossAxisAlignment.center,
children: [
Expand Down Expand Up @@ -74,47 +72,71 @@ class _HomeState extends State<Home> {
),
),
decoration: BoxDecoration(
borderRadius: BorderRadius.circular(14.0),
color: Colors.black
borderRadius: BorderRadius.circular(14.0),
gradient: const LinearGradient(
colors: <Color>[
Color(0xFF615766),
Color(0xFF394253),
Color(0xFF412B4F),
],
begin: Alignment.bottomLeft,
stops: <double>[0.2, 0.45, 0.8],
),
),
),
const SizedBox(height: 40),
Row(
mainAxisAlignment: MainAxisAlignment.center,
children: [
Center(
child: CircleAvatar(
backgroundColor: Colors.black,
child: IconButton(
onPressed: waveController.record,
color: Colors.white,
icon: const Icon(Icons.play_arrow),
Center(
child: Container(
decoration: BoxDecoration(
gradient: const LinearGradient(
colors: [Color(0xff2D3548), Color(0xff151922)],
stops: [0.1, 0.45],
begin: Alignment.topCenter,
end: Alignment.bottomCenter),
borderRadius: BorderRadius.circular(12.0)),
padding: const EdgeInsets.all(12.0),
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
mainAxisSize: MainAxisSize.min,
children: [
CircleAvatar(
backgroundColor: Colors.black,
child: IconButton(
onPressed: waveController.record,
color: Colors.white,
icon: const Icon(Icons.play_arrow),
),
),
),
),
const SizedBox(width: 10),
Center(
child: CircleAvatar(
backgroundColor: Colors.black,
child: IconButton(
onPressed: waveController.pause,
color: Colors.white,
icon: const Icon(Icons.pause),
const SizedBox(width: 10),
CircleAvatar(
backgroundColor: Colors.black,
child: IconButton(
onPressed: waveController.pause,
color: Colors.white,
icon: const Icon(Icons.pause),
),
),
),
),
const SizedBox(width: 10),
Center(
child: CircleAvatar(
backgroundColor: Colors.black,
child: IconButton(
onPressed: waveController.refresh,
color: Colors.white,
icon: const Icon(Icons.refresh),
const SizedBox(width: 10),
CircleAvatar(
backgroundColor: Colors.black,
child: IconButton(
onPressed: waveController.stop,
color: Colors.white,
icon: const Icon(Icons.stop),
),
),
const SizedBox(width: 10),
CircleAvatar(
backgroundColor: Colors.black,
child: IconButton(
onPressed: waveController.refresh,
color: Colors.white,
icon: const Icon(Icons.refresh),
),
),
),
],
),
],
),
)
],
),
Expand Down
30 changes: 25 additions & 5 deletions ios/Classes/AudioWaveMethodCall.swift
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,20 @@ public class AudioWaveMethodCall: NSObject, AVAudioRecorderDelegate{
var path: String?
var hasPermission: Bool = false

public func startRecording(_ result: @escaping FlutterResult,_ path: String?){
public func startRecording(_ result: @escaping FlutterResult,_ path: String?,_ encoder : Int?,_ sampleRate : Int?,_ fileNameFormat: String){
let settings = [
AVFormatIDKey: Int(kAudioFormatMPEG4AAC),
AVSampleRateKey: 16000,
AVFormatIDKey: getEncoder(encoder ?? 0),
AVSampleRateKey: sampleRate ?? 16000,
AVNumberOfChannelsKey: 1,
AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
]
let options: AVAudioSession.CategoryOptions = [.defaultToSpeaker, .allowBluetooth]
if (path == nil) {
let directory = NSTemporaryDirectory()
let fileName = UUID().uuidString + ".m4a"
let date = Date()
let dateFormatter = DateFormatter()
dateFormatter.dateFormat = fileNameFormat
let fileName = dateFormatter.string(from: date) + ".aac"

self.path = NSURL.fileURL(withPathComponents: [directory, fileName])?.absoluteString
} else {
Expand All @@ -41,7 +44,7 @@ public class AudioWaveMethodCall: NSObject, AVAudioRecorderDelegate{
public func stopRecording(_ result: @escaping FlutterResult) {
audioRecorder?.stop()
audioRecorder = nil
result(false)
result(path)
}

public func pauseRecording(_ result: @escaping FlutterResult) {
Expand Down Expand Up @@ -73,8 +76,25 @@ public class AudioWaveMethodCall: NSObject, AVAudioRecorderDelegate{
hasPermission = true
break
@unknown default:
hasPermission = false
break
}
result(hasPermission)
}
public func getEncoder(_ enCoder: Int) -> Int {
switch(enCoder) {
case 1:
return Int(kAudioFormatMPEG4AAC_ELD)
case 2:
return Int(kAudioFormatMPEG4AAC_HE)
case 3:
return Int(kAudioFormatOpus)
case 4:
return Int(kAudioFormatAMR)
case 5:
return Int(kAudioFormatAMR_WB)
default:
return Int(kAudioFormatMPEG4AAC)
}
}
}
Loading

0 comments on commit 04f7608

Please sign in to comment.