Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 8 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,21 +28,20 @@ Friend is an AI wearable device that records everything you say, gives you proac
## How it works

```mermaid
graph TD
A[Device] -- Streams Audio --> B[Phone App]
B -- Saves Audio --> C[Phone Storage]
C -- After X mins --> D[Send Audio to Whisper API]
D -- Returns Transcript --> B[Phone App]
B -- Saves Transcript --> F[Phone Storage]
graph TD;
A[Device] -- Streams Audio --> B[Phone App];
B -- Transmits --> C[Deepgram];
C -- Returns Transcript --> D[Phone App];
D -- Saves Transcript --> E[Phone Storage];

classDef lightMode fill:#FFFFFF, stroke:#333333, color:#333333;
classDef darkMode fill:#333333, stroke:#FFFFFF, color:#FFFFFF;

classDef lightModeLinks stroke:#333333;
classDef darkModeLinks stroke:#FFFFFF;

class A,B,C,D,F lightMode
class A,B,C,D,F darkMode
class A,B,C,D,E lightMode
class A,B,C,D,E darkMode

linkStyle 0 stroke:#FF4136, stroke-width:2px
linkStyle 1 stroke:#1ABC9C, stroke-width:2px
Expand Down Expand Up @@ -85,7 +84,7 @@ Follow these steps to get started with your Friend.
3. Install [Flutter](https://docs.flutter.dev/get-started/install/macos/mobile-ios?tab=download) and [CocoaPods](https://guides.cocoapods.org/using/getting-started.html)
4. Install your environment variables

- For AppWithWearable, open file api_calls.dart located in `apps/AppWithWearable/lib/backend/api_requests ` Find "Whisper" and instead of "key", provide your own api-key for openai whisper for transcriptions to work
- For AppWithWearable, open file ble_receive_w_a_v.dart located in `apps/AppWithWearable/lib/custom_code/actions/` Find "DEEPGRAM_API_KEY" and provide your own api-key for Deepgram for transcriptions to work

<img src="https://github.com/BasedHardware/Friend/assets/43514161/d0fb89d2-07fd-44e3-8563-68f938bb2319" alt="CleanShot 2024-03-25 at 21 58 42" width="400">

Expand Down
9 changes: 8 additions & 1 deletion apps/AppWithWearable/lib/app_state.dart
Original file line number Diff line number Diff line change
Expand Up @@ -350,13 +350,20 @@ class FFAppState extends ChangeNotifier {
_whispers.removeAt(_index);
}

void updateWhispersAtIndex(
void updateWhispersAtIndexWithFunction(
int _index,
String Function(String) updateFn,
) {
_whispers[_index] = updateFn(_whispers[_index]);
}

void updateWhispersAtIndex(
int _index,
String _value,
) {
_whispers[_index] = _value;
}

void insertAtIndexInWhispers(int _index, String _value) {
_whispers.insert(_index, _value);
}
Expand Down
117 changes: 68 additions & 49 deletions apps/AppWithWearable/lib/custom_code/actions/ble_receive_w_a_v.dart
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,15 @@ import 'package:flutter_blue_plus/flutter_blue_plus.dart';

import '/backend/schema/structs/index.dart';
import '/flutter_flow/flutter_flow_util.dart';
import 'dart:convert';
import 'package:web_socket_channel/io.dart';
import 'package:flutter/material.dart';

const serverUrl =
'wss://api.deepgram.com/v1/listen?encoding=linear16&sample_rate=8000&language=en&model=nova-2-general&no_delay=true&endpointing=100&interim_results=true&smart_format=true&diarize=true';
const apiKey = 'DEEPGRAM_API_KEY';

late IOWebSocketChannel channel;

const int sampleRate = 8000;
const int channelCount = 1;
Expand All @@ -18,27 +27,68 @@ const String audioCharacteristicUuid = "19b10001-e8f2-537e-4f6c-d104768a1214";
const String audioCharacteristicFormatUuid =
"19b10002-e8f2-537e-4f6c-d104768a1214";

/*List<int> filterAudioData(List<int> audioData) {
// Calculate the scaling factor
//
//int maxVal = audioData.reduce((curr, next) => curr > next ? curr : next);
//int minVal = audioData.reduce((curr, next) => curr < next ? curr : next);
//double scalingFactor = 2 * 32768 / (max(0, maxVal) - min(0, minVal));
// for each item in the list subtract 32
double afterSubtraction =
// Apply the scaling factor
List<int> scaledAudioData =
audioData.map((e) => (e * scalingFactor).toInt()).toList();
return scaledAudioData;
Future<void> _initStream(void Function(String) finalized_callback, void Function(String) interim_callback) async {
print('Websocket Opening');
channel = IOWebSocketChannel.connect(Uri.parse(serverUrl),
headers: {'Authorization': 'Token $apiKey'});
var is_finals = [];
var is_finalized = false;
channel.ready.then((_) {
channel.stream.listen((event) {
print('Event from Stream: $event');
final parsedJson = jsonDecode(event);
final transcript = parsedJson['channel']['alternatives'][0]['transcript'];
final is_final = parsedJson['is_final'];
final speech_final = parsedJson['is_final'];

print('Transcript: ${transcript}');
if(transcript.length > 0){
if (speech_final) {
interim_callback(is_finals.join(' ') + (is_finals.length > 0 ? ' ' : '') + transcript);
finalized_callback('');
is_finals = [];
} else {
if(is_final) {
is_finals.add(transcript);
interim_callback(transcript);
} else {
interim_callback(transcript);
}
}
}
// Re-instantiate the Completer for next use
// completer = Completer<String>();
},onError: (err){
print('Websocket Error: ${err}');
// handle stream error
},
onDone: (() {
// stream on done callback...
print('Websocket Closed');
}),
cancelOnError: true
);

}).onError((error, stackTrace) {
print("WebsocketChannel was unable to establishconnection");
});

try {
await channel.ready;
} catch (e) {
// handle exception here
print("Websocket was unable to establishconnection");

}
*/
}

Future<FFUploadedFile?> bleReceiveWAV(
BTDeviceStruct btDevice, int recordDuration) async {
Future<String> bleReceiveWAV(
BTDeviceStruct btDevice, void Function(String) finalized_callback, void Function(String) interim_callback) async {
final device = BluetoothDevice.fromId(btDevice.id);
final completer = Completer<FFUploadedFile?>();
final completer = Completer<String>();

try {
_initStream(finalized_callback, interim_callback);
await device.connect();
print('Connected to device: ${device.id}');
List<BluetoothService> services = await device.discoverServices();
Expand All @@ -64,41 +114,10 @@ Future<FFUploadedFile?> bleReceiveWAV(
characteristic.value.listen((value) {
if (value.isEmpty) return;
value.removeRange(0, 3);
// print('values -- ${value[0]}, ${value[1]}');

// Interpret bytes as Int16 directly
for (int i = 0; i < value.length; i += 2) {
int byte1 = value[i];
int byte2 = value[i + 1];
int int16Value = (byte2 << 8) | byte1;
wavData.add(int16Value);

//print('$int16Value');
}

print(
'Received ------ ${value.length ~/ 2} samples, total: ${wavData.length}/$samplesToRead');
if (wavData.length >= samplesToRead && !completer.isCompleted) {
print('Received desired amount of data');
characteristic.setNotifyValue(false);
completer.complete(createWavFile(wavData));
} else {
print('Still need ${samplesToRead - wavData.length} samples');
}
channel.sink.add(value);
});

// Wait for the desired duration
final waitSeconds = recordDuration + 20;
await Future.delayed(Duration(seconds: waitSeconds));

// If the desired amount of data is not received within the duration,
// return null if the completer is not already completed
if (!completer.isCompleted) {
print(
'Recording duration reached without receiving enough data');
await characteristic.setNotifyValue(false);
completer.complete(null);
}


return completer.future;
}
Expand Down
1 change: 1 addition & 0 deletions apps/AppWithWearable/lib/main.dart
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ class _MyAppState extends State<MyApp> {
@override
Widget build(BuildContext context) {
return MaterialApp.router(
debugShowCheckedModeBanner: false,
title: 'Friend Private',
localizationsDelegates: [
FFLocalizationsDelegate(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@ class DeviceDataModel extends FlutterFlowModel<DeviceDataWidget> {
void removeAtIndexFromWhispers(int index) => whispers.removeAt(index);
void insertAtIndexInWhispers(int index, String item) =>
whispers.insert(index, item);
void updateWhispersAtIndex(int index, Function(String) updateFn) =>
void updateWhispersAtIndexFunction(int index, Function(String) updateFn) =>
whispers[index] = updateFn(whispers[index]);

void updateWhispersAtIndex(int index, String _value) => whispers[index] = _value;
List<int> ints = [];
void addToInts(int item) => ints.add(item);
void removeFromInts(int item) => ints.remove(item);
Expand All @@ -27,7 +27,7 @@ class DeviceDataModel extends FlutterFlowModel<DeviceDataWidget> {
/// State fields for stateful widgets in this component.

// Stores action output result for [Custom Action - bleReceiveWAV] action in deviceData widget.
FFUploadedFile? wav;
String wav = '';
// Stores action output result for [Backend Call - API (WHISPER D)] action in deviceData widget.
ApiCallResponse? whsiper;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,58 +40,43 @@ class _DeviceDataWidgetState extends State<DeviceDataWidget> {

// On component load action.
SchedulerBinding.instance.addPostFrameCallback((_) async {
while (true) {
_model.wav = await actions.bleReceiveWAV(
widget.btdevice!,
30,
);
if (_model.wav != null && (_model.wav?.bytes?.isNotEmpty ?? false)) {
_model.whsiper = await WhisperDCall.call(
file: _model.wav,
);
if ((_model.whsiper?.succeeded ?? true)) {
print('Checking for transcript');
_model.wav = await actions.bleReceiveWAV(
widget.btdevice!,
(String receivedData) {
print("Deepgram Finalized Callback received: $receivedData");
setState(() {
_model.addToWhispers(receivedData);
});
setState(() {
FFAppState().addToWhispers(receivedData);
});
// You can perform any action using receivedData here
},
(String receivedData) {
print("Deepgram Interim Callback received: $receivedData");

// We dont have any whispers yet so we need to create the first one to update
if(_model.whispers.length == 0){
setState(() {
_model.addToWhispers(WhisperDCall.text(
(_model.whsiper?.jsonBody ?? ''),
).toString());
_model.addToWhispers(receivedData);
});
setState(() {
FFAppState().addToWhispers(WhisperDCall.text(
(_model.whsiper?.jsonBody ?? ''),
).toString());
FFAppState().addToWhispers(receivedData);
});
} else {
await showDialog(
context: context,
builder: (alertDialogContext) {
return AlertDialog(
title: Text('Error'),
content: Text((_model.whsiper?.jsonBody ?? '').toString()),
actions: [
TextButton(
onPressed: () => Navigator.pop(alertDialogContext),
child: Text('Ok'),
),
],
);
},
);
setState(() {
_model.updateWhispersAtIndex(_model.whispers.length-1, receivedData);
});
setState(() {
FFAppState().updateWhispersAtIndex(_model.whispers.length-1, receivedData);
});
}
} else {
ScaffoldMessenger.of(context).showSnackBar(
SnackBar(
content: Text(
'No audio yet',
style: TextStyle(
color: FlutterFlowTheme.of(context).primary,
),
),
duration: Duration(milliseconds: 4000),
backgroundColor: FlutterFlowTheme.of(context).secondary,
),
);


}
}
);

});

WidgetsBinding.instance.addPostFrameCallback((_) => setState(() {}));
Expand All @@ -104,9 +89,9 @@ class _DeviceDataWidgetState extends State<DeviceDataWidget> {
super.dispose();
}

@override
Widget build(BuildContext context) {
context.watch<FFAppState>();
@override
Widget build(BuildContext context) {
context.watch<FFAppState>();

return Align(
alignment: AlignmentDirectional(0.0, 0.0),
Expand Down
1 change: 1 addition & 0 deletions apps/AppWithWearable/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ dependencies:
url_launcher_platform_interface: 2.3.2
wav: ^1.3.0
lottie: 3.1.0
web_socket_channel: ^2.4.0

dependency_overrides:
http: 1.2.0
Expand Down