From 9b6550d3c3036ebe42bae095695c41e1a8bf0fa0 Mon Sep 17 00:00:00 2001 From: enricostrijks Date: Wed, 10 Sep 2025 10:34:25 +0200 Subject: [PATCH 1/2] Update build.gradle --- android/build.gradle | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/android/build.gradle b/android/build.gradle index 61cfc68..09a4776 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -27,7 +27,7 @@ apply plugin: 'com.android.library' android { // Bumping the plugin compileSdkVersion requires all clients of this plugin // to bump the version in their app. - compileSdkVersion 31 + compileSdkVersion 35 namespace 'org.tensorflow.tflite_flutter' // Bumping the plugin ndkVersion requires all clients of this plugin to bump @@ -55,7 +55,8 @@ android { } defaultConfig { - minSdkVersion 19 + minSdkVersion 23 + targetSdkVersion 35 } } @@ -65,4 +66,4 @@ dependencies { implementation("org.tensorflow:tensorflow-lite:${tflite_version}") implementation("org.tensorflow:tensorflow-lite-gpu:${tflite_version}") -} +} \ No newline at end of file From 1c60d1d85ac6431c55fbe7e1898c4853473b249a Mon Sep 17 00:00:00 2001 From: enricostrijks Date: Wed, 10 Sep 2025 10:35:34 +0200 Subject: [PATCH 2/2] Remove deprecated withOpacity and swap it for withValues --- example/audio_classification/lib/main.dart | 25 +++------ example/bertqa/lib/ui/qa_detail.dart | 53 ++++--------------- example/digit_classification/lib/main.dart | 15 ++---- example/gesture_classification/lib/main.dart | 27 +++------- .../lib/main.dart | 8 ++- example/image_segmentation/lib/main.dart | 48 +++++------------ .../lib/ui/detector_widget.dart | 12 ++--- .../lib/main.dart | 2 +- .../lib/main.dart | 2 +- example/pose_estimation/lib/main.dart | 27 +++------- example/style_transfer/lib/main.dart | 21 +++----- example/super_resolution_esrgan/lib/main.dart | 2 +- 12 files changed, 67 insertions(+), 175 deletions(-) diff --git a/example/audio_classification/lib/main.dart b/example/audio_classification/lib/main.dart index e65a6ee..a384d74 100644 --- a/example/audio_classification/lib/main.dart +++ b/example/audio_classification/lib/main.dart @@ -52,16 +52,14 @@ class MyHomePage extends StatefulWidget { } class _MyHomePageState extends State { - static const platform = - MethodChannel('org.tensorflow.audio_classification/audio_record'); + static const platform = MethodChannel('org.tensorflow.audio_classification/audio_record'); // The YAMNet/classifier model used in this code example accepts data that // represent single-channel, or mono, audio clips recorded at 16kHz in 0.975 // second clips (15600 samples). static const _sampleRate = 16000; // 16kHz static const _expectAudioLength = 975; // milliseconds - final int _requiredInputBuffer = - (16000 * (_expectAudioLength / 1000)).toInt(); + final int _requiredInputBuffer = (16000 * (_expectAudioLength / 1000)).toInt(); late AudioClassificationHelper _helper; List> _classification = List.empty(); final List _primaryProgressColorList = [ @@ -102,10 +100,7 @@ class _MyHomePageState extends State { Future _requestPermission() async { try { - return await platform.invokeMethod('requestPermissionAndCreateRecorder', { - "sampleRate": _sampleRate, - "requiredInputBuffer": _requiredInputBuffer - }); + return await platform.invokeMethod('requestPermissionAndCreateRecorder', {"sampleRate": _sampleRate, "requiredInputBuffer": _requiredInputBuffer}); } on Exception catch (e) { log("Failed to create recorder: '${e.toString()}'."); return false; @@ -115,8 +110,7 @@ class _MyHomePageState extends State { Future _getAudioFloatArray() async { var audioFloatArray = Float32List(0); try { - final Float32List result = - await platform.invokeMethod('getAudioFloatArray'); + final Float32List result = await platform.invokeMethod('getAudioFloatArray'); audioFloatArray = result; } on PlatformException catch (e) { log("Failed to get audio array: '${e.message}'."); @@ -160,8 +154,7 @@ class _MyHomePageState extends State { Future _runInference() async { Float32List inputArray = await _getAudioFloatArray(); - final result = - await _helper.inference(inputArray.sublist(0, _requiredInputBuffer)); + final result = await _helper.inference(inputArray.sublist(0, _requiredInputBuffer)); setState(() { // take top 3 classification _classification = (result.entries.toList() @@ -186,7 +179,7 @@ class _MyHomePageState extends State { backgroundColor: Colors.white, appBar: AppBar( title: Image.asset('assets/images/tfl_logo.png'), - backgroundColor: Colors.black.withOpacity(0.5), + backgroundColor: Colors.black.withValues(alpha: 0.5), ), body: _buildBody(), ); @@ -216,10 +209,8 @@ class _MyHomePageState extends State { ), Flexible( child: LinearProgressIndicator( - backgroundColor: _backgroundProgressColorList[ - index % _backgroundProgressColorList.length], - color: _primaryProgressColorList[ - index % _primaryProgressColorList.length], + backgroundColor: _backgroundProgressColorList[index % _backgroundProgressColorList.length], + color: _primaryProgressColorList[index % _primaryProgressColorList.length], value: item.value, minHeight: 20, )) diff --git a/example/bertqa/lib/ui/qa_detail.dart b/example/bertqa/lib/ui/qa_detail.dart index 0271923..b74f264 100644 --- a/example/bertqa/lib/ui/qa_detail.dart +++ b/example/bertqa/lib/ui/qa_detail.dart @@ -19,11 +19,7 @@ import 'package:bertqa/ml/qa_client.dart'; import 'package:flutter/material.dart'; class QaDetail extends StatefulWidget { - const QaDetail( - {super.key, - required this.title, - required this.content, - required this.questions}); + const QaDetail({super.key, required this.title, required this.content, required this.questions}); final String title; final String content; @@ -66,8 +62,7 @@ class _QaDetailState extends State { if (!trimQuestion.endsWith("?")) { trimQuestion += "?"; } - List answers = - await _qaClient.runInference(trimQuestion, widget.content); + List answers = await _qaClient.runInference(trimQuestion, widget.content); // Highlight the answer here _highlightAnswer(answers.first); } @@ -115,36 +110,15 @@ class _QaDetailState extends State { style: Theme.of(context).textTheme.bodyMedium, ) : RichText( - text: TextSpan( - style: Theme.of(context).textTheme.bodyMedium, - children: [ - if (_answerIndex > 0) - TextSpan( - text: widget.content - .substring(0, _answerIndex)), - TextSpan( - style: TextStyle( - background: Paint() - ..color = Colors.yellow), - text: widget.content.substring(_answerIndex, - _answerIndex + _qaAnswer!.text.length)), - if ((_answerIndex + _qaAnswer!.text.length) < - widget.content.length) - TextSpan( - text: widget.content.substring( - _answerIndex + _qaAnswer!.text.length, - widget.content.length)) - ]), + text: TextSpan(style: Theme.of(context).textTheme.bodyMedium, children: [ + if (_answerIndex > 0) TextSpan(text: widget.content.substring(0, _answerIndex)), + TextSpan(style: TextStyle(background: Paint()..color = Colors.yellow), text: widget.content.substring(_answerIndex, _answerIndex + _qaAnswer!.text.length)), + if ((_answerIndex + _qaAnswer!.text.length) < widget.content.length) TextSpan(text: widget.content.substring(_answerIndex + _qaAnswer!.text.length, widget.content.length)) + ]), ))), Container( padding: const EdgeInsets.all(16), - decoration: BoxDecoration(color: Colors.white, boxShadow: [ - BoxShadow( - color: Colors.grey.withOpacity(0.5), - spreadRadius: 2, - blurRadius: 5, - offset: const Offset(0, 3)) - ]), + decoration: BoxDecoration(color: Colors.white, boxShadow: [BoxShadow(color: Colors.grey.withValues(alpha: 0.5), spreadRadius: 2, blurRadius: 5, offset: const Offset(0, 3))]), // color: Colors.white, child: Column( children: [ @@ -157,8 +131,7 @@ class _QaDetailState extends State { child: ListView.separated( shrinkWrap: true, scrollDirection: Axis.horizontal, - separatorBuilder: (BuildContext context, int index) => - const Divider( + separatorBuilder: (BuildContext context, int index) => const Divider( indent: 16, ), itemCount: widget.questions.length, @@ -175,9 +148,7 @@ class _QaDetailState extends State { Expanded( child: TextField( controller: _controller, - decoration: const InputDecoration( - border: UnderlineInputBorder(), - labelText: "Text query"), + decoration: const InputDecoration(border: UnderlineInputBorder(), labelText: "Text query"), onChanged: (text) { setState(() { _currentQuestion = text; @@ -194,9 +165,7 @@ class _QaDetailState extends State { _answerQuestion(); } : null, - style: ElevatedButton.styleFrom( - disabledBackgroundColor: Colors.grey, - backgroundColor: const Color(0xFFFFA800)), + style: ElevatedButton.styleFrom(disabledBackgroundColor: Colors.grey, backgroundColor: const Color(0xFFFFA800)), child: const Icon( Icons.east, color: Colors.white, diff --git a/example/digit_classification/lib/main.dart b/example/digit_classification/lib/main.dart index b9c3145..bb40218 100644 --- a/example/digit_classification/lib/main.dart +++ b/example/digit_classification/lib/main.dart @@ -65,15 +65,13 @@ class _MyHomePageState extends State { Future _predictNumber() async { // capture sketch area - RenderRepaintBoundary boundary = - _globalKey.currentContext!.findRenderObject() as RenderRepaintBoundary; + RenderRepaintBoundary boundary = _globalKey.currentContext!.findRenderObject() as RenderRepaintBoundary; ui.Image image = await boundary.toImage(); final byteData = await image.toByteData(format: ui.ImageByteFormat.png); final inputImageData = byteData?.buffer.asUint8List(); final stopwatch = Stopwatch()..start(); - final (number, confidence) = - await _digitClassifierHelper.runInference(inputImageData!); + final (number, confidence) = await _digitClassifierHelper.runInference(inputImageData!); stopwatch.stop(); setState(() { @@ -91,7 +89,7 @@ class _MyHomePageState extends State { title: Center( child: Image.asset('assets/images/tfl_logo.png'), ), - backgroundColor: Colors.black.withOpacity(0.5), + backgroundColor: Colors.black.withValues(alpha: 0.5), ), body: Center( child: Column( @@ -107,9 +105,7 @@ class _MyHomePageState extends State { children: [ const Spacer(), const Text("Predicted number:"), - if (_predictedNumber != null && _predictedConfidence != null) - Text( - "$_predictedNumber (${_predictedConfidence?.toStringAsFixed(3)})"), + if (_predictedNumber != null && _predictedConfidence != null) Text("$_predictedNumber (${_predictedConfidence?.toStringAsFixed(3)})"), const Spacer(), Text("Inference Time: $_inferenceTime (ms)"), Padding( @@ -133,8 +129,7 @@ class _MyHomePageState extends State { } Widget sketchArea() { - return LayoutBuilder( - builder: (BuildContext context, BoxConstraints constraints) { + return LayoutBuilder(builder: (BuildContext context, BoxConstraints constraints) { return GestureDetector( onPanUpdate: (DragUpdateDetails details) { final width = constraints.maxWidth; diff --git a/example/gesture_classification/lib/main.dart b/example/gesture_classification/lib/main.dart index 803e967..b21e323 100644 --- a/example/gesture_classification/lib/main.dart +++ b/example/gesture_classification/lib/main.dart @@ -39,9 +39,7 @@ class MyApp extends StatelessWidget { colorScheme: ColorScheme.fromSeed(seedColor: Colors.orange), useMaterial3: true, ), - home: const MyHomePage( - title: - 'An end-to-end example of gesture classification using Flutter and TensorFlow Lite'), + home: const MyHomePage(title: 'An end-to-end example of gesture classification using Flutter and TensorFlow Lite'), ); } } @@ -64,13 +62,8 @@ class _MyHomePageState extends State with WidgetsBindingObserver { // init camera _initCamera() { - _cameraDescription = _cameras.firstWhere( - (element) => element.lensDirection == CameraLensDirection.front); - _cameraController = CameraController( - _cameraDescription, ResolutionPreset.high, - imageFormatGroup: Platform.isIOS - ? ImageFormatGroup.bgra8888 - : ImageFormatGroup.yuv420); + _cameraDescription = _cameras.firstWhere((element) => element.lensDirection == CameraLensDirection.front); + _cameraController = CameraController(_cameraDescription, ResolutionPreset.high, imageFormatGroup: Platform.isIOS ? ImageFormatGroup.bgra8888 : ImageFormatGroup.yuv420); _cameraController!.initialize().then((value) { _cameraController!.startImageStream(_imageAnalysis); if (mounted) { @@ -85,8 +78,7 @@ class _MyHomePageState extends State with WidgetsBindingObserver { return; } _isProcessing = true; - _classification = - await _gestureClassificationHelper.inferenceCameraFrame(cameraImage); + _classification = await _gestureClassificationHelper.inferenceCameraFrame(cameraImage); _isProcessing = false; if (mounted) { setState(() {}); @@ -116,8 +108,7 @@ class _MyHomePageState extends State with WidgetsBindingObserver { _cameraController?.stopImageStream(); break; case AppLifecycleState.resumed: - if (_cameraController != null && - !_cameraController!.value.isStreamingImages) { + if (_cameraController != null && !_cameraController!.value.isStreamingImages) { await _cameraController!.startImageStream(_imageAnalysis); } break; @@ -166,7 +157,7 @@ class _MyHomePageState extends State with WidgetsBindingObserver { title: Center( child: Image.asset('assets/images/tfl_logo.png'), ), - backgroundColor: Colors.black.withOpacity(0.5), + backgroundColor: Colors.black.withValues(alpha: 0.5), ), body: Center( // Center is a layout widget. It takes a single child and positions it @@ -193,11 +184,7 @@ class _MyHomePageState extends State with WidgetsBindingObserver { padding: const EdgeInsets.all(8), color: Colors.white, child: Row( - children: [ - Text(e.key), - const Spacer(), - Text(e.value.toStringAsFixed(2)) - ], + children: [Text(e.key), const Spacer(), Text(e.value.toStringAsFixed(2))], ), ), ), diff --git a/example/image_classification_mobilenet/lib/main.dart b/example/image_classification_mobilenet/lib/main.dart index fad0275..96634cd 100644 --- a/example/image_classification_mobilenet/lib/main.dart +++ b/example/image_classification_mobilenet/lib/main.dart @@ -41,12 +41,10 @@ class BottomNavigationBarExample extends StatefulWidget { const BottomNavigationBarExample({super.key}); @override - State createState() => - _BottomNavigationBarExampleState(); + State createState() => _BottomNavigationBarExampleState(); } -class _BottomNavigationBarExampleState - extends State { +class _BottomNavigationBarExampleState extends State { late CameraDescription cameraDescription; int _selectedIndex = 0; List? _widgetOptions; @@ -88,7 +86,7 @@ class _BottomNavigationBarExampleState return Scaffold( appBar: AppBar( title: Image.asset('assets/images/tfl_logo.png'), - backgroundColor: Colors.black.withOpacity(0.5), + backgroundColor: Colors.black.withValues(alpha: 0.5), ), body: Center( child: _widgetOptions?.elementAt(_selectedIndex), diff --git a/example/image_segmentation/lib/main.dart b/example/image_segmentation/lib/main.dart index c8c8fbe..92bacf9 100644 --- a/example/image_segmentation/lib/main.dart +++ b/example/image_segmentation/lib/main.dart @@ -67,14 +67,8 @@ class _MyHomePageState extends State with WidgetsBindingObserver { List? _labelsIndex; Future _initCamera() async { - _cameraDescription = _cameras.firstWhere( - (element) => element.lensDirection == CameraLensDirection.back); - _cameraController = CameraController( - _cameraDescription, ResolutionPreset.medium, - imageFormatGroup: Platform.isIOS - ? ImageFormatGroup.bgra8888 - : ImageFormatGroup.yuv420, - enableAudio: false); + _cameraDescription = _cameras.firstWhere((element) => element.lensDirection == CameraLensDirection.back); + _cameraController = CameraController(_cameraDescription, ResolutionPreset.medium, imageFormatGroup: Platform.isIOS ? ImageFormatGroup.bgra8888 : ImageFormatGroup.yuv420, enableAudio: false); await _cameraController!.initialize().then((value) { _cameraController!.startImageStream(_imageAnalysis); if (mounted) { @@ -90,16 +84,12 @@ class _MyHomePageState extends State with WidgetsBindingObserver { } _isProcessing = true; // run image segmentation - final masks = - await _imageSegmentationHelper.inferenceCameraFrame(cameraImage); + final masks = await _imageSegmentationHelper.inferenceCameraFrame(cameraImage); _isProcessing = false; if (mounted) { // convert mask to image, if Platform is Android we need to swap width // and height because camera image in android is landscape - _convertToImage( - masks, - Platform.isIOS ? cameraImage.width : cameraImage.height, - Platform.isIOS ? cameraImage.height : cameraImage.width); + _convertToImage(masks, Platform.isIOS ? cameraImage.width : cameraImage.height, Platform.isIOS ? cameraImage.height : cameraImage.width); } } @@ -125,8 +115,7 @@ class _MyHomePageState extends State with WidgetsBindingObserver { } // convert output mask to image - void _convertToImage(List>>? masks, int originImageWidth, - int originImageHeight) async { + void _convertToImage(List>>? masks, int originImageWidth, int originImageHeight) async { if (masks == null) return null; final width = masks.length; final height = masks.first.length; @@ -167,15 +156,10 @@ class _MyHomePageState extends State with WidgetsBindingObserver { } // convert image matrix to image - image_lib.Image convertedImage = image_lib.Image.fromBytes( - width: width, - height: height, - bytes: Uint8List.fromList(imageMatrix).buffer, - numChannels: 4); + image_lib.Image convertedImage = image_lib.Image.fromBytes(width: width, height: height, bytes: Uint8List.fromList(imageMatrix).buffer, numChannels: 4); // resize output image to match original image - final resizeImage = image_lib.copyResize(convertedImage, - width: originImageWidth, height: originImageHeight); + final resizeImage = image_lib.copyResize(convertedImage, width: originImageWidth, height: originImageHeight); // convert image to ui.Image to display on screen final bytes = image_lib.encodePng(resizeImage); @@ -192,13 +176,8 @@ class _MyHomePageState extends State with WidgetsBindingObserver { // calculate scale to fit output image to screen var scale = 1.0; if (_displayImage != null) { - final minOutputSize = _displayImage!.width > _displayImage!.height - ? _displayImage!.height - : _displayImage!.width; - final minScreenSize = - MediaQuery.of(context).size.width > MediaQuery.of(context).size.height - ? MediaQuery.of(context).size.height - : MediaQuery.of(context).size.width; + final minOutputSize = _displayImage!.width > _displayImage!.height ? _displayImage!.height : _displayImage!.width; + final minScreenSize = MediaQuery.of(context).size.width > MediaQuery.of(context).size.height ? MediaQuery.of(context).size.height : MediaQuery.of(context).size.width; scale = minScreenSize / minOutputSize; } return Stack( @@ -224,14 +203,11 @@ class _MyHomePageState extends State with WidgetsBindingObserver { padding: const EdgeInsets.all(8), decoration: BoxDecoration( // parse color from label color - color: Color(ImageSegmentationHelper - .labelColors[_labelsIndex![index]]) - .withOpacity(0.5), + color: Color(ImageSegmentationHelper.labelColors[_labelsIndex![index]]).withValues(alpha: 0.5), borderRadius: BorderRadius.circular(8), ), child: Text( - _imageSegmentationHelper - .getLabelsName(_labelsIndex![index]), + _imageSegmentationHelper.getLabelsName(_labelsIndex![index]), style: const TextStyle( fontSize: 12, ), @@ -251,7 +227,7 @@ class _MyHomePageState extends State with WidgetsBindingObserver { title: Center( child: Image.asset('assets/images/tfl_logo.png'), ), - backgroundColor: Colors.black.withOpacity(0.5), + backgroundColor: Colors.black.withValues(alpha: 0.5), ), body: cameraWidget(context), ); diff --git a/example/live_object_detection_ssd_mobilenet/lib/ui/detector_widget.dart b/example/live_object_detection_ssd_mobilenet/lib/ui/detector_widget.dart index b9581bf..43df8cd 100644 --- a/example/live_object_detection_ssd_mobilenet/lib/ui/detector_widget.dart +++ b/example/live_object_detection_ssd_mobilenet/lib/ui/detector_widget.dart @@ -18,8 +18,7 @@ class DetectorWidget extends StatefulWidget { State createState() => _DetectorWidgetState(); } -class _DetectorWidgetState extends State - with WidgetsBindingObserver { +class _DetectorWidgetState extends State with WidgetsBindingObserver { /// List of available cameras late List cameras; @@ -114,14 +113,12 @@ class _DetectorWidgetState extends State ? Align( alignment: Alignment.bottomCenter, child: Container( - color: Colors.white.withAlpha(150), + color: Colors.white.withValues(alpha: 150), child: Padding( padding: const EdgeInsets.all(16.0), child: Column( mainAxisSize: MainAxisSize.min, - children: stats!.entries - .map((e) => StatsWidget(e.key, e.value)) - .toList(), + children: stats!.entries.map((e) => StatsWidget(e.key, e.value)).toList(), ), ), ), @@ -133,8 +130,7 @@ class _DetectorWidgetState extends State if (results == null) { return const SizedBox.shrink(); } - return Stack( - children: results!.map((box) => BoxWidget(result: box)).toList()); + return Stack(children: results!.map((box) => BoxWidget(result: box)).toList()); } /// Callback to receive each frame [CameraImage] perform inference on it diff --git a/example/object_detection_ssd_mobilenet/lib/main.dart b/example/object_detection_ssd_mobilenet/lib/main.dart index 12819e7..4556e11 100644 --- a/example/object_detection_ssd_mobilenet/lib/main.dart +++ b/example/object_detection_ssd_mobilenet/lib/main.dart @@ -64,7 +64,7 @@ class _MyHomeState extends State { return Scaffold( appBar: AppBar( title: Image.asset('assets/images/tfl_logo.png'), - backgroundColor: Colors.black.withOpacity(0.5), + backgroundColor: Colors.black.withValues(alpha: 0.5), ), body: SafeArea( child: Column( diff --git a/example/object_detection_ssd_mobilenet_v2/lib/main.dart b/example/object_detection_ssd_mobilenet_v2/lib/main.dart index 8071e39..7027ba5 100644 --- a/example/object_detection_ssd_mobilenet_v2/lib/main.dart +++ b/example/object_detection_ssd_mobilenet_v2/lib/main.dart @@ -49,7 +49,7 @@ class _MyHomeState extends State { return Scaffold( appBar: AppBar( title: Image.asset('assets/images/tfl_logo.png'), - backgroundColor: Colors.black.withOpacity(0.5), + backgroundColor: Colors.black.withValues(alpha: 0.5), ), body: SafeArea( child: Column( diff --git a/example/pose_estimation/lib/main.dart b/example/pose_estimation/lib/main.dart index 82336c0..d5cc600 100644 --- a/example/pose_estimation/lib/main.dart +++ b/example/pose_estimation/lib/main.dart @@ -66,14 +66,8 @@ class _MyHomePageState extends State with WidgetsBindingObserver { // init camera _initCamera() { - _cameraDescription = _cameras.firstWhere( - (element) => element.lensDirection == CameraLensDirection.back); - _cameraController = CameraController( - _cameraDescription, ResolutionPreset.low, - enableAudio: false, - imageFormatGroup: Platform.isIOS - ? ImageFormatGroup.bgra8888 - : ImageFormatGroup.yuv420); + _cameraDescription = _cameras.firstWhere((element) => element.lensDirection == CameraLensDirection.back); + _cameraController = CameraController(_cameraDescription, ResolutionPreset.low, enableAudio: false, imageFormatGroup: Platform.isIOS ? ImageFormatGroup.bgra8888 : ImageFormatGroup.yuv420); _cameraController!.initialize().then((value) { _cameraController!.startImageStream(_imageAnalysis); if (mounted) { @@ -120,8 +114,7 @@ class _MyHomePageState extends State with WidgetsBindingObserver { _cameraController?.stopImageStream(); break; case AppLifecycleState.resumed: - if (_cameraController != null && - !_cameraController!.value.isStreamingImages) { + if (_cameraController != null && !_cameraController!.value.isStreamingImages) { await _cameraController!.startImageStream(_imageAnalysis); } break; @@ -142,8 +135,7 @@ class _MyHomePageState extends State with WidgetsBindingObserver { Widget resultWidget(context) { if (_cameraController == null) return Container(); - final scale = MediaQuery.of(context).size.width / - _cameraController!.value.previewSize!.height; + final scale = MediaQuery.of(context).size.width / _cameraController!.value.previewSize!.height; return Stack( children: [ @@ -180,7 +172,7 @@ class _MyHomePageState extends State with WidgetsBindingObserver { title: Center( child: Image.asset('assets/images/tfl_logo.png'), ), - backgroundColor: Colors.black.withOpacity(0.5), + backgroundColor: Colors.black.withValues(alpha: 0.5), ), body: resultWidget(context), ); @@ -233,19 +225,14 @@ class OverlayView extends CustomPainter { // draw circles if (_persons!.score > _minConfidence) { _persons?.keyPoints.forEach((element) { - canvas.drawCircle( - Offset( - element.coordinate.dx * _scale, element.coordinate.dy * _scale), - 5, - _circlePaint); + canvas.drawCircle(Offset(element.coordinate.dx * _scale, element.coordinate.dy * _scale), 5, _circlePaint); }); for (var index in _bodyJoints) { final pointA = _persons?.keyPoints[index[0].index].coordinate; final pointB = _persons?.keyPoints[index[1].index].coordinate; // drawLine if (pointA != null && pointB != null) { - canvas.drawLine(Offset(pointA.dx * _scale, pointA.dy * _scale), - Offset(pointB.dx * _scale, pointB.dy * _scale), _strokePaint); + canvas.drawLine(Offset(pointA.dx * _scale, pointA.dy * _scale), Offset(pointB.dx * _scale, pointB.dy * _scale), _strokePaint); } } } diff --git a/example/style_transfer/lib/main.dart b/example/style_transfer/lib/main.dart index dbd2ad3..55612de 100644 --- a/example/style_transfer/lib/main.dart +++ b/example/style_transfer/lib/main.dart @@ -51,10 +51,8 @@ class Home extends StatefulWidget { } class _HomeState extends State { - static const predictionModelPath = - 'assets/models/magenta_arbitrary-image-stylization-v1-256_int8_prediction_1.tflite'; - static const transferModelPath = - 'assets/models/magenta_arbitrary-image-stylization-v1-256_int8_transfer_1.tflite'; + static const predictionModelPath = 'assets/models/magenta_arbitrary-image-stylization-v1-256_int8_prediction_1.tflite'; + static const transferModelPath = 'assets/models/magenta_arbitrary-image-stylization-v1-256_int8_transfer_1.tflite'; late final Interpreter predictionInterpreter; late final IsolateInterpreter predictionIsolateInterpreter; @@ -121,16 +119,14 @@ class _HomeState extends State { options: predictionOptions, ); - predictionIsolateInterpreter = - await IsolateInterpreter.create(address: predictionInterpreter.address); + predictionIsolateInterpreter = await IsolateInterpreter.create(address: predictionInterpreter.address); transferInterpreter = await Interpreter.fromAsset( transferModelPath, options: transferOptions, ); - transferIsolateInterpreter = - await IsolateInterpreter.create(address: transferInterpreter.address); + transferIsolateInterpreter = await IsolateInterpreter.create(address: transferInterpreter.address); setState(() {}); @@ -278,8 +274,7 @@ class _HomeState extends State { ); // Encode image in jpeg format - img.Image resized = - img.copyResize(image, width: widthOrg, height: heightOrg); + img.Image resized = img.copyResize(image, width: widthOrg, height: heightOrg); imageResult = img.encodeJpg(resized); setState(() {}); @@ -291,7 +286,7 @@ class _HomeState extends State { return Scaffold( appBar: AppBar( title: Image.asset('assets/images/tfl_logo.png'), - backgroundColor: Colors.black.withOpacity(0.5), + backgroundColor: Colors.black.withValues(alpha: 0.5), ), body: SafeArea( child: Center( @@ -310,9 +305,7 @@ class _HomeState extends State { children: [ Padding( padding: const EdgeInsets.all(24), - child: imageResult != null - ? Image.memory(imageResult!) - : Image.file(File(imagePath!)), + child: imageResult != null ? Image.memory(imageResult!) : Image.file(File(imagePath!)), ), if (stylePath != null) Positioned( diff --git a/example/super_resolution_esrgan/lib/main.dart b/example/super_resolution_esrgan/lib/main.dart index 72bb7c3..9964e2d 100644 --- a/example/super_resolution_esrgan/lib/main.dart +++ b/example/super_resolution_esrgan/lib/main.dart @@ -181,7 +181,7 @@ class _HomeState extends State { return Scaffold( appBar: AppBar( title: Image.asset('assets/images/tfl_logo.png'), - backgroundColor: Colors.black.withOpacity(0.5), + backgroundColor: Colors.black.withValues(alpha: 0.5), ), body: SafeArea( child: Center(