-
-
Notifications
You must be signed in to change notification settings - Fork 597
/
just_audio.dart
3626 lines (3195 loc) · 129 KB
/
just_audio.dart
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
import 'dart:async';
import 'dart:convert';
import 'dart:io';
import 'dart:math';
import 'package:audio_session/audio_session.dart';
import 'package:crypto/crypto.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
import 'package:flutter/widgets.dart';
import 'package:just_audio_platform_interface/just_audio_platform_interface.dart';
import 'package:meta/meta.dart' show experimental;
import 'package:path/path.dart' as p;
import 'package:path_provider/path_provider.dart';
import 'package:rxdart/rxdart.dart';
import 'package:uuid/uuid.dart';
final _uuid = Uuid();
/// An object to manage playing audio from a URL, a locale file or an asset.
///
/// ```
/// final player = AudioPlayer();
/// await player.setUrl('https://foo.com/bar.mp3');
/// player.play();
/// await player.pause();
/// await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20));
/// await player.play();
/// await player.setUrl('https://foo.com/baz.mp3');
/// await player.seek(Duration(minutes: 5));
/// player.play();
/// await player.pause();
/// await player.dispose();
/// ```
///
/// You must call [dispose] to release the resources used by this player,
/// including any temporary files created to cache assets.
class AudioPlayer {
/// The user agent to set on all HTTP requests.
final String? _userAgent;
final AudioLoadConfiguration? _audioLoadConfiguration;
/// This is `true` when the audio player needs to engage the native platform
/// side of the plugin to decode or play audio, and is `false` when the native
/// resources are not needed (i.e. after initial instantiation and after [stop]).
bool _active = false;
/// This is set to [_nativePlatform] when [_active] is `true` and
/// [_idlePlatform] otherwise.
late Future<AudioPlayerPlatform> _platform;
/// Reflects the current platform immediately after it is set.
AudioPlayerPlatform? _platformValue;
/// The interface to the native portion of the plugin. This will be disposed
/// and set to `null` when not in use.
Future<AudioPlayerPlatform>? _nativePlatform;
/// A pure Dart implementation of the platform interface for use when the
/// native platform is not needed.
_IdleAudioPlayer? _idlePlatform;
/// The subscription to the event channel of the current platform
/// implementation. When switching between active and inactive modes, this is
/// used to cancel the subscription to the previous platform's events and
/// subscribe to the new platform's events.
StreamSubscription? _playbackEventSubscription;
/// The subscription to the data event channel of the current platform
/// implementation. When switching between active and inactive modes, this is
/// used to cancel the subscription to the previous platform's events and
/// subscribe to the new platform's events.
StreamSubscription? _playerDataSubscription;
final String _id;
final _proxy = _ProxyHttpServer();
AudioSource? _audioSource;
final Map<String, AudioSource> _audioSources = {};
bool _disposed = false;
_InitialSeekValues? _initialSeekValues;
final AudioPipeline _audioPipeline;
PlaybackEvent _playbackEvent = PlaybackEvent();
final _playbackEventSubject = BehaviorSubject<PlaybackEvent>(sync: true);
Future<Duration?>? _durationFuture;
final _durationSubject = BehaviorSubject<Duration?>();
final _processingStateSubject = BehaviorSubject<ProcessingState>();
final _playingSubject = BehaviorSubject.seeded(false);
final _volumeSubject = BehaviorSubject.seeded(1.0);
final _speedSubject = BehaviorSubject.seeded(1.0);
final _pitchSubject = BehaviorSubject.seeded(1.0);
final _skipSilenceEnabledSubject = BehaviorSubject.seeded(false);
final _bufferedPositionSubject = BehaviorSubject<Duration>();
final _icyMetadataSubject = BehaviorSubject<IcyMetadata?>();
final _playerStateSubject = BehaviorSubject<PlayerState>();
final _sequenceSubject = BehaviorSubject<List<IndexedAudioSource>?>();
final _shuffleIndicesSubject = BehaviorSubject<List<int>?>();
final _shuffleIndicesInv = <int>[];
final _currentIndexSubject = BehaviorSubject<int?>(sync: true);
final _sequenceStateSubject = BehaviorSubject<SequenceState?>();
final _loopModeSubject = BehaviorSubject.seeded(LoopMode.off);
final _shuffleModeEnabledSubject = BehaviorSubject.seeded(false);
final _androidAudioSessionIdSubject = BehaviorSubject<int?>();
// ignore: close_sinks
BehaviorSubject<Duration>? _positionSubject;
bool _automaticallyWaitsToMinimizeStalling = true;
bool _canUseNetworkResourcesForLiveStreamingWhilePaused = false;
double _preferredPeakBitRate = 0;
bool _playInterrupted = false;
bool _platformLoading = false;
AndroidAudioAttributes? _androidAudioAttributes;
final bool _androidApplyAudioAttributes;
final bool _handleAudioSessionActivation;
/// Counts how many times [_setPlatformActive] is called.
int _activationCount = 0;
/// Creates an [AudioPlayer].
///
/// If [userAgent] is specified, it will be included in the header of all HTTP
/// requests on Android, iOS and macOS to identify your agent to the server.
/// If set, just_audio will create a cleartext local HTTP proxy on your device
/// to forward HTTP requests with headers included. If [userAgent] is not
/// specified, this will default to Apple's Core Audio user agent on iOS/macOS
/// and to just_audio's own user agent on Android. On Web, the browser will
/// override any specified user-agent string with its own.
///
/// The player will automatically pause/duck and resume/unduck when audio
/// interruptions occur (e.g. a phone call) or when headphones are unplugged.
/// If you wish to handle audio interruptions manually, set
/// [handleInterruptions] to `false` and interface directly with the audio
/// session via the [audio_session](https://pub.dev/packages/audio_session)
/// package. If you do not wish just_audio to automatically activate the audio
/// session when playing audio, set [handleAudioSessionActivation] to `false`.
/// If you do not want just_audio to respect the global
/// [AndroidAudioAttributes] configured by audio_session, set
/// [androidApplyAudioAttributes] to `false`.
///
/// The default audio loading and buffering behaviour can be configured via
/// the [audioLoadConfiguration] parameter.
AudioPlayer({
String? userAgent,
bool handleInterruptions = true,
bool androidApplyAudioAttributes = true,
bool handleAudioSessionActivation = true,
AudioLoadConfiguration? audioLoadConfiguration,
AudioPipeline? audioPipeline,
}) : _id = _uuid.v4(),
_userAgent = userAgent,
_androidApplyAudioAttributes =
androidApplyAudioAttributes && _isAndroid(),
_handleAudioSessionActivation = handleAudioSessionActivation,
_audioLoadConfiguration = audioLoadConfiguration,
_audioPipeline = audioPipeline ?? AudioPipeline() {
_audioPipeline._setup(this);
if (_audioLoadConfiguration?.darwinLoadControl != null) {
_automaticallyWaitsToMinimizeStalling = _audioLoadConfiguration!
.darwinLoadControl!.automaticallyWaitsToMinimizeStalling;
}
_playbackEventSubject.add(_playbackEvent);
_processingStateSubject.addStream(playbackEventStream
.map((event) => event.processingState)
.distinct()
.handleError((Object err, StackTrace stackTrace) {/* noop */}));
_bufferedPositionSubject.addStream(playbackEventStream
.map((event) => event.bufferedPosition)
.distinct()
.handleError((Object err, StackTrace stackTrace) {/* noop */}));
_icyMetadataSubject.addStream(playbackEventStream
.map((event) => event.icyMetadata)
.distinct()
.handleError((Object err, StackTrace stackTrace) {/* noop */}));
_currentIndexSubject.addStream(playbackEventStream
.map((event) => event.currentIndex)
.distinct()
.handleError((Object err, StackTrace stackTrace) {/* noop */}));
_androidAudioSessionIdSubject.addStream(playbackEventStream
.map((event) => event.androidAudioSessionId)
.distinct()
.handleError((Object err, StackTrace stackTrace) {/* noop */}));
_sequenceStateSubject.addStream(Rx.combineLatest5<List<IndexedAudioSource>?,
List<int>?, int?, bool, LoopMode, SequenceState?>(
sequenceStream,
shuffleIndicesStream,
currentIndexStream,
shuffleModeEnabledStream,
loopModeStream,
(sequence, shuffleIndices, currentIndex, shuffleModeEnabled, loopMode) {
if (sequence == null) return null;
if (shuffleIndices == null) return null;
currentIndex ??= 0;
currentIndex = max(min(sequence.length - 1, max(0, currentIndex)), 0);
return SequenceState(
sequence,
currentIndex,
shuffleIndices,
shuffleModeEnabled,
loopMode,
);
},
).distinct().handleError((Object err, StackTrace stackTrace) {/* noop */}));
_playerStateSubject.addStream(
Rx.combineLatest2<bool, PlaybackEvent, PlayerState>(
playingStream,
playbackEventStream,
(playing, event) => PlayerState(playing, event.processingState))
.distinct()
.handleError((Object err, StackTrace stackTrace) {/* noop */}));
_shuffleModeEnabledSubject.add(false);
_loopModeSubject.add(LoopMode.off);
_setPlatformActive(false, force: true)?.catchError((dynamic e) {});
_sequenceSubject.add(null);
// Respond to changes to AndroidAudioAttributes configuration.
if (androidApplyAudioAttributes && _isAndroid()) {
AudioSession.instance.then((audioSession) {
audioSession.configurationStream
.map((conf) => conf.androidAudioAttributes)
.where((attributes) => attributes != null)
.cast<AndroidAudioAttributes>()
.distinct()
.listen(setAndroidAudioAttributes);
});
}
if (handleInterruptions) {
AudioSession.instance.then((session) {
session.becomingNoisyEventStream.listen((_) {
pause();
});
session.interruptionEventStream.listen((event) {
if (event.begin) {
switch (event.type) {
case AudioInterruptionType.duck:
assert(_isAndroid());
if (session.androidAudioAttributes!.usage ==
AndroidAudioUsage.game) {
setVolume(volume / 2);
}
_playInterrupted = false;
break;
case AudioInterruptionType.pause:
case AudioInterruptionType.unknown:
if (playing) {
pause();
// Although pause is async and sets _playInterrupted = false,
// this is done in the sync portion.
_playInterrupted = true;
}
break;
}
} else {
switch (event.type) {
case AudioInterruptionType.duck:
assert(_isAndroid());
setVolume(min(1.0, volume * 2));
_playInterrupted = false;
break;
case AudioInterruptionType.pause:
if (_playInterrupted) play();
_playInterrupted = false;
break;
case AudioInterruptionType.unknown:
_playInterrupted = false;
break;
}
}
});
});
}
_removeOldAssetCacheDir();
}
/// Old versions of just_audio used an asset caching system that created a
/// separate cache file per asset per player instance, and was highly
/// dependent on the app calling [dispose] to clean up afterwards. If the app
/// is upgrading from an old version of just_audio, this will delete the old
/// cache directory.
Future<void> _removeOldAssetCacheDir() async {
if (kIsWeb) return;
try {
final oldAssetCacheDir = Directory(p.join(
(await getTemporaryDirectory()).path, 'just_audio_asset_cache'));
if (oldAssetCacheDir.existsSync()) {
try {
oldAssetCacheDir.deleteSync(recursive: true);
} catch (e) {
print("Failed to delete old asset cache dir: $e");
}
}
} catch (e) {
// There is no temporary directory for this platform.
}
}
/// The previously set [AudioSource], if any.
AudioSource? get audioSource => _audioSource;
/// The latest [PlaybackEvent].
PlaybackEvent get playbackEvent => _playbackEvent;
/// A stream of [PlaybackEvent]s.
Stream<PlaybackEvent> get playbackEventStream => _playbackEventSubject.stream;
/// The duration of the current audio or `null` if unknown.
Duration? get duration => _playbackEvent.duration;
/// The duration of the current audio or `null` if unknown.
Future<Duration?>? get durationFuture => _durationFuture;
/// The duration of the current audio.
Stream<Duration?> get durationStream => _durationSubject.stream;
/// The current [ProcessingState].
ProcessingState get processingState => _playbackEvent.processingState;
/// A stream of [ProcessingState]s.
Stream<ProcessingState> get processingStateStream =>
_processingStateSubject.stream;
/// Whether the player is playing.
bool get playing => _playingSubject.nvalue!;
/// A stream of changing [playing] states.
Stream<bool> get playingStream => _playingSubject.stream;
/// The current volume of the player.
double get volume => _volumeSubject.nvalue!;
/// A stream of [volume] changes.
Stream<double> get volumeStream => _volumeSubject.stream;
/// The current speed of the player.
double get speed => _speedSubject.nvalue!;
/// A stream of current speed values.
Stream<double> get speedStream => _speedSubject.stream;
/// The current pitch factor of the player.
double get pitch => _pitchSubject.nvalue!;
/// A stream of current pitch factor values.
Stream<double> get pitchStream => _pitchSubject.stream;
/// The current skipSilenceEnabled factor of the player.
bool get skipSilenceEnabled => _skipSilenceEnabledSubject.nvalue!;
/// A stream of current skipSilenceEnabled factor values.
Stream<bool> get skipSilenceEnabledStream =>
_skipSilenceEnabledSubject.stream;
/// The position up to which buffered audio is available.
Duration get bufferedPosition =>
_bufferedPositionSubject.nvalue ?? Duration.zero;
/// A stream of buffered positions.
Stream<Duration> get bufferedPositionStream =>
_bufferedPositionSubject.stream;
/// The latest ICY metadata received through the audio source, or `null` if no
/// metadata is available.
IcyMetadata? get icyMetadata => _playbackEvent.icyMetadata;
/// A stream of ICY metadata received through the audio source.
Stream<IcyMetadata?> get icyMetadataStream => _icyMetadataSubject.stream;
/// The current player state containing only the processing and playing
/// states.
PlayerState get playerState =>
_playerStateSubject.nvalue ?? PlayerState(false, ProcessingState.idle);
/// A stream of [PlayerState]s.
Stream<PlayerState> get playerStateStream => _playerStateSubject.stream;
/// The current sequence of indexed audio sources, or `null` if no audio
/// source is set.
List<IndexedAudioSource>? get sequence => _sequenceSubject.nvalue;
/// A stream broadcasting the current sequence of indexed audio sources.
Stream<List<IndexedAudioSource>?> get sequenceStream =>
_sequenceSubject.stream;
/// The current shuffled sequence of indexed audio sources, or `null` if no
/// audio source is set.
List<int>? get shuffleIndices => _shuffleIndicesSubject.nvalue;
/// A stream broadcasting the current shuffled sequence of indexed audio
/// sources.
Stream<List<int>?> get shuffleIndicesStream => _shuffleIndicesSubject.stream;
//List<IndexedAudioSource> get _effectiveSequence =>
// shuffleModeEnabled ? shuffleIndices : sequence;
/// The index of the current item, or `null` if either no audio source is set,
/// or the current audio source has an empty sequence.
int? get currentIndex => _currentIndexSubject.nvalue;
/// A stream broadcasting the current item.
Stream<int?> get currentIndexStream => _currentIndexSubject.stream;
/// The current [SequenceState], or `null` if either [sequence]] or
/// [currentIndex] is `null`.
SequenceState? get sequenceState => _sequenceStateSubject.nvalue;
/// A stream broadcasting the current [SequenceState].
Stream<SequenceState?> get sequenceStateStream =>
_sequenceStateSubject.stream;
/// Whether there is another item after the current index.
bool get hasNext => nextIndex != null;
/// Whether there is another item before the current index.
bool get hasPrevious => previousIndex != null;
/// Returns [shuffleIndices] if [shuffleModeEnabled] is `true`, otherwise
/// returns the unshuffled indices. When no current audio source is set, this
/// returns `null`.
List<int>? get effectiveIndices {
if (shuffleIndices == null || sequence == null) return null;
return shuffleModeEnabled
? shuffleIndices
: List.generate(sequence!.length, (i) => i);
}
List<int>? get _effectiveIndicesInv {
if (shuffleIndices == null || sequence == null) return null;
return shuffleModeEnabled
? _shuffleIndicesInv
: List.generate(sequence!.length, (i) => i);
}
/// The index of the next item to be played, or `null` if there is no next
/// item.
int? get nextIndex => _getRelativeIndex(1);
/// The index of the previous item in play order, or `null` if there is no
/// previous item.
int? get previousIndex => _getRelativeIndex(-1);
int? _getRelativeIndex(int offset) {
if (_audioSource == null || currentIndex == null) return null;
if (loopMode == LoopMode.one) return currentIndex;
final effectiveIndices = this.effectiveIndices;
if (effectiveIndices == null || effectiveIndices.isEmpty) return null;
final effectiveIndicesInv = _effectiveIndicesInv!;
if (currentIndex! >= effectiveIndicesInv.length) return null;
final invPos = effectiveIndicesInv[currentIndex!];
var newInvPos = invPos + offset;
if (newInvPos >= effectiveIndices.length || newInvPos < 0) {
if (loopMode == LoopMode.all) {
newInvPos %= effectiveIndices.length;
} else {
return null;
}
}
final result = effectiveIndices[newInvPos];
return result;
}
/// The current loop mode.
LoopMode get loopMode => _loopModeSubject.nvalue!;
/// A stream of [LoopMode]s.
Stream<LoopMode> get loopModeStream => _loopModeSubject.stream;
/// Whether shuffle mode is currently enabled.
bool get shuffleModeEnabled => _shuffleModeEnabledSubject.nvalue!;
/// A stream of the shuffle mode status.
Stream<bool> get shuffleModeEnabledStream =>
_shuffleModeEnabledSubject.stream;
/// The current Android AudioSession ID or `null` if not set.
int? get androidAudioSessionId => _playbackEvent.androidAudioSessionId;
/// Broadcasts the current Android AudioSession ID or `null` if not set.
Stream<int?> get androidAudioSessionIdStream =>
_androidAudioSessionIdSubject.stream;
/// Whether the player should automatically delay playback in order to
/// minimize stalling. (iOS 10.0 or later only)
bool get automaticallyWaitsToMinimizeStalling =>
_automaticallyWaitsToMinimizeStalling;
/// Whether the player can use the network for live streaming while paused on
/// iOS/macOS.
bool get canUseNetworkResourcesForLiveStreamingWhilePaused =>
_canUseNetworkResourcesForLiveStreamingWhilePaused;
/// The preferred peak bit rate (in bits per second) of bandwidth usage on iOS/macOS.
double get preferredPeakBitRate => _preferredPeakBitRate;
/// The current position of the player.
Duration get position {
if (playing && processingState == ProcessingState.ready) {
final result = _playbackEvent.updatePosition +
(DateTime.now().difference(_playbackEvent.updateTime)) * speed;
return _playbackEvent.duration == null ||
result <= _playbackEvent.duration!
? result
: _playbackEvent.duration!;
} else {
return _playbackEvent.updatePosition;
}
}
/// A stream tracking the current position of this player, suitable for
/// animating a seek bar. To ensure a smooth animation, this stream emits
/// values more frequently on short items where the seek bar moves more
/// quickly, and less frequenly on long items where the seek bar moves more
/// slowly. The interval between each update will be no quicker than once
/// every 16ms and no slower than once every 200ms.
///
/// See [createPositionStream] for more control over the stream parameters.
Stream<Duration> get positionStream {
if (_positionSubject == null) {
_positionSubject = BehaviorSubject<Duration>();
if (!_disposed) {
_positionSubject!.addStream(createPositionStream(
steps: 800,
minPeriod: Duration(milliseconds: 16),
maxPeriod: Duration(milliseconds: 200)));
}
}
return _positionSubject!.stream;
}
/// Creates a new stream periodically tracking the current position of this
/// player. The stream will aim to emit [steps] position updates from the
/// beginning to the end of the current audio source, at intervals of
/// [duration] / [steps]. This interval will be clipped between [minPeriod]
/// and [maxPeriod]. This stream will not emit values while audio playback is
/// paused or stalled.
///
/// Note: each time this method is called, a new stream is created. If you
/// intend to use this stream multiple times, you should hold a reference to
/// the returned stream and close it once you are done.
Stream<Duration> createPositionStream({
int steps = 800,
Duration minPeriod = const Duration(milliseconds: 200),
Duration maxPeriod = const Duration(milliseconds: 200),
}) {
assert(minPeriod <= maxPeriod);
assert(minPeriod > Duration.zero);
final controller = StreamController<Duration>.broadcast();
if (_disposed) return controller.stream;
Duration duration() => this.duration ?? Duration.zero;
Duration step() {
var s = duration() ~/ steps;
if (s < minPeriod) s = minPeriod;
if (s > maxPeriod) s = maxPeriod;
return s;
}
Timer? currentTimer;
StreamSubscription? durationSubscription;
StreamSubscription? playbackEventSubscription;
void yieldPosition(Timer timer) {
if (controller.isClosed) {
timer.cancel();
durationSubscription?.cancel();
playbackEventSubscription?.cancel();
return;
}
if (_durationSubject.isClosed) {
timer.cancel();
durationSubscription?.cancel();
playbackEventSubscription?.cancel();
// This will in turn close _positionSubject.
controller.close();
return;
}
if (playing) {
controller.add(position);
}
}
durationSubscription = durationStream.listen((duration) {
currentTimer?.cancel();
currentTimer = Timer.periodic(step(), yieldPosition);
}, onError: (Object e, StackTrace stackTrace) {});
playbackEventSubscription = playbackEventStream.listen((event) {
controller.add(position);
}, onError: (Object e, StackTrace stackTrace) {});
return controller.stream.distinct();
}
/// Convenience method to set the audio source to a URL with optional headers,
/// preloaded by default, with an initial position of zero by default.
/// If headers are set, just_audio will create a cleartext local HTTP proxy on
/// your device to forward HTTP requests with headers included.
///
/// This is equivalent to:
///
/// ```
/// setAudioSource(AudioSource.uri(Uri.parse(url), headers: headers),
/// initialPosition: Duration.zero, preload: true);
/// ```
///
/// See [setAudioSource] for a detailed explanation of the options.
Future<Duration?> setUrl(
String url, {
Map<String, String>? headers,
Duration? initialPosition,
bool preload = true,
}) =>
setAudioSource(AudioSource.uri(Uri.parse(url), headers: headers),
initialPosition: initialPosition, preload: preload);
/// Convenience method to set the audio source to a file, preloaded by
/// default, with an initial position of zero by default.
///
/// ```
/// setAudioSource(AudioSource.uri(Uri.file(filePath)),
/// initialPosition: Duration.zero, preload: true);
/// ```
///
/// See [setAudioSource] for a detailed explanation of the options.
Future<Duration?> setFilePath(
String filePath, {
Duration? initialPosition,
bool preload = true,
}) =>
setAudioSource(AudioSource.uri(Uri.file(filePath)),
initialPosition: initialPosition, preload: preload);
/// Convenience method to set the audio source to an asset, preloaded by
/// default, with an initial position of zero by default.
///
/// ```
/// setAudioSource(AudioSource.uri(Uri.parse('asset:///$assetPath')),
/// initialPosition: Duration.zero, preload: true);
/// ```
///
/// See [setAudioSource] for a detailed explanation of the options.
Future<Duration?> setAsset(
String assetPath, {
bool preload = true,
Duration? initialPosition,
}) =>
setAudioSource(AudioSource.uri(Uri.parse('asset:///$assetPath')),
initialPosition: initialPosition, preload: preload);
/// Sets the source from which this audio player should fetch audio.
///
/// By default, this method will immediately start loading audio and return
/// its duration as soon as it is known, or `null` if that information is
/// unavailable. Set [preload] to `false` if you would prefer to delay loading
/// until some later point, either via an explicit call to [load] or via a
/// call to [play] which implicitly loads the audio. If [preload] is `false`,
/// a `null` duration will be returned. Note that the [preload] option will
/// automatically be assumed as `true` if `playing` is currently `true`.
///
/// Optionally specify [initialPosition] and [initialIndex] to seek to an
/// initial position within a particular item (defaulting to position zero of
/// the first item).
///
/// When [preload] is `true`, this method may throw:
///
/// * [Exception] if no audio source has been previously set.
/// * [PlayerException] if the audio source was unable to be loaded.
/// * [PlayerInterruptedException] if another audio source was loaded before
/// this call completed or the player was stopped or disposed of before the
/// call completed.
Future<Duration?> setAudioSource(
AudioSource source, {
bool preload = true,
int? initialIndex,
Duration? initialPosition,
}) async {
if (_disposed) return null;
_audioSource = null;
_initialSeekValues =
_InitialSeekValues(position: initialPosition, index: initialIndex);
_playbackEventSubject.add(_playbackEvent = PlaybackEvent(
currentIndex: initialIndex ?? 0,
updatePosition: initialPosition ?? Duration.zero));
_audioSource = source;
_broadcastSequence();
Duration? duration;
if (playing) preload = true;
if (preload) {
duration = await load();
} else {
await _setPlatformActive(false)?.catchError((dynamic e) {});
}
return duration;
}
/// Starts loading the current audio source and returns the audio duration as
/// soon as it is known, or `null` if unavailable.
///
/// This method throws:
///
/// * [Exception] if no audio source has been previously set.
/// * [PlayerException] if the audio source was unable to be loaded.
/// * [PlayerInterruptedException] if another call to [load] happened before
/// this call completed or the player was stopped or disposed of before the
/// call could complete.
Future<Duration?> load() async {
if (_disposed) return null;
if (_audioSource == null) {
throw Exception('Must set AudioSource before loading');
}
if (_active) {
final initialSeekValues = _initialSeekValues;
_initialSeekValues = null;
return await _load(await _platform, _audioSource!,
initialSeekValues: initialSeekValues);
} else {
// This will implicitly load the current audio source.
return await _setPlatformActive(true);
}
}
void _broadcastSequence() {
// TODO: update currentIndex first if it's out of range as a result of
// removing items from the playlist.
_sequenceSubject.add(_audioSource?.sequence);
_updateShuffleIndices();
}
void _updateShuffleIndices() {
_shuffleIndicesSubject.add(_audioSource?.shuffleIndices);
final shuffleIndicesLength = shuffleIndices?.length ?? 0;
if (_shuffleIndicesInv.length > shuffleIndicesLength) {
_shuffleIndicesInv.removeRange(
shuffleIndicesLength, _shuffleIndicesInv.length);
} else if (_shuffleIndicesInv.length < shuffleIndicesLength) {
_shuffleIndicesInv.addAll(
List.filled(shuffleIndicesLength - _shuffleIndicesInv.length, 0));
}
for (var i = 0; i < shuffleIndicesLength; i++) {
_shuffleIndicesInv[shuffleIndices![i]] = i;
}
}
void _registerAudioSource(AudioSource source) {
_audioSources[source._id] = source;
}
Future<Duration?> _load(AudioPlayerPlatform platform, AudioSource source,
{_InitialSeekValues? initialSeekValues}) async {
final activationNumber = _activationCount;
void checkInterruption() {
if (_activationCount != activationNumber) {
// the platform has changed since we started loading, so abort.
throw PlatformException(code: 'abort', message: 'Loading interrupted');
}
}
try {
await source._setup(this);
checkInterruption();
source._shuffle(initialIndex: initialSeekValues?.index ?? 0);
_broadcastSequence();
_durationFuture = platform
.load(LoadRequest(
audioSourceMessage: source._toMessage(),
initialPosition: initialSeekValues?.position,
initialIndex: initialSeekValues?.index,
))
.then((response) => response.duration);
final duration = await _durationFuture;
checkInterruption();
_durationSubject.add(duration);
if (platform != _platformValue) {
// the platform has changed since we started loading, so abort.
throw PlatformException(code: 'abort', message: 'Loading interrupted');
}
// Wait for loading state to pass.
await processingStateStream
.firstWhere((state) => state != ProcessingState.loading);
checkInterruption();
return duration;
} on PlatformException catch (e) {
try {
throw PlayerException(int.parse(e.code), e.message);
} on FormatException catch (_) {
if (e.code == 'abort') {
throw PlayerInterruptedException(e.message);
} else {
throw PlayerException(9999999, e.message);
}
}
}
}
/// Clips the current [AudioSource] to the given [start] and [end]
/// timestamps. If [start] is null, it will be reset to the start of the
/// original [AudioSource]. If [end] is null, it will be reset to the end of
/// the original [AudioSource]. This method cannot be called from the
/// [ProcessingState.idle] state.
Future<Duration?> setClip({Duration? start, Duration? end}) async {
if (_disposed) return null;
_setPlatformActive(true)?.catchError((dynamic e) {});
final duration = await _load(
await _platform,
start == null && end == null
? _audioSource!
: ClippingAudioSource(
child: _audioSource as UriAudioSource,
start: start,
end: end,
));
return duration;
}
/// Tells the player to play audio at the current [speed] and [volume] as soon
/// as an audio source is loaded and ready to play. If an audio source has
/// been set but not preloaded, this method will also initiate the loading.
/// The [Future] returned by this method completes when the playback completes
/// or is paused or stopped. If the player is already playing, this method
/// completes immediately.
///
/// This method causes [playing] to become true, and it will remain true
/// until [pause] or [stop] is called. This means that if playback completes,
/// and then you [seek] to an earlier position in the audio, playback will
/// continue playing from that position. If you instead wish to [pause] or
/// [stop] playback on completion, you can call either method as soon as
/// [processingState] becomes [ProcessingState.completed] by listening to
/// [processingStateStream].
///
/// This method activates the audio session before playback, and will do
/// nothing if activation of the audio session fails for any reason.
Future<void> play() async {
if (_disposed) return;
if (playing) return;
_playInterrupted = false;
// Broadcast to clients immediately, but revert to false if we fail to
// activate the audio session. This allows setAudioSource to be aware of a
// prior play request.
_playbackEvent = _playbackEvent.copyWith(
updatePosition: position,
updateTime: DateTime.now(),
);
_playingSubject.add(true);
_playbackEventSubject.add(_playbackEvent);
final playCompleter = Completer<dynamic>();
final audioSession = await AudioSession.instance;
if (!_handleAudioSessionActivation || await audioSession.setActive(true)) {
if (!playing) return;
// TODO: rewrite this to more cleanly handle simultaneous load/play
// requests which each may result in platform play requests.
final requireActive = _audioSource != null;
if (requireActive) {
if (_active) {
// If the native platform is already active, send it a play request.
// NOTE: If a load() request happens simultaneously, this may result
// in two play requests being sent. The platform implementation should
// ignore the second play request since it is already playing.
_sendPlayRequest(await _platform, playCompleter);
} else {
// If the native platform wasn't already active, activating it will
// implicitly restore the playing state and send a play request.
_setPlatformActive(true, playCompleter: playCompleter)
?.catchError((dynamic e) {});
}
}
} else {
// Revert if we fail to activate the audio session.
_playingSubject.add(false);
}
await playCompleter.future;
}
/// Pauses the currently playing media. This method does nothing if
/// ![playing].
Future<void> pause() async {
if (_disposed) return;
if (!playing) return;
//_setPlatformActive(true);
_playInterrupted = false;
// Update local state immediately so that queries aren't surprised.
_playbackEvent = _playbackEvent.copyWith(
updatePosition: position,
updateTime: DateTime.now(),
);
_playingSubject.add(false);
_playbackEventSubject.add(_playbackEvent);
// TODO: perhaps modify platform side to ensure new state is broadcast
// before this method returns.
await (await _platform).pause(PauseRequest());
}
Future<void> _sendPlayRequest(
AudioPlayerPlatform platform, Completer<void>? playCompleter) async {
try {
if (!playing) return; // defensive
await platform.play(PlayRequest());
playCompleter?.complete();
} catch (e, stackTrace) {
playCompleter?.completeError(e, stackTrace);
}
}
/// Stops playing audio and releases decoders and other native platform
/// resources needed to play audio. The current audio source state will be
/// retained and playback can be resumed at a later point in time.
///
/// Use [stop] if the app is done playing audio for now but may need still
/// want to resume playback later. Use [dispose] when the app is completely
/// finished playing audio. Use [pause] instead if you would like to keep the
/// decoders alive so that the app can quickly resume audio playback.
Future<void> stop() async {
if (_disposed) return;
final future = _setPlatformActive(false)?.catchError((dynamic e) {});
_playInterrupted = false;
// Update local state immediately so that queries aren't surprised.
_playingSubject.add(false);
await future;
}
/// Sets the volume of this player, where 1.0 is normal volume.
Future<void> setVolume(final double volume) async {
if (_disposed) return;
_volumeSubject.add(volume);
await (await _platform).setVolume(SetVolumeRequest(volume: volume));
}
/// Sets whether silence should be skipped in audio playback. (Currently
/// Android only).
Future<void> setSkipSilenceEnabled(bool enabled) async {
if (_disposed) return;
final previouslyEnabled = skipSilenceEnabled;
if (enabled == previouslyEnabled) return;
_skipSilenceEnabledSubject.add(enabled);
try {
await (await _platform)
.setSkipSilence(SetSkipSilenceRequest(enabled: enabled));
} catch (e) {
_skipSilenceEnabledSubject.add(previouslyEnabled);
rethrow;
}
}
/// Sets the playback speed to use when [playing] is `true`, where 1.0 is
/// normal speed. Note that values in excess of 1.0 may result in stalls if
/// the playback speed is faster than the player is able to downloaded the
/// audio.
Future<void> setSpeed(final double speed) async {
if (_disposed) return;
_playbackEvent = _playbackEvent.copyWith(
updatePosition: position,
updateTime: DateTime.now(),
);
_playbackEventSubject.add(_playbackEvent);
_speedSubject.add(speed);
await (await _platform).setSpeed(SetSpeedRequest(speed: speed));
}
/// Sets the factor by which pitch will be shifted.
Future<void> setPitch(final double pitch) async {
if (_disposed) return;
_playbackEvent = _playbackEvent.copyWith(
updatePosition: position,
updateTime: DateTime.now(),
);
_playbackEventSubject.add(_playbackEvent);
_pitchSubject.add(pitch);
await (await _platform).setPitch(SetPitchRequest(pitch: pitch));
}
/// Sets the [LoopMode]. Looping will be gapless on Android, iOS and macOS. On
/// web, there will be a slight gap at the loop point.
Future<void> setLoopMode(LoopMode mode) async {
if (_disposed) return;
_loopModeSubject.add(mode);
await (await _platform).setLoopMode(
SetLoopModeRequest(loopMode: LoopModeMessage.values[mode.index]));
}
/// Sets whether shuffle mode is enabled.
Future<void> setShuffleModeEnabled(bool enabled) async {
if (_disposed) return;
_shuffleModeEnabledSubject.add(enabled);
await (await _platform).setShuffleMode(SetShuffleModeRequest(
shuffleMode:
enabled ? ShuffleModeMessage.all : ShuffleModeMessage.none));
}
/// Recursively shuffles the children of the currently loaded [AudioSource].
/// Each [ConcatenatingAudioSource] will be shuffled according to its
/// configured [ShuffleOrder].
Future<void> shuffle() async {
if (_disposed) return;
if (_audioSource == null) return;
_audioSource!._shuffle(initialIndex: currentIndex);
_updateShuffleIndices();
await (await _platform).setShuffleOrder(
SetShuffleOrderRequest(audioSourceMessage: _audioSource!._toMessage()));
}
/// Sets automaticallyWaitsToMinimizeStalling for AVPlayer in iOS 10.0 or later, defaults to true.
/// Has no effect on Android clients
Future<void> setAutomaticallyWaitsToMinimizeStalling(
final bool automaticallyWaitsToMinimizeStalling) async {
if (_disposed) return;
_automaticallyWaitsToMinimizeStalling =