just_audio.dart 48 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448
  1. import 'dart:async';
  2. import 'dart:io';
  3. import 'dart:math';
  4. import 'package:audio_session/audio_session.dart';
  5. import 'package:flutter/foundation.dart';
  6. import 'package:flutter/services.dart';
  7. import 'package:flutter/widgets.dart';
  8. import 'package:path/path.dart' as p;
  9. import 'package:path_provider/path_provider.dart';
  10. import 'package:rxdart/rxdart.dart';
  11. import 'package:uuid/uuid.dart';
  12. final _uuid = Uuid();
  13. /// An object to manage playing audio from a URL, a locale file or an asset.
  14. ///
  15. /// ```
  16. /// final player = AudioPlayer();
  17. /// await player.setUrl('https://foo.com/bar.mp3');
  18. /// player.play();
  19. /// await player.pause();
  20. /// await player.setClip(start: Duration(seconds: 10), end: Duration(seconds: 20));
  21. /// await player.play();
  22. /// await player.setUrl('https://foo.com/baz.mp3');
  23. /// await player.seek(Duration(minutes: 5));
  24. /// player.play();
  25. /// await player.pause();
  26. /// await player.dispose();
  27. /// ```
  28. ///
  29. /// You must call [dispose] to release the resources used by this player,
  30. /// including any temporary files created to cache assets.
  31. class AudioPlayer {
  32. static final _mainChannel = MethodChannel('com.ryanheise.just_audio.methods');
  33. static Future<MethodChannel> _init(String id) async {
  34. await _mainChannel.invokeMethod('init', [id]);
  35. return MethodChannel('com.ryanheise.just_audio.methods.$id');
  36. }
  37. final Future<MethodChannel> _channel;
  38. final String _id;
  39. _ProxyHttpServer _proxy;
  40. Stream<PlaybackEvent> _eventChannelStream;
  41. AudioSource _audioSource;
  42. Map<String, AudioSource> _audioSources = {};
  43. PlaybackEvent _playbackEvent;
  44. StreamSubscription<PlaybackEvent> _eventChannelStreamSubscription;
  45. final _playbackEventSubject = BehaviorSubject<PlaybackEvent>();
  46. Future<Duration> _durationFuture;
  47. final _durationSubject = BehaviorSubject<Duration>();
  48. final _processingStateSubject = BehaviorSubject<ProcessingState>();
  49. final _playingSubject = BehaviorSubject.seeded(false);
  50. final _volumeSubject = BehaviorSubject.seeded(1.0);
  51. final _speedSubject = BehaviorSubject.seeded(1.0);
  52. final _bufferedPositionSubject = BehaviorSubject<Duration>();
  53. final _icyMetadataSubject = BehaviorSubject<IcyMetadata>();
  54. final _playerStateSubject = BehaviorSubject<PlayerState>();
  55. final _sequenceSubject = BehaviorSubject<List<IndexedAudioSource>>();
  56. final _currentIndexSubject = BehaviorSubject<int>();
  57. final _sequenceStateSubject = BehaviorSubject<SequenceState>();
  58. final _loopModeSubject = BehaviorSubject<LoopMode>();
  59. final _shuffleModeEnabledSubject = BehaviorSubject<bool>();
  60. final _androidAudioSessionIdSubject = BehaviorSubject<int>();
  61. BehaviorSubject<Duration> _positionSubject;
  62. bool _automaticallyWaitsToMinimizeStalling = true;
  63. bool _playInterrupted = false;
  64. /// Creates an [AudioPlayer]. The player will automatically pause/duck and
  65. /// resume/unduck when audio interruptions occur (e.g. a phone call) or when
  66. /// headphones are unplugged. If you wish to handle audio interruptions
  67. /// manually, set [handleInterruptions] to `false` and interface directly
  68. /// with the audio session via the
  69. /// [audio_session](https://pub.dev/packages/audio_session) package.
  70. factory AudioPlayer({bool handleInterruptions = true}) =>
  71. AudioPlayer._internal(_uuid.v4(), handleInterruptions);
  72. AudioPlayer._internal(this._id, bool handleInterruptions)
  73. : _channel = _init(_id) {
  74. _playbackEvent = PlaybackEvent(
  75. processingState: ProcessingState.none,
  76. updatePosition: Duration.zero,
  77. updateTime: DateTime.now(),
  78. bufferedPosition: Duration.zero,
  79. duration: null,
  80. icyMetadata: null,
  81. currentIndex: null,
  82. androidAudioSessionId: null,
  83. qualityString: ''
  84. );
  85. _playbackEventSubject.add(_playbackEvent);
  86. _eventChannelStream = EventChannel('com.ryanheise.just_audio.events.$_id')
  87. .receiveBroadcastStream()
  88. .map((data) {
  89. try {
  90. //print("received raw event: $data");
  91. final duration = (data['duration'] ?? -1) < 0
  92. ? null
  93. : Duration(milliseconds: data['duration']);
  94. _durationFuture = Future.value(duration);
  95. if (duration != _playbackEvent.duration) {
  96. _durationSubject.add(duration);
  97. }
  98. _playbackEvent = PlaybackEvent(
  99. processingState: ProcessingState.values[data['processingState']],
  100. updatePosition: Duration(milliseconds: data['updatePosition']),
  101. updateTime: DateTime.fromMillisecondsSinceEpoch(data['updateTime']),
  102. bufferedPosition: Duration(milliseconds: data['bufferedPosition']),
  103. duration: duration,
  104. icyMetadata: data['icyMetadata'] == null
  105. ? null
  106. : IcyMetadata.fromJson(data['icyMetadata']),
  107. currentIndex: data['currentIndex'],
  108. androidAudioSessionId: data['androidAudioSessionId'],
  109. qualityString: data['qualityString']
  110. );
  111. //print("created event object with state: ${_playbackEvent.state}");
  112. return _playbackEvent;
  113. } catch (e, stacktrace) {
  114. print("Error parsing event: $e");
  115. print("$stacktrace");
  116. rethrow;
  117. }
  118. });
  119. _processingStateSubject.addStream(playbackEventStream
  120. .map((event) => event.processingState)
  121. .distinct()
  122. .handleError((err, stack) {/* noop */}));
  123. _bufferedPositionSubject.addStream(playbackEventStream
  124. .map((event) => event.bufferedPosition)
  125. .distinct()
  126. .handleError((err, stack) {/* noop */}));
  127. _icyMetadataSubject.addStream(playbackEventStream
  128. .map((event) => event.icyMetadata)
  129. .distinct()
  130. .handleError((err, stack) {/* noop */}));
  131. _currentIndexSubject.addStream(playbackEventStream
  132. .map((event) => event.currentIndex)
  133. .distinct()
  134. .handleError((err, stack) {/* noop */}));
  135. _androidAudioSessionIdSubject.addStream(playbackEventStream
  136. .map((event) => event.androidAudioSessionId)
  137. .distinct()
  138. .handleError((err, stack) {/* noop */}));
  139. _sequenceStateSubject.addStream(
  140. Rx.combineLatest2<List<IndexedAudioSource>, int, SequenceState>(
  141. sequenceStream,
  142. currentIndexStream,
  143. (sequence, currentIndex) {
  144. if (sequence == null) return null;
  145. if (currentIndex == null) currentIndex = 0;
  146. currentIndex = min(sequence.length - 1, max(0, currentIndex));
  147. return SequenceState(sequence, currentIndex);
  148. },
  149. ).distinct().handleError((err, stack) {/* noop */}));
  150. _playerStateSubject.addStream(
  151. Rx.combineLatest2<bool, PlaybackEvent, PlayerState>(
  152. playingStream,
  153. playbackEventStream,
  154. (playing, event) => PlayerState(playing, event.processingState))
  155. .distinct()
  156. .handleError((err, stack) {/* noop */}));
  157. _eventChannelStreamSubscription = _eventChannelStream.listen(
  158. _playbackEventSubject.add,
  159. onError: _playbackEventSubject.addError,
  160. );
  161. _sequenceSubject.add(null);
  162. // Respond to changes to AndroidAudioAttributes configuration.
  163. AudioSession.instance.then((audioSession) {
  164. audioSession.configurationStream
  165. .map((conf) => conf?.androidAudioAttributes)
  166. .where((attributes) => attributes != null)
  167. .distinct()
  168. .listen(setAndroidAudioAttributes);
  169. });
  170. if (handleInterruptions) {
  171. AudioSession.instance.then((session) {
  172. session.becomingNoisyEventStream.listen((_) {
  173. pause();
  174. });
  175. session.interruptionEventStream.listen((event) {
  176. if (event.begin) {
  177. switch (event.type) {
  178. case AudioInterruptionType.duck:
  179. if (session.androidAudioAttributes.usage ==
  180. AndroidAudioUsage.game) {
  181. setVolume(volume / 2);
  182. }
  183. _playInterrupted = false;
  184. break;
  185. case AudioInterruptionType.pause:
  186. case AudioInterruptionType.unknown:
  187. if (playing) {
  188. pause();
  189. // Although pause is async and sets _playInterrupted = false,
  190. // this is done in the sync portion.
  191. _playInterrupted = true;
  192. }
  193. break;
  194. }
  195. } else {
  196. switch (event.type) {
  197. case AudioInterruptionType.duck:
  198. setVolume(min(1.0, volume * 2));
  199. _playInterrupted = false;
  200. break;
  201. case AudioInterruptionType.pause:
  202. if (_playInterrupted) play();
  203. _playInterrupted = false;
  204. break;
  205. case AudioInterruptionType.unknown:
  206. _playInterrupted = false;
  207. break;
  208. }
  209. }
  210. });
  211. });
  212. }
  213. }
  214. /// The latest [PlaybackEvent].
  215. PlaybackEvent get playbackEvent => _playbackEvent;
  216. /// A stream of [PlaybackEvent]s.
  217. Stream<PlaybackEvent> get playbackEventStream => _playbackEventSubject.stream;
  218. /// The duration of the current audio or null if unknown.
  219. Duration get duration => _playbackEvent.duration;
  220. /// The duration of the current audio or null if unknown.
  221. Future<Duration> get durationFuture => _durationFuture;
  222. /// The duration of the current audio.
  223. Stream<Duration> get durationStream => _durationSubject.stream;
  224. /// The current [ProcessingState].
  225. ProcessingState get processingState => _playbackEvent.processingState;
  226. /// A stream of [ProcessingState]s.
  227. Stream<ProcessingState> get processingStateStream =>
  228. _processingStateSubject.stream;
  229. /// Whether the player is playing.
  230. bool get playing => _playingSubject.value;
  231. /// A stream of changing [playing] states.
  232. Stream<bool> get playingStream => _playingSubject.stream;
  233. /// The current volume of the player.
  234. double get volume => _volumeSubject.value;
  235. /// A stream of [volume] changes.
  236. Stream<double> get volumeStream => _volumeSubject.stream;
  237. /// The current speed of the player.
  238. double get speed => _speedSubject.value;
  239. /// A stream of current speed values.
  240. Stream<double> get speedStream => _speedSubject.stream;
  241. /// The position up to which buffered audio is available.
  242. Duration get bufferedPosition => _bufferedPositionSubject.value;
  243. /// A stream of buffered positions.
  244. Stream<Duration> get bufferedPositionStream =>
  245. _bufferedPositionSubject.stream;
  246. /// The latest ICY metadata received through the audio source.
  247. IcyMetadata get icyMetadata => _playbackEvent.icyMetadata;
  248. /// A stream of ICY metadata received through the audio source.
  249. Stream<IcyMetadata> get icyMetadataStream => _icyMetadataSubject.stream;
  250. /// The current player state containing only the processing and playing
  251. /// states.
  252. PlayerState get playerState => _playerStateSubject.value;
  253. /// A stream of [PlayerState]s.
  254. Stream<PlayerState> get playerStateStream => _playerStateSubject.stream;
  255. /// The current sequence of indexed audio sources.
  256. List<IndexedAudioSource> get sequence => _sequenceSubject.value;
  257. /// A stream broadcasting the current sequence of indexed audio sources.
  258. Stream<List<IndexedAudioSource>> get sequenceStream =>
  259. _sequenceSubject.stream;
  260. /// The index of the current item.
  261. int get currentIndex => _currentIndexSubject.value;
  262. /// A stream broadcasting the current item.
  263. Stream<int> get currentIndexStream => _currentIndexSubject.stream;
  264. /// The current [SequenceState], or `null` if either [sequence]] or
  265. /// [currentIndex] is `null`.
  266. SequenceState get sequenceState => _sequenceStateSubject.value;
  267. /// A stream broadcasting the current [SequenceState].
  268. Stream<SequenceState> get sequenceStateStream => _sequenceStateSubject.stream;
  269. /// Whether there is another item after the current index.
  270. bool get hasNext =>
  271. _audioSource != null &&
  272. currentIndex != null &&
  273. currentIndex + 1 < sequence.length;
  274. /// Whether there is another item before the current index.
  275. bool get hasPrevious =>
  276. _audioSource != null && currentIndex != null && currentIndex > 0;
  277. /// The current loop mode.
  278. LoopMode get loopMode => _loopModeSubject.value;
  279. /// A stream of [LoopMode]s.
  280. Stream<LoopMode> get loopModeStream => _loopModeSubject.stream;
  281. /// Whether shuffle mode is currently enabled.
  282. bool get shuffleModeEnabled => _shuffleModeEnabledSubject.value;
  283. /// A stream of the shuffle mode status.
  284. Stream<bool> get shuffleModeEnabledStream =>
  285. _shuffleModeEnabledSubject.stream;
  286. /// The current Android AudioSession ID or `null` if not set.
  287. int get androidAudioSessionId => _playbackEvent.androidAudioSessionId;
  288. /// Broadcasts the current Android AudioSession ID or `null` if not set.
  289. Stream<int> get androidAudioSessionIdStream =>
  290. _androidAudioSessionIdSubject.stream;
  291. /// Whether the player should automatically delay playback in order to
  292. /// minimize stalling. (iOS 10.0 or later only)
  293. bool get automaticallyWaitsToMinimizeStalling =>
  294. _automaticallyWaitsToMinimizeStalling;
  295. /// The current position of the player.
  296. Duration get position {
  297. if (playing && processingState == ProcessingState.ready) {
  298. final result = _playbackEvent.updatePosition +
  299. (DateTime.now().difference(_playbackEvent.updateTime)) * speed;
  300. return _playbackEvent.duration == null ||
  301. result <= _playbackEvent.duration
  302. ? result
  303. : _playbackEvent.duration;
  304. } else {
  305. return _playbackEvent.updatePosition;
  306. }
  307. }
  308. /// A stream tracking the current position of this player, suitable for
  309. /// animating a seek bar. To ensure a smooth animation, this stream emits
  310. /// values more frequently on short items where the seek bar moves more
  311. /// quickly, and less frequenly on long items where the seek bar moves more
  312. /// slowly. The interval between each update will be no quicker than once
  313. /// every 16ms and no slower than once every 200ms.
  314. ///
  315. /// See [createPositionStream] for more control over the stream parameters.
  316. Stream<Duration> get positionStream {
  317. if (_positionSubject == null) {
  318. _positionSubject = BehaviorSubject<Duration>();
  319. _positionSubject.addStream(createPositionStream(
  320. steps: 800,
  321. minPeriod: Duration(milliseconds: 16),
  322. maxPeriod: Duration(milliseconds: 200)));
  323. }
  324. return _positionSubject.stream;
  325. }
  326. /// Creates a new stream periodically tracking the current position of this
  327. /// player. The stream will aim to emit [steps] position updates from the
  328. /// beginning to the end of the current audio source, at intervals of
  329. /// [duration] / [steps]. This interval will be clipped between [minPeriod]
  330. /// and [maxPeriod]. This stream will not emit values while audio playback is
  331. /// paused or stalled.
  332. ///
  333. /// Note: each time this method is called, a new stream is created. If you
  334. /// intend to use this stream multiple times, you should hold a reference to
  335. /// the returned stream and close it once you are done.
  336. Stream<Duration> createPositionStream({
  337. int steps = 800,
  338. Duration minPeriod = const Duration(milliseconds: 200),
  339. Duration maxPeriod = const Duration(milliseconds: 200),
  340. }) {
  341. assert(minPeriod <= maxPeriod);
  342. assert(minPeriod > Duration.zero);
  343. Duration duration() => this.duration ?? Duration.zero;
  344. Duration step() {
  345. var s = duration() ~/ steps;
  346. if (s < minPeriod) s = minPeriod;
  347. if (s > maxPeriod) s = maxPeriod;
  348. return s;
  349. }
  350. StreamController<Duration> controller = StreamController.broadcast();
  351. Timer currentTimer;
  352. StreamSubscription durationSubscription;
  353. StreamSubscription playbackEventSubscription;
  354. void yieldPosition(Timer timer) {
  355. if (controller.isClosed) {
  356. timer.cancel();
  357. durationSubscription?.cancel();
  358. playbackEventSubscription?.cancel();
  359. return;
  360. }
  361. if (_durationSubject.isClosed) {
  362. timer.cancel();
  363. durationSubscription?.cancel();
  364. playbackEventSubscription?.cancel();
  365. // This will in turn close _positionSubject.
  366. controller.close();
  367. return;
  368. }
  369. controller.add(position);
  370. }
  371. currentTimer = Timer.periodic(step(), yieldPosition);
  372. durationSubscription = durationStream.listen((duration) {
  373. currentTimer.cancel();
  374. currentTimer = Timer.periodic(step(), yieldPosition);
  375. });
  376. playbackEventSubscription = playbackEventStream.listen((event) {
  377. controller.add(position);
  378. });
  379. return controller.stream.distinct();
  380. }
  381. /// Convenience method to load audio from a URL with optional headers,
  382. /// equivalent to:
  383. ///
  384. /// ```
  385. /// load(AudioSource.uri(Uri.parse(url), headers: headers));
  386. /// ```
  387. ///
  388. ///
  389. Future<Duration> setUrl(String url, {Map headers}) =>
  390. load(AudioSource.uri(Uri.parse(url), headers: headers));
  391. /// Convenience method to load audio from a file, equivalent to:
  392. ///
  393. /// ```
  394. /// load(AudioSource.uri(Uri.file(filePath)));
  395. /// ```
  396. Future<Duration> setFilePath(String filePath) =>
  397. load(AudioSource.uri(Uri.file(filePath)));
  398. /// Convenience method to load audio from an asset, equivalent to:
  399. ///
  400. /// ```
  401. /// load(AudioSource.uri(Uri.parse('asset:///$assetPath')));
  402. /// ```
  403. Future<Duration> setAsset(String assetPath) =>
  404. load(AudioSource.uri(Uri.parse('asset:///$assetPath')));
  405. /// Loads audio from an [AudioSource] and completes when the audio is ready
  406. /// to play with the duration of that audio, or null if the duration is unknown.
  407. ///
  408. /// This method throws:
  409. ///
  410. /// * [PlayerException] if the audio source was unable to be loaded.
  411. /// * [PlayerInterruptedException] if another call to [load] happened before
  412. /// this call completed.
  413. Future<Duration> load(AudioSource source) async {
  414. try {
  415. _audioSource = source;
  416. _broadcastSequence();
  417. final duration = await _load(source);
  418. // Wait for loading state to pass.
  419. await processingStateStream
  420. .firstWhere((state) => state != ProcessingState.loading);
  421. return duration;
  422. } catch (e) {
  423. _audioSource = null;
  424. rethrow;
  425. }
  426. }
  427. void _broadcastSequence() {
  428. _sequenceSubject.add(_audioSource?.sequence);
  429. }
  430. _registerAudioSource(AudioSource source) {
  431. _audioSources[source._id] = source;
  432. }
  433. Future<Duration> _load(AudioSource source) async {
  434. try {
  435. if (!kIsWeb && source._requiresHeaders) {
  436. if (_proxy == null) {
  437. _proxy = _ProxyHttpServer();
  438. await _proxy.start();
  439. }
  440. }
  441. await source._setup(this);
  442. _durationFuture = _invokeMethod('load', [source.toJson()]).then(
  443. (ms) => (ms == null || ms < 0) ? null : Duration(milliseconds: ms));
  444. final duration = await _durationFuture;
  445. _durationSubject.add(duration);
  446. return duration;
  447. } on PlatformException catch (e) {
  448. try {
  449. throw PlayerException(int.parse(e.code), e.message);
  450. } on FormatException catch (_) {
  451. if (e.code == 'abort') {
  452. throw PlayerInterruptedException(e.message);
  453. } else {
  454. throw PlayerException(9999999, e.message);
  455. }
  456. }
  457. }
  458. }
  459. /// Clips the current [AudioSource] to the given [start] and [end]
  460. /// timestamps. If [start] is null, it will be reset to the start of the
  461. /// original [AudioSource]. If [end] is null, it will be reset to the end of
  462. /// the original [AudioSource]. This method cannot be called from the
  463. /// [AudioPlaybackState.none] state.
  464. Future<Duration> setClip({Duration start, Duration end}) async {
  465. final duration = await _load(start == null && end == null
  466. ? _audioSource
  467. : ClippingAudioSource(
  468. child: _audioSource,
  469. start: start,
  470. end: end,
  471. ));
  472. // Wait for loading state to pass.
  473. await processingStateStream
  474. .firstWhere((state) => state != ProcessingState.loading);
  475. return duration;
  476. }
  477. /// Tells the player to play audio as soon as an audio source is loaded and
  478. /// ready to play. The [Future] returned by this method completes when the
  479. /// playback completes or is paused or stopped. If the player is already
  480. /// playing, this method completes immediately.
  481. ///
  482. /// This method causes [playing] to become true, and it will remain true
  483. /// until [pause] or [stop] is called. This means that if playback completes,
  484. /// and then you [seek] to an earlier position in the audio, playback will
  485. /// continue playing from that position. If you instead wish to [pause] or
  486. /// [stop] playback on completion, you can call either method as soon as
  487. /// [processingState] becomes [ProcessingState.completed] by listening to
  488. /// [processingStateStream].
  489. ///
  490. /// This method activates the audio session before playback, and will do
  491. /// nothing if activation of the audio session fails for any reason.
  492. Future<void> play() async {
  493. if (playing) return;
  494. _playInterrupted = false;
  495. final audioSession = await AudioSession.instance;
  496. if (await audioSession.setActive(true)) {
  497. _playingSubject.add(true);
  498. await _invokeMethod('play');
  499. }
  500. }
  501. /// Pauses the currently playing media. This method does nothing if
  502. /// ![playing].
  503. Future<void> pause() async {
  504. if (!playing) return;
  505. _playInterrupted = false;
  506. // Update local state immediately so that queries aren't surprised.
  507. _playbackEvent = _playbackEvent.copyWith(
  508. updatePosition: position,
  509. updateTime: DateTime.now(),
  510. );
  511. _playbackEventSubject.add(_playbackEvent);
  512. _playingSubject.add(false);
  513. // TODO: perhaps modify platform side to ensure new state is broadcast
  514. // before this method returns.
  515. await _invokeMethod('pause');
  516. }
  517. /// Convenience method to pause and seek to zero.
  518. Future<void> stop() async {
  519. await pause();
  520. await seek(Duration.zero);
  521. }
  522. /// Sets the volume of this player, where 1.0 is normal volume.
  523. Future<void> setVolume(final double volume) async {
  524. _volumeSubject.add(volume);
  525. await _invokeMethod('setVolume', [volume]);
  526. }
  527. /// Sets the playback speed of this player, where 1.0 is normal speed.
  528. Future<void> setSpeed(final double speed) async {
  529. _playbackEvent = _playbackEvent.copyWith(
  530. updatePosition: position,
  531. updateTime: DateTime.now(),
  532. );
  533. _playbackEventSubject.add(_playbackEvent);
  534. _speedSubject.add(speed);
  535. await _invokeMethod('setSpeed', [speed]);
  536. }
  537. /// Sets the [LoopMode]. The gapless looping support is as follows:
  538. ///
  539. /// * Android: supported
  540. /// * iOS/macOS: not supported, however, gapless looping can be achieved by
  541. /// using [LoopingAudioSource].
  542. /// * Web: not supported
  543. Future<void> setLoopMode(LoopMode mode) async {
  544. _loopModeSubject.add(mode);
  545. await _invokeMethod('setLoopMode', [mode.index]);
  546. }
  547. /// Sets whether shuffle mode is enabled.
  548. Future<void> setShuffleModeEnabled(bool enabled) async {
  549. _shuffleModeEnabledSubject.add(enabled);
  550. await _invokeMethod('setShuffleModeEnabled', [enabled]);
  551. }
  552. /// Sets automaticallyWaitsToMinimizeStalling for AVPlayer in iOS 10.0 or later, defaults to true.
  553. /// Has no effect on Android clients
  554. Future<void> setAutomaticallyWaitsToMinimizeStalling(
  555. final bool automaticallyWaitsToMinimizeStalling) async {
  556. _automaticallyWaitsToMinimizeStalling =
  557. automaticallyWaitsToMinimizeStalling;
  558. await _invokeMethod('setAutomaticallyWaitsToMinimizeStalling',
  559. [automaticallyWaitsToMinimizeStalling]);
  560. }
  561. /// Seeks to a particular [position]. If a composition of multiple
  562. /// [AudioSource]s has been loaded, you may also specify [index] to seek to a
  563. /// particular item within that sequence. This method has no effect unless
  564. /// an audio source has been loaded.
  565. Future<void> seek(final Duration position, {int index}) async {
  566. switch (processingState) {
  567. case ProcessingState.none:
  568. case ProcessingState.loading:
  569. return;
  570. default:
  571. _playbackEvent = _playbackEvent.copyWith(
  572. updatePosition: position,
  573. updateTime: DateTime.now(),
  574. );
  575. _playbackEventSubject.add(_playbackEvent);
  576. await _invokeMethod('seek', [position?.inMilliseconds, index]);
  577. }
  578. }
  579. /// Seek to the next item.
  580. Future<void> seekToNext() async {
  581. if (hasNext) {
  582. await seek(Duration.zero, index: currentIndex + 1);
  583. }
  584. }
  585. /// Seek to the previous item.
  586. Future<void> seekToPrevious() async {
  587. if (hasPrevious) {
  588. await seek(Duration.zero, index: currentIndex - 1);
  589. }
  590. }
  591. /// Set the Android audio attributes for this player. Has no effect on other
  592. /// platforms. This will cause a new Android AudioSession ID to be generated.
  593. Future<void> setAndroidAudioAttributes(
  594. AndroidAudioAttributes audioAttributes) async {
  595. if (audioAttributes == null) return;
  596. await _invokeMethod(
  597. 'setAndroidAudioAttributes', [audioAttributes.toJson()]);
  598. }
  599. /// Release all resources associated with this player. You must invoke this
  600. /// after you are done with the player.
  601. Future<void> dispose() async {
  602. await _invokeMethod('dispose');
  603. _audioSource = null;
  604. _audioSources.values.forEach((s) => s._dispose());
  605. _audioSources.clear();
  606. _proxy?.stop();
  607. await _durationSubject.close();
  608. await _eventChannelStreamSubscription.cancel();
  609. await _loopModeSubject.close();
  610. await _shuffleModeEnabledSubject.close();
  611. await _playingSubject.close();
  612. await _volumeSubject.close();
  613. await _speedSubject.close();
  614. await _sequenceSubject.close();
  615. }
  616. Future<dynamic> _invokeMethod(String method, [dynamic args]) async =>
  617. (await _channel).invokeMethod(method, args);
  618. }
  619. /// Captures the details of any error accessing, loading or playing an audio
  620. /// source, including an invalid or inaccessible URL, or an audio encoding that
  621. /// could not be understood.
  622. class PlayerException {
  623. /// On iOS and macOS, maps to `NSError.code`. On Android, maps to
  624. /// `ExoPlaybackException.type`. On Web, maps to `MediaError.code`.
  625. final int code;
  626. /// On iOS and macOS, maps to `NSError.localizedDescription`. On Android,
  627. /// maps to `ExoPlaybackException.getMessage()`. On Web, a generic message
  628. /// is provided.
  629. final String message;
  630. PlayerException(this.code, this.message);
  631. @override
  632. String toString() => "($code) $message";
  633. }
  634. /// An error that occurs when one operation on the player has been interrupted
  635. /// (e.g. by another simultaneous operation).
  636. class PlayerInterruptedException {
  637. final String message;
  638. PlayerInterruptedException(this.message);
  639. @override
  640. String toString() => "$message";
  641. }
  642. /// Encapsulates the playback state and current position of the player.
  643. class PlaybackEvent {
  644. /// The current processing state.
  645. final ProcessingState processingState;
  646. /// When the last time a position discontinuity happened, as measured in time
  647. /// since the epoch.
  648. final DateTime updateTime;
  649. /// The position at [updateTime].
  650. final Duration updatePosition;
  651. /// The buffer position.
  652. final Duration bufferedPosition;
  653. /// The media duration, or null if unknown.
  654. final Duration duration;
  655. /// The latest ICY metadata received through the audio stream.
  656. final IcyMetadata icyMetadata;
  657. /// The index of the currently playing item.
  658. final int currentIndex;
  659. /// The current Android AudioSession ID.
  660. final int androidAudioSessionId;
  661. String qualityString;
  662. PlaybackEvent({
  663. @required this.processingState,
  664. @required this.updateTime,
  665. @required this.updatePosition,
  666. @required this.bufferedPosition,
  667. @required this.duration,
  668. @required this.icyMetadata,
  669. @required this.currentIndex,
  670. @required this.androidAudioSessionId,
  671. this.qualityString
  672. });
  673. PlaybackEvent copyWith({
  674. ProcessingState processingState,
  675. DateTime updateTime,
  676. Duration updatePosition,
  677. Duration bufferedPosition,
  678. double speed,
  679. Duration duration,
  680. IcyMetadata icyMetadata,
  681. UriAudioSource currentIndex,
  682. int androidAudioSessionId,
  683. }) =>
  684. PlaybackEvent(
  685. processingState: processingState ?? this.processingState,
  686. updateTime: updateTime ?? this.updateTime,
  687. updatePosition: updatePosition ?? this.updatePosition,
  688. bufferedPosition: bufferedPosition ?? this.bufferedPosition,
  689. duration: duration ?? this.duration,
  690. icyMetadata: icyMetadata ?? this.icyMetadata,
  691. currentIndex: currentIndex ?? this.currentIndex,
  692. androidAudioSessionId:
  693. androidAudioSessionId ?? this.androidAudioSessionId,
  694. qualityString: this.qualityString
  695. );
  696. @override
  697. String toString() =>
  698. "{processingState=$processingState, updateTime=$updateTime, updatePosition=$updatePosition}";
  699. }
  700. /// Enumerates the different processing states of a player.
  701. enum ProcessingState {
  702. /// The player has not loaded an [AudioSource].
  703. none,
  704. /// The player is loading an [AudioSource].
  705. loading,
  706. /// The player is buffering audio and unable to play.
  707. buffering,
  708. /// The player is has enough audio buffered and is able to play.
  709. ready,
  710. /// The player has reached the end of the audio.
  711. completed,
  712. }
  713. /// Encapsulates the playing and processing states. These two states vary
  714. /// orthogonally, and so if [processingState] is [ProcessingState.buffering],
  715. /// you can check [playing] to determine whether the buffering occurred while
  716. /// the player was playing or while the player was paused.
  717. class PlayerState {
  718. /// Whether the player will play when [processingState] is
  719. /// [ProcessingState.ready].
  720. final bool playing;
  721. /// The current processing state of the player.
  722. final ProcessingState processingState;
  723. PlayerState(this.playing, this.processingState);
  724. @override
  725. String toString() => 'playing=$playing,processingState=$processingState';
  726. @override
  727. int get hashCode => toString().hashCode;
  728. @override
  729. bool operator ==(dynamic other) =>
  730. other is PlayerState &&
  731. other?.playing == playing &&
  732. other?.processingState == processingState;
  733. }
  734. class IcyInfo {
  735. final String title;
  736. final String url;
  737. IcyInfo({@required this.title, @required this.url});
  738. IcyInfo.fromJson(Map json) : this(title: json['title'], url: json['url']);
  739. @override
  740. String toString() => 'title=$title,url=$url';
  741. @override
  742. int get hashCode => toString().hashCode;
  743. @override
  744. bool operator ==(dynamic other) =>
  745. other is IcyInfo && other?.toString() == toString();
  746. }
  747. class IcyHeaders {
  748. final int bitrate;
  749. final String genre;
  750. final String name;
  751. final int metadataInterval;
  752. final String url;
  753. final bool isPublic;
  754. IcyHeaders({
  755. @required this.bitrate,
  756. @required this.genre,
  757. @required this.name,
  758. @required this.metadataInterval,
  759. @required this.url,
  760. @required this.isPublic,
  761. });
  762. IcyHeaders.fromJson(Map json)
  763. : this(
  764. bitrate: json['bitrate'],
  765. genre: json['genre'],
  766. name: json['name'],
  767. metadataInterval: json['metadataInterval'],
  768. url: json['url'],
  769. isPublic: json['isPublic'],
  770. );
  771. @override
  772. String toString() =>
  773. 'bitrate=$bitrate,genre=$genre,name=$name,metadataInterval=$metadataInterval,url=$url,isPublic=$isPublic';
  774. @override
  775. int get hashCode => toString().hashCode;
  776. @override
  777. bool operator ==(dynamic other) =>
  778. other is IcyHeaders && other?.toString() == toString();
  779. }
  780. class IcyMetadata {
  781. final IcyInfo info;
  782. final IcyHeaders headers;
  783. IcyMetadata({@required this.info, @required this.headers});
  784. IcyMetadata.fromJson(Map json)
  785. : this(
  786. info: json['info'] == null ? null : IcyInfo.fromJson(json['info']),
  787. headers: json['headers'] == null
  788. ? null
  789. : IcyHeaders.fromJson(json['headers']),
  790. );
  791. @override
  792. int get hashCode => info.hashCode ^ headers.hashCode;
  793. @override
  794. bool operator ==(dynamic other) =>
  795. other is IcyMetadata && other?.info == info && other?.headers == headers;
  796. }
  797. /// Encapsulates the [sequence] and [currentIndex] state and ensures
  798. /// consistency such that [currentIndex] is within the range of
  799. /// [sequence.length]. If [sequence.length] is 0, then [currentIndex] is also
  800. /// 0.
  801. class SequenceState {
  802. /// The sequence of the current [AudioSource].
  803. final List<IndexedAudioSource> sequence;
  804. /// The index of the current source in the sequence.
  805. final int currentIndex;
  806. SequenceState(this.sequence, this.currentIndex);
  807. /// The current source in the sequence.
  808. IndexedAudioSource get currentSource => sequence[currentIndex];
  809. }
  810. /// A local proxy HTTP server for making remote GET requests with headers.
  811. ///
  812. /// TODO: Recursively attach headers to items in playlists like m3u8.
  813. class _ProxyHttpServer {
  814. HttpServer _server;
  815. /// Maps request keys to [_ProxyRequest]s.
  816. final Map<String, _ProxyRequest> _uriMap = {};
  817. /// The port this server is bound to on localhost. This is set only after
  818. /// [start] has completed.
  819. int get port => _server.port;
  820. /// Associate headers with a URL. This may be called only after [start] has
  821. /// completed.
  822. Uri addUrl(Uri url, Map<String, String> headers) {
  823. final path = _requestKey(url);
  824. _uriMap[path] = _ProxyRequest(url, headers);
  825. return url.replace(
  826. scheme: 'http',
  827. host: InternetAddress.loopbackIPv4.address,
  828. port: port,
  829. );
  830. }
  831. /// A unique key for each request that can be processed by this proxy,
  832. /// made up of the URL path and query string. It is not possible to
  833. /// simultaneously track requests that have the same URL path and query
  834. /// but differ in other respects such as the port or headers.
  835. String _requestKey(Uri uri) => '${uri.path}?${uri.query}';
  836. /// Starts the server.
  837. Future start() async {
  838. _server = await HttpServer.bind(InternetAddress.loopbackIPv4, 0);
  839. _server.listen((request) async {
  840. if (request.method == 'GET') {
  841. final path = _requestKey(request.uri);
  842. final proxyRequest = _uriMap[path];
  843. final originRequest = await HttpClient().getUrl(proxyRequest.uri);
  844. // Rewrite request headers
  845. final host = originRequest.headers.value('host');
  846. originRequest.headers.clear();
  847. request.headers.forEach((name, value) {
  848. originRequest.headers.set(name, value);
  849. });
  850. for (var name in proxyRequest.headers.keys) {
  851. originRequest.headers.set(name, proxyRequest.headers[name]);
  852. }
  853. originRequest.headers.set('host', host);
  854. // Try to make normal request
  855. try {
  856. final originResponse = await originRequest.close();
  857. request.response.headers.clear();
  858. originResponse.headers.forEach((name, value) {
  859. request.response.headers.set(name, value);
  860. });
  861. // Pipe response
  862. await originResponse.pipe(request.response);
  863. } on HttpException {
  864. // We likely are dealing with a streaming protocol
  865. if (proxyRequest.uri.scheme == 'http') {
  866. // Try parsing HTTP 0.9 response
  867. //request.response.headers.clear();
  868. final socket = await Socket.connect(
  869. proxyRequest.uri.host, proxyRequest.uri.port);
  870. final clientSocket =
  871. await request.response.detachSocket(writeHeaders: false);
  872. Completer done = Completer();
  873. socket.listen(
  874. clientSocket.add,
  875. onDone: () async {
  876. await clientSocket.flush();
  877. socket.close();
  878. clientSocket.close();
  879. done.complete();
  880. },
  881. );
  882. // Rewrite headers
  883. final headers = <String, String>{};
  884. request.headers.forEach((name, value) {
  885. if (name.toLowerCase() != 'host') {
  886. headers[name] = value.join(",");
  887. }
  888. });
  889. for (var name in proxyRequest.headers.keys) {
  890. headers[name] = proxyRequest.headers[name];
  891. }
  892. socket.write("GET ${proxyRequest.uri.path} HTTP/1.1\n");
  893. if (host != null) {
  894. socket.write("Host: $host\n");
  895. }
  896. for (var name in headers.keys) {
  897. socket.write("$name: ${headers[name]}\n");
  898. }
  899. socket.write("\n");
  900. await socket.flush();
  901. await done.future;
  902. }
  903. }
  904. }
  905. });
  906. }
  907. /// Stops the server
  908. Future stop() => _server.close();
  909. }
  910. /// A request for a URL and headers made by a [_ProxyHttpServer].
  911. class _ProxyRequest {
  912. final Uri uri;
  913. final Map<String, String> headers;
  914. _ProxyRequest(this.uri, this.headers);
  915. }
  916. /// Specifies a source of audio to be played. Audio sources are composable
  917. /// using the subclasses of this class. The same [AudioSource] instance should
  918. /// not be used simultaneously by more than one [AudioPlayer].
  919. abstract class AudioSource {
  920. final String _id;
  921. AudioPlayer _player;
  922. /// Creates an [AudioSource] from a [Uri] with optional headers by
  923. /// attempting to guess the type of stream. On iOS, this uses Apple's SDK to
  924. /// automatically detect the stream type. On Android, the type of stream will
  925. /// be guessed from the extension.
  926. ///
  927. /// If you are loading DASH or HLS streams that do not have standard "mpd" or
  928. /// "m3u8" extensions in their URIs, this method will fail to detect the
  929. /// stream type on Android. If you know in advance what type of audio stream
  930. /// it is, you should instantiate [DashAudioSource] or [HlsAudioSource]
  931. /// directly.
  932. static AudioSource uri(Uri uri, {Map headers, dynamic tag}) {
  933. bool hasExtension(Uri uri, String extension) =>
  934. uri.path.toLowerCase().endsWith('.$extension') ||
  935. uri.fragment.toLowerCase().endsWith('.$extension');
  936. if (hasExtension(uri, 'mpd')) {
  937. return DashAudioSource(uri, headers: headers, tag: tag);
  938. } else if (hasExtension(uri, 'm3u8')) {
  939. return HlsAudioSource(uri, headers: headers, tag: tag);
  940. } else {
  941. return ProgressiveAudioSource(uri, headers: headers, tag: tag);
  942. }
  943. }
  944. static AudioSource fromJson(Map json) {
  945. switch (json['type']) {
  946. case 'progressive':
  947. return ProgressiveAudioSource(Uri.parse(json['uri']),
  948. headers: json['headers']);
  949. case "dash":
  950. return DashAudioSource(Uri.parse(json['uri']),
  951. headers: json['headers']);
  952. case "hls":
  953. return HlsAudioSource(Uri.parse(json['uri']), headers: json['headers']);
  954. case "concatenating":
  955. return ConcatenatingAudioSource(
  956. children: (json['audioSources'] as List)
  957. .map((s) => AudioSource.fromJson(s))
  958. .toList());
  959. case "clipping":
  960. return ClippingAudioSource(
  961. child: AudioSource.fromJson(json['audioSource']),
  962. start: Duration(milliseconds: json['start']),
  963. end: Duration(milliseconds: json['end']));
  964. default:
  965. throw Exception("Unknown AudioSource type: " + json['type']);
  966. }
  967. }
  968. AudioSource() : _id = _uuid.v4();
  969. @mustCallSuper
  970. Future<void> _setup(AudioPlayer player) async {
  971. _player = player;
  972. player._registerAudioSource(this);
  973. }
  974. @mustCallSuper
  975. void _dispose() {
  976. _player = null;
  977. }
  978. bool get _requiresHeaders;
  979. List<IndexedAudioSource> get sequence;
  980. Map toJson();
  981. @override
  982. int get hashCode => _id.hashCode;
  983. @override
  984. bool operator ==(dynamic other) => other is AudioSource && other._id == _id;
  985. }
  986. /// An [AudioSource] that can appear in a sequence.
  987. abstract class IndexedAudioSource extends AudioSource {
  988. final dynamic tag;
  989. IndexedAudioSource(this.tag);
  990. @override
  991. List<IndexedAudioSource> get sequence => [this];
  992. }
  993. /// An abstract class representing audio sources that are loaded from a URI.
  994. abstract class UriAudioSource extends IndexedAudioSource {
  995. final Uri uri;
  996. final Map headers;
  997. final String _type;
  998. Uri _overrideUri;
  999. File _cacheFile;
  1000. UriAudioSource(this.uri, {this.headers, dynamic tag, @required String type})
  1001. : _type = type,
  1002. super(tag);
  1003. @override
  1004. Future<void> _setup(AudioPlayer player) async {
  1005. await super._setup(player);
  1006. if (uri.scheme == 'asset') {
  1007. _overrideUri = Uri.file(
  1008. (await _loadAsset(uri.path.replaceFirst(RegExp(r'^/'), ''))).path);
  1009. } else if (headers != null) {
  1010. _overrideUri = player._proxy.addUrl(uri, headers);
  1011. }
  1012. }
  1013. @override
  1014. void _dispose() {
  1015. if (_cacheFile?.existsSync() == true) {
  1016. _cacheFile?.deleteSync();
  1017. }
  1018. super._dispose();
  1019. }
  1020. Future<File> _loadAsset(String assetPath) async {
  1021. final file = await _getCacheFile(assetPath);
  1022. this._cacheFile = file;
  1023. if (!file.existsSync()) {
  1024. await file.create(recursive: true);
  1025. await file.writeAsBytes(
  1026. (await rootBundle.load(assetPath)).buffer.asUint8List());
  1027. }
  1028. return file;
  1029. }
  1030. /// Get file for caching asset media with proper extension
  1031. Future<File> _getCacheFile(final String assetPath) async => File(p.join(
  1032. (await getTemporaryDirectory()).path,
  1033. 'just_audio_asset_cache',
  1034. '${_player._id}_$_id${p.extension(assetPath)}'));
  1035. @override
  1036. bool get _requiresHeaders => headers != null;
  1037. @override
  1038. Map toJson() => {
  1039. 'id': _id,
  1040. 'type': _type,
  1041. 'uri': (_overrideUri ?? uri).toString(),
  1042. 'headers': headers,
  1043. };
  1044. }
  1045. /// An [AudioSource] representing a regular media file such as an MP3 or M4A
  1046. /// file. The following URI schemes are supported:
  1047. ///
  1048. /// * file: loads from a local file (provided you give your app permission to
  1049. /// access that file).
  1050. /// * asset: loads from a Flutter asset (not supported on Web).
  1051. /// * http(s): loads from an HTTP(S) resource.
  1052. ///
  1053. /// On platforms except for the web, the supplied [headers] will be passed with
  1054. /// the HTTP(S) request.
  1055. class ProgressiveAudioSource extends UriAudioSource {
  1056. ProgressiveAudioSource(Uri uri, {Map headers, dynamic tag})
  1057. : super(uri, headers: headers, tag: tag, type: 'progressive');
  1058. }
  1059. /// An [AudioSource] representing a DASH stream. The following URI schemes are
  1060. /// supported:
  1061. ///
  1062. /// * file: loads from a local file (provided you give your app permission to
  1063. /// access that file).
  1064. /// * asset: loads from a Flutter asset (not supported on Web).
  1065. /// * http(s): loads from an HTTP(S) resource.
  1066. ///
  1067. /// On platforms except for the web, the supplied [headers] will be passed with
  1068. /// the HTTP(S) request. Currently headers are not recursively applied to items
  1069. /// the HTTP(S) request. Currently headers are not applied recursively.
  1070. class DashAudioSource extends UriAudioSource {
  1071. DashAudioSource(Uri uri, {Map headers, dynamic tag})
  1072. : super(uri, headers: headers, tag: tag, type: 'dash');
  1073. }
  1074. /// An [AudioSource] representing an HLS stream. The following URI schemes are
  1075. /// supported:
  1076. ///
  1077. /// * file: loads from a local file (provided you give your app permission to
  1078. /// access that file).
  1079. /// * asset: loads from a Flutter asset (not supported on Web).
  1080. /// * http(s): loads from an HTTP(S) resource.
  1081. ///
  1082. /// On platforms except for the web, the supplied [headers] will be passed with
  1083. /// the HTTP(S) request. Currently headers are not applied recursively.
  1084. class HlsAudioSource extends UriAudioSource {
  1085. HlsAudioSource(Uri uri, {Map headers, dynamic tag})
  1086. : super(uri, headers: headers, tag: tag, type: 'hls');
  1087. }
  1088. /// An [AudioSource] representing a concatenation of multiple audio sources to
  1089. /// be played in succession. This can be used to create playlists. Playback
  1090. /// between items will be gapless on Android, iOS and macOS, while there will
  1091. /// be a slight gap on Web.
  1092. ///
  1093. /// (Untested) Audio sources can be dynamically added, removed and reordered
  1094. /// while the audio is playing.
  1095. class ConcatenatingAudioSource extends AudioSource {
  1096. final List<AudioSource> children;
  1097. final bool useLazyPreparation;
  1098. ConcatenatingAudioSource({
  1099. @required this.children,
  1100. this.useLazyPreparation = false,
  1101. });
  1102. @override
  1103. Future<void> _setup(AudioPlayer player) async {
  1104. await super._setup(player);
  1105. for (var source in children) {
  1106. await source._setup(player);
  1107. }
  1108. }
  1109. /// (Untested) Appends an [AudioSource].
  1110. Future<void> add(AudioSource audioSource) async {
  1111. children.add(audioSource);
  1112. _player._broadcastSequence();
  1113. if (_player != null) {
  1114. await _player
  1115. ._invokeMethod('concatenating.add', [_id, audioSource.toJson()]);
  1116. }
  1117. }
  1118. /// (Untested) Inserts an [AudioSource] at [index].
  1119. Future<void> insert(int index, AudioSource audioSource) async {
  1120. children.insert(index, audioSource);
  1121. _player._broadcastSequence();
  1122. if (_player != null) {
  1123. await _player._invokeMethod(
  1124. 'concatenating.insert', [_id, index, audioSource.toJson()]);
  1125. }
  1126. }
  1127. /// (Untested) Appends multiple [AudioSource]s.
  1128. Future<void> addAll(List<AudioSource> children) async {
  1129. this.children.addAll(children);
  1130. _player._broadcastSequence();
  1131. if (_player != null) {
  1132. await _player._invokeMethod('concatenating.addAll',
  1133. [_id, children.map((s) => s.toJson()).toList()]);
  1134. }
  1135. }
  1136. /// (Untested) Insert multiple [AudioSource]s at [index].
  1137. Future<void> insertAll(int index, List<AudioSource> children) async {
  1138. this.children.insertAll(index, children);
  1139. _player._broadcastSequence();
  1140. if (_player != null) {
  1141. await _player._invokeMethod('concatenating.insertAll',
  1142. [_id, index, children.map((s) => s.toJson()).toList()]);
  1143. }
  1144. }
  1145. /// (Untested) Dynmaically remove an [AudioSource] at [index] after this
  1146. /// [ConcatenatingAudioSource] has already been loaded.
  1147. Future<void> removeAt(int index) async {
  1148. children.removeAt(index);
  1149. _player._broadcastSequence();
  1150. if (_player != null) {
  1151. await _player._invokeMethod('concatenating.removeAt', [_id, index]);
  1152. }
  1153. }
  1154. /// (Untested) Removes a range of [AudioSource]s from index [start] inclusive
  1155. /// to [end] exclusive.
  1156. Future<void> removeRange(int start, int end) async {
  1157. children.removeRange(start, end);
  1158. _player._broadcastSequence();
  1159. if (_player != null) {
  1160. await _player
  1161. ._invokeMethod('concatenating.removeRange', [_id, start, end]);
  1162. }
  1163. }
  1164. /// (Untested) Moves an [AudioSource] from [currentIndex] to [newIndex].
  1165. Future<void> move(int currentIndex, int newIndex) async {
  1166. children.insert(newIndex, children.removeAt(currentIndex));
  1167. _player._broadcastSequence();
  1168. if (_player != null) {
  1169. await _player
  1170. ._invokeMethod('concatenating.move', [_id, currentIndex, newIndex]);
  1171. }
  1172. }
  1173. /// (Untested) Removes all [AudioSources].
  1174. Future<void> clear() async {
  1175. children.clear();
  1176. _player._broadcastSequence();
  1177. if (_player != null) {
  1178. await _player._invokeMethod('concatenating.clear', [_id]);
  1179. }
  1180. }
  1181. /// The number of [AudioSource]s.
  1182. int get length => children.length;
  1183. operator [](int index) => children[index];
  1184. @override
  1185. List<IndexedAudioSource> get sequence =>
  1186. children.expand((s) => s.sequence).toList();
  1187. @override
  1188. bool get _requiresHeaders =>
  1189. children.any((source) => source._requiresHeaders);
  1190. @override
  1191. Map toJson() => {
  1192. 'id': _id,
  1193. 'type': 'concatenating',
  1194. 'audioSources': children.map((source) => source.toJson()).toList(),
  1195. 'useLazyPreparation': useLazyPreparation,
  1196. };
  1197. }
  1198. /// An [AudioSource] that clips the audio of a [UriAudioSource] between a
  1199. /// certain start and end time.
  1200. class ClippingAudioSource extends IndexedAudioSource {
  1201. final UriAudioSource child;
  1202. final Duration start;
  1203. final Duration end;
  1204. /// Creates an audio source that clips [child] to the range [start]..[end],
  1205. /// where [start] and [end] default to the beginning and end of the original
  1206. /// [child] source.
  1207. ClippingAudioSource({
  1208. @required this.child,
  1209. this.start,
  1210. this.end,
  1211. dynamic tag,
  1212. }) : super(tag);
  1213. @override
  1214. Future<void> _setup(AudioPlayer player) async {
  1215. await super._setup(player);
  1216. await child._setup(player);
  1217. }
  1218. @override
  1219. bool get _requiresHeaders => child._requiresHeaders;
  1220. @override
  1221. Map toJson() => {
  1222. 'id': _id,
  1223. 'type': 'clipping',
  1224. 'audioSource': child.toJson(),
  1225. 'start': start?.inMilliseconds,
  1226. 'end': end?.inMilliseconds,
  1227. };
  1228. }
  1229. // An [AudioSource] that loops a nested [AudioSource] a finite number of times.
  1230. // NOTE: this can be inefficient when using a large loop count. If you wish to
  1231. // loop an infinite number of times, use [AudioPlayer.setLoopMode].
  1232. //
  1233. // On iOS and macOS, note that [LoopingAudioSource] will provide gapless
  1234. // playback while [AudioPlayer.setLoopMode] will not. (This will be supported
  1235. // in a future release.)
  1236. class LoopingAudioSource extends AudioSource {
  1237. AudioSource child;
  1238. final int count;
  1239. LoopingAudioSource({
  1240. @required this.child,
  1241. this.count,
  1242. }) : super();
  1243. @override
  1244. Future<void> _setup(AudioPlayer player) async {
  1245. await super._setup(player);
  1246. await child._setup(player);
  1247. }
  1248. @override
  1249. List<IndexedAudioSource> get sequence =>
  1250. List.generate(count, (i) => child).expand((s) => s.sequence).toList();
  1251. @override
  1252. bool get _requiresHeaders => child._requiresHeaders;
  1253. @override
  1254. Map toJson() => {
  1255. 'id': _id,
  1256. 'type': 'looping',
  1257. 'audioSource': child.toJson(),
  1258. 'count': count,
  1259. };
  1260. }
  1261. enum LoopMode { off, one, all }