import 'dart:async'; import 'package:collection/collection.dart'; import 'package:media_kit/media_kit.dart' as mk; import 'package:just_audio/just_audio.dart' as ja; import 'package:flutter_desktop_tools/flutter_desktop_tools.dart'; import 'package:spotube/models/spotube_track.dart'; import 'package:spotube/services/audio_player/loop_mode.dart'; import 'package:spotube/services/audio_player/mk_state_player.dart'; import 'package:spotube/services/audio_player/playback_state.dart'; final audioPlayer = SpotubeAudioPlayer(); class SpotubeAudioPlayer { final MkPlayerWithState? _mkPlayer; final ja.AudioPlayer? _justAudio; SpotubeAudioPlayer() : _mkPlayer = mkSupportedPlatform ? MkPlayerWithState() : null, _justAudio = !mkSupportedPlatform ? ja.AudioPlayer() : null; /// Whether the current platform supports the audioplayers plugin static final bool mkSupportedPlatform = DesktopTools.platform.isWindows || DesktopTools.platform.isLinux; // stream getters Stream get durationStream { if (mkSupportedPlatform) { return _mkPlayer!.streams.duration.asBroadcastStream(); } else { return _justAudio!.durationStream .where((event) => event != null) .map((event) => event!) .asBroadcastStream(); } } Stream get positionStream { if (mkSupportedPlatform) { return _mkPlayer!.streams.position.asBroadcastStream(); } else { return _justAudio!.positionStream.asBroadcastStream(); } } Stream get bufferedPositionStream { if (mkSupportedPlatform) { // audioplayers doesn't have the capability to get buffered position return _mkPlayer!.streams.buffer.asBroadcastStream(); } else { return _justAudio!.bufferedPositionStream.asBroadcastStream(); } } Stream get completedStream { if (mkSupportedPlatform) { return _mkPlayer!.streams.completed.asBroadcastStream(); } else { return _justAudio!.playerStateStream .where( (event) => event.processingState == ja.ProcessingState.completed) .asBroadcastStream(); } } /// Stream that emits when the player is almost (80%) complete Stream get almostCompleteStream { return positionStream .asyncMap((event) async => [event, await duration]) .where((event) { final position = event[0] as Duration; final duration = event[1] as Duration; return position.inSeconds > (duration.inSeconds * .8).toInt(); }).asBroadcastStream(); } Stream get playingStream { if (mkSupportedPlatform) { return _mkPlayer!.streams.playing.asBroadcastStream(); } else { return _justAudio!.playingStream.asBroadcastStream(); } } Stream get bufferingStream { if (mkSupportedPlatform) { return Stream.value(false).asBroadcastStream(); } else { return _justAudio!.playerStateStream .map( (event) => event.processingState == ja.ProcessingState.buffering || event.processingState == ja.ProcessingState.loading, ) .asBroadcastStream(); } } Stream get playerStateStream { if (mkSupportedPlatform) { return _mkPlayer!.playerStateStream.asBroadcastStream(); } else { return _justAudio!.playerStateStream .map(AudioPlaybackState.fromJaPlayerState) .asBroadcastStream(); } } // regular info getter Future get duration async { if (mkSupportedPlatform) { return _mkPlayer!.state.duration; } else { return _justAudio!.duration; } } Future get position async { if (mkSupportedPlatform) { return _mkPlayer!.state.position; } else { return _justAudio!.position; } } Future get bufferedPosition async { if (mkSupportedPlatform) { // audioplayers doesn't have the capability to get buffered position return null; } else { return null; } } bool get hasSource { if (mkSupportedPlatform) { return _mkPlayer!.state.playlist.medias.isNotEmpty; } else { return _justAudio!.audioSource != null; } } // states bool get isPlaying { if (mkSupportedPlatform) { return _mkPlayer!.state.playing; } else { return _justAudio!.playing; } } bool get isPaused { if (mkSupportedPlatform) { return !_mkPlayer!.state.playing; } else { return !isPlaying; } } bool get isStopped { if (mkSupportedPlatform) { return !hasSource; } else { return _justAudio!.processingState == ja.ProcessingState.idle; } } Future get isCompleted async { if (mkSupportedPlatform) { return _mkPlayer!.state.completed; } else { return _justAudio!.processingState == ja.ProcessingState.completed; } } bool get isBuffering { if (mkSupportedPlatform) { // audioplayers doesn't have the capability to get buffering state return false; } else { return _justAudio!.processingState == ja.ProcessingState.buffering || _justAudio!.processingState == ja.ProcessingState.loading; } } Object _resolveUrlType(String url) { if (mkSupportedPlatform) { return mk.Media(url); } else { if (url.startsWith("https")) { return ja.AudioSource.uri(Uri.parse(url)); } else { return ja.AudioSource.file(url); } } } Future preload(String url) async { throw UnimplementedError(); // final urlType = _resolveUrlType(url); // if (mkSupportedPlatform && urlType is ap.Source) { // // audioplayers doesn't have the capability to preload // return; // } else { // return; // } } Future play(String url) async { final urlType = _resolveUrlType(url); if (mkSupportedPlatform && urlType is mk.Media) { await _mkPlayer?.open(urlType, play: true); } else { if (_justAudio?.audioSource is ja.ProgressiveAudioSource && (_justAudio?.audioSource as ja.ProgressiveAudioSource) .uri .toString() == url) { await _justAudio?.play(); } else { await _justAudio?.stop(); await _justAudio?.setAudioSource( urlType as ja.AudioSource, preload: true, ); await _justAudio?.play(); } } } Future pause() async { await _mkPlayer?.pause(); await _justAudio?.pause(); } Future resume() async { await _mkPlayer?.play(); await _justAudio?.play(); } Future stop() async { _mkLooped = PlaybackLoopMode.none; _mkShuffled = false; await _mkPlayer?.pause(); await _justAudio?.stop(); } Future seek(Duration position) async { await _mkPlayer?.seek(position); await _justAudio?.seek(position); } Future setVolume(double volume) async { await _mkPlayer?.setVolume(volume); await _justAudio?.setVolume(volume); } Future setSpeed(double speed) async { await _mkPlayer?.setRate(speed); await _justAudio?.setSpeed(speed); } Future dispose() async { await _mkPlayer?.dispose(); await _justAudio?.dispose(); } // Playlist related Future openPlaylist( List tracks, { bool autoPlay = true, int initialIndex = 0, }) async { assert(tracks.isNotEmpty); assert(initialIndex <= tracks.length - 1); if (mkSupportedPlatform) { await _mkPlayer!.open( mk.Playlist( tracks.map((e) => mk.Media(e)).toList(), index: initialIndex, ), play: autoPlay, ); } else { await _justAudio!.setAudioSource( ja.ConcatenatingAudioSource( useLazyPreparation: true, children: tracks.map((e) => ja.AudioSource.uri(Uri.parse(e))).toList(), ), preload: true, initialIndex: initialIndex, ); if (autoPlay) { await _justAudio!.play(); } } } List resolveTracksForSource(List tracks) { if (mkSupportedPlatform) { final urls = _mkPlayer!.state.playlist.medias.map((e) => e.uri).toList(); return tracks.where((e) => urls.contains(e.ytUri)).toList(); } else { final urls = (_justAudio!.audioSource as ja.ConcatenatingAudioSource) .children .map((e) => (e as ja.UriAudioSource).uri.toString()) .toList(); return tracks.where((e) => urls.contains(e.ytUri)).toList(); } } bool tracksExistsInPlaylist(List tracks) { return resolveTracksForSource(tracks).length == tracks.length; } int get currentIndex { if (mkSupportedPlatform) { return _mkPlayer!.state.playlist.index; } else { return _justAudio!.sequenceState!.currentIndex; } } Future skipToNext() async { if (mkSupportedPlatform) { await _mkPlayer!.next(); } else { await _justAudio!.seekToNext(); } } Future skipToPrevious() async { if (mkSupportedPlatform) { await _mkPlayer!.previous(); } else { await _justAudio!.seekToPrevious(); } } Future skipToIndex(int index) async { if (mkSupportedPlatform) { await _mkPlayer!.jump(index); } else { await _justAudio!.seek(Duration.zero, index: index); } } Future addTrack(String url) async { final urlType = _resolveUrlType(url); if (mkSupportedPlatform && urlType is mk.Media) { await _mkPlayer!.add(urlType); } else { await (_justAudio!.audioSource as ja.ConcatenatingAudioSource) .add(urlType as ja.AudioSource); } } Future removeTrack(int index) async { if (mkSupportedPlatform) { await _mkPlayer!.remove(index); } else { await (_justAudio!.audioSource as ja.ConcatenatingAudioSource) .removeAt(index); } } Future moveTrack(int from, int to) async { if (mkSupportedPlatform) { await _mkPlayer!.move(from, to); } else { await (_justAudio!.audioSource as ja.ConcatenatingAudioSource) .move(from, to); } } Future clearPlaylist() async { if (mkSupportedPlatform) { await Future.wait( _mkPlayer!.state.playlist.medias.mapIndexed( (i, e) async => await _mkPlayer!.remove(i), ), ); } else { await (_justAudio!.audioSource as ja.ConcatenatingAudioSource).clear(); } } bool _mkShuffled = false; Future setShuffle(bool shuffle) async { if (mkSupportedPlatform) { await _mkPlayer!.setShuffle(shuffle); _mkShuffled = shuffle; } else { await _justAudio!.setShuffleModeEnabled(shuffle); } } Future isShuffled() async { if (mkSupportedPlatform) { return _mkShuffled; } else { return _justAudio!.shuffleModeEnabled; } } PlaybackLoopMode _mkLooped = PlaybackLoopMode.none; Future setLoopMode(PlaybackLoopMode loop) async { if (mkSupportedPlatform) { await _mkPlayer!.setPlaylistMode(loop.toPlaylistMode()); _mkLooped = loop; } else { await _justAudio!.setLoopMode(loop.toLoopMode()); } } Future getLoopMode() async { if (mkSupportedPlatform) { return _mkLooped; } else { return PlaybackLoopMode.fromLoopMode(_justAudio!.loopMode); } } }