🔀 Merge some services from spotube

This commit is contained in:
2024-08-26 23:21:22 +08:00
parent 80771e84ce
commit 84d66fbc4b
28 changed files with 2517 additions and 4 deletions

View File

@ -0,0 +1,163 @@
import 'dart:developer';
import 'dart:io';
import 'package:media_kit/media_kit.dart' hide Track;
import 'package:flutter/foundation.dart';
import 'package:rhythm_box/platform.dart';
import 'package:rhythm_box/services/local_track.dart';
import 'package:spotify/spotify.dart' hide Playlist;
import 'package:rhythm_box/services/audio_player/custom_player.dart';
import 'dart:async';
import 'package:media_kit/media_kit.dart' as mk;
import 'package:rhythm_box/services/audio_player/playback_state.dart';
import 'package:rhythm_box/services/sourced_track/sourced_track.dart';
part 'audio_players_streams_mixin.dart';
part 'audio_player_impl.dart';
class RhythmMedia extends mk.Media {
final Track track;
static int serverPort = 0;
RhythmMedia(
this.track, {
Map<String, dynamic>? extras,
super.httpHeaders,
}) : super(
track is LocalTrack
? track.path
: "http://${PlatformInfo.isWindows ? "localhost" : InternetAddress.anyIPv4.address}:$serverPort/stream/${track.id}",
extras: {
...?extras,
"track": switch (track) {
LocalTrack() => track.toJson(),
SourcedTrack() => track.toJson(),
_ => track.toJson(),
},
},
);
@override
String get uri {
return switch (track) {
/// [super.uri] must be used instead of [track.path] to prevent wrong
/// path format exceptions in Windows causing [extras] to be null
LocalTrack() => super.uri,
_ =>
"http://${PlatformInfo.isWindows ? "localhost" : InternetAddress.anyIPv4.address}:"
"$serverPort/stream/${track.id}",
};
}
factory RhythmMedia.fromMedia(mk.Media media) {
final track = media.uri.startsWith("http")
? Track.fromJson(media.extras?["track"])
: LocalTrack.fromJson(media.extras?["track"]);
return RhythmMedia(
track,
extras: media.extras,
httpHeaders: media.httpHeaders,
);
}
// @override
// operator ==(Object other) {
// if (other is! RhythmMedia) return false;
// final isLocal = track is LocalTrack && other.track is LocalTrack;
// return isLocal
// ? (other.track as LocalTrack).path == (track as LocalTrack).path
// : other.track.id == track.id;
// }
// @override
// int get hashCode => track is LocalTrack
// ? (track as LocalTrack).path.hashCode
// : track.id.hashCode;
}
abstract class AudioPlayerInterface {
final CustomPlayer _mkPlayer;
AudioPlayerInterface()
: _mkPlayer = CustomPlayer(
configuration: const mk.PlayerConfiguration(
title: "Rhythm",
logLevel: kDebugMode ? mk.MPVLogLevel.info : mk.MPVLogLevel.error,
),
) {
_mkPlayer.stream.error.listen((event) {
log("[Playback] Error: $event");
});
}
/// Whether the current platform supports the audioplayers plugin
static const bool _mkSupportedPlatform = true;
bool get mkSupportedPlatform => _mkSupportedPlatform;
Duration get duration {
return _mkPlayer.state.duration;
}
Playlist get playlist {
return _mkPlayer.state.playlist;
}
Duration get position {
return _mkPlayer.state.position;
}
Duration get bufferedPosition {
return _mkPlayer.state.buffer;
}
Future<mk.AudioDevice> get selectedDevice async {
return _mkPlayer.state.audioDevice;
}
Future<List<mk.AudioDevice>> get devices async {
return _mkPlayer.state.audioDevices;
}
bool get hasSource {
return _mkPlayer.state.playlist.medias.isNotEmpty;
}
// states
bool get isPlaying {
return _mkPlayer.state.playing;
}
bool get isPaused {
return !_mkPlayer.state.playing;
}
bool get isStopped {
return !hasSource;
}
Future<bool> get isCompleted async {
return _mkPlayer.state.completed;
}
bool get isShuffled {
return _mkPlayer.shuffled;
}
PlaylistMode get loopMode {
return _mkPlayer.state.playlistMode;
}
/// Returns the current volume of the player, between 0 and 1
double get volume {
return _mkPlayer.state.volume / 100;
}
bool get isBuffering {
return _mkPlayer.state.buffering;
}
}

View File

@ -0,0 +1,134 @@
part of 'audio_player.dart';
final audioPlayer = RhythmAudioPlayer();
class RhythmAudioPlayer extends AudioPlayerInterface
with RhythmAudioPlayersStreams {
Future<void> pause() async {
await _mkPlayer.pause();
}
Future<void> resume() async {
await _mkPlayer.play();
}
Future<void> stop() async {
await _mkPlayer.stop();
}
Future<void> seek(Duration position) async {
await _mkPlayer.seek(position);
}
/// Volume is between 0 and 1
Future<void> setVolume(double volume) async {
assert(volume >= 0 && volume <= 1);
await _mkPlayer.setVolume(volume * 100);
}
Future<void> setSpeed(double speed) async {
await _mkPlayer.setRate(speed);
}
Future<void> setAudioDevice(mk.AudioDevice device) async {
await _mkPlayer.setAudioDevice(device);
}
Future<void> dispose() async {
await _mkPlayer.dispose();
}
// Playlist related
Future<void> openPlaylist(
List<mk.Media> tracks, {
bool autoPlay = true,
int initialIndex = 0,
}) async {
assert(tracks.isNotEmpty);
assert(initialIndex <= tracks.length - 1);
await _mkPlayer.open(
mk.Playlist(tracks, index: initialIndex),
play: autoPlay,
);
}
List<String> get sources {
return _mkPlayer.state.playlist.medias.map((e) => e.uri).toList();
}
String? get currentSource {
if (_mkPlayer.state.playlist.index == -1) return null;
return _mkPlayer.state.playlist.medias
.elementAtOrNull(_mkPlayer.state.playlist.index)
?.uri;
}
String? get nextSource {
if (loopMode == PlaylistMode.loop &&
_mkPlayer.state.playlist.index ==
_mkPlayer.state.playlist.medias.length - 1) {
return sources.first;
}
return _mkPlayer.state.playlist.medias
.elementAtOrNull(_mkPlayer.state.playlist.index + 1)
?.uri;
}
String? get previousSource {
if (loopMode == PlaylistMode.loop && _mkPlayer.state.playlist.index == 0) {
return sources.last;
}
return _mkPlayer.state.playlist.medias
.elementAtOrNull(_mkPlayer.state.playlist.index - 1)
?.uri;
}
int get currentIndex => _mkPlayer.state.playlist.index;
Future<void> skipToNext() async {
await _mkPlayer.next();
}
Future<void> skipToPrevious() async {
await _mkPlayer.previous();
}
Future<void> jumpTo(int index) async {
await _mkPlayer.jump(index);
}
Future<void> addTrack(mk.Media media) async {
await _mkPlayer.add(media);
}
Future<void> addTrackAt(mk.Media media, int index) async {
await _mkPlayer.insert(index, media);
}
Future<void> removeTrack(int index) async {
await _mkPlayer.remove(index);
}
Future<void> moveTrack(int from, int to) async {
await _mkPlayer.move(from, to);
}
Future<void> clearPlaylist() async {
_mkPlayer.stop();
}
Future<void> setShuffle(bool shuffle) async {
await _mkPlayer.setShuffle(shuffle);
}
Future<void> setLoopMode(PlaylistMode loop) async {
await _mkPlayer.setPlaylistMode(loop);
}
Future<void> setAudioNormalization(bool normalize) async {
await _mkPlayer.setAudioNormalization(normalize);
}
}

View File

@ -0,0 +1,152 @@
part of 'audio_player.dart';
mixin RhythmAudioPlayersStreams on AudioPlayerInterface {
// stream getters
Stream<Duration> get durationStream {
// if (mkSupportedPlatform) {
return _mkPlayer.stream.duration;
// } else {
// return _justAudio!.durationStream
// .where((event) => event != null)
// .map((event) => event!)
// ;
// }
}
Stream<Duration> get positionStream {
// if (mkSupportedPlatform) {
return _mkPlayer.stream.position;
// } else {
// return _justAudio!.positionStream;
// }
}
Stream<Duration> get bufferedPositionStream {
// if (mkSupportedPlatform) {
// audioplayers doesn't have the capability to get buffered position
return _mkPlayer.stream.buffer;
// } else {
// return _justAudio!.bufferedPositionStream;
// }
}
Stream<void> get completedStream {
// if (mkSupportedPlatform) {
return _mkPlayer.stream.completed;
// } else {
// return _justAudio!.playerStateStream
// .where(
// (event) => event.processingState == ja.ProcessingState.completed)
// ;
// }
}
/// Stream that emits when the player is almost (%) complete
Stream<int> percentCompletedStream(double percent) {
return positionStream
.asyncMap(
(position) async => duration == Duration.zero
? 0
: (position.inSeconds / duration.inSeconds * 100).toInt(),
)
.where((event) => event >= percent);
}
Stream<bool> get playingStream {
// if (mkSupportedPlatform) {
return _mkPlayer.stream.playing;
// } else {
// return _justAudio!.playingStream;
// }
}
Stream<bool> get shuffledStream {
// if (mkSupportedPlatform) {
return _mkPlayer.shuffleStream;
// } else {
// return _justAudio!.shuffleModeEnabledStream;
// }
}
Stream<PlaylistMode> get loopModeStream {
// if (mkSupportedPlatform) {
return _mkPlayer.stream.playlistMode;
// } else {
// return _justAudio!.loopModeStream
// .map(PlaylistMode.fromLoopMode)
// ;
// }
}
Stream<double> get volumeStream {
// if (mkSupportedPlatform) {
return _mkPlayer.stream.volume.map((event) => event / 100);
// } else {
// return _justAudio!.volumeStream;
// }
}
Stream<bool> get bufferingStream {
// if (mkSupportedPlatform) {
return Stream.value(false);
// } else {
// return _justAudio!.playerStateStream
// .map(
// (event) =>
// event.processingState == ja.ProcessingState.buffering ||
// event.processingState == ja.ProcessingState.loading,
// )
// ;
// }
}
Stream<AudioPlaybackState> get playerStateStream {
// if (mkSupportedPlatform) {
return _mkPlayer.playerStateStream;
// } else {
// return _justAudio!.playerStateStream
// .map(AudioPlaybackState.fromJaPlayerState)
// ;
// }
}
Stream<int> get currentIndexChangedStream {
// if (mkSupportedPlatform) {
return _mkPlayer.indexChangeStream;
// } else {
// return _justAudio!.sequenceStateStream
// .map((event) => event?.currentIndex ?? -1)
// ;
// }
}
Stream<String> get activeSourceChangedStream {
// if (mkSupportedPlatform) {
return _mkPlayer.indexChangeStream
.map((event) {
return _mkPlayer.state.playlist.medias.elementAtOrNull(event)?.uri;
})
.where((event) => event != null)
.cast<String>();
// } else {
// return _justAudio!.sequenceStateStream
// .map((event) {
// return (event?.currentSource as ja.UriAudioSource?)?.uri.toString();
// })
// .where((event) => event != null)
// .cast<String>();
// }
}
Stream<List<mk.AudioDevice>> get devicesStream =>
_mkPlayer.stream.audioDevices.asBroadcastStream();
Stream<mk.AudioDevice> get selectedDeviceStream =>
_mkPlayer.stream.audioDevice.asBroadcastStream();
Stream<String> get errorStream => _mkPlayer.stream.error;
Stream<mk.Playlist> get playlistStream => _mkPlayer.stream.playlist.map((s) {
return s;
});
}

View File

@ -0,0 +1,148 @@
import 'dart:async';
import 'dart:developer';
import 'package:media_kit/media_kit.dart';
import 'package:flutter_broadcasts/flutter_broadcasts.dart';
import 'package:package_info_plus/package_info_plus.dart';
import 'package:audio_session/audio_session.dart';
import 'package:rhythm_box/platform.dart';
// ignore: implementation_imports
import 'package:rhythm_box/services/audio_player/playback_state.dart';
/// MediaKit [Player] by default doesn't have a state stream.
/// This class adds a state stream to the [Player] class.
class CustomPlayer extends Player {
final StreamController<AudioPlaybackState> _playerStateStream;
final StreamController<bool> _shuffleStream;
late final List<StreamSubscription> _subscriptions;
bool _shuffled;
int _androidAudioSessionId = 0;
String _packageName = "";
AndroidAudioManager? _androidAudioManager;
CustomPlayer({super.configuration})
: _playerStateStream = StreamController.broadcast(),
_shuffleStream = StreamController.broadcast(),
_shuffled = false {
nativePlayer.setProperty("network-timeout", "120");
_subscriptions = [
stream.buffering.listen((event) {
_playerStateStream.add(AudioPlaybackState.buffering);
}),
stream.playing.listen((playing) {
if (playing) {
_playerStateStream.add(AudioPlaybackState.playing);
} else {
_playerStateStream.add(AudioPlaybackState.paused);
}
}),
stream.completed.listen((isCompleted) async {
if (!isCompleted) return;
_playerStateStream.add(AudioPlaybackState.completed);
}),
stream.playlist.listen((event) {
if (event.medias.isEmpty) {
_playerStateStream.add(AudioPlaybackState.stopped);
}
}),
stream.error.listen((event) {
log('[MediaKitError] $event');
}),
];
PackageInfo.fromPlatform().then((packageInfo) {
_packageName = packageInfo.packageName;
});
if (PlatformInfo.isAndroid) {
_androidAudioManager = AndroidAudioManager();
AudioSession.instance.then((s) async {
_androidAudioSessionId =
await _androidAudioManager!.generateAudioSessionId();
notifyAudioSessionUpdate(true);
await nativePlayer.setProperty(
"audiotrack-session-id",
_androidAudioSessionId.toString(),
);
await nativePlayer.setProperty("ao", "audiotrack,opensles,");
});
}
}
Future<void> notifyAudioSessionUpdate(bool active) async {
if (PlatformInfo.isAndroid) {
sendBroadcast(
BroadcastMessage(
name: active
? "android.media.action.OPEN_AUDIO_EFFECT_CONTROL_SESSION"
: "android.media.action.CLOSE_AUDIO_EFFECT_CONTROL_SESSION",
data: {
"android.media.extra.AUDIO_SESSION": _androidAudioSessionId,
"android.media.extra.PACKAGE_NAME": _packageName
},
),
);
}
}
bool get shuffled => _shuffled;
Stream<AudioPlaybackState> get playerStateStream => _playerStateStream.stream;
Stream<bool> get shuffleStream => _shuffleStream.stream;
Stream<int> get indexChangeStream {
int oldIndex = state.playlist.index;
return stream.playlist.map((event) => event.index).where((newIndex) {
if (newIndex != oldIndex) {
oldIndex = newIndex;
return true;
}
return false;
});
}
@override
Future<void> setShuffle(bool shuffle) async {
_shuffled = shuffle;
await super.setShuffle(shuffle);
_shuffleStream.add(shuffle);
await Future.delayed(const Duration(milliseconds: 100));
if (shuffle) {
await move(state.playlist.index, 0);
}
}
@override
Future<void> stop() async {
await super.stop();
_shuffled = false;
_playerStateStream.add(AudioPlaybackState.stopped);
_shuffleStream.add(false);
}
@override
Future<void> dispose() async {
for (var element in _subscriptions) {
element.cancel();
}
await notifyAudioSessionUpdate(false);
return super.dispose();
}
NativePlayer get nativePlayer => platform as NativePlayer;
Future<void> insert(int index, Media media) async {
await add(media);
await move(state.playlist.medias.length, index);
}
Future<void> setAudioNormalization(bool normalize) async {
if (normalize) {
await nativePlayer.setProperty('af', 'dynaudnorm=g=5:f=250:r=0.9:p=0.5');
} else {
await nativePlayer.setProperty('af', '');
}
}
}

View File

@ -0,0 +1,28 @@
// import 'package:just_audio/just_audio.dart';
/// An unified playback state enum
enum AudioPlaybackState {
playing,
paused,
completed,
buffering,
stopped;
// static AudioPlaybackState fromJaPlayerState(PlayerState state) {
// if (state.playing) {
// return AudioPlaybackState.playing;
// }
// switch (state.processingState) {
// case ProcessingState.idle:
// return AudioPlaybackState.stopped;
// case ProcessingState.ready:
// return AudioPlaybackState.paused;
// case ProcessingState.completed:
// return AudioPlaybackState.completed;
// case ProcessingState.loading:
// case ProcessingState.buffering:
// return AudioPlaybackState.buffering;
// }
// }
}