深色模式、首页设置页面和功能优化

This commit is contained in:
Developer
2026-04-02 07:06:55 +08:00
parent f0a62ed68b
commit 954d173329
88 changed files with 12157 additions and 7578 deletions

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017 Blue Fire
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,16 @@
export 'package:audioplayers_platform_interface/src/api/audio_context.dart';
export 'package:audioplayers_platform_interface/src/api/audio_context_config.dart';
export 'package:audioplayers_platform_interface/src/api/audio_event.dart';
export 'package:audioplayers_platform_interface/src/api/global_audio_event.dart';
export 'package:audioplayers_platform_interface/src/api/player_mode.dart';
export 'package:audioplayers_platform_interface/src/api/player_state.dart';
export 'package:audioplayers_platform_interface/src/api/release_mode.dart';
export 'src/audio_cache.dart';
export 'src/audio_log_level.dart';
export 'src/audio_logger.dart';
export 'src/audio_pool.dart';
export 'src/audioplayer.dart';
export 'src/global_audio_scope.dart';
export 'src/position_updater.dart';
export 'src/source.dart';

View File

@@ -0,0 +1,181 @@
import 'dart:async';
import 'package:audioplayers/src/uri_ext.dart';
import 'package:file/file.dart';
import 'package:file/local.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart' show rootBundle;
import 'package:http/http.dart' as http;
import 'package:path_provider/path_provider.dart';
import 'package:uuid/uuid.dart';
const _uuid = Uuid();
/// This class represents a cache for Local Assets to be played.
///
/// On desktop/mobile, Flutter can only play audios on device folders, so first
/// this class copies asset files to a temporary folder, and then holds a
/// reference to the file.
///
/// On web, it just stores a reference to the URL of the audio, but it gets
/// preloaded by making a simple GET request (the browser then takes care of
/// caching).
///
/// You can pre-cache your audio, or clear the cache, as desired.
/// For most normal uses, the static instance is used. But if you want to
/// control multiple caches, you can create your own instances.
class AudioCache {
/// A globally accessible instance used by default by all players.
static AudioCache instance = AudioCache();
@visibleForTesting
static FileSystem fileSystem = const LocalFileSystem();
/// A reference to the loaded files absolute URLs.
///
/// This is a map of fileNames to pre-loaded URIs.
/// On mobile/desktop, the URIs are from local files where the bytes have been
/// copied.
/// On web, the URIs are external links for pre-loaded files.
final Map<String, Uri> loadedFiles = {};
/// This is the path inside your assets folder where your files lie.
///
/// For example, Flame uses the prefix 'assets/audio/'
/// (you must include the final slash!).
/// The default prefix (if not provided) is 'assets/'
/// Your files will be found at `<prefix><fileName>` (so the trailing slash is
/// crucial).
String prefix;
/// An unique ID generated for this instance of [AudioCache].
///
/// This is used to load a file into an unique location in the temporary
/// directory.
String? cacheId;
AudioCache({this.prefix = 'assets/', String? cacheId})
: cacheId = cacheId ?? _uuid.v4();
/// Clears the cache for the file [fileName].
///
/// Does nothing if the file was not on cache.
/// Note: web relies on the browser cache which is handled entirely by the
/// browser, thus this will no-op.
Future<void> clear(String fileName) async {
await _clearFile(fileName);
loadedFiles.remove(fileName);
}
Future<void> _clearFile(String fileName) async {
final uri = loadedFiles[fileName];
if (uri != null && !kIsWeb) {
await fileSystem.file(uri.toFilePath(windows: false)).delete();
}
}
/// Clears the whole cache.
Future<void> clearAll() async {
await Future.wait(loadedFiles.keys.map(_clearFile));
loadedFiles.clear();
}
@visibleForTesting
Future<ByteData> loadAsset(String path) => rootBundle.load(path);
@visibleForTesting
Future<String> getTempDir() async => (await getTemporaryDirectory()).path;
Future<Uri> fetchToMemory(String fileName) async {
if (kIsWeb) {
final uri = _sanitizeURLForWeb(fileName);
// We rely on browser caching here. Once the browser downloads this file,
// the native side implementation should be able to access it from cache.
await http.get(uri);
return uri;
}
// read local asset from rootBundle
final byteData = await loadAsset('$prefix$fileName');
// create a temporary file on the device to be read by the native side
final file = fileSystem.file('${await getTempDir()}/$cacheId/$fileName');
await file.create(recursive: true);
await file.writeAsBytes(byteData.buffer.asUint8List());
// returns the local file uri
return file.uri;
}
Uri _sanitizeURLForWeb(String fileName) {
final tryAbsolute = Uri.tryParse(fileName);
if (tryAbsolute?.isAbsolute ?? false) {
return tryAbsolute!;
}
// Relative Asset path
// URL-encode twice, see:
// https://github.com/flutter/engine/blob/2d39e672c95efc6c539d9b48b2cccc65df290cc4/lib/web_ui/lib/ui_web/src/ui_web/asset_manager.dart#L61
// Parsing an already encoded string to an Uri does not encode it a second
// time, so we have to do it manually:
final encoded = UriCoder.encodeOnce(fileName);
return Uri.parse(Uri.encodeFull('assets/$prefix$encoded'));
}
/// Loads a single [fileName] to the cache.
///
/// Returns a [Uri] to access that file.
Future<Uri> load(String fileName) async {
var needsFetch = !loadedFiles.containsKey(fileName);
// On Android, verify that the cached file still exists. It can be removed
// by the system when the storage is almost full
// see https://developer.android.com/training/data-storage/app-specific#internal-remove-cache
if (!needsFetch &&
defaultTargetPlatform == TargetPlatform.android &&
!await fileSystem.file(loadedFiles[fileName]).exists()) {
needsFetch = true;
}
if (needsFetch) {
loadedFiles[fileName] = await fetchToMemory(fileName);
}
return loadedFiles[fileName]!;
}
/// Loads a single [fileName] to the cache.
///
/// Returns a decoded [String] to access that file.
Future<String> loadPath(String fileName) async {
final encodedPath = (await load(fileName)).path;
// Web needs an url double-encoded path.
// Darwin needs a decoded path for local files.
return kIsWeb ? encodedPath : Uri.decodeFull(encodedPath);
}
/// Loads a single [fileName] to the cache but returns it as a File.
///
/// Note: this is not available for web, as File doesn't make sense on the
/// browser!
Future<File> loadAsFile(String fileName) async {
if (kIsWeb) {
throw 'This method cannot be used on web!';
}
final uri = await load(fileName);
return fileSystem.file(
uri.toFilePath(windows: defaultTargetPlatform == TargetPlatform.windows),
);
}
/// Loads a single [fileName] to the cache but returns it as a list of bytes.
Future<Uint8List> loadAsBytes(String fileName) async {
return (await loadAsFile(fileName)).readAsBytes();
}
/// Loads all the [fileNames] provided to the cache.
///
/// Also returns a list of [Future]s for those files.
Future<List<Uri>> loadAll(List<String> fileNames) async {
return Future.wait(fileNames.map(load));
}
}

View File

@@ -0,0 +1,16 @@
enum AudioLogLevel implements Comparable<AudioLogLevel> {
none(0),
error(1),
info(2);
const AudioLogLevel(this.level);
factory AudioLogLevel.fromInt(int level) {
return values.firstWhere((e) => e.level == level);
}
final int level;
@override
int compareTo(AudioLogLevel other) => level - other.level;
}

View File

@@ -0,0 +1,46 @@
import 'package:audioplayers/audioplayers.dart';
class AudioLogger {
static AudioLogLevel logLevel = AudioLogLevel.error;
static void log(String message) {
if (AudioLogLevel.info.level <= logLevel.level) {
// ignore: avoid_print
print('AudioPlayers Log: $message');
}
}
static void error(Object o, [StackTrace? stacktrace]) {
if (AudioLogLevel.error.level <= logLevel.level) {
// ignore: avoid_print
print(_errorColor(errorToString(o, stacktrace)));
}
}
static String errorToString(Object o, [StackTrace? stackTrace]) {
String errStr;
if (o is Error) {
errStr = 'AudioPlayers Error: $o\n${o.stackTrace}';
} else if (o is Exception) {
errStr = 'AudioPlayers Exception: $o';
} else {
errStr = 'AudioPlayers throw: $o';
}
if (stackTrace != null && stackTrace.toString().isNotEmpty) {
errStr += '\n$stackTrace';
}
return errStr;
}
static String _errorColor(String text) => '\x1B[31m$text\x1B[0m';
}
class AudioPlayerException implements Exception {
Object? cause;
AudioPlayer player;
AudioPlayerException(this.player, {this.cause});
@override
String toString() => 'AudioPlayerException(\n\t${player.source}, \n\t$cause';
}

View File

@@ -0,0 +1,165 @@
import 'dart:async';
import 'package:audioplayers/audioplayers.dart';
import 'package:flutter/foundation.dart';
import 'package:synchronized/synchronized.dart';
/// Represents a function that can stop an audio playing.
typedef StopFunction = Future<void> Function();
/// An AudioPool is a provider of AudioPlayers that are pre-loaded with an asset
/// to minimize delays.
///
/// All AudioPlayers are loaded with the same audio [source].
/// If you want multiple sounds use multiple [AudioPool]s.
///
/// Use this class if you for example have extremely quick firing, repetitive
/// or simultaneous sounds.
class AudioPool {
@visibleForTesting
final Map<String, AudioPlayer> currentPlayers = {};
@visibleForTesting
final List<AudioPlayer> availablePlayers = [];
/// Instance of [AudioCache] to be used by all players.
final AudioCache audioCache;
/// Platform specific configuration.
final AudioContext? audioContext;
/// The source of the sound for this pool.
final Source source;
/// The minimum numbers of players, this is the amount of players that the
/// pool is initialized with.
final int minPlayers;
/// The maximum number of players to be kept in the pool.
///
/// If `start` is called after the pool is full there will still be new
/// [AudioPlayer]s created, but once they are stopped they will not be
/// returned to the pool.
final int maxPlayers;
/// Whether the players in this pool use low latency mode.
final PlayerMode playerMode;
/// Lock to synchronize access to the pool.
final Lock _lock = Lock();
AudioPool._({
required this.minPlayers,
required this.maxPlayers,
required this.source,
required this.audioContext,
this.playerMode = PlayerMode.mediaPlayer,
AudioCache? audioCache,
}) : audioCache = audioCache ?? AudioCache.instance;
/// Creates an [AudioPool] instance with the given parameters.
/// You will have to manage disposing the players if you choose
/// PlayerMode.lowLatency.
static Future<AudioPool> create({
required Source source,
required int maxPlayers,
AudioCache? audioCache,
AudioContext? audioContext,
int minPlayers = 1,
PlayerMode playerMode = PlayerMode.mediaPlayer,
}) async {
final instance = AudioPool._(
source: source,
audioCache: audioCache,
maxPlayers: maxPlayers,
minPlayers: minPlayers,
playerMode: playerMode,
audioContext: audioContext,
);
final players = <AudioPlayer>[];
for (var i = 0; i < minPlayers; i++) {
players.add(await instance._createNewAudioPlayer());
}
return instance..availablePlayers.addAll(players);
}
/// Creates an [AudioPool] instance with the asset from the given [path].
static Future<AudioPool> createFromAsset({
required String path,
required int maxPlayers,
AudioCache? audioCache,
int minPlayers = 1,
PlayerMode playerMode = PlayerMode.mediaPlayer,
}) async {
return create(
source: AssetSource(path),
audioCache: audioCache,
minPlayers: minPlayers,
maxPlayers: maxPlayers,
playerMode: playerMode,
);
}
/// Starts playing the audio, returns a function that can stop the audio.
/// You must dispose the audio player yourself if using PlayerMode.lowLatency.
Future<StopFunction> start({double volume = 1.0}) async {
return _lock.synchronized(() async {
if (availablePlayers.isEmpty) {
availablePlayers.add(await _createNewAudioPlayer());
}
final player = availablePlayers.removeAt(0);
currentPlayers[player.playerId] = player;
await player.setVolume(volume);
await player.resume();
StreamSubscription<void>? subscription;
Future<void> stop() {
return _lock.synchronized(() async {
final removedPlayer = currentPlayers.remove(player.playerId);
if (removedPlayer != null) {
subscription?.cancel();
await removedPlayer.stop();
if (availablePlayers.length >= maxPlayers) {
await removedPlayer.release();
} else {
availablePlayers.add(removedPlayer);
}
}
});
}
if (playerMode != PlayerMode.lowLatency) {
subscription = player.onPlayerComplete.listen((_) => stop());
}
return stop;
});
}
Future<AudioPlayer> _createNewAudioPlayer() async {
final player = AudioPlayer()..audioCache = audioCache;
await player.setPlayerMode(playerMode);
if (audioContext != null) {
await player.setAudioContext(audioContext!);
}
await player.setSource(source);
await player.setReleaseMode(ReleaseMode.stop);
return player;
}
/// Disposes the audio pool. Then it cannot be used anymore.
Future<void> dispose() async {
// Dispose all players
await Future.wait([
...currentPlayers.values.map((e) => e.dispose()),
...availablePlayers.map((e) => e.dispose()),
]);
currentPlayers.clear();
availablePlayers.clear();
}
}

View File

@@ -0,0 +1,517 @@
import 'dart:async';
import 'dart:io';
import 'package:audioplayers/audioplayers.dart';
import 'package:audioplayers/src/uri_ext.dart';
import 'package:audioplayers_platform_interface/audioplayers_platform_interface.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
import 'package:path_provider/path_provider.dart';
import 'package:uuid/uuid.dart';
const _uuid = Uuid();
/// This represents a single AudioPlayer, which can play one audio at a time.
/// To play several audios at the same time, you must create several instances
/// of this class.
///
/// It holds methods to play, loop, pause, stop, seek the audio, and some useful
/// hooks for handlers and callbacks.
class AudioPlayer {
static final global = GlobalAudioScope();
static Duration preparationTimeout = const Duration(seconds: 30);
static Duration seekingTimeout = const Duration(seconds: 30);
final _platform = AudioplayersPlatformInterface.instance;
/// This is the [AudioCache] instance used by this player.
/// Unless you want to control multiple caches separately, you don't need to
/// change anything as the global instance will be used by default.
AudioCache audioCache = AudioCache.instance;
/// An unique ID generated for this instance of [AudioPlayer].
///
/// This is used to properly exchange messages with the [MethodChannel].
final String playerId;
Source? _source;
Source? get source => _source;
double _volume = 1.0;
double get volume => _volume;
double _balance = 0.0;
double get balance => _balance;
double _playbackRate = 1.0;
double get playbackRate => _playbackRate;
/// Current mode of the audio player. Can be updated at any time, but is going
/// to take effect only at the next time you play the audio.
PlayerMode _mode = PlayerMode.mediaPlayer;
PlayerMode get mode => _mode;
ReleaseMode _releaseMode = ReleaseMode.release;
ReleaseMode get releaseMode => _releaseMode;
/// Auxiliary variable to re-check the volatile player state during async
/// operations.
@visibleForTesting
PlayerState desiredState = PlayerState.stopped;
PlayerState _playerState = PlayerState.stopped;
PlayerState get state => _playerState;
/// The current playback state.
/// It is only set, when the corresponding action succeeds.
set state(PlayerState state) {
if (_playerState == PlayerState.disposed) {
throw Exception('AudioPlayer has been disposed');
}
if (!_playerStateController.isClosed) {
_playerStateController.add(state);
}
_playerState = desiredState = state;
}
PositionUpdater? _positionUpdater;
/// Completer to wait until the native player and its event stream are
/// created.
@visibleForTesting
final creatingCompleter = Completer<void>();
late final StreamSubscription _onPlayerCompleteStreamSubscription;
late final StreamSubscription _onLogStreamSubscription;
/// Stream controller to be able to get a stream on initialization, before the
/// native event stream is ready via [_create] method.
final _eventStreamController = StreamController<AudioEvent>.broadcast();
late final StreamSubscription _eventStreamSubscription;
Stream<AudioEvent> get eventStream => _eventStreamController.stream;
final StreamController<PlayerState> _playerStateController =
StreamController<PlayerState>.broadcast();
/// Stream of changes on player state.
Stream<PlayerState> get onPlayerStateChanged => _playerStateController.stream;
/// Stream of changes on audio position.
///
/// Roughly fires every 200 milliseconds. Will continuously update the
/// position of the playback if the status is [PlayerState.playing].
///
/// You can use it on a progress bar, for instance.
Stream<Duration> get onPositionChanged =>
_positionUpdater?.positionStream ?? const Stream.empty();
/// Stream of changes on audio duration.
///
/// An event is going to be sent as soon as the audio duration is available
/// (it might take a while to download or buffer it).
Stream<Duration> get onDurationChanged => eventStream
.where((event) => event.eventType == AudioEventType.duration)
.map((event) => event.duration!);
/// Stream of player completions.
///
/// Events are sent every time an audio is finished, therefore no event is
/// sent when an audio is paused or stopped.
///
/// [ReleaseMode.loop] also sends events to this stream.
Stream<void> get onPlayerComplete =>
eventStream.where((event) => event.eventType == AudioEventType.complete);
/// Stream of seek completions.
///
/// An event is going to be sent as soon as the audio seek is finished.
Stream<void> get onSeekComplete => eventStream
.where((event) => event.eventType == AudioEventType.seekComplete);
Stream<bool> get _onPrepared => eventStream
.where((event) => event.eventType == AudioEventType.prepared)
.map((event) => event.isPrepared!);
/// Stream of log events.
Stream<String> get onLog => eventStream
.where((event) => event.eventType == AudioEventType.log)
.map((event) => event.logMessage!);
/// Creates a new instance and assigns an unique id to it.
AudioPlayer({String? playerId}) : playerId = playerId ?? _uuid.v4() {
_onLogStreamSubscription = onLog.listen(
(log) => AudioLogger.log('$log\nSource: $_source'),
onError: (Object e, [StackTrace? stackTrace]) => AudioLogger.error(
AudioPlayerException(this, cause: e),
stackTrace,
),
);
_onPlayerCompleteStreamSubscription = onPlayerComplete.listen(
(_) async {
state = PlayerState.completed;
if (releaseMode == ReleaseMode.release) {
_source = null;
}
await _positionUpdater?.stopAndUpdate();
},
onError: (Object _, [StackTrace? __]) {
/* Errors are already handled via log stream */
},
);
_create();
positionUpdater = FramePositionUpdater(
getPosition: getCurrentPosition,
);
}
Future<void> _create() async {
try {
await global.ensureInitialized();
await _platform.create(playerId);
// Assign the event stream, now that the platform registered this player.
_eventStreamSubscription = _platform.getEventStream(playerId).listen(
_eventStreamController.add,
onError: _eventStreamController.addError,
);
creatingCompleter.complete();
} on Exception catch (e, stackTrace) {
creatingCompleter.completeError(e, stackTrace);
}
}
/// Play an audio [source].
///
/// To reduce preparation latency, instead consider calling [setSource]
/// beforehand and then [resume] separately.
Future<void> play(
Source source, {
double? volume,
double? balance,
AudioContext? ctx,
Duration? position,
PlayerMode? mode,
}) async {
desiredState = PlayerState.playing;
if (mode != null) {
await setPlayerMode(mode);
}
if (volume != null) {
await setVolume(volume);
}
if (balance != null) {
await setBalance(balance);
}
if (ctx != null) {
await setAudioContext(ctx);
}
await setSource(source);
if (position != null) {
await seek(position);
}
await _resume();
}
Future<void> setAudioContext(AudioContext ctx) async {
await creatingCompleter.future;
return _platform.setAudioContext(playerId, ctx);
}
Future<void> setPlayerMode(PlayerMode mode) async {
_mode = mode;
await creatingCompleter.future;
return _platform.setPlayerMode(playerId, mode);
}
/// Pauses the audio that is currently playing.
///
/// If you call [resume] later, the audio will resume from the point that it
/// has been paused.
Future<void> pause() async {
desiredState = PlayerState.paused;
await creatingCompleter.future;
if (desiredState == PlayerState.paused) {
await _platform.pause(playerId);
state = PlayerState.paused;
await _positionUpdater?.stopAndUpdate();
}
}
/// Stops the audio that is currently playing.
///
/// The position is going to be reset and you will no longer be able to resume
/// from the last point.
Future<void> stop() async {
desiredState = PlayerState.stopped;
await creatingCompleter.future;
if (desiredState == PlayerState.stopped) {
await _platform.stop(playerId);
state = PlayerState.stopped;
await _positionUpdater?.stopAndUpdate();
}
}
/// Resumes the audio that has been paused or stopped.
Future<void> resume() async {
desiredState = PlayerState.playing;
await _resume();
}
/// Resume without setting the desired state.
Future<void> _resume() async {
await creatingCompleter.future;
if (desiredState == PlayerState.playing) {
await _platform.resume(playerId);
state = PlayerState.playing;
_positionUpdater?.start();
}
}
/// Releases the resources associated with this media player.
///
/// The resources are going to be fetched or buffered again as soon as you
/// call [resume] or change the source.
Future<void> release() async {
await stop();
await _platform.release(playerId);
// Stop state already set in stop()
_source = null;
}
/// Moves the cursor to the desired position.
Future<void> seek(Duration position) async {
await creatingCompleter.future;
final futureSeekComplete =
onSeekComplete.first.timeout(AudioPlayer.seekingTimeout);
final futureSeek = _platform.seek(playerId, position);
// Wait simultaneously to ensure all errors are propagated through the same
// future.
await Future.wait([futureSeek, futureSeekComplete]);
await _positionUpdater?.update();
}
/// Sets the stereo balance.
///
/// -1 - The left channel is at full volume; the right channel is silent.
/// 1 - The right channel is at full volume; the left channel is silent.
/// 0 - Both channels are at the same volume.
Future<void> setBalance(double balance) async {
_balance = balance;
await creatingCompleter.future;
return _platform.setBalance(playerId, balance);
}
/// Sets the volume (amplitude).
///
/// 0 is mute and 1 is the max volume. The values between 0 and 1 are linearly
/// interpolated.
Future<void> setVolume(double volume) async {
_volume = volume;
await creatingCompleter.future;
return _platform.setVolume(playerId, volume);
}
/// Sets the release mode.
///
/// Check [ReleaseMode]'s doc to understand the difference between the modes.
Future<void> setReleaseMode(ReleaseMode releaseMode) async {
_releaseMode = releaseMode;
await creatingCompleter.future;
return _platform.setReleaseMode(playerId, releaseMode);
}
/// Sets the playback rate - call this after first calling play() or resume().
///
/// iOS and macOS have limits between 0.5 and 2x
/// Android SDK version should be 23 or higher
Future<void> setPlaybackRate(double playbackRate) async {
_playbackRate = playbackRate;
await creatingCompleter.future;
return _platform.setPlaybackRate(playerId, playbackRate);
}
/// Sets the audio source for this player.
///
/// This will delegate to one of the specific methods below depending on
/// the source type.
Future<void> setSource(Source source) async {
// Implementations of setOnPlayer also call `creatingCompleter.future`
await source.setOnPlayer(this);
}
/// This method helps waiting for a source to be set until it's prepared.
/// This can happen immediately after [setSource] has finished or it needs to
/// wait for the [AudioEvent] [AudioEventType.prepared] to arrive.
Future<void> _completePrepared(Future<void> Function() setSource) async {
await creatingCompleter.future;
final preparedFuture = _onPrepared
.firstWhere((isPrepared) => isPrepared)
.timeout(AudioPlayer.preparationTimeout);
// Need to await the setting the source to propagate immediate errors.
final setSourceFuture = setSource();
// Wait simultaneously to ensure all errors are propagated through the same
// future.
await Future.wait([setSourceFuture, preparedFuture]);
// Share position once after finished loading
await _positionUpdater?.update();
}
/// Sets the URL to a remote link.
///
/// The resources will start being fetched or buffered as soon as you call
/// this method.
Future<void> setSourceUrl(String url, {String? mimeType}) async {
if (!kIsWeb &&
defaultTargetPlatform != TargetPlatform.android &&
url.startsWith('data:')) {
// Convert data URI's to bytes (native support for web and android).
final uriData = UriData.fromUri(Uri.parse(url));
mimeType ??= url.substring(url.indexOf(':') + 1, url.indexOf(';'));
await setSourceBytes(uriData.contentAsBytes(), mimeType: mimeType);
return;
}
_source = UrlSource(url, mimeType: mimeType);
// Encode remote url to avoid unexpected failures.
await _completePrepared(
() => _platform.setSourceUrl(
playerId,
UriCoder.encodeOnce(url),
mimeType: mimeType,
isLocal: false,
),
);
}
/// Sets the URL to a file in the users device.
///
/// The resources will start being fetched or buffered as soon as you call
/// this method.
Future<void> setSourceDeviceFile(String path, {String? mimeType}) async {
_source = DeviceFileSource(path, mimeType: mimeType);
await _completePrepared(
() => _platform.setSourceUrl(
playerId,
path,
isLocal: true,
mimeType: mimeType,
),
);
}
/// Sets the URL to an asset in your Flutter application.
/// The global instance of AudioCache will be used by default.
///
/// The resources will start being fetched or buffered as soon as you call
/// this method.
Future<void> setSourceAsset(String path, {String? mimeType}) async {
_source = AssetSource(path, mimeType: mimeType);
final cachePath = await audioCache.loadPath(path);
await _completePrepared(
() => _platform.setSourceUrl(
playerId,
cachePath,
mimeType: mimeType,
isLocal: true,
),
);
}
Future<void> setSourceBytes(Uint8List bytes, {String? mimeType}) async {
if (!kIsWeb &&
(defaultTargetPlatform == TargetPlatform.iOS ||
defaultTargetPlatform == TargetPlatform.macOS ||
defaultTargetPlatform == TargetPlatform.linux)) {
// Convert to file as workaround
final tempDir = (await getTemporaryDirectory()).path;
final bytesHash = Object.hashAll(bytes)
.toUnsigned(20)
.toRadixString(16)
.padLeft(5, '0');
final file = File('$tempDir/$bytesHash');
await file.writeAsBytes(bytes);
await setSourceDeviceFile(file.path, mimeType: mimeType);
} else {
_source = BytesSource(bytes, mimeType: mimeType);
await _completePrepared(
() => _platform.setSourceBytes(playerId, bytes, mimeType: mimeType),
);
}
}
/// Set the PositionUpdater to control how often the position stream will be
/// updated. You can use the [FramePositionUpdater], the
/// [TimerPositionUpdater] or write your own implementation of the
/// [PositionUpdater].
set positionUpdater(PositionUpdater? positionUpdater) {
_positionUpdater?.dispose(); // No need to wait for dispose
_positionUpdater = positionUpdater;
}
/// Get audio duration after setting url.
/// Use it in conjunction with setUrl.
///
/// It will be available as soon as the audio duration is available
/// (it might take a while to download or buffer it if file is not local).
Future<Duration?> getDuration() async {
await creatingCompleter.future;
final milliseconds = await _platform.getDuration(playerId);
if (milliseconds == null) {
return null;
}
return Duration(milliseconds: milliseconds);
}
// Gets audio current playing position
Future<Duration?> getCurrentPosition() async {
await creatingCompleter.future;
final milliseconds = await _platform.getCurrentPosition(playerId);
if (milliseconds == null) {
return null;
}
return Duration(milliseconds: milliseconds);
}
/// Closes all [StreamController]s.
///
/// You must call this method when your [AudioPlayer] instance is not going to
/// be used anymore. If you try to use it after this you will get errors.
Future<void> dispose() async {
// First stop and release all native resources.
await release();
state = desiredState = PlayerState.disposed;
final futures = <Future>[
if (_positionUpdater != null) _positionUpdater!.dispose(),
if (!_playerStateController.isClosed) _playerStateController.close(),
_onPlayerCompleteStreamSubscription.cancel(),
_onLogStreamSubscription.cancel(),
_eventStreamSubscription.cancel(),
_eventStreamController.close(),
];
_source = null;
await Future.wait<dynamic>(futures);
// Needs to be called after cancelling event stream subscription:
await _platform.dispose(playerId);
}
}

View File

@@ -0,0 +1,52 @@
import 'dart:async';
import 'package:audioplayers/src/audio_logger.dart';
import 'package:audioplayers_platform_interface/audioplayers_platform_interface.dart';
GlobalAudioplayersPlatformInterface? _lastGlobalAudioplayersPlatform;
/// Handle global audio scope like calls and events concerning all AudioPlayers.
class GlobalAudioScope {
Completer<void>? _initCompleter;
GlobalAudioplayersPlatformInterface get _platform =>
GlobalAudioplayersPlatformInterface.instance;
/// Stream of global events.
late final Stream<GlobalAudioEvent> eventStream;
/// Stream of global log events.
Stream<String> get onLog => eventStream
.where((event) => event.eventType == GlobalAudioEventType.log)
.map((event) => event.logMessage!);
GlobalAudioScope() {
eventStream = _platform.getGlobalEventStream();
onLog.listen(
AudioLogger.log,
onError: AudioLogger.error,
);
}
/// Ensure the global platform is initialized.
Future<void> ensureInitialized() async {
if (_lastGlobalAudioplayersPlatform != _platform) {
// This will clear all open players on the platform when a full restart is
// performed.
_lastGlobalAudioplayersPlatform = _platform;
_initCompleter = Completer<void>();
try {
await _platform.init();
_initCompleter?.complete();
} on Exception catch (e, stackTrace) {
_initCompleter?.completeError(e, stackTrace);
}
}
await _initCompleter?.future;
}
Future<void> setAudioContext(AudioContext ctx) async {
await ensureInitialized();
await _platform.setGlobalAudioContext(ctx);
}
}

View File

@@ -0,0 +1,92 @@
import 'dart:async';
import 'package:flutter/scheduler.dart';
abstract class PositionUpdater {
/// You can use `player.getCurrentPosition` as the [getPosition] parameter.
PositionUpdater({
required this.getPosition,
});
final Future<Duration?> Function() getPosition;
final _streamController = StreamController<Duration>.broadcast();
Stream<Duration> get positionStream => _streamController.stream;
Future<void> update() async {
final position = await getPosition();
if (position != null) {
_streamController.add(position);
}
}
void start();
void stop();
Future<void> stopAndUpdate() async {
stop();
await update();
}
Future<void> dispose() async {
stop();
await _streamController.close();
}
}
class TimerPositionUpdater extends PositionUpdater {
Timer? _positionStreamTimer;
final Duration interval;
/// Position stream will be updated in the according [interval].
TimerPositionUpdater({
required super.getPosition,
required this.interval,
});
@override
void start() {
_positionStreamTimer?.cancel();
_positionStreamTimer = Timer.periodic(interval, (timer) async {
await update();
});
}
@override
void stop() {
_positionStreamTimer?.cancel();
_positionStreamTimer = null;
}
}
class FramePositionUpdater extends PositionUpdater {
int? _frameCallbackId;
bool _isRunning = false;
/// Position stream will be updated at every new frame.
FramePositionUpdater({
required super.getPosition,
});
void _tick(Duration? timestamp) {
if (_isRunning) {
update();
_frameCallbackId = SchedulerBinding.instance.scheduleFrameCallback(_tick);
}
}
@override
void start() {
_isRunning = true;
_tick(null);
}
@override
void stop() {
_isRunning = false;
if (_frameCallbackId != null) {
SchedulerBinding.instance.cancelFrameCallbackWithId(_frameCallbackId!);
}
}
}

View File

@@ -0,0 +1,100 @@
import 'dart:math';
import 'package:audioplayers/src/audioplayer.dart';
import 'package:flutter/foundation.dart';
/// A generic representation of a source from where audio can be pulled.
///
/// This can be a remote or local URL, an application asset, or the file bytes.
abstract class Source {
String? get mimeType;
Future<void> setOnPlayer(AudioPlayer player);
}
/// Source representing a remote URL to be played from the Internet.
/// This can be an audio file to be downloaded or an audio stream.
class UrlSource extends Source {
final String url;
@override
final String? mimeType;
UrlSource(this.url, {this.mimeType});
@override
Future<void> setOnPlayer(AudioPlayer player) {
return player.setSourceUrl(url, mimeType: mimeType);
}
@override
String toString() {
return 'UrlSource(url: ${url.substring(0, min(500, url.length))},'
' mimeType: $mimeType)';
}
}
/// Source representing the absolute path of a file in the user's device.
class DeviceFileSource extends Source {
final String path;
@override
final String? mimeType;
DeviceFileSource(this.path, {this.mimeType});
@override
Future<void> setOnPlayer(AudioPlayer player) {
return player.setSourceDeviceFile(path, mimeType: mimeType);
}
@override
String toString() {
return 'DeviceFileSource(path: $path, mimeType: $mimeType)';
}
}
/// Source representing the path of an application asset in your Flutter
/// "assets" folder.
/// Note that a prefix might be applied by your [AudioPlayer]'s audio cache
/// instance.
class AssetSource extends Source {
final String path;
@override
final String? mimeType;
AssetSource(this.path, {this.mimeType});
@override
Future<void> setOnPlayer(AudioPlayer player) {
return player.setSourceAsset(path, mimeType: mimeType);
}
@override
String toString() {
return 'AssetSource(path: $path, mimeType: $mimeType)';
}
}
/// Source containing the actual bytes of the media to be played.
class BytesSource extends Source {
final Uint8List bytes;
@override
final String? mimeType;
BytesSource(this.bytes, {this.mimeType});
@override
Future<void> setOnPlayer(AudioPlayer player) {
return player.setSourceBytes(bytes, mimeType: mimeType);
}
@override
String toString() {
final bytesHash =
Object.hashAll(bytes).toUnsigned(20).toRadixString(16).padLeft(5, '0');
return 'BytesSource(bytes: $bytesHash, mimeType: $mimeType)';
}
}

View File

@@ -0,0 +1,12 @@
extension UriCoder on Uri {
static String encodeOnce(String uri) {
try {
// If decoded differs, the uri was already encoded.
final decodedUri = Uri.decodeFull(uri);
if (decodedUri != uri) {
return uri;
}
} on ArgumentError catch (_) {}
return Uri.encodeFull(uri);
}
}

View File

@@ -0,0 +1,58 @@
name: audioplayers
resolution: workspace
description: A Flutter plugin to play multiple audio files simultaneously
version: 6.6.0
homepage: https://github.com/bluefireteam/audioplayers
repository: https://github.com/bluefireteam/audioplayers/tree/master/packages/audioplayers
flutter:
plugin:
platforms:
android:
default_package: audioplayers_android
ios:
default_package: audioplayers_darwin
linux:
default_package: audioplayers_linux
macos:
default_package: audioplayers_darwin
ohos:
default_package: audioplayers_ohos
web:
default_package: audioplayers_web
windows:
default_package: audioplayers_windows
dependencies:
audioplayers_android: ^5.2.1
audioplayers_darwin: ^6.4.0
audioplayers_linux: ^4.2.1
audioplayers_ohos:
path: ../audioplayers_ohos/audioplayers_ohos
audioplayers_platform_interface: ^7.1.1
audioplayers_web: ^5.2.0
audioplayers_windows: ^4.3.0
file: '>=6.1.0 <8.0.0'
flutter:
sdk: flutter
http: '>=0.13.1 <2.0.0'
meta: ^1.7.0
path_provider:
git:
url: https://gitcode.com/openharmony-sig/flutter_packages.git
path: packages/path_provider/path_provider
synchronized: ^3.0.0
uuid: '>=3.0.7 <5.0.0'
dev_dependencies:
flame_lint: ^1.4.1
flutter_test:
sdk: flutter
environment:
sdk: ^3.6.0
flutter: '>=3.27.0'
topics:
- audio
- audio-player