如何使用audio_service包获取音频文件/URL总时长?



我使用just_audio &audio_service包来实现一个播放列表音乐播放器,我面临的问题是获得每个音频文件/URL的总持续时间,我没有将持续时间传递给MediaItem实例,那么如何获得持续时间而不将持续时间传递给MediaItem。

我将持续时间添加为空或未初始化。

MediaItem(
id: 'https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3',
album: "Science Friday",
title: "A Salute To Head-Scratching Science",
artist: "Science Friday and WNYC Studios",
// duration: const Duration(milliseconds: 5739820,
or
duration: null
),
),

的例子:

import 'dart:async';
import 'package:audio_service/audio_service.dart';
import 'package:audio_service_example/common.dart';
import 'package:audio_session/audio_session.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:just_audio/just_audio.dart';
import 'package:rxdart/rxdart.dart';
// You might want to provide this using dependency injection rather than a
// global variable.
late AudioPlayerHandler _audioHandler;
Future<void> main() async {
_audioHandler = await AudioService.init(
builder: () => AudioPlayerHandlerImpl(),
config: const AudioServiceConfig(
androidNotificationChannelId: 'com.ryanheise.myapp.channel.audio',
androidNotificationChannelName: 'Audio playback',
androidNotificationOngoing: true,
),
);
runApp(MyApp());
}
/// The app widget
class MyApp extends StatelessWidget {
@override
Widget build(BuildContext context) {
return MaterialApp(
debugShowCheckedModeBanner: false,
title: 'Audio Service Demo',
theme: ThemeData(primarySwatch: Colors.blue),
home: MainScreen3(),
);
}
}
/// The main screen.
class MainScreen3 extends StatelessWidget {
Stream<Duration> get _bufferedPositionStream => _audioHandler.playbackState
.map((state) => state.bufferedPosition)
.distinct();
Stream<Duration?> get _durationStream =>
_audioHandler.mediaItem.map((item) => item?.duration).distinct();
Stream<PositionData> get _positionDataStream =>
Rx.combineLatest3<Duration, Duration, Duration?, PositionData>(
AudioService.position,
_bufferedPositionStream,
_durationStream,
(position, bufferedPosition, duration) => PositionData(
position, bufferedPosition, duration ?? Duration.zero));
@override
Widget build(BuildContext context) {
return Scaffold(
body: SafeArea(
child: Column(
crossAxisAlignment: CrossAxisAlignment.center,
mainAxisAlignment: MainAxisAlignment.center,
children: [
// MediaItem display
Expanded(
child: StreamBuilder<MediaItem?>(
stream: _audioHandler.mediaItem,
builder: (context, snapshot) {
final mediaItem = snapshot.data;
if (mediaItem == null) return const SizedBox();
return Column(
crossAxisAlignment: CrossAxisAlignment.center,
children: [
if (mediaItem.artUri != null)
Expanded(
child: Padding(
padding: const EdgeInsets.all(8.0),
child: Center(
child: Image.network('${mediaItem.artUri!}'),
),
),
),
Text(mediaItem.album ?? '',
style: Theme.of(context).textTheme.headline6),
Text(mediaItem.title),
],
);
},
),
),
// Playback controls
ControlButtons(_audioHandler),
// A seek bar.
StreamBuilder<PositionData>(
stream: _positionDataStream,
builder: (context, snapshot) {
final positionData = snapshot.data ??
PositionData(Duration.zero, Duration.zero, Duration.zero);
return SeekBar(
duration: positionData.duration,
position: positionData.position,
onChangeEnd: (newPosition) {
_audioHandler.seek(newPosition);
},
);
},
),
const SizedBox(height: 8.0),
// Repeat/shuffle controls
Row(
children: [
StreamBuilder<AudioServiceRepeatMode>(
stream: _audioHandler.playbackState
.map((state) => state.repeatMode)
.distinct(),
builder: (context, snapshot) {
final repeatMode =
snapshot.data ?? AudioServiceRepeatMode.none;
const icons = [
Icon(Icons.repeat, color: Colors.grey),
Icon(Icons.repeat, color: Colors.orange),
Icon(Icons.repeat_one, color: Colors.orange),
];
const cycleModes = [
AudioServiceRepeatMode.none,
AudioServiceRepeatMode.all,
AudioServiceRepeatMode.one,
];
final index = cycleModes.indexOf(repeatMode);
return IconButton(
icon: icons[index],
onPressed: () {
_audioHandler.setRepeatMode(cycleModes[
(cycleModes.indexOf(repeatMode) + 1) %
cycleModes.length]);
},
);
},
),
Expanded(
child: Text(
"Playlist",
style: Theme.of(context).textTheme.headline6,
textAlign: TextAlign.center,
),
),
StreamBuilder<bool>(
stream: _audioHandler.playbackState
.map((state) =>
state.shuffleMode == AudioServiceShuffleMode.all)
.distinct(),
builder: (context, snapshot) {
final shuffleModeEnabled = snapshot.data ?? false;
return IconButton(
icon: shuffleModeEnabled
? const Icon(Icons.shuffle, color: Colors.orange)
: const Icon(Icons.shuffle, color: Colors.grey),
onPressed: () async {
final enable = !shuffleModeEnabled;
await _audioHandler.setShuffleMode(enable
? AudioServiceShuffleMode.all
: AudioServiceShuffleMode.none);
},
);
},
),
],
),
// Playlist
Container(
height: 240.0,
child: StreamBuilder<QueueState>(
stream: _audioHandler.queueState,
builder: (context, snapshot) {
final queueState = snapshot.data ?? QueueState.empty;
final queue = queueState.queue;
return ReorderableListView(
onReorder: (int oldIndex, int newIndex) {
if (oldIndex < newIndex) newIndex--;
_audioHandler.moveQueueItem(oldIndex, newIndex);
},
children: [
for (var i = 0; i < queue.length; i++)
Dismissible(
key: ValueKey(queue[i].id),
background: Container(
color: Colors.redAccent,
alignment: Alignment.centerRight,
child: const Padding(
padding: EdgeInsets.only(right: 8.0),
child: Icon(Icons.delete, color: Colors.white),
),
),
onDismissed: (dismissDirection) {
_audioHandler.removeQueueItemAt(i);
},
child: Material(
color: i == queueState.queueIndex
? Colors.grey.shade300
: null,
child: ListTile(
title: Text(queue[i].title),
onTap: () => _audioHandler.skipToQueueItem(i),
),
),
),
],
);
},
),
),
],
),
),
);
}
}
class ControlButtons extends StatelessWidget {
final AudioPlayerHandler audioHandler;
ControlButtons(this.audioHandler);
@override
Widget build(BuildContext context) {
return Row(
mainAxisSize: MainAxisSize.min,
children: [
IconButton(
icon: const Icon(Icons.volume_up),
onPressed: () {
showSliderDialog(
context: context,
title: "Adjust volume",
divisions: 10,
min: 0.0,
max: 1.0,
value: audioHandler.volume.value,
stream: audioHandler.volume,
onChanged: audioHandler.setVolume,
);
},
),
StreamBuilder<QueueState>(
stream: audioHandler.queueState,
builder: (context, snapshot) {
final queueState = snapshot.data ?? QueueState.empty;
return IconButton(
icon: const Icon(Icons.skip_previous),
onPressed:
queueState.hasPrevious ? audioHandler.skipToPrevious : null,
);
},
),
StreamBuilder<PlaybackState>(
stream: audioHandler.playbackState,
builder: (context, snapshot) {
final playbackState = snapshot.data;
final processingState = playbackState?.processingState;
final playing = playbackState?.playing;
if (processingState == AudioProcessingState.loading ||
processingState == AudioProcessingState.buffering) {
return Container(
margin: const EdgeInsets.all(8.0),
width: 64.0,
height: 64.0,
child: const CircularProgressIndicator(),
);
} else if (playing != true) {
return IconButton(
icon: const Icon(Icons.play_arrow),
iconSize: 64.0,
onPressed: audioHandler.play,
);
} else {
return IconButton(
icon: const Icon(Icons.pause),
iconSize: 64.0,
onPressed: audioHandler.pause,
);
}
},
),
StreamBuilder<QueueState>(
stream: audioHandler.queueState,
builder: (context, snapshot) {
final queueState = snapshot.data ?? QueueState.empty;
return IconButton(
icon: const Icon(Icons.skip_next),
onPressed: queueState.hasNext ? audioHandler.skipToNext : null,
);
},
),
StreamBuilder<double>(
stream: audioHandler.speed,
builder: (context, snapshot) => IconButton(
icon: Text("${snapshot.data?.toStringAsFixed(1)}x",
style: const TextStyle(fontWeight: FontWeight.bold)),
onPressed: () {
showSliderDialog(
context: context,
title: "Adjust speed",
divisions: 10,
min: 0.5,
max: 1.5,
value: audioHandler.speed.value,
stream: audioHandler.speed,
onChanged: audioHandler.setSpeed,
);
},
),
),
],
);
}
}
class QueueState {
static final QueueState empty =
const QueueState([], 0, [], AudioServiceRepeatMode.none);
final List<MediaItem> queue;
final int? queueIndex;
final List<int>? shuffleIndices;
final AudioServiceRepeatMode repeatMode;
const QueueState(
this.queue, this.queueIndex, this.shuffleIndices, this.repeatMode);
bool get hasPrevious =>
repeatMode != AudioServiceRepeatMode.none || (queueIndex ?? 0) > 0;
bool get hasNext =>
repeatMode != AudioServiceRepeatMode.none ||
(queueIndex ?? 0) + 1 < queue.length;
List<int> get indices =>
shuffleIndices ?? List.generate(queue.length, (i) => i);
}
/// An [AudioHandler] for playing a list of podcast episodes.
///
/// This class exposes the interface and not the implementation.
abstract class AudioPlayerHandler implements AudioHandler {
Stream<QueueState> get queueState;
Future<void> moveQueueItem(int currentIndex, int newIndex);
ValueStream<double> get volume;
Future<void> setVolume(double volume);
ValueStream<double> get speed;
}
/// The implementation of [AudioPlayerHandler].
///
/// This handler is backed by a just_audio player. The player's effective
/// sequence is mapped onto the handler's queue, and the player's state is
/// mapped onto the handler's state.
class AudioPlayerHandlerImpl extends BaseAudioHandler
with SeekHandler
implements AudioPlayerHandler {
// ignore: close_sinks
final BehaviorSubject<List<MediaItem>> _recentSubject =
BehaviorSubject.seeded(<MediaItem>[]);
final _mediaLibrary = MediaLibrary();
final _player = AudioPlayer();
final _playlist = ConcatenatingAudioSource(children: []);
@override
final BehaviorSubject<double> volume = BehaviorSubject.seeded(1.0);
@override
final BehaviorSubject<double> speed = BehaviorSubject.seeded(1.0);
final _mediaItemExpando = Expando<MediaItem>();
/// A stream of the current effective sequence from just_audio.
Stream<List<IndexedAudioSource>> get _effectiveSequence => Rx.combineLatest3<
List<IndexedAudioSource>?,
List<int>?,
bool,
List<IndexedAudioSource>?>(_player.sequenceStream,
_player.shuffleIndicesStream, _player.shuffleModeEnabledStream,
(sequence, shuffleIndices, shuffleModeEnabled) {
if (sequence == null) return [];
if (!shuffleModeEnabled) return sequence;
if (shuffleIndices == null) return null;
if (shuffleIndices.length != sequence.length) return null;
return shuffleIndices.map((i) => sequence[i]).toList();
}).whereType<List<IndexedAudioSource>>();
/// Computes the effective queue index taking shuffle mode into account.
int? getQueueIndex(
int? currentIndex, bool shuffleModeEnabled, List<int>? shuffleIndices) {
final effectiveIndices = _player.effectiveIndices ?? [];
final shuffleIndicesInv = List.filled(effectiveIndices.length, 0);
for (var i = 0; i < effectiveIndices.length; i++) {
shuffleIndicesInv[effectiveIndices[i]] = i;
}
return (shuffleModeEnabled &&
((currentIndex ?? 0) < shuffleIndicesInv.length))
? shuffleIndicesInv[currentIndex ?? 0]
: currentIndex;
}
/// A stream reporting the combined state of the current queue and the current
/// media item within that queue.
@override
Stream<QueueState> get queueState =>
Rx.combineLatest3<List<MediaItem>, PlaybackState, List<int>, QueueState>(
queue,
playbackState,
_player.shuffleIndicesStream.whereType<List<int>>(),
(queue, playbackState, shuffleIndices) => QueueState(
queue,
playbackState.queueIndex,
playbackState.shuffleMode == AudioServiceShuffleMode.all
? shuffleIndices
: null,
playbackState.repeatMode,
)).where((state) =>
state.shuffleIndices == null ||
state.queue.length == state.shuffleIndices!.length);
@override
Future<void> setShuffleMode(AudioServiceShuffleMode mode) async {
final enabled = mode == AudioServiceShuffleMode.all;
if (enabled) {
await _player.shuffle();
}
playbackState.add(playbackState.value.copyWith(shuffleMode: mode));
await _player.setShuffleModeEnabled(enabled);
}
@override
Future<void> setRepeatMode(AudioServiceRepeatMode repeatMode) async {
playbackState.add(playbackState.value.copyWith(repeatMode: repeatMode));
await _player.setLoopMode(LoopMode.values[repeatMode.index]);
}
@override
Future<void> setSpeed(double speed) async {
this.speed.add(speed);
await _player.setSpeed(speed);
}
@override
Future<void> setVolume(double volume) async {
this.volume.add(volume);
await _player.setVolume(volume);
}
AudioPlayerHandlerImpl() {
_init();
}
Future<void> _init() async {
final session = await AudioSession.instance;
await session.configure(const AudioSessionConfiguration.speech());
// Broadcast speed changes. Debounce so that we don't flood the notification
// with updates.
speed.debounceTime(const Duration(milliseconds: 250)).listen((speed) {
playbackState.add(playbackState.value.copyWith(speed: speed));
});
// Load and broadcast the initial queue
await updateQueue(_mediaLibrary.items[MediaLibrary.albumsRootId]!);
// For Android 11, record the most recent item so it can be resumed.
mediaItem
.whereType<MediaItem>()
.listen((item) => _recentSubject.add([item]));
// Broadcast media item changes.
Rx.combineLatest4<int?, List<MediaItem>, bool, List<int>?, MediaItem?>(
_player.currentIndexStream,
queue,
_player.shuffleModeEnabledStream,
_player.shuffleIndicesStream,
(index, queue, shuffleModeEnabled, shuffleIndices) {
final queueIndex =
getQueueIndex(index, shuffleModeEnabled, shuffleIndices);
return (queueIndex != null && queueIndex < queue.length)
? queue[queueIndex]
: null;
}).whereType<MediaItem>().distinct().listen(mediaItem.add);
// Propagate all events from the audio player to AudioService clients.
_player.playbackEventStream.listen(_broadcastState);
_player.shuffleModeEnabledStream
.listen((enabled) => _broadcastState(_player.playbackEvent));
// In this example, the service stops when reaching the end.
_player.processingStateStream.listen((state) {
if (state == ProcessingState.completed) {
stop();
_player.seek(Duration.zero, index: 0);
}
});
// Broadcast the current queue.
_effectiveSequence
.map((sequence) =>
sequence.map((source) => _mediaItemExpando[source]!).toList())
.pipe(queue);
// Load the playlist.
_playlist.addAll(queue.value.map(_itemToSource).toList());
await _player.setAudioSource(_playlist);
}
AudioSource _itemToSource(MediaItem mediaItem) {
final audioSource = AudioSource.uri(Uri.parse(mediaItem.id));
_mediaItemExpando[audioSource] = mediaItem;
return audioSource;
}
List<AudioSource> _itemsToSources(List<MediaItem> mediaItems) =>
mediaItems.map(_itemToSource).toList();
@override
Future<List<MediaItem>> getChildren(String parentMediaId,
[Map<String, dynamic>? options]) async {
switch (parentMediaId) {
case AudioService.recentRootId:
// When the user resumes a media session, tell the system what the most
// recently played item was.
return _recentSubject.value;
default:
// Allow client to browse the media library.
return _mediaLibrary.items[parentMediaId]!;
}
}
@override
ValueStream<Map<String, dynamic>> subscribeToChildren(String parentMediaId) {
switch (parentMediaId) {
case AudioService.recentRootId:
final stream = _recentSubject.map((_) => <String, dynamic>{});
return _recentSubject.hasValue
? stream.shareValueSeeded(<String, dynamic>{})
: stream.shareValue();
default:
return Stream.value(_mediaLibrary.items[parentMediaId])
.map((_) => <String, dynamic>{})
.shareValue();
}
}
@override
Future<void> addQueueItem(MediaItem mediaItem) async {
await _playlist.add(_itemToSource(mediaItem));
}
@override
Future<void> addQueueItems(List<MediaItem> mediaItems) async {
await _playlist.addAll(_itemsToSources(mediaItems));
}
@override
Future<void> insertQueueItem(int index, MediaItem mediaItem) async {
await _playlist.insert(index, _itemToSource(mediaItem));
}
@override
Future<void> updateQueue(List<MediaItem> newQueue) async {
await _playlist.clear();
await _playlist.addAll(_itemsToSources(newQueue));
}
@override
Future<void> updateMediaItem(MediaItem mediaItem) async {
final index = queue.value.indexWhere((item) => item.id == mediaItem.id);
_mediaItemExpando[_player.sequence![index]] = mediaItem;
}
@override
Future<void> removeQueueItem(MediaItem mediaItem) async {
final index = queue.value.indexOf(mediaItem);
await _playlist.removeAt(index);
}
@override
Future<void> moveQueueItem(int currentIndex, int newIndex) async {
await _playlist.move(currentIndex, newIndex);
}
@override
Future<void> skipToNext() => _player.seekToNext();
@override
Future<void> skipToPrevious() => _player.seekToPrevious();
@override
Future<void> skipToQueueItem(int index) async {
if (index < 0 || index >= _playlist.children.length) return;
// This jumps to the beginning of the queue item at [index].
_player.seek(Duration.zero,
index: _player.shuffleModeEnabled
? _player.shuffleIndices![index]
: index);
}
@override
Future<void> play() => _player.play();
@override
Future<void> pause() => _player.pause();
@override
Future<void> seek(Duration position) => _player.seek(position);
@override
Future<void> stop() async {
await _player.stop();
await playbackState.firstWhere(
(state) => state.processingState == AudioProcessingState.idle);
}
/// Broadcasts the current state to all clients.
void _broadcastState(PlaybackEvent event) {
final playing = _player.playing;
final queueIndex = getQueueIndex(
event.currentIndex, _player.shuffleModeEnabled, _player.shuffleIndices);
playbackState.add(playbackState.value.copyWith(
controls: [
MediaControl.skipToPrevious,
if (playing) MediaControl.pause else MediaControl.play,
MediaControl.stop,
MediaControl.skipToNext,
],
systemActions: const {
MediaAction.seek,
MediaAction.seekForward,
MediaAction.seekBackward,
},
androidCompactActionIndices: const [0, 1, 3],
processingState: const {
ProcessingState.idle: AudioProcessingState.idle,
ProcessingState.loading: AudioProcessingState.loading,
ProcessingState.buffering: AudioProcessingState.buffering,
ProcessingState.ready: AudioProcessingState.ready,
ProcessingState.completed: AudioProcessingState.completed,
}[_player.processingState]!,
playing: playing,
updatePosition: _player.position,
bufferedPosition: _player.bufferedPosition,
speed: _player.speed,
queueIndex: queueIndex,
));
}
}
/// Provides access to a library of media items. In your app, this could come
/// from a database or web service.
class MediaLibrary {
static const albumsRootId = 'albums';
final items = <String, List<MediaItem>>{
AudioService.browsableRootId: const [
MediaItem(
id: albumsRootId,
title: "Albums",
playable: false,
),
],
albumsRootId: [
MediaItem(
id: 'https://s3.amazonaws.com/scifri-episodes/scifri20181123-episode.mp3',
album: "Science Friday",
title: "A Salute To Head-Scratching Science",
artist: "Science Friday and WNYC Studios",
duration: const Duration(milliseconds: 5739820),
artUri: Uri.parse(
'https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg'),
),
MediaItem(
id: 'https://s3.amazonaws.com/scifri-segments/scifri201711241.mp3',
album: "Science Friday",
title: "From Cat Rheology To Operatic Incompetence",
artist: "Science Friday and WNYC Studios",
duration: const Duration(milliseconds: 2856950),
artUri: Uri.parse(
'https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg'),
),
MediaItem(
id: 'https://s3.amazonaws.com/scifri-segments/scifri202011274.mp3',
album: "Science Friday",
title: "Laugh Along At Home With The Ig Nobel Awards",
artist: "Science Friday and WNYC Studios",
duration: const Duration(milliseconds: 1791883),
artUri: Uri.parse(
'https://media.wnyc.org/i/1400/1400/l/80/1/ScienceFriday_WNYCStudios_1400.jpg'),
),
],
};
}

AudioSource输入AudioPlayer时,AudioPlayer将通过durationStream返回其总持续时间。

你可以简单地听这个durationStream和复制mediaItem从durationStream收到的持续时间之前,你把它沉到你的mediaItem流的AudioHandler

在本例中,您可以更改第475行_init方法中将MediaItem下沉到mediaItem流的部分,如下所示。

:

// Broadcast media item changes.
Rx.combineLatest4<int?, List<MediaItem>, bool, List<int>?, MediaItem?>(
_player.currentIndexStream,
queue,
_player.shuffleModeEnabledStream,
_player.shuffleIndicesStream,
(index, queue, shuffleModeEnabled, shuffleIndices) {
final queueIndex =
getQueueIndex(index, shuffleModeEnabled, shuffleIndices);
return (queueIndex != null && queueIndex < queue.length)
? queue[queueIndex]
: null;
}).whereType<MediaItem>().distinct().listen(mediaItem.add);

:后

// Broadcast media item changes.
Rx.combineLatest5<int?, List<MediaItem>, bool, List<int>?, Duration?,
MediaItem?>(
_player.currentIndexStream,
queue,
_player.shuffleModeEnabledStream,
_player.shuffleIndicesStream,
_player.durationStream, // <- add listening to durationStream here
(index, queue, shuffleModeEnabled, shuffleIndices, duration) {
final queueIndex =
getQueueIndex(index, shuffleModeEnabled, shuffleIndices);
return (queueIndex != null && queueIndex < queue.length)
? queue[queueIndex].copyWith(duration: duration) // <- sink mediaItem provided with duration
: null;
}).whereType<MediaItem>().distinct().listen(mediaItem.add);

相关内容

  • 没有找到相关文章

最新更新