I'm using a Samsung device m32 and I'm not getting the lock screen player controller
and
I am using audio_service and just_audio for play .mp3 in android devices
And Also
I am not getting above issue in other devices except samsung device
below is my code
AudioPlayerHandler? audioHandler;
class AudioPlayerHandler extends BaseAudioHandler with SeekHandler {
final _player = AudioPlayer();
AudioPlayerHandler(VideoAudioContent daf, Duration audioDuration) {
_player.playbackEventStream.map(_transformEvent).pipe(playbackState);
mediaItem.add(
MediaItem(
id: daf.audioUrl.toString(),
album: daf.subtitle,
title: daf.title,
artist: daf.subtitle,
artUri: Uri.parse(daf.audioThumbnail),
duration: audioDuration,
),
);
_player.setAudioSource(AudioSource.uri(Uri.parse(daf.audioUrl ?? "")));
}
@override
Future<void> setSpeed(double speed) {
return _player.setSpeed(speed);
}
@override
Future<void> play() => _player.play();
@override
Future<void> pause() => _player.pause();
@override
Future<void> seek(Duration position) => _player.seek(position);
@override
Future<void> stop() async {
_player.stop();
}
@override
Future<void> dispose() async {
customAction('dispose',null);
_player.dispose();
}
PlaybackState _transformEvent(PlaybackEvent event) {
return PlaybackState(
controls: [
MediaControl.rewind,
if (_player.playing) MediaControl.pause else MediaControl.play,
MediaControl.stop,
MediaControl.fastForward,
],
systemActions: const {
MediaAction.seek,
MediaAction.seekForward,
MediaAction.seekBackward,
},
androidCompactActionIndices: const [0, 1, 3],
processingState: const {
ProcessingState.idle: AudioProcessingState.idle,
ProcessingState.loading: AudioProcessingState.loading,
ProcessingState.buffering: AudioProcessingState.buffering,
ProcessingState.ready: AudioProcessingState.ready,
ProcessingState.completed: AudioProcessingState.completed,
}[_player.processingState]!,
playing: _player.playing,
updatePosition: _player.position,
bufferedPosition: _player.bufferedPosition,
speed: _player.speed,
queueIndex: event.currentIndex,
);
}
}
I have used common code from documentation
I created a handle for doing everything in the same class, and all I have to do is access the audioHandler for play/pause actions, and it extends the BaseAudioHandler class provided by audio service, so it handles notification and service base work on its own.
UPDATE:
Now I Found out if i play audio first time it works perfactly but after stop playing and play again it not working
even it not working in example given by the audio_service package in pub.dev