if I launch the app and then android auto everything works correctly, even if then I close and reopen the app, it is launched. If, on the other hand, I open the app on android auto first and then open the app on the smartphone, it freezes on the startup screen.
Has anyone had the same problem?
This is my class:
class MediaLibrary {
static const albumsRootId = 'stations';
final items = <String, List<MediaItem>>{
AudioService.browsableRootId: allMediaItems,
albumsRootId: allMediaItems,
};
}
class AudioPlayerHandler extends BaseAudioHandler with QueueHandler {
final BehaviorSubject<List<MediaItem>> _recentSubject =
BehaviorSubject.seeded(<MediaItem>[]);
final _mediaLibrary = MediaLibrary();
final isIOS = Platform.isIOS;
int? get index => audioPlayer.currentIndex;
AudioPlayerHandler() {
_init();
}
Future<void> _init() async {
final session = await AudioSession.instance;
await session.configure(const AudioSessionConfiguration(
avAudioSessionCategory: AVAudioSessionCategory.playback,
avAudioSessionMode: AVAudioSessionMode.defaultMode,
avAudioSessionRouteSharingPolicy:
AVAudioSessionRouteSharingPolicy.defaultPolicy,
avAudioSessionSetActiveOptions:
AVAudioSessionSetActiveOptions.notifyOthersOnDeactivation,
androidAudioAttributes: AndroidAudioAttributes(
contentType: AndroidAudioContentType.music,
flags: AndroidAudioFlags.none,
usage: AndroidAudioUsage.media,
),
androidAudioFocusGainType: AndroidAudioFocusGainType.gain,
androidWillPauseWhenDucked: true,
));
await getStations();
// await Future.delayed(const Duration(seconds: ));
queue.add(_mediaLibrary.items[MediaLibrary.albumsRootId]!);
mediaItem
.whereType<MediaItem>()
.listen((item) => _recentSubject.add([item]));
audioPlayer.currentIndexStream.listen((index) {
if (index != null) mediaItem.add(queue.value[index]);
});
audioPlayer.playbackEventStream.listen(_broadcastState);
audioPlayer.processingStateStream.listen((state) {
if (state == ProcessingState.completed) stop();
});
// await audioHandler.addQueueItems(queue.value);
try {
await Future.delayed(const Duration(seconds: 2));
await audioPlayer.setAudioSource(ConcatenatingAudioSource(
children: queue.value
.map((item) => AudioSource.uri(Uri.parse(item.id)))
.toList(),
));
} catch (e) {
print("Error: $e");
}
}
@override
Future<List<MediaItem>> getChildren(String parentMediaId,
[Map<String, dynamic>? options]) async {
switch (parentMediaId) {
case AudioService.recentRootId:
return _recentSubject.value;
default:
return _mediaLibrary.items[parentMediaId]!;
}
}
@override
ValueStream<Map<String, dynamic>> subscribeToChildren(String parentMediaId) {
switch (parentMediaId) {
case AudioService.recentRootId:
final stream = _recentSubject.map((_) => <String, dynamic>{});
return _recentSubject.hasValue
? stream.shareValueSeeded(<String, dynamic>{})
: stream.shareValue();
default:
return Stream.value(_mediaLibrary.items[parentMediaId])
.map((_) => <String, dynamic>{})
.shareValue();
}
}
@override
Future<void> skipToQueueItem(int index) async {
if (index < 0 || index >= queue.value.length) return;
audioPlayer.seek(Duration.zero, index: index);
}
@override
Future<void> skipToPrevious() async {
if (audioPlayer.currentIndex == 0) {
audioPlayer.seek(Duration.zero, index: queue.value.length - 1);
} else {
audioPlayer.seekToPrevious();
}
}
@override
Future<void> skipToNext() async {
if (audioPlayer.currentIndex == queue.value.length - 1) {
audioPlayer.seek(Duration.zero, index: 0);
} else {
audioPlayer.seekToNext();
}
}
@override
Future<void> updateMediaItem(MediaItem? mediaItem) {
this.mediaItem.add(mediaItem);
return Future<void>.value();
}
@override
Future<void> playFromMediaId(String mediaId,
[Map<String, dynamic>? extras]) async {
var index = queue.value.indexWhere((element) => element.id == mediaId);
skipToQueueItem(index);
audioHandler.play();
}
@override
Future<void> play() async {
audioPlayer.play();
audioPlayer.icyMetadataStream.listen((_) {}).onData((data) async {
if (data != null &&
data.info != null &&
mediaItem.value!.extras!['showMetadata'] == 'true') {
print('CURRENT SONG: ${data.info!.title}');
audioHandler.updateMediaItem(
MediaItem(
id: mediaItem.value!.id,
title: data.info!.title!.split('-').last.trim(),
artist: data.info!.title!.split('-').first.trim(),
artUri: mediaItem.value!.artUri,
duration: null,
extras: {
"longDesc": mediaItem.value!.extras!['longDesc'],
"social": mediaItem.value!.extras!['social'],
"socialMessage": mediaItem.value!.extras!['socialMessage'],
"socialURL": mediaItem.value!.extras!['socialURL'],
"website": mediaItem.value!.extras!['website'],
"showMetadata": mediaItem.value!.extras!['showMetadata'],
"backgroundColor": mediaItem.value!.extras!['backgroundColor'],
},
),
);
}
});
readSleepTime();
}
@override
Future<void> pause() async {
audioPlayer.stop();
if (isIOS) {
await FlutterCarplay.popToRoot();
}
}
@override
Future<void> stop() => audioPlayer.stop();
void _broadcastState(PlaybackEvent event) {
final playing = audioPlayer.playing;
playbackState.add(
playbackState.value.copyWith(
controls: [
MediaControl.skipToPrevious,
if (playing) MediaControl.pause else MediaControl.play,
MediaControl.stop,
MediaControl.skipToNext,
],
androidCompactActionIndices: const [0, 1, 3],
processingState: const {
ProcessingState.idle: AudioProcessingState.idle,
ProcessingState.loading: AudioProcessingState.loading,
ProcessingState.buffering: AudioProcessingState.buffering,
ProcessingState.ready: AudioProcessingState.ready,
ProcessingState.completed: AudioProcessingState.completed,
}[audioPlayer.processingState]!,
playing: playing,
updatePosition: audioPlayer.position,
bufferedPosition: audioPlayer.bufferedPosition,
speed: audioPlayer.speed,
queueIndex: event.currentIndex,
),
);
}
}