0

I need to play audio to app users when recieve Firebase Cloud Messaging notification on background and terminated. FCM _backgroundhandler works when background, foreground and terminated. Everything is working on android. I examine all the status on ios side it is working clumsily. It work a bit better on ios16< devices but process is still maverick?(clumsy).

I think audiosession is not starting respect to decision of ios system. ios 15.5 device mostly play when app recieves the notification but it is on developer mode enabled. I Can not play sound on a device which is developer mode disabled.

On the other hand when app is terminated it is never been invoked. I can not play announcement when the app recives the notification.

The actual issue is how can I play the announcement when app recieves notification? Sound files are longer than 1 minutes and no longer than 10 minutes.

My minimal code is below;

Background Handler fcm backgroundhandler uses it should start AudioService;

late AudioHandler _audioHandler;
@pragma('vm:entry-point')
Future<void> backgroundHandler(RemoteMessage message) async {
  WidgetsFlutterBinding.ensureInitialized();
      final session = await AudioSession.instance;
      //await session.setActive(false);
      await session.configure(AudioSessionConfiguration.music());
      await session.setActive(true);

      _audioHandler = await AudioService.init(
        builder: () => AudioPlayerHandler(
            notMediaItem: MediaItem(
                id: DateTime.now().microsecondsSinceEpoch.toString(),
                extras: {
                  "url": "${ApiConstant.FILES}${message.data["audio_path"]}"
                },
                title: message.data["title"] ?? "Belediye Duyurusu",
                album: message.data["type_name"] ?? "Duyuru",
                artist: message.data["hall_name"] ?? "Belediye",
                artUri: message.data["image_path"] == null
                    ? null
                    : Uri.tryParse(
                        "${ApiConstant.FILES}${message.data["image_path"]}"))),
        config: const AudioServiceConfig(
          androidNotificationChannelId: 'com.ryanheise.myapp.channel.audio',
          androidNotificationChannelName: 'Audio playback',
          androidNotificationOngoing: true,
        ),
      );

      await _audioHandler.play();
}

And Main Function

void main() async {
  WidgetsFlutterBinding.ensureInitialized();
    await Firebase.initializeApp();
     final session = await AudioSession.instance;
     await session.configure(AudioSessionConfiguration.music());
     await session.setActive(true);
    _audioHandler = await AudioService.init(
      builder: () => AudioPlayerHandler(
          notMediaItem: MediaItem(
        id: DateTime.now().microsecondsSinceEpoch.toString(),
        extras: {
          "url": "****url/mp3",
        },
        title: "*****",
        album: "******",
        artist: "*********",
      )),
      config: const AudioServiceConfig(
        androidNotificationChannelId: 'com.ryanheise.myapp.channel.audio',
        androidNotificationChannelName: 'Audio playback',
        androidNotificationOngoing: true,
      ),
    );

    await _audioHandler.play();

    FirebaseMessaging.onBackgroundMessage(backgroundHandler);
    DynamicLinkServices().handledynamiclink();

    FlutterError.onError = FirebaseCrashlytics.instance.recordFlutterError;
    PlatformDispatcher.instance.onError = (error, stack) {
      FirebaseCrashlytics.instance.recordError(error, stack, fatal: true);
      return true;
    };
  }

I used sample audio service handler

/// An [AudioHandler] for playing a single item.
class AudioPlayerHandler extends BaseAudioHandler with SeekHandler {
  MediaItem? notMediaItem;

  final _player = AudioPlayer();

  /// Initialise our audio handler.
  AudioPlayerHandler({required this.notMediaItem}) {
    final _item = notMediaItem;

    // So that our clients (the Flutter UI and the system notification) know
    // what state to display, here we set up our audio handler to broadcast all
    // playback state changes as they happen via playbackState...
    _player.playbackEventStream.map(_transformEvent).pipe(playbackState);
    // ... and also the current media item via mediaItem.
    mediaItem.add(_item);

    // Load the player.
    //_player.setAudioSource(AudioSource.uri(Uri.parse(_item!.id)));
    _player.setAudioSource(AudioSource.uri(Uri.parse(_item!.extras!["url"])));
  }

  // In this simple example, we handle only 4 actions: play, pause, seek and
  // stop. Any button press from the Flutter UI, notification, lock screen or
  // headset will be routed through to these 4 methods so that you can handle
  // your audio playback logic in one place.
  @override
  Future<void> onNotificationDeleted() async {
    await stop();
    _player.dispose();
    return super.onNotificationDeleted();
  }

  Future<void> dispose() => _player.dispose();
  @override
  Future<void> onTaskRemoved() => _player.dispose();

  @override
  Future<void> playFromUri(Uri uri, [Map<String, dynamic>? extras]) async {
    await setAudioSource(uri);
    return _player.play();
  }

  @override
  Future<void> play() {
    return _player.play();
  }

  @override
  Future<void> pause() => _player.pause();

  Future<void> setAudioSource(Uri path) async =>
      _player.setAudioSource(AudioSource.uri(path));

  @override
  Future<void> seek(Duration position) => _player.seek(position);

  @override
  Future<void> stop() => _player.stop();

  /// Transform a just_audio event into an audio_service state.
  ///
  /// This method is used from the constructor. Every event received from the
  /// just_audio player will be transformed into an audio_service state so that
  /// it can be broadcast to audio_service clients.
  PlaybackState _transformEvent(PlaybackEvent event) {
    return PlaybackState(
      controls: [
        MediaControl.rewind,
        if (_player.playing) MediaControl.pause else MediaControl.play,
        MediaControl.stop,
        MediaControl.fastForward,
      ],
      systemActions: const {
        MediaAction.seek,
        MediaAction.seekForward,
        MediaAction.seekBackward,
        MediaAction.playPause,
        MediaAction.stop,
        MediaAction.play,
        MediaAction.pause,
      },
      androidCompactActionIndices: const [0, 1, 3],
      processingState: const {
        ProcessingState.idle: AudioProcessingState.idle,
        ProcessingState.loading: AudioProcessingState.loading,
        ProcessingState.buffering: AudioProcessingState.buffering,
        ProcessingState.ready: AudioProcessingState.ready,
        ProcessingState.completed: AudioProcessingState.completed,
      }[_player.processingState]!,
      playing: _player.playing,
      updatePosition: _player.position,
      bufferedPosition: _player.bufferedPosition,
      speed: _player.speed,
      queueIndex: event.currentIndex,
    );
  }
}
Muhtar
  • 1,506
  • 1
  • 8
  • 33

0 Answers0