This is what I tried and I get the error below;
Error: MissingPluginException(No implementation found for method getTemporaryDirectory on channel plugins.flutter.io/path_provider) at Object.throw_ [as throw] (localhost:64887/dart_sdk.js:5063:11)
All I want to do is to see if it's possible to concat a video from URL with bunch of audio layers and play it all from one source to use it latter with airplay.
Is this possible using ffmpeg or any other tool?
I need help with finding the right packages and tools to do this.
Or should it all be server sided and on some cloud functions?
What is the best approach here?
nb:
This is my first time with flutter.
Is this possible with react native?
import 'dart:io';
import 'dart:async';
import 'dart:developer';
import 'package:flutter/material.dart';
import 'package:flutter_ffmpeg/flutter_ffmpeg.dart';
import 'package:video_player/video_player.dart';
import 'package:path_provider/path_provider.dart';
class VideoPlayerScreen extends StatefulWidget {
final String videoUrl;
final String audioUrl;
VideoPlayerScreen({required this.videoUrl, required this.audioUrl});
@override
_VideoPlayerScreenState createState() => _VideoPlayerScreenState();
}
class _VideoPlayerScreenState extends State<VideoPlayerScreen> {
// the class will take videoUrl and audioUrl as input and will combine them using ffmpeg and play the combined video
late VideoPlayerController _controller;
late Future<void> _initializeVideoPlayerFuture;
@override
void initState() {
super.initState();
_controller = VideoPlayerController.network(widget.videoUrl);
_initializeVideoPlayerFuture = _controller.initialize();
_controller.setLooping(true);
_combineVideoAndAudio();
}
@override
void dispose() {
_controller.dispose();
super.dispose();
}
Future<Directory> createTempDir() async {
Directory tempDir = await getTemporaryDirectory();
return Directory('${tempDir.path}/my_temp_dir').create(recursive: true);
}
Future<void> _combineVideoAndAudio() async {
log('combineVideoAndAudio');
// this function will combine the video and audio and play the combined video
// then we will play the combined video
// now we will combine the video and audio using ffmpeg
createTempDir().then((tempDir){
final ffmpeg = FlutterFFmpeg();
final String outputPath = '${tempDir.path}/output.mp4';
final String command = '-i ${widget.videoUrl} -i ${widget.audioUrl} -c:v copy -c:a aac -strict experimental $outputPath';
log("Running the command");
// now we will combine the video and audio
ffmpeg.execute(command).then((result) {
// now we will play the combined video
log("Command done!");
setState(() {
_controller = VideoPlayerController.file(File(outputPath));
_initializeVideoPlayerFuture = _controller.initialize();
_controller.setLooping(true);
});
});
});
}
// getApplicationDocumentsDirectory
@override
Widget build(BuildContext context) {
return Scaffold(
appBar: AppBar(
title: Text('Video Player'),
),
body: Center(
child: FutureBuilder(
future: _initializeVideoPlayerFuture,
builder: (context, snapshot) {
if (snapshot.connectionState == ConnectionState.done) {
return AspectRatio(
aspectRatio: _controller.value.aspectRatio,
child: VideoPlayer(_controller),
);
} else {
return CircularProgressIndicator();
}
},
),
),
floatingActionButton: FloatingActionButton(
onPressed: () {
setState(() {
if (_controller.value.isPlaying) {
_controller.pause();
} else {
_controller.play();
}
});
},
child: Icon(
_controller.value.isPlaying ? Icons.pause : Icons.play_arrow,
),
),
);
}
}