I encounter a problem when I try to play a video on a web browser in a component, the file can not play at all. The file was captured on Android device using MediaRecorder and MediaProjection and tried to record screen. Here is the code how I initialize the MediaRecorder:
public class ScreenRecordService extends Service {
private static final String TAG = ScreenRecordService.class.getSimpleName();
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final int DISPLAY_WIDTH = 960;
private static final int DISPLAY_HEIGHT = 540;
private float mDensity;
private int mRotation;
private boolean mIsRecording;
private MediaProjectionManager mProjectionManager;
private MediaProjection mMediaProjection;
private VirtualDisplay mVirtualDisplay;
private MediaProjectionCallback mMediaProjectionCallback;
private MediaRecorder mMediaRecorder;
private String mFilePath;
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private class MediaProjectionCallback extends MediaProjection.Callback {
@Override
public void onStop() {
try {
if (mIsRecording) {
mIsRecording = false;
mMediaRecorder.stop();
mMediaRecorder.reset();
}
mMediaProjection = null;
stopScreenSharing();
HermesEventBus.getDefault().post(new EventRecorder.Server(EventRecorder.SERVER_STOP_SUCCESS));
} catch (Exception e) {
e.printStackTrace();
HermesEventBus.getDefault().post(new EventRecorder.Server(EventRecorder.SERVER_STOP_FAIL));
}
}
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onStopCall(EventRecorder.Client clientEvent) {
if (clientEvent.messageType == EventRecorder.CLIENT_STOP_RECORD) {
stopRecording();
}
}
@Nullable
@Override
public IBinder onBind(Intent intent) {
return null;
}
@Override
public void onCreate() {
super.onCreate();
HermesEventBus.getDefault().register(this);
AppManager.getInstance().addService(this);
mProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
mMediaProjectionCallback = new MediaProjectionCallback();
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
if (mProjectionManager == null) {
mProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
}
if (intent != null) {
mDensity = intent.getFloatExtra("density", 0f);
mRotation = intent.getIntExtra("rotation", 0);
mFilePath = intent.getStringExtra(Const.Intent.INFO);
JLog.d(TAG, mFilePath);
startRecording(intent);
}
return START_NOT_STICKY;
}
@Override
public void onDestroy() {
super.onDestroy();
AppManager.getInstance().removeService(this);
}
private void startRecording(Intent intent) {
try {
if (!mIsRecording) {
mMediaProjection = mProjectionManager.getMediaProjection(RESULT_OK, intent);
mMediaProjection.registerCallback(mMediaProjectionCallback, null);
initRecorder();
mVirtualDisplay = createVirtualDisplay();
mMediaRecorder.start();
mIsRecording = true;
HermesEventBus.getDefault().post(new EventRecorder.Server(EventRecorder.SERVER_START_SUCCESS));
}
} catch (Exception e) {
e.printStackTrace();
mIsRecording = false;
HermesEventBus.getDefault().post(new EventRecorder.Server(EventRecorder.SERVER_START_FAIL));
}
}
private void stopRecording() {
try {
if (mIsRecording) {
mMediaRecorder.stop();
mMediaRecorder.reset();
stopScreenSharing();
HermesEventBus.getDefault().post(new EventRecorder.Server(EventRecorder.SERVER_STOP_SUCCESS));
}
} catch (Exception e) {
e.printStackTrace();
mIsRecording = false;
if (mMediaRecorder != null) {
mMediaRecorder.reset();
}
stopScreenSharing();
HermesEventBus.getDefault().post(new EventRecorder.Server(EventRecorder.SERVER_STOP_FAIL));
}
}
private VirtualDisplay createVirtualDisplay() {
return mMediaProjection.createVirtualDisplay(getString(R.string.video_record), DISPLAY_WIDTH, DISPLAY_HEIGHT, (int) mDensity,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, mMediaRecorder.getSurface(), null, null);
}
private void stopScreenSharing() {
if (mVirtualDisplay == null) {
return;
}
mVirtualDisplay.release();
destroyMediaProjection();
mIsRecording = false;
}
private void initRecorder() {
int bitRateQuality = PrefsUtils.getInstance(this, Const.Pref.FILE_COMMON).getInt(Const.Pref.KEY_RECORD_BITRATE, Const.Setting.QUALITY_MID);
int bitRate;
if (bitRateQuality == Const.Setting.QUALITY_HIGH) {
bitRate = 1536000;
} else if (bitRateQuality == Const.Setting.QUALITY_MID) {
bitRate = 1024 * 1024;
} else {
bitRate = 512000;
}
if (mMediaRecorder == null) {
mMediaRecorder = new MediaRecorder();
}
try {
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.SURFACE);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4); //THREE_GPP
mMediaRecorder.setOutputFile(mFilePath);
mMediaRecorder.setVideoSize(DISPLAY_WIDTH, DISPLAY_HEIGHT);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
mMediaRecorder.setVideoFrameRate(8); // 30
mMediaRecorder.setVideoEncodingBitRate(bitRate);
int orientation = ORIENTATIONS.get(mRotation + 90);
mMediaRecorder.setOrientationHint(orientation);
mMediaRecorder.prepare();
mMediaRecorder.setOnInfoListener((mr, what, extra) -> {
if (what == MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
stopRecording();
}
});
} catch (IOException e) {
e.printStackTrace();
}
}
private void destroyMediaProjection() {
if (mMediaProjection != null) {
mMediaProjection.unregisterCallback(mMediaProjectionCallback);
mMediaProjection.stop();
mMediaProjection = null;
}
JLog.i(TAG, "MediaProjection Stopped");
}
}
And here is the file I uploaded.
http://eachdoctorvideotest.oss-cn-shenzhen.aliyuncs.com/1103/videoRecord/input/ali_TVM1103vRecordIn20190212154904841.mp4
Just paste the url to any browser(I use Chrome and chrome cannot play but Safari can) and you can find that the file can not play. But you can play it on any third party media player on PC. So, what's the exact problem that the file can not be played on browser?
This video file was originally generated with two files(video track and audio track). I use mp4parser to combine there tracks and you may see the library here:
https://github.com/sannies/mp4parser
Here is the key code I used to combine them:
public boolean muxAacMp4(String mp4Path, String aacPath, String outPath) {
boolean flag = false;
try {
AACTrackImpl aacTrack = new AACTrackImpl(new FileDataSourceImpl(aacPath));
Movie videoMovie = MovieCreator.build(mp4Path);
Track videoTracks = null;
for (Track videoMovieTrack : videoMovie.getTracks()) {
if ("vide".equals(videoMovieTrack.getHandler())) {
videoTracks = videoMovieTrack;
}
}
Movie resultMovie = new Movie();
resultMovie.addTrack(videoTracks);
resultMovie.addTrack(aacTrack);
Container out = new DefaultMp4Builder().build(resultMovie);
FileOutputStream fos = new FileOutputStream(new File(outPath));
out.writeContainer(fos.getChannel());
fos.close();
flag = true;
Log.e("update_tag", "merge finish");
} catch (Exception e) {
e.printStackTrace();
flag = false;
}
return flag;
}