I am building a real-time facial landmark detection app using android's ML Kit. Currently, I can capture the video using android camerax library. To enable real-time facial landmark detection for the captured video, I need to extract the video frames and pass the frames to the ML Kit Facial Landmark detection function which will process and detect the landmarks present in the video.
I have attached the code snippet below. Could someone please help me out to extract the video frames? If there is any other approach please do let me know.
Thank you.
btnVideo.setOnClickListener(v -> {
if(mCameraView.isRecording()){return;}
SimpleDateFormat mDateFormat = new SimpleDateFormat("yyyyMMddHHmmss", Locale.US);
File file = new File(getBatchDirectoryName(), mDateFormat.format(new Date()) + ".mp4");
mCameraView.setCaptureMode(CameraView.CaptureMode.VIDEO);
InputImage image = InputImage.fromBitmap(bitmap, rotationDegree);
FaceDetectorOptions highAccuracyOpts =
new FaceDetectorOptions.Builder()
.setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_ACCURATE)
.setLandmarkMode(FaceDetectorOptions.LANDMARK_MODE_ALL)
.setContourMode(FaceDetectorOptions.CONTOUR_MODE_ALL)
.setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
.build();
FaceDetector detector = FaceDetection.getClient(highAccuracyOpts);
Task<List<Face>> result =
detector.process(image)
.addOnSuccessListener(
new OnSuccessListener<List<Face>>() {
@Override
public void onSuccess(List<Face> faces) {
for (Face face : faces) {
Rect bounds = face.getBoundingBox();
float rotY = face.getHeadEulerAngleY();
float rotZ = face.getHeadEulerAngleZ();
FaceLandmark leftEar = face.getLandmark(FaceLandmark.LEFT_EAR);
if (leftEar != null) {
PointF leftEarPos = leftEar.getPosition();
}
List<PointF> leftEyeContour =
face.getContour(FaceContour.LEFT_EYE).getPoints();
List<PointF> upperLipBottomContour =
face.getContour(FaceContour.UPPER_LIP_BOTTOM).getPoints();
if (face.getSmilingProbability() != null) {
float smileProb = face.getSmilingProbability();
}
if (face.getRightEyeOpenProbability() != null) {
float rightEyeOpenProb = face.getRightEyeOpenProbability();
}
if (face.getTrackingId() != null) {
int id = face.getTrackingId();
}
}
}
})
.addOnFailureListener(
new OnFailureListener() {
@Override
public void onFailure(@NonNull Exception e) {
}
});
mCameraView.startRecording(file, executor, new VideoCapture.OnVideoSavedCallback() {
@Override
public void onVideoSaved(@NonNull OutputFileResults outputFileResults) {
galleryAddPic(file, 1);
}
@Override
public void onError(int videoCaptureError, @NonNull String message, @Nullable Throwable cause) {
//Log.i("TAG",message);
mCameraView.stopRecording();
}
});
});