- When cameraX's videoCapture and ImageAnlaysis are used together, the preview is rotated 90 degrees, and the recorded video in the gallery is longer than the actual length. why is this happening?
private fun bindUseCase() {
if (cameraProvider == null) {
return
}
if (analysisUseCase != null) {
cameraProvider!!.unbind(analysisUseCase)
}
if (imageProcessor != null) {
imageProcessor!!.stop()
}
if (!PreferenceUtils.isCameraLiveViewportEnabled(this.context)) {
return
}
if (previewUseCase != null) {
cameraProvider!!.unbind(previewUseCase)
}
val previewBuilder = Preview.Builder()
previewUseCase = previewBuilder.build()
previewUseCase!!.setSurfaceProvider(previewView!!.surfaceProvider)
val recorder = Recorder.Builder()
.setQualitySelector(QualitySelector.from(Quality.HIGHEST))
.build()
videoCapture = VideoCapture.withOutput(recorder)
imageProcessor =
try {
Log.i(TAG, "Using Face Detector Processor")
val faceDetectorOptions = FaceDetectorOptions.Builder()
.setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
.enableTracking()
.build()
FaceDetectorProcessor(this.context, faceDetectorOptions)
} catch (e: Exception) {
Log.e(TAG,"imageProcessor",e)
return
}
val builder = ImageAnalysis.Builder()
analysisUseCase = builder.build()
needUpdateGraphicOverlayImageSourceInfo = true
analysisUseCase?.setAnalyzer(
// imageProcessor.processImageProxy will use another thread to run the detection underneath,
// thus we can just runs the analyzer itself on main thread.
ContextCompat.getMainExecutor(this.context),
ImageAnalysis.Analyzer { imageProxy: ImageProxy ->
if (needUpdateGraphicOverlayImageSourceInfo) {
val isImageFlipped = lensFacing == CameraSelector.LENS_FACING_FRONT
val rotationDegrees = imageProxy.imageInfo.rotationDegrees
if (rotationDegrees == 0 || rotationDegrees == 180) {
graphicOverlay!!.setImageSourceInfo(imageProxy.width, imageProxy.height, isImageFlipped)
} else {
graphicOverlay!!.setImageSourceInfo(imageProxy.height, imageProxy.width, isImageFlipped)
}
needUpdateGraphicOverlayImageSourceInfo = false
}
try {
imageProcessor!!.processImageProxy(imageProxy, graphicOverlay)
} catch (e: MlKitException) {
Log.e(TAG, "Failed to process image. Error: " + e.localizedMessage)
}
}
)
Log.d(TAG,"$cameraProvider, $cameraSelector, $analysisUseCase, $imageProcessor,$previewUseCase")
cameraProvider!!.bindToLifecycle(this, cameraSelector!!,analysisUseCase,videoCapture,previewUseCase)
}
If you subtract "analysisUseCase" from the code above, the preview and recording time come out normally.
2. I want to draw the result of face recognition on a video frame. Currently, it is drawn only on the preview, but I want the result to be recorded together. What should I do?