0

I want to save image only if face is detected, currently it is saving even if face is not present. I want to capture only if the face detected, I am using android face API for face detection.

My code:here i want to save image only if there are faces that is in else block i need to put my code. according to my code if i start capture session with in the else block i cant access readerListener so how to do this.

      final ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {

     @Override
                public void onImageAvailable(ImageReader reader) {
                    Image image = null;
                    try {
                        image = reader.acquireLatestImage();
                        ByteBuffer buffer = image.getPlanes()[0].getBuffer();
                        byte[] bytes = new byte[buffer.capacity()];
                        buffer.get(bytes);
                        save(bytes);
                        mBitmapToSave1 = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
                        mBitmapToSave = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
                        Bitmap scaled = Bitmap.createScaledBitmap(mBitmapToSave, width, height, true);
                        int w = scaled.getWidth();
                        int h = scaled.getHeight();
                        // Setting post rotate to 90
                        Matrix mtx = new Matrix();
                        mtx.postRotate(-180);
                        // Rotating Bitmap
                        mBitmapToSave = Bitmap.createBitmap(scaled, 0, 0, w, h, mtx, true);
                        // mBitmapToSave = Bitmap.createBitmap(width+rowPadding/pixelStride,height, Bitmap.Config.RGB_565);
                        // mBitmapToSave.copyPixelsToBuffer(buffer);

                        if (detector.isOperational() && mBitmapToSave != null) {
                            Frame frame = new Frame.Builder().setBitmap(mBitmapToSave).build();
                            SparseArray<Face> faces = detector.detect(frame);
                            for (index = 0; index < faces.size(); ++index) {
                                Face face = faces.valueAt(index);
                            }if (faces.size() == 0) {
                                MediaPlayer mediaPlayer = MediaPlayer.create(getApplicationContext(), R.raw.not);
                                mediaPlayer.start();
                                //Toast.makeText(AndroidCamera2API.this, "Face Not detected Adjust Camera Properly", Toast.LENGTH_SHORT).show();
                            } else {
                                Toast.makeText(AndroidCamera2API.this, "Face Found " + "\n", Toast.LENGTH_SHORT).show();
                            }

                            }
                        }catch(FileNotFoundException e){
                            e.printStackTrace();
                        } catch(IOException e){
                            e.printStackTrace();
                        } finally{
                            if (image != null) {
                                image.close();
                            }
                        }
                    }

                private void save(byte[] bytes) throws IOException {
                    OutputStream output = null;
                    try {
                        output = new FileOutputStream(file);
                        output.write(bytes);
                    } finally {
                        if (null != output) {
                            output.close();
                        }
                    }
                }
            };
            reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
            final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
                @Override
                public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
                    super.onCaptureCompleted(session, request, result);
                    Toast.makeText(AndroidCamera2API.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
                    uploadMultipart();
                    createCameraPreview();

                }
 }
            };
            cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
                @Override
                public void onConfigured(CameraCaptureSession session) {
                    try {
                        session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
                    } catch (CameraAccessException e) {
                        e.printStackTrace();
                    }
                }

                @Override
                public void onConfigureFailed(CameraCaptureSession session) {
                }
            }, mBackgroundHandler);
            mBitmapToSave = null;
        } catch(CameraAccessException e){
                e.printStackTrace();
            }
        }
Abhi
  • 193
  • 1
  • 2
  • 11
  • 3
    We can help, but it's your job to work on this in first place. [Put some efforts](http://meta.stackoverflow.com/questions/261592) first, then ask with a clear explanation and [MCV example](http://stackoverflow.com/help/mcve) if applicable. – Milan Chheda Aug 09 '17 at 10:37
  • @MilanChheda i have updated my problem statement help me regarding. – Abhi Aug 10 '17 at 10:23

0 Answers0