0

I'm trying to apply a dft on a image capture by the camera. The main problem is the function is crashing at the point of :

Core.add(magI, Mat.ones(padded.rows(), padded.cols(), CvType.CV_64FC1), magI2); // switch to logarithmic scale

And give the following error: A/libc: Fatal signal 11 (SIGSEGV), code 1, fault addr 0x16 in tid 17590 (e_androidstudio)

The code I'm using for the DFT is supposed to work since I copy from this solution: Convert OpenCv DFT example from C++ to Android

Here's my code:

package ch.hepia.lsn.opencv_native_androidstudio;

import android.app.Activity;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.util.Log;
import android.view.SurfaceView;
import android.view.WindowManager;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import android.hardware.Camera;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Rect;
import org.opencv.core.Size;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;

import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;


public class MainActivity extends Activity /*implements CameraBridgeViewBase.CvCameraViewListener2*/ {
    private static final String TAG = "OCVSample::Activity";

    private CameraBridgeViewBase mOpenCvCameraView;

    private static final int CAMERA_REQUEST = 1888;
    private ImageView imageView;
    static final int REQUEST_IMAGE_CAPTURE = 1;
    private Bitmap mImageBitmap;
    private String mCurrentPhotoPath;

    private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) {
        @Override
        public void onManagerConnected(int status) {
            switch (status) {
                case LoaderCallbackInterface.SUCCESS: {
                    Log.i(TAG, "OpenCV loaded successfully");
                    mOpenCvCameraView.enableView();
                }
                break;
                default: {
                    super.onManagerConnected(status);
                }
            }
        }
    };

    private File createImageFile() throws IOException {
        // Create an image file name
        String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
        String imageFileName = "JPEG_" + timeStamp + "_";
        File storageDir = Environment.getExternalStoragePublicDirectory(
                Environment.DIRECTORY_PICTURES);
        File image = File.createTempFile(
                imageFileName,  // prefix
                ".jpg",         // suffix
                storageDir      // directory
        );

        // Save a file: path for use with ACTION_VIEW intents
        mCurrentPhotoPath = "file:" + image.getAbsolutePath();
        return image;
    }

    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);

        // Load ndk built module, as specified
        // in moduleName in build.gradle
        System.loadLibrary("native");

        getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

        setContentView(R.layout.activity_main);

        /*mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.main_surface);
        mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
        mOpenCvCameraView.setCvCameraViewListener(this);*/


        //take picture part
        this.imageView = (ImageView)this.findViewById(R.id.imageView1);
        Button photoButton = (Button) this.findViewById(R.id.button1);
        photoButton.setOnClickListener(new View.OnClickListener() {

            @Override
            public void onClick(View v) {
                Intent cameraIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
                if (cameraIntent.resolveActivity(getPackageManager()) != null) {
                    // Create the File where the photo should go
                    File photoFile = null;
                    try {
                        photoFile = createImageFile();
                    } catch (IOException ex) {
                        // Error occurred while creating the File
                        Log.i(TAG, "IOException");
                    }
                    // Continue only if the File was successfully created
                    if (photoFile != null) {
                        cameraIntent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(photoFile));
                        startActivityForResult(cameraIntent, REQUEST_IMAGE_CAPTURE);
                    }
                }
            }
        });
    }

    protected void onActivityResult(int requestCode, int resultCode, Intent data) {
        if (requestCode == REQUEST_IMAGE_CAPTURE && resultCode == RESULT_OK) {
            try {
                mImageBitmap = MediaStore.Images.Media.getBitmap(this.getContentResolver(), Uri.parse(mCurrentPhotoPath));
                Mat imageMat = new Mat (mImageBitmap.getWidth(), mImageBitmap.getHeight(), CvType.CV_8UC1);
                Utils.bitmapToMat(mImageBitmap, imageMat); //bitmap to mat
                Imgproc.cvtColor(imageMat, imageMat, Imgproc.COLOR_BGR2GRAY); //togray
                getDFT(imageMat); //dft
                int frameSize = imageMat.width()*imageMat.height();
                int[] rgba = new int[frameSize];
                //dft(imageMat.width(),imageMat.height(),imageMat, rgba);//dft2
                Utils.matToBitmap(getDFT(imageMat),mImageBitmap);//mat2bitmap
                Log.w("myApp", "After getDFT");
                imageView.setImageBitmap(mImageBitmap);
            } catch (IOException e) {
                e.printStackTrace();
            }
        }
    }
 /*
    @Override
    public void onPause() {
        super.onPause();
        disableCamera();
    }

    @Override
    public void onResume() {
        super.onResume();
        if (!OpenCVLoader.initDebug()) {
            Log.d(TAG, "Internal OpenCV library not found. Using OpenCV Manager for initialization");
            OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_0_0, this, mLoaderCallback);
        } else {
            Log.d(TAG, "OpenCV library found inside package. Using it!");
            mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
        }
    }

    public void onDestroy() {
        super.onDestroy();
        disableCamera();
    }

    public void disableCamera() {
        if (mOpenCvCameraView != null)
            mOpenCvCameraView.disableView();
    }

    public void onCameraViewStarted(int width, int height) {
    }

    public void onCameraViewStopped() {
    }*/

    private Mat getDFT(Mat singleChannel) {
        //Log.w("myApp", "Entering DFT function");
        singleChannel.convertTo(singleChannel, CvType.CV_64FC1);

        int m = Core.getOptimalDFTSize(singleChannel.rows());
        int n = Core.getOptimalDFTSize(singleChannel.cols()); // on the border
        //Log.w("myApp", "After Optimal Size");
        // add zero
        // values
        // Imgproc.copyMakeBorder(image1,
        // padded, 0, m -
        // image1.rows(), 0, n

        Mat padded = new Mat(new Size(n, m), CvType.CV_64FC1); // expand input
        // image to
        // optimal size
       // Log.w("myApp", "After padded");

        Core.copyMakeBorder(singleChannel, padded, 0, m - singleChannel.rows(), 0,
                n - singleChannel.cols(), Core.BORDER_CONSTANT);
        //Log.w("myApp", "After CopyMakeBorder");

        List<Mat> planes = new ArrayList<Mat>();
        planes.add(padded);
        planes.add(Mat.zeros(padded.rows(), padded.cols(), CvType.CV_64FC1));
        //Log.w("myApp", "After Planes");

        Mat complexI = Mat.zeros(padded.rows(), padded.cols(), CvType.CV_64FC2);

        Mat complexI2 = Mat
                .zeros(padded.rows(), padded.cols(), CvType.CV_64FC2);

        Core.merge(planes, complexI); // Add to the expanded another plane with
        // zeros

        Core.dft(complexI, complexI2); // this way the result may fit in the
        // source matrix
        //Log.w("myApp", " After DFT");

        // compute the magnitude and switch to logarithmic scale
        // => log(1 + sqrt(Re(DFT(I))^2 + Im(DFT(I))^2))
        Core.split(complexI2, planes); // planes[0] = Re(DFT(I), planes[1] =
        // Im(DFT(I))

        Mat mag = new Mat(planes.get(0).size(), planes.get(0).type());
        Log.w("myApp", " mag Size: "+mag.width()+" "+mag.height()); //ok

        Core.magnitude(planes.get(0), planes.get(1), mag);// planes[0]
        Log.w("myApp", " mag Size after Magnitude: " + mag.width() + " " + mag.height()); //ok
        // =
        // magnitude
        //Log.w("myApp", " After Core Split");

        Mat magI = mag;
        //mag.release();
        //singleChannel.release();
        //Log.w("myApp", " After Release");

        Mat magI2 = new Mat(magI.size(), magI.type());
        Mat magI3 = new Mat(magI.size(), magI.type());
        Mat magI4 = new Mat(magI.size(), magI.type());
        Mat magI5 = new Mat(magI.size(), magI.type());

        //Log.w("myApp", " After magIn allocate");
        Log.w("myApp", " Padded Size: "+padded.width() +" "+padded.height());
        Log.w("myApp", " magI Size: "+magI.width()+" "+magI.height());
        Log.w("myApp", " magI2 Size: "+magI2.width()+" "+magI2.height());
        Log.w("myApp", "Mat one Size: " + Mat.ones(padded.rows(), padded.cols(), CvType.CV_64FC1).width() + " " + Mat.ones(padded.rows(), padded.cols(), CvType.CV_64FC1).height());
        Core.add(magI, Mat.ones(padded.rows(), padded.cols(), CvType.CV_64FC1), magI2); // switch to logarithmic scale
        Log.w("myApp", " After Core.add"); //breaks here

        padded.release();
        Core.log(magI2, magI3);

        Log.w("myApp", " After 1");

        Mat crop = new Mat(magI3, new Rect(0, 0, magI3.cols() & -2,
                magI3.rows() & -2));

        magI4 = crop.clone();
        crop.release();

        Log.w("myApp", " After 2");

        // rearrange the quadrants of Fourier image so that the origin is at the
        // image center
        int cx = magI4.cols() / 2;
        int cy = magI4.rows() / 2;

        Rect q0Rect = new Rect(0, 0, cx, cy);
        Rect q1Rect = new Rect(cx, 0, cx, cy);
        Rect q2Rect = new Rect(0, cy, cx, cy);
        Rect q3Rect = new Rect(cx, cy, cx, cy);

        Mat q0 = new Mat(magI4, q0Rect); // Top-Left - Create a ROI per quadrant
        Mat q1 = new Mat(magI4, q1Rect); // Top-Right
        Mat q2 = new Mat(magI4, q2Rect); // Bottom-Left
        Mat q3 = new Mat(magI4, q3Rect); // Bottom-Right

        Log.w("myApp", " After 4");

        Mat tmp = new Mat(); // swap quadrants (Top-Left with Bottom-Right)
        q0.copyTo(tmp);
        q3.copyTo(q0);
        tmp.copyTo(q3);

        q1.copyTo(tmp); // swap quadrant (Top-Right with Bottom-Left)
        q2.copyTo(q1);
        tmp.copyTo(q2);

        Log.w("myApp", " After 5");

        Core.normalize(magI4, magI5, 0, 255, Core.NORM_MINMAX);

        Mat realResult = new Mat(magI5.size(), CvType.CV_8UC1);

        magI5.convertTo(realResult, CvType.CV_8UC1);

        return realResult;
    }

    /*public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {
        Mat matGray = inputFrame.gray();
        Log.w("myApp", "before dft");
        //dft(matGray.getNativeObjAddr());
        //getDFT(inputFrame.gray());
        Log.w("myApp", "after dft");
        return matGray;
    }*/

    //public native void salt(long matAddrGray, int nbrElem);
    public native Mat dft(int width, int height, Mat yuv, int bgra[]);


}

Obs: Both matrix there I'm adding, has the same size (I've check it)

07-11 15:27:10.521 17590-17590/ch.hepia.lsn.opencv_native_androidstudio W/myApp: Padded Size: 2592 1944

07-11 15:27:10.521 17590-17590/ch.hepia.lsn.opencv_native_androidstudio W/myApp: magI Size: 2592 1944

07-11 15:27:10.521 17590-17590/ch.hepia.lsn.opencv_native_androidstudio W/myApp: magI2 Size: 2592 1944

07-11 15:27:10.661 17590-17590/ch.hepia.lsn.opencv_native_androidstudio W/myApp: Mat one Size: 2592 1944

07-11 15:27:10.741 17590-17590/ch.hepia.lsn.opencv_native_androidstudio

Community
  • 1
  • 1
  • any help with this issue? – Usman Rana Oct 24 '16 at 06:48
  • Hi Usman, the function of adding two matrices in OpenCV is buged. Try using add with weights. cheers – Pedro Gabriel Lancelloti Pinto Oct 24 '16 at 12:55
  • Thanks Perdo, Core.multiply which is mentioned in their sample code is also giving fatal error. Is it also a bug of OpenCV? See this one : http://stackoverflow.com/questions/40215019/in-android-opencv-color-blob-detector-is-crashing-by-giving-fatal-error?noredirect=1#comment67697858_40215019 – Usman Rana Oct 24 '16 at 13:14

0 Answers0