当我尝试在Android-NDK上使用getPerspectiveTransform转换透视图图像时,声明失败

当我尝试在Android-NDK上使用getPerspectiveTransform转换透视图图像时,声明失败,第1张

概述所以我试图在ndk上使用opencv在android上检测一个正方形对象,并将其裁剪为2D图,我检测到正方形的点,但是当我尝试使用getPerspectiveTRansform(src,dst)时,出现此错误消息:OpenCVError:Assertionfailed(src.checkVector(2,CV_32F)==4&&dst.checkVector(2,CV_32F)==4)

所以我试图在ndk上使用opencv在android上检测一个正方形对象,并将其裁剪为2D图,我检测到正方形的点,但是当我尝试使用getPerspectivetransform(src,dst)时,出现此错误消息:

OpenCV Error: Assertion Failed (src.checkVector(2, CV_32F) == 4 && dst.checkVector(2, CV_32F)  == 4) in cv::Mat cv::getPerspectivetransform(cv::inputArray, cv::inputArray), file /home/reports/ci/slave/50-SDK/opencv/modules/imgproc/src/imgwarp.cpp, line 3607

这是我在androID中的活动:

package org.opencv.samples.tutorial1;import java.io.ByteArrayOutputStream;import org.opencv.androID.BaseLoaderCallback;import org.opencv.androID.CameraBrIDgeVIEwBase.CvCameraviewFrame;import org.opencv.androID.LoaderCallbackInterface;import org.opencv.androID.OpenCVLoader;import org.opencv.androID.Utils;import org.opencv.core.CvException;import org.opencv.core.CvType;import org.opencv.core.Mat;import org.opencv.core.Size;import org.opencv.imgproc.imgproc;import org.opencv.androID.CameraBrIDgeVIEwBase;import org.opencv.androID.CameraBrIDgeVIEwBase.CvCameraviewListener2;import androID.app.Activity;import androID.content.Intent;import androID.graphics.Bitmap;import androID.os.Bundle;import androID.util.Log;import androID.vIEw.Menu;import androID.vIEw.MenuItem;import androID.vIEw.SurfaceVIEw;import androID.vIEw.VIEw;import androID.vIEw.WindowManager;import androID.vIEw.VIEw.OnClickListener;import androID.Widget.Toast;public class Tutorial1Activity extends Activity implements        CvCameraviewListener2 {    private static final String TAG = "OCVSample::Activity";    private Mat mRgba;    private Mat mGrayMat;    private Mat imageTaken;    private CameraBrIDgeVIEwBase mOpenCvCameraview;    private boolean mIsJavaCamera = true;    private MenuItem mItemSwitchCamera = null;    private BaseLoaderCallback mloaderCallback = new BaseLoaderCallback(this) {        @OverrIDe        public voID onManagerConnected(int status) {            switch (status) {            case LoaderCallbackInterface.SUCCESS: {                Log.i(TAG, "OpenCV loaded successfully");                System.loadlibrary("native_sample");                mOpenCvCameraview.enableVIEw();            }                break;            default: {                super.onManagerConnected(status);            }                break;            }        }    };    public Tutorial1Activity() {        Log.i(TAG, "Instantiated new " + this.getClass());    }    /** Called when the activity is first created. */    @OverrIDe    public voID onCreate(Bundle savedInstanceState) {        Log.i(TAG, "called onCreate");        super.onCreate(savedInstanceState);        getwindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);        setContentVIEw(R.layout.tutorial1_surface_vIEw);        mOpenCvCameraview = (CameraBrIDgeVIEwBase) findVIEwByID(R.ID.tutorial1_activity_native_surface_vIEw);        mOpenCvCameraview.setVisibility(SurfaceVIEw.VISIBLE);        mOpenCvCameraview.setCvCameraviewListener(this);    }    @OverrIDe    public voID onPause() {        super.onPause();        if (mOpenCvCameraview != null)            mOpenCvCameraview.disableVIEw();    }    @OverrIDe    public voID onResume() {        super.onResume();        OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this,                mloaderCallback);    }    public voID onDestroy() {        super.onDestroy();        if (mOpenCvCameraview != null)            mOpenCvCameraview.disableVIEw();    }    @OverrIDe    public boolean onCreateOptionsMenu(Menu menu) {        Log.i(TAG, "called onCreateOptionsMenu");        // mItemSwitchCamera = menu.add("Toggle Native/Java camera");        return true;    }    @OverrIDe    public boolean onoptionsItemSelected(MenuItem item) {        String toastMesage = new String();        Log.i(TAG, "called onoptionsItemSelected; selected item: " + item);        if (item == mItemSwitchCamera) {            mOpenCvCameraview.setVisibility(SurfaceVIEw.GONE);            mIsJavaCamera = !mIsJavaCamera;            if (mIsJavaCamera) {                mOpenCvCameraview = (CameraBrIDgeVIEwBase) findVIEwByID(R.ID.tutorial1_activity_java_surface_vIEw);                toastMesage = "Java Camera";            } else {                mOpenCvCameraview = (CameraBrIDgeVIEwBase) findVIEwByID(R.ID.tutorial1_activity_native_surface_vIEw);                toastMesage = "Native Camera";            }            mOpenCvCameraview.setVisibility(SurfaceVIEw.VISIBLE);            mOpenCvCameraview.setCvCameraviewListener(this);            mOpenCvCameraview.enableVIEw();            mOpenCvCameraview.setonClickListener(new OnClickListener() {                @OverrIDe                public voID onClick(VIEw v) {                    takePicture();                }            });            Toast toast = Toast.makeText(this, toastMesage, Toast.LENGTH_LONG);            toast.show();        }        return true;    }    public voID takePicture() {        if (imageTaken != null) {            Bitmap resultBitmap = null;            try {                // imgproc.cvtcolor(imageTaken, imageTaken,                // imgproc.color_BGR2GRAY);                // imgproc.cvtcolor(imageTaken, imageTaken,                // imgproc.color_GRAY2RGBA, 4);                /*                 * Mat test =                 * imgproc.getPerspectivetransform(ImageSrc,ImageDst);                 * imgproc.warpPerspective(ImageSrc, ImageDst, test,                 * ImageDst.size());                 */                resultBitmap = Bitmap.createBitmap(imageTaken.cols(),                        imageTaken.rows(), Bitmap.Config.ARGB_8888);                //                Utils.matToBitmap(imageTaken, resultBitmap);                byte[] sendData = codec(resultBitmap,                        Bitmap.CompressFormat.JPEG, 50);                Intent i = new Intent(getApplicationContext(),                        ShowImageActivity.class);                i.putExtra("data", sendData);                startActivity(i);            } catch (CvException e) {                // Todo: handle exception                e.printstacktrace();            }        }    }    private byte[] codec(Bitmap src, Bitmap.CompressFormat format, int quality) {        ByteArrayOutputStream os = new ByteArrayOutputStream();        src.compress(format, quality, os);        byte[] array = os.toByteArray();        System.out.println(array.length);//      return BitmapFactory.decodeByteArray(array, 0, array.length);        return array;    }    public voID onCameraviewStarted(int wIDth, int height) {        mRgba = new Mat();        mGrayMat = new Mat();        imageTaken = new Mat();    }    public voID onCameraviewStopped() {        mRgba.release();        mGrayMat.release();        imageTaken.release();    }    public Mat onCameraFrame(CvCameraviewFrame inputFrame) {        /*long start = System.currentTimeMillis();        Size originalSize = inputFrame.rgba().size();        imgproc.resize(inputFrame.rgba(), mRgba, new Size(800, 480));        *///      FindSquares(inputFrame.rgba().getNativeObjAddr(), 1);        // imageTaken = inputFrame.clone();//      System.out.println(inputFrame.rgba().type());        findSquare(inputFrame.rgba().getNativeObjAddr(), imageTaken.getNativeObjAddr(),  1);        // if (mDraw == 1) {        /*imgproc.resize(mRgba, inputFrame.rgba(), originalSize);        // }        long end = System.currentTimeMillis();        Log.d("Frame time", "" + (end - start) + " ms");*/        return inputFrame.rgba();    }    public native voID FindFeatures(long matAddrGr, long matAddrRgba);    public native int FindSquares(long matAddrRgba, int draw);    public native voID findSquare(long matAddrRgba, long matAddrDescriptor, int draw);}

这是我的jni代码:

JNIEXPORT jint JNICALL Java_com_gconsent_opencv_MainActivity_findSquare(jnienv*,        jobject, jlong addrRgba, jlong addrDescriptor, jlong addrSrc, jlong addrDst, jint draw){    Mat& image = *(Mat*) addrRgba;    Mat& imageCropped = *(Mat*) addrDescriptor;    Mat& imageSrc = *(Mat*) addrSrc;    Mat& imageDst = *(Mat*) addrDst;    Mat newSrc = image.clone();    imageCropped = image.clone();    Mat testimage = image.clone();    // blur will enhance edge detection    Mat blurred(testimage);    medianBlur(testimage, blurred, 9);    Mat gray0(blurred.size(), CV_8U), gray;    vector < vector<Point> > contours;    // find squares in every color plane of the image    for (int c = 0; c < 3; c++) {        int ch[] = { c, 0 };        mixChannels(&blurred, 1, &gray0, 1, ch, 1);        // try several threshold levels        const int threshold_level = 2;        for (int l = 0; l < threshold_level; L++) {            // Use Canny instead of zero threshold level!            // Canny helps to catch squares with gradIEnt shading            if (l == 0) {                Canny(gray0, gray, 10, 20, 3); //                // Dilate helps to remove potential holes between edge segments                dilate(gray, gray, Mat(), Point(-1, -1));            } else {                gray = gray0 >= (l + 1) * 255 / threshold_level;            }            // Find contours and store them in a List            findContours(gray, contours, CV_RETR_List, CV_CHAIN_APPROX_SIMPLE);            // Test contours            vector < Point > approx;            for (size_t i = 0; i < contours.size(); i++) {                // approximate contour with accuracy proportional                // to the contour perimeter                approxpolyDP(Mat(contours[i]), approx,                        arcLength(Mat(contours[i]), true) * 0.02, true);                // Note: absolute value of an area is used because                // area may be positive or negative - in accordance with the                // contour orIEntation                if (approx.size() == 4 && fabs(contourArea(Mat(approx))) > 1000                        && isContourConvex(Mat(approx))) {                    double maxCosine = 0;                    for (int j = 2; j < 5; j++) {                        double cosine = fabs(                                angle(approx[j % 4], approx[j - 2],                                        approx[j - 1]));                        maxCosine = MAX(maxCosine, cosine);                    }                    if (maxCosine < 0.3) {                        line(image, approx[0], approx[1],                                Scalar(0, 255, 0, 255), 2, 4, 0);                        line(image, approx[1], approx[2],                                Scalar(0, 255, 0, 255), 2, 4, 0);                        line(image, approx[2], approx[3],                                Scalar(0, 255, 0, 255), 2, 4, 0);                        line(image, approx[3], approx[0],                                Scalar(0, 255, 0, 255), 2, 4, 0);                        vector<Point2f> src(4);//                      src.push_back(approx[0]);//                      src.push_back(approx[1]);//                      src.push_back(approx[2]);//                      src.push_back(approx[3]);                        src[0] = approx[0];                        src[1] = approx[1];                        src[2] = approx[2];                        src[3] = approx[3];                        cv::Mat quad = cv::Mat::zeros(300, 220, CV_8U);                        // transformed quadrangle                        vector < Point2f > quad_pts(4);//                      Point2f quad_pts[4];                        quad_pts.push_back(Point(0, 0));                        quad_pts.push_back(Point(quad.cols, 0));                        quad_pts.push_back(Point(quad.cols, quad.rows));                        quad_pts.push_back(Point(0, quad.rows));//                      quad_pts[0] = Point(0, 0);//                      quad_pts[1] = Point(quad.cols, 0);//                      quad_pts[2] = Point(quad.cols, quad.rows);//                      quad_pts[3] = Point(0, quad.rows);                        imageSrc = Mat(src);                        imageDst = Mat(quad_pts);                        Mat transmtx = getPerspectivetransform(src, quad_pts);                        warpPerspective(src, quad, transmtx, quad.size());                        imageCropped = quad.clone();                    }                }            }        }    }//  imageCropped = getpolygon(newSrc);    return 1;}

解决方法:

将输入cv :: Mat类型更改为CV_32FC2.
另外看看this.

总结

以上是内存溢出为你收集整理的当我尝试在Android-NDK上使用getPerspectiveTransform转换透视图图像时,声明失败全部内容,希望文章能够帮你解决当我尝试在Android-NDK上使用getPerspectiveTransform转换透视图图像时,声明失败所遇到的程序开发问题。

如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。

欢迎分享,转载请注明来源:内存溢出

原文地址: https://outofmemory.cn/web/1086955.html

(0)
打赏 微信扫一扫 微信扫一扫 支付宝扫一扫 支付宝扫一扫
上一篇 2022-05-27
下一篇 2022-05-27

发表评论

登录后才能评论

评论列表(0条)

保存