使用FAST探测器在Android上实时识别物体

使用FAST探测器在Android上实时识别物体,第1张

概述这项工作的目的是从视频场景中的现有图像中提取实时关键点processFrame由于匹配而执行不起作用, 是我可以实时显示图像上圆圈形式的对应关键点 class Sample1View extends SampleViewBase { public static final int VIEW_MODE_RGBA = 0; public static final int 这项工作的目的是从视频场景中的现有图像中提取实时关键点processFrame由于匹配而执行不起作用,
是我可以实时显示图像上圆圈形式的对应关键点

class Sample1VIEw extends SampleVIEwBase {    public static final int     VIEW_MODE_RGBA   = 0;    public static final int  VIEW_MODE_BLUE  = 1;     public static final int  VIEW_MODE_YELLOW = 2;    public static final int  VIEW_MODE_DE = 3;    private Mat mYuv;    private Mat mRgba;    private Mat mGraySubmat;    private Mat mResult;    private Mat mIntermediateMat; private Bitmap mBitmap; private int mviewmode; private Mat mcolor; private Mat mHsv; TimingLogger timings;   private Mat img1;    private Mat descriptors;    private MatOfKeyPoint keypoints;    private FeatureDetector detector;    private DescriptorExtractor descriptor;    private DescriptorMatcher matcher; private static final String TAG ="Sample::VIEw";    public Sample1VIEw(Context context) {        super(context);        mviewmode = VIEW_MODE_RGBA; try { img1=Utils.loadResource(getContext(),R.drawable.wings); } catch (IOException e) { // Todo auto-generated catch block Log.w("Activity::LoadResource","Unable to load resource R.drawable.wings"); e.printstacktrace(); }        descriptors = new Mat();        keypoints = new MatOfKeyPoint();        detector = FeatureDetector.create(FeatureDetector.FAST);        detector.detect(img1,keypoints);        descriptor = DescriptorExtractor.create(DescriptorExtractor.ORB);        descriptor.compute(img1,keypoints,descriptors);        matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);    } @OverrIDe protected voID onPrevIEwStarted(int prevIEwWIDth,int prevIEwHeight) {     Log.i(TAG,"prevIEw Started");     synchronized (this) {         mYuv = new Mat(getFrameHeight() + getFrameHeight() / 2,getFrameWIDth(),CvType.CV_8UC1);         mGraySubmat = mYuv.submat(0,getFrameHeight(),getFrameWIDth());         mRgba = new Mat();         mIntermediateMat = new Mat();         mBitmap = Bitmap.createBitmap(prevIEwWIDth,prevIEwHeight,Bitmap.Config.ARGB_8888);          mHsv = new Mat();         mcolor = new Mat();         mResult = new Mat();         } } @OverrIDe protected voID onPrevIEwStopped() {     Log.i(TAG,"prevIEw Stopped");     if(mBitmap != null) {   mBitmap.recycle();  }  synchronized (this) {            // Explicitly deallocate Mats            if (mYuv != null)                mYuv.release();            if (mRgba != null)                mRgba.release();            if (mGraySubmat != null)                mGraySubmat.release();            if (mIntermediateMat != null)                mIntermediateMat.release();            mYuv = null;            mRgba = null;            mGraySubmat = null;            mIntermediateMat = null;            if (mResult != null)             mResult.release();            if (mHsv != null)             mHsv.release();            if (mcolor != null)             mcolor.release();            mcolor = null;         mResult = null;         mHsv = null;        }    }

// cvt_YUVtoRGBtoHSV:

@OverrIDe    protected Bitmap processFrame(byte[] data) {        mYuv.put(0,data);        final int viewmode = mviewmode;                 colorDetection.cvt_YUVtoRGBtoHSV(mYuv,mGraySubmat);      MatOfKeyPoint mKeyPoints = new MatOfKeyPoint();    MatOfDMatch  matches = new MatOfDmatch();      detector.detect(mGraySubmat,mKeyPoints);      descriptor.compute(mGraySubmat,mKeyPoints,mIntermediateMat);      matcher.match(mIntermediateMat,descriptors,matches);      mIntermediateMat2.create(resultSize,CvType.CV_8UC1);      Features2d.drawMatches(mGraySubmat,mGraySubmat,matches,mIntermediateMat2);      imgproc.resize(mIntermediateMat2,mIntermediateMat2,mRgba.size());      imgproc.cvtcolor(mIntermediateMat2,mRgba,imgproc.color_RGBA2BGRA,4);        break;        }    Bitmap bmp = mBitmap;    try {      Utils.matToBitmap(mRgba,bmp);    } catch(Exception e) {        Log.e("org.opencv.samples.*","Utils.matToBitmap() throws an exception: " + e.getMessage());        bmp.recycle();        bmp = null;    }    return bmp;    }    public voID setviewmode(int viewmode) {     mviewmode = viewmode;    }}

在日志中

CvException [org.opencv.core.CvException:/home/reports/ci/slave/50-SDK/opencv/modules/features2d/src/draw.cpp:207:error:(215)i1\u0026gt; = 0& &安培; i1<函数voID cv :: drawMatches中的static_cast(keypoints1.size())(const cv :: Mat&,const std :: vector&,const cv :: Mat&,cv :: Mat&,const Scalar&,int)

解决方法 这篇文章有点旧,但我给出了答案.

你的drawMatches函数的前四个参数是不正确的.前两个和后两个是相同的.
您可以从(mGraySubmat,…)更改drawMatches的前四个参数;

(mGraySubmat,img1,…);
要么
(img1,…);
使用那个适合你的.

对于其他读者,如果他们有相同的问题,他们可以尝试使用后两个参数更改前两个参数.
例:
(image1,keypointsImage1,image2,keypointsImage2,…);

(image2,image1,…);

对我来说,它修复了我的错误.

总结

以上是内存溢出为你收集整理的使用FAST探测器在Android上实时识别物体全部内容,希望文章能够帮你解决使用FAST探测器在Android上实时识别物体所遇到的程序开发问题。

如果觉得内存溢出网站内容还不错,欢迎将内存溢出网站推荐给程序员好友。

欢迎分享,转载请注明来源:内存溢出

原文地址: https://outofmemory.cn/web/1126887.html

(0)
打赏 微信扫一扫 微信扫一扫 支付宝扫一扫 支付宝扫一扫
上一篇 2022-05-30
下一篇 2022-05-30

发表评论

登录后才能评论

评论列表(0条)

保存