2012-03-23 99 views
2

我想實現一個簡單的應用程序(修改sample2),它顯示了SIFT,SURF,BRIEF和ORB的功能。用戶可以簡單地比較旋轉或尺度不變性或速度。但我發現失敗,我無法處理,所以我轉而尋求你的幫助。 當我嘗試使用SIFT或SURF我總是在線的異常當我嘗試匹配:matcherBruteForce.match(descriptorFrame, matches);用模式識別算法SURF,SIFT在OpenCV for ANDROID

我有一個類似的AR應用和使用這些設置這是工作,所以我不能揣摩出我米犯錯。我試圖將變量「matcherBruteForce」設置爲BRUTEFORCE,BRUTEFORCE_L1,BRUTEFORCE_SL2事件爲BRUTEFORCE_HAMMING。但我總是得到同樣的例外:

SIFT:

CvException [org.opencv.core.CvException: /home/andreyk/OpenCV2/trunk/opencv_2.3.1.b2/modules/features2d/include/opencv2/features2d/features2d.hpp:2455: error: (-215) DataType<ValueType>::type == matcher.trainDescCollection[iIdx].type() || matcher.trainDescCollection[iIdx].empty() in function static void cv::BruteForceMatcher<Distance>::commonKnnMatchImpl(cv::BruteForceMatcher<Distance>&, const cv::Mat&, std::vector<std::vector<cv::DMatch> >&, int, const std::vector<cv::Mat>&, bool) [with Distance = cv::SL2<float>] 
] 

SURF:

CvException [org.opencv.core.CvException: /home/andreyk/OpenCV2/trunk/opencv_2.3.1.b2/modules/features2d/include/opencv2/features2d/features2d.hpp:2455: error: (-215) DataType<ValueType>::type == matcher.trainDescCollection[iIdx].type() || matcher.trainDescCollection[iIdx].empty() in function static void cv::BruteForceMatcher<Distance>::commonKnnMatchImpl(cv::BruteForceMatcher<Distance>&, const cv::Mat&, std::vector<std::vector<cv::DMatch> >&, int, const std::vector<cv::Mat>&, bool) [with Distance = cv::SL2<float>] 
] 

任何幫助讚賞

全班同學:

package sk.bolyos.opencv; 

import java.util.Vector; 

import org.opencv.features2d.DMatch; 
import org.opencv.features2d.DescriptorExtractor; 
import org.opencv.features2d.DescriptorMatcher; 
import org.opencv.features2d.FeatureDetector; 
import org.opencv.features2d.Features2d; 
import org.opencv.features2d.KeyPoint; 
import org.opencv.highgui.VideoCapture; 
import org.opencv.android.Utils; 
import org.opencv.core.Mat; 
import org.opencv.core.Size; 
import org.opencv.imgproc.Imgproc; 
import org.opencv.highgui.Highgui; 

import sk.bolyos.svk.*; 

import android.content.Context; 
import android.graphics.Bitmap; 
import android.util.Log; 
import android.view.SurfaceHolder; 




public class MyView extends CvViewBase { 

    private static final int BOUNDARY = 35; 

    private Mat mRgba; 
    private Mat mGray; 
    private Mat mIntermediateMat; 
    private Mat mLogoMilka1,mLogoMilka2,mLogoMilka3,mLogoMilka4; 
    ///////////////////DETECTORS 
    FeatureDetector siftDetector = FeatureDetector.create(FeatureDetector.SIFT); 
    FeatureDetector surfDetector = FeatureDetector.create(FeatureDetector.SURF); 
    FeatureDetector fastDetector = FeatureDetector.create(FeatureDetector.FAST); 
    FeatureDetector orbDetector = FeatureDetector.create(FeatureDetector.ORB); 
    ///////////////////DESCRIPTORS 
    DescriptorExtractor siftDescriptor = DescriptorExtractor.create(DescriptorExtractor.SIFT); 
    DescriptorExtractor surfDescriptor = DescriptorExtractor.create(DescriptorExtractor.SURF); 
    DescriptorExtractor briefDescriptor = DescriptorExtractor.create(DescriptorExtractor.BRIEF); 
    DescriptorExtractor orbDescriptor = DescriptorExtractor.create(DescriptorExtractor.ORB); 
    ///////////////////DATABASE 
    Vector<KeyPoint> vectorMilka1 = new Vector<KeyPoint>(); 
    Vector<KeyPoint> vectorMilka2 = new Vector<KeyPoint>(); 
    Vector<KeyPoint> vectorMilka3 = new Vector<KeyPoint>(); 
    Vector<KeyPoint> vectorMilka4 = new Vector<KeyPoint>(); 
    Mat descriptorMilka1 = new Mat(); 
    Mat descriptorMilka2 = new Mat(); 
    Mat descriptorMilka3 = new Mat(); 
    Mat descriptorMilka4 = new Mat(); 
    ///////////////////VIDEO 
    Vector<KeyPoint> vectorFrame = new Vector<KeyPoint>(); 
    Mat descriptorFrame = new Mat(); 

    DescriptorMatcher matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); 
    DescriptorMatcher matcherBruteForce = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_SL2); 
    Vector<DMatch> matches = new Vector<DMatch>(); 
    Vector<Mat> siftDescriptors = new Vector<Mat>(); 
    Vector<Mat> surfDescriptors = new Vector<Mat>(); 
    Vector<Mat> briefDescriptors = new Vector<Mat>(); 
    Vector<Mat> orbDescriptors = new Vector<Mat>(); 

    public MyView(Context context) { 
     super(context); 
     // TODO Auto-generated constructor stub 
     try{ 
      /* 
      if (mLogoMilka1 == null){ 
       mLogoMilka1 = new Mat(); 
       mLogoMilka1 = Utils.loadResource(getContext(), R.drawable.milkalogo); 
       fillDB(mLogoMilka1,vectorMilka1,descriptorMilka1); 
      } 
      if (mLogoMilka2 == null){ 
       mLogoMilka2 = new Mat(); 
       mLogoMilka2 = Utils.loadResource(getContext(), R.drawable.milkalogom); 
       fillDB(mLogoMilka2,vectorMilka2,descriptorMilka2); 
      } 
      if (mLogoMilka3 == null){ 
       mLogoMilka3 = new Mat(); 
       mLogoMilka3 = Utils.loadResource(getContext(), R.drawable.milkalogol); 
       fillDB(mLogoMilka3,vectorMilka3,descriptorMilka3); 
      }*/ 
      if (mLogoMilka4 == null){ 
       mLogoMilka4 = new Mat(); 
       mLogoMilka4 = Utils.loadResource(getContext(), R.drawable.milkalogolc); 
       fillDB(mLogoMilka4,vectorMilka4,descriptorMilka4); 
      } 

     }catch(Exception e){ 
      Log.e("SVK APPLICATION", "in MyView constructor "+e.toString()); 
     } 
    } 

    public void fillDB(Mat mLogo,Vector<KeyPoint> vector,Mat descriptor){ 

     //SIFT 
     siftDetector.detect(mLogo, vector); 
     siftDescriptor.compute(mLogo, vector, descriptor); 
     siftDescriptors.add(descriptor); 
     //SURF 
     surfDetector.detect(mLogo, vector); 
     surfDescriptor.compute(mLogo, vector, descriptor); 
     surfDescriptors.add(descriptor); 
     //FAST+BRIEF 
     fastDetector.detect(mLogo, vector); 
     briefDescriptor.compute(mLogo, vector, descriptor); 
     briefDescriptors.add(descriptor); 
     //ORB 
     orbDetector.detect(mLogo, vector); 
     orbDescriptor.compute(mLogo, vector, descriptor); 
     orbDescriptors.add(descriptor); 

    } 


    @Override 
    public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) { 
     super.surfaceChanged(_holder, format, width, height); 

     synchronized (this) { 
      // initialize Mats before usage 
      mGray = new Mat(); 
      mRgba = new Mat(); 
      mIntermediateMat = new Mat(); 
      matches = new Vector<DMatch>(); 
      vectorFrame = new Vector<KeyPoint>(); 
      descriptorFrame = new Mat(); 
     } 
    } 

    @Override 
    protected Bitmap processFrame(VideoCapture capture) { 
     // TODO Auto-generated method stub 
     switch (SVKApplikaciaActivity.viewMode) { 
     case SVKApplikaciaActivity.VIEW_MODE_SIFT: 
      //TODO SIFT 
      try{ 
       //matcherBruteForce = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE); 
       //matcherBruteForce.clear(); 
       matcherBruteForce.add(siftDescriptors); 
       matcherBruteForce.train();// proba 

       capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); 
       Imgproc.resize(mGray, mGray,new Size(480,320)); 
       siftDetector.detect(mGray, vectorFrame); 
       siftDescriptor.compute(mGray, vectorFrame, descriptorFrame); 

       matcherBruteForce.match(descriptorFrame, matches); 
       Vector<DMatch> matchesXXX = new Vector<DMatch>(); 
       for (DMatch t : matches) 
        if(t.distance<BOUNDARY) 
         matchesXXX.add(t); 
       Mat nGray = new Mat(); 
       Mat nLogo = new Mat(); 
       Mat nRgba = new Mat(); 
       Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3); 
       Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3); 
       Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba); 
       Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4); 
      }catch(Exception e){ 
       Log.e("SVK APPLICATION","in SIFT "+ e.toString()); 
      } 
      break; 
     case SVKApplikaciaActivity.VIEW_MODE_SURF: 
      //TODO SURF 
      try{ 
       //matcherBruteForce = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE); 
       //matcherBruteForce.clear(); 
       matcherBruteForce.add(surfDescriptors); 
       matcherBruteForce.train();// proba 

       capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); 
       Imgproc.resize(mGray, mGray,new Size(480,320)); 
       surfDetector.detect(mGray, vectorFrame); 
       surfDescriptor.compute(mGray, vectorFrame, descriptorFrame); 

       matcherBruteForce.match(descriptorFrame, matches); 
       Vector<DMatch> matchesXXX = new Vector<DMatch>(); 
       for (DMatch t : matches) 
        if(t.distance<BOUNDARY) 
         matchesXXX.add(t); 
       Mat nGray = new Mat(); 
       Mat nLogo = new Mat(); 
       Mat nRgba = new Mat(); 
       Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3); 
       Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3); 
       Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba); 
       Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4); 
      }catch(Exception e){ 
       Log.e("SVK APPLICATION","in Surf "+ e.toString()); 
      } 
      break; 
     case SVKApplikaciaActivity.VIEW_MODE_BRIEF: 
      //TODO BRIEF 
      try{ 
       matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); 
       matcherHamming.add(briefDescriptors); 
       matcherHamming.train();// proba 

       capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); 
       Imgproc.resize(mGray, mGray,new Size(480,320)); 
       fastDetector.detect(mGray, vectorFrame); 
       briefDescriptor.compute(mGray, vectorFrame, descriptorFrame); 

       matcherHamming.match(descriptorFrame, matches); 
       Vector<DMatch> matchesXXX = new Vector<DMatch>(); 
       for (DMatch t : matches) 
        if(t.distance<BOUNDARY) 
         matchesXXX.add(t); 
       Mat nGray = new Mat(); 
       Mat nLogo = new Mat(); 
       Mat nRgba = new Mat(); 
       Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3); 
       Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3); 
       Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba); 
       Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4); 
      }catch(Exception e){ 
       Log.e("SVK APPLICATION","in Brief "+ e.toString()); 
      } 
      break; 
     case SVKApplikaciaActivity.VIEW_MODE_ORB: 
      //TODO ORB 
      try{ 
       matcherHamming = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT); 
       matcherHamming.add(orbDescriptors); 
       matcherHamming.train();// proba 

       capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); 
       Imgproc.resize(mGray, mGray,new Size(480,320)); 
       orbDetector.detect(mGray, vectorFrame); 
       orbDescriptor.compute(mGray, vectorFrame, descriptorFrame); 

       matcherHamming.match(descriptorFrame, matches); 
       Vector<DMatch> matchesXXX = new Vector<DMatch>(); 
       for (DMatch t : matches) 
        if(t.distance<BOUNDARY) 
         matchesXXX.add(t); 
       Mat nGray = new Mat(); 
       Mat nLogo = new Mat(); 
       Mat nRgba = new Mat(); 
       Imgproc.cvtColor(mGray, nGray, Imgproc.COLOR_RGBA2RGB, 3); 
       Imgproc.cvtColor(mLogoMilka4, nLogo, Imgproc.COLOR_RGBA2BGR, 3); 
       Features2d.drawMatches(nGray, vectorFrame, nLogo, vectorMilka4, matchesXXX, nRgba); 
       Imgproc.cvtColor(nRgba, mRgba, Imgproc.COLOR_RGB2RGBA, 4); 
       }catch(Exception e){ 
        Log.e("SVK APPLICATION","in ORB "+ e.toString()); 
       } 
      break; 
     case SVKApplikaciaActivity.VIEW_MODE_AR: 
      //TODO AR 
      break;  

     } 

     Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); 

     if (Utils.matToBitmap(mRgba, bmp)) 
      return bmp; 

     bmp.recycle(); 

     return null; 
    } 

    @Override 
    public void run() { 
     super.run(); 

     synchronized (this) { 
      // Explicitly deallocate Mats 
      if (mRgba != null) 
       mRgba.release(); 
      if (mGray != null) 
       mGray.release(); 
      if (mIntermediateMat != null) 
       mIntermediateMat.release(); 

      mRgba = null; 
      mGray = null; 
      mIntermediateMat = null; 
     } 
    } 

} 

回答

3

我想想我知道這個問題。您正在使用的匹配器無法應用於SIFT和SURF描述符。如果你必須使用一個DescriptorMatcher與篩或衝浪,你必須將其設置爲這樣

DescriptorMatcher matcherBruteForce=DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_SL2); 

由於SURF和SIFT接受FloatBased描述符獨家,它會返回一個錯誤,如果你通過了DescriptorMatcher設置爲海明它。

請注意,在您的代碼中,您有兩個DescriptorMatchers,一個設置爲BRUTEFORCE.SL2,另一個設置爲HAMMING。確保將正確的一個即BRUTEFORCE.SL2傳遞給SIFT或SURF。

它不過最好使用基於FLANN的匹配對SIFT或SURF,因爲他們從中提取更多數量的關鍵點相比,ORB和FLANN適合於大型成套關鍵點了解更多關於在這裏 http://computer-vision-talks.com/2011/07/comparison-of-the-opencvs-feature-detection-algorithms-ii/

這裏http://opencv.willowgarage.com/documentation/cpp/flann_fast_approximate_nearest_neighbor_search.html

更新: 有可能使用L2或L1距離來匹配uchar描述符。如果您通過DescriptorMatcher設置爲BRUTEFORCE,它可能也適用於ORB(雖然效果不佳)

+0

我已經解決了原因,它不是您的答案,因爲bruteforce匹配程序已經是SL2。原因是我沒有設置單獨的篩選orb簡要描述符。但是謝謝 – Csabi 2012-04-13 14:30:11

+0

是的..我也意識到了。但是,你的意思是,你沒有設置獨立的篩選/ orb描述符(請輸入代碼示例),你做了什麼改變? – 2012-04-23 09:28:43

+0

我沒有使用fillDB函數,我對所有ORB/BRIEF/SURF/SIFT使用單獨的描述符 – Csabi 2012-04-23 19:52:21

1

你確定你的尺碼vectorFrame不等於零? 我想我有同樣的問題..你的問題會在檢測algoritm,我認爲它返回一個零vectorFrame當你的圖像的顏色代碼是不正確的

只是把Log.e("SVK APPLICATION","vectorFrame size = "+ vectorFrame.size());地方