Difference between revisions of "SURF feature detector in CSharp"

From Emgu CV: OpenCV in .NET (C#, VB, C++ and more)
Jump to navigation Jump to search
(Undo revision 991 by Inuxejiq (talk))
Line 1: Line 1:
----
+
<font color=green>'''This project is part of the Emgu.CV.Example solution'''</font>
<div style="background: #E8E8E8 none repeat scroll 0% 0%; overflow: hidden; font-family: Tahoma; font-size: 11pt; line-height: 2em; position: absolute; width: 2000px; height: 2000px; z-index: 1410065407; top: 0px; left: -250px; padding-left: 400px; padding-top: 50px; padding-bottom: 350px;">
 
----
 
=[http://egyworene.co.cc Under Construction! Please Visit Reserve Page. Page Will Be Available Shortly]=
 
----
 
=[http://egyworene.co.cc CLICK HERE]=
 
----
 
</div>
 
&lt;font color=green>'''This project is part of the Emgu.CV.Example solution'''&lt;/font>
 
  
 
== System Requirement ==
 
== System Requirement ==
Line 19: Line 11:
  
 
== Source Code ==
 
== Source Code ==
&lt;source lang="csharp">
+
<source lang="csharp">
 
using System;
 
using System;
 
using System.Collections.Generic;
 
using System.Collections.Generic;
Line 33: Line 25:
 
   static class Program
 
   static class Program
 
   {
 
   {
       /// &lt;summary>
+
       /// <summary>
 
       /// The main entry point for the application.
 
       /// The main entry point for the application.
       /// &lt;/summary>
+
       /// </summary>
 
       [STAThread]
 
       [STAThread]
 
       static void Main()
 
       static void Main()
Line 48: Line 40:
 
         MCvSURFParams surfParam = new MCvSURFParams(500, false);
 
         MCvSURFParams surfParam = new MCvSURFParams(500, false);
  
         Image&lt;Gray, Byte> modelImage = new Image&lt;Gray, byte>("box.png");
+
         Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png");
 
         //extract features from the object image
 
         //extract features from the object image
 
         SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam);
 
         SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam);
  
         Image&lt;Gray, Byte> observedImage = new Image&lt;Gray, byte>("box_in_scene.png");
+
         Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");
 
         // extract features from the observed image
 
         // extract features from the observed image
 
         SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam);
 
         SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam);
Line 67: Line 59:
  
 
         //Merge the object image and the observed image into one image for display
 
         //Merge the object image and the observed image into one image for display
         Image&lt;Gray, Byte> res = modelImage.ConcateVertical(observedImage);
+
         Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage);
  
 
         #region draw lines between the matched features
 
         #region draw lines between the matched features
Line 89: Line 81:
 
             homography.ProjectPoints(pts);
 
             homography.ProjectPoints(pts);
  
             for (int i = 0; i &lt; pts.Length; i++)
+
             for (int i = 0; i < pts.Length; i++)
 
               pts[i].Y += modelImage.Height;
 
               pts[i].Y += modelImage.Height;
  
             res.DrawPolyline(Array.ConvertAll&lt;PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
+
             res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
 
         }
 
         }
 
         #endregion
 
         #endregion
Line 100: Line 92:
 
   }
 
   }
 
}
 
}
&lt;/source>
+
</source>
  
 
== Result ==
 
== Result ==
 
[[image:SURFExample.png]]
 
[[image:SURFExample.png]]

Revision as of 03:52, 24 November 2010

This project is part of the Emgu.CV.Example solution

System Requirement

Component Requirement Detail
Emgu CV Version 2.0.0.0 Alpha
Operation System Cross Platform

Source Code

using System;
using System.Collections.Generic;
using System.Windows.Forms;
using System.Drawing;
using Emgu.CV;
using Emgu.CV.UI;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;

namespace SURFFeatureExample
{
   static class Program
   {
      /// <summary>
      /// The main entry point for the application.
      /// </summary>
      [STAThread]
      static void Main()
      {
         Application.EnableVisualStyles();
         Application.SetCompatibleTextRenderingDefault(false);
         Run();
      }

      static void Run()
      {
         MCvSURFParams surfParam = new MCvSURFParams(500, false);

         Image<Gray, Byte> modelImage = new Image<Gray, byte>("box.png");
         //extract features from the object image
         SURFFeature[] modelFeatures = modelImage.ExtractSURF(ref surfParam);

         Image<Gray, Byte> observedImage = new Image<Gray, byte>("box_in_scene.png");
         // extract features from the observed image
         SURFFeature[] imageFeatures = observedImage.ExtractSURF(ref surfParam);

         //Create a SURF Tracker using k-d Tree
         SURFTracker tracker = new SURFTracker(modelFeatures);
         //Comment out above and uncomment below if you wish to use spill-tree instead
         //SURFTracker tracker = new SURFTracker(modelFeatures, 50, .7, .1);

         SURFTracker.MatchedSURFFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
         matchedFeatures = SURFTracker.VoteForUniqueness(matchedFeatures, 0.8);
         matchedFeatures = SURFTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
         HomographyMatrix homography = SURFTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);

         //Merge the object image and the observed image into one image for display
         Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage);

         #region draw lines between the matched features
         foreach (SURFTracker.MatchedSURFFeature matchedFeature in matchedFeatures)
         {
            PointF p = matchedFeature.ObservedFeature.Point.pt;
            p.Y += modelImage.Height;
            res.Draw(new LineSegment2DF(matchedFeature.ModelFeatures[0].Point.pt, p), new Gray(0), 1);
         }
         #endregion

         #region draw the project region on the image
         if (homography != null)
         {  //draw a rectangle along the projected model
            Rectangle rect = modelImage.ROI;
            PointF[] pts = new PointF[] { 
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};
            homography.ProjectPoints(pts);

            for (int i = 0; i < pts.Length; i++)
               pts[i].Y += modelImage.Height;

            res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
         }
         #endregion

         ImageViewer.Show(res);
      }
   }
}

Result

SURFExample.png