SURF 特征法

时间:2022-12-17 17:03:35
 public static void FindMatch(Mat modelImage, Mat observedImage, out long matchTime, out VectorOfKeyPoint modelKeyPoints, out VectorOfKeyPoint observedKeyPoints, VectorOfVectorOfDMatch matches, out Mat mask, out Mat homography)
{
int k = ;
double uniquenessThreshold = 0.8;
double hessianThresh = ; Stopwatch watch;
homography = null; modelKeyPoints = new VectorOfKeyPoint();
observedKeyPoints = new VectorOfKeyPoint();
using (UMat uModelImage = modelImage.ToUMat(AccessType.Read))
using (UMat uObservedImage = observedImage.ToUMat(AccessType.Read))
{
SURF surfCPU = new SURF(hessianThresh);
//extract features from the object image
UMat modelDescriptors = new UMat();
surfCPU.DetectAndCompute(uModelImage, null, modelKeyPoints, modelDescriptors, false); watch = Stopwatch.StartNew(); // extract features from the observed image
UMat observedDescriptors = new UMat();
surfCPU.DetectAndCompute(uObservedImage, null, observedKeyPoints, observedDescriptors, false);
BFMatcher matcher = new BFMatcher(DistanceType.L2);
matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, k, null);
mask = new Mat(matches.Size, , DepthType.Cv8U, );
mask.SetTo(new MCvScalar());
Features2DToolbox.VoteForUniqueness(matches, uniquenessThreshold, mask); int nonZeroCount = CvInvoke.CountNonZero(mask);
if (nonZeroCount >= )
{
nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints,
matches, mask, 1.5, );
if (nonZeroCount >= )
homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints,
observedKeyPoints, matches, mask, );
} watch.Stop(); }
matchTime = watch.ElapsedMilliseconds;
} /// <summary>
/// Draw the model image and observed image, the matched features and homography projection.
/// </summary>
/// <param name="modelImage">The model image</param>
/// <param name="observedImage">The observed image</param>
/// <param name="matchTime">The output total time for computing the homography matrix.</param>
/// <returns>The model image and observed image, the matched features and homography projection.</returns>
public static Mat Draw(Mat modelImage, Mat observedImage, out long matchTime)
{
Mat homography;
VectorOfKeyPoint modelKeyPoints;
VectorOfKeyPoint observedKeyPoints;
using (VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch())
{
Mat mask;
FindMatch(modelImage, observedImage, out matchTime, out modelKeyPoints, out observedKeyPoints, matches,
out mask, out homography); //Draw the matched keypoints
Mat result = new Mat();
Features2DToolbox.DrawMatches(modelImage, modelKeyPoints, observedImage, observedKeyPoints,
matches, result, new MCvScalar(, , ), new MCvScalar(, , ), mask); #region draw the projected region on the image if (homography != null)
{
//draw a rectangle along the projected model
Rectangle rect = new Rectangle(Point.Empty, modelImage.Size);
PointF[] pts = new PointF[]
{
new PointF(rect.Left, rect.Bottom),
new PointF(rect.Right, rect.Bottom),
new PointF(rect.Right, rect.Top),
new PointF(rect.Left, rect.Top)
};
pts = CvInvoke.PerspectiveTransform(pts, homography); Point[] points = Array.ConvertAll<PointF, Point>(pts, Point.Round);
using (VectorOfPoint vp = new VectorOfPoint(points))
{
CvInvoke.Polylines(result, vp, true, new MCvScalar(, , , ), );
} } #endregion return result; }
}