162 lines
5.1 KiB
C#
162 lines
5.1 KiB
C#
using OpenCVForUnity.CoreModule;
|
|
using OpenCVForUnity.Features2dModule;
|
|
using OpenCVForUnity.ImgprocModule;
|
|
using OpenCVForUnity.Calib3dModule;
|
|
using System;
|
|
using System.IO;
|
|
using System.Collections.Generic;
|
|
using UnityEngine;
|
|
using OpenCVForUnity.ImgcodecsModule;
|
|
using System.Threading.Tasks;
|
|
using UguiToolkit.Editor;
|
|
|
|
public static class ImageUtils
|
|
{
|
|
public static string FormatImgFilePath(string imgFilePath)
|
|
{
|
|
string projectPath = Directory.GetParent(Application.dataPath).FullName;
|
|
return Path.GetRelativePath(projectPath, imgFilePath).Replace("\\", "/");
|
|
}
|
|
|
|
public static void LoadPngImagesFromFolder(string folderPath, List<Mat> images, List<string> imagePaths)
|
|
{
|
|
foreach (string file in Directory.GetFiles(folderPath, "*.png"))
|
|
{
|
|
Mat img = Imgcodecs.imread(file);
|
|
if (!img.empty())
|
|
{
|
|
images.Add(img);
|
|
imagePaths.Add(FormatImgFilePath(file));
|
|
}
|
|
}
|
|
}
|
|
|
|
public static async Task ProcessFolderAsync(List<Mat> images, string targetFilePath, RotationScaleDetector detector,
|
|
double distanceDifference, Action<int, (double, (double, double))> callback,
|
|
Action endCallback)
|
|
{
|
|
Debug.Log("GetRotationScaleAsync: " + targetFilePath);
|
|
Mat targetImage = Imgcodecs.imread(targetFilePath);
|
|
for (int index = 0; index < images.Count; index++)
|
|
{
|
|
var img = images[index];
|
|
var result = await detector.GetRotationScaleAsync(targetImage, img, distanceDifference);
|
|
int _index = index;
|
|
UnityMainThreadDispatcher.Instance().Enqueue(() =>
|
|
{
|
|
if (result.Item1.HasValue && result.Item2.HasValue)
|
|
{
|
|
double rotationAngleDegrees = result.Item1.Value;
|
|
var scale = result.Item2.Value;
|
|
double scaleX = scale.Item1;
|
|
double scaleY = scale.Item2;
|
|
|
|
callback(_index, (rotationAngleDegrees, (scaleX, scaleY)));
|
|
Debug.Log($"Target Image -> Image {_index}");
|
|
Debug.Log($"Rotation Angle: {rotationAngleDegrees} degrees");
|
|
Debug.Log($"Scale X: {scaleX}");
|
|
Debug.Log($"Scale Y: {scaleY}");
|
|
}
|
|
});
|
|
}
|
|
|
|
UnityMainThreadDispatcher.Instance().Enqueue(() =>
|
|
{
|
|
endCallback();
|
|
});
|
|
}
|
|
}
|
|
|
|
public class RotationScaleDetector
|
|
{
|
|
private SIFT sift;
|
|
private BFMatcher bf;
|
|
private Mat gray;
|
|
private Mat descriptors1;
|
|
private Mat descriptors2;
|
|
private MatOfKeyPoint keypoints;
|
|
private Mat inliers;
|
|
private List<MatOfDMatch> knnMatches;
|
|
private List<DMatch> goodMatches;
|
|
private List<Point> srcPts;
|
|
private List<Point> dstPts;
|
|
|
|
public RotationScaleDetector()
|
|
{
|
|
sift = SIFT.create();
|
|
bf = BFMatcher.create();
|
|
gray = new Mat();
|
|
descriptors1 = new Mat();
|
|
descriptors2 = new Mat();
|
|
keypoints = new MatOfKeyPoint();
|
|
inliers = new Mat();
|
|
knnMatches = new List<MatOfDMatch>();
|
|
goodMatches = new List<DMatch>();
|
|
srcPts = new List<Point>();
|
|
dstPts = new List<Point>();
|
|
}
|
|
|
|
private KeyPoint[] Sift(Mat image, Mat descriptors)
|
|
{
|
|
Imgproc.cvtColor(image, gray, Imgproc.COLOR_BGR2GRAY);
|
|
sift.detectAndCompute(gray, new Mat(), keypoints, descriptors);
|
|
return keypoints.toArray();
|
|
}
|
|
|
|
public async Task<(double?, (double, double)?)> GetRotationScaleAsync(Mat img0, Mat img1, double distanceDifference)
|
|
{
|
|
KeyPoint[] kp1 = Sift(img0, descriptors1);
|
|
KeyPoint[] kp2 = Sift(img1, descriptors2);
|
|
|
|
if (kp1.Length == 0 || kp2.Length == 0)
|
|
{
|
|
return (null, null);
|
|
}
|
|
|
|
knnMatches.Clear();
|
|
goodMatches.Clear();
|
|
bf.knnMatch(descriptors1, descriptors2, knnMatches, 2);
|
|
|
|
foreach (MatOfDMatch matofDMatch in knnMatches)
|
|
{
|
|
DMatch[] matches = matofDMatch.toArray();
|
|
if (matches[0].distance < distanceDifference * matches[1].distance)
|
|
{
|
|
goodMatches.Add(matches[0]);
|
|
}
|
|
}
|
|
|
|
if (goodMatches.Count < 3)
|
|
{
|
|
return (null, null);
|
|
}
|
|
|
|
srcPts.Clear();
|
|
dstPts.Clear();
|
|
foreach (DMatch match in goodMatches)
|
|
{
|
|
srcPts.Add(kp1[match.queryIdx].pt);
|
|
dstPts.Add(kp2[match.trainIdx].pt);
|
|
}
|
|
|
|
MatOfPoint2f srcMatOfPoint2f = new MatOfPoint2f(srcPts.ToArray());
|
|
MatOfPoint2f dstMatOfPoint2f = new MatOfPoint2f(dstPts.ToArray());
|
|
|
|
Mat M = Calib3d.estimateAffinePartial2D(srcMatOfPoint2f, dstMatOfPoint2f, inliers, Calib3d.RANSAC, 5);
|
|
if (M.empty())
|
|
{
|
|
return (null, null);
|
|
}
|
|
|
|
Mat R = M.colRange(0, 2);
|
|
double theta = Math.Atan2(R.get(1, 0)[0], R.get(0, 0)[0]);
|
|
double rotationAngleDegrees = theta * 180.0 / Math.PI;
|
|
|
|
double scaleX = Core.norm(R.row(0));
|
|
double scaleY = Core.norm(R.row(1));
|
|
|
|
return (rotationAngleDegrees, (scaleX, scaleY));
|
|
}
|
|
}
|
|
|