339 lines
10 KiB
C#
339 lines
10 KiB
C#
#if UNITY_EDITOR
|
||
|
||
using OpenCVForUnity.CoreModule;
|
||
using OpenCVForUnity.Features2dModule;
|
||
using OpenCVForUnity.ImgprocModule;
|
||
using OpenCVForUnity.Calib3dModule;
|
||
using OpenCVForUnity.UnityUtils;
|
||
using System;
|
||
using System.IO;
|
||
using System.Collections.Generic;
|
||
using UnityEngine;
|
||
using OpenCVForUnity.ImgcodecsModule;
|
||
using System.Threading.Tasks;
|
||
using UguiToolkit.Editor;
|
||
using System.Security.Cryptography;
|
||
using System.Text;
|
||
|
||
public static class ImageUtils
|
||
{
|
||
public static void SetDebugMode(bool debug)
|
||
{
|
||
Utils.setDebugMode(debug);
|
||
}
|
||
|
||
public static string FormatImgFilePath(string imgFilePath)
|
||
{
|
||
string projectPath = Directory.GetParent(Application.dataPath).FullName;
|
||
return Path.GetRelativePath(projectPath, imgFilePath).Replace("\\", "/");
|
||
}
|
||
|
||
public static void LoadPngImagesFromFolder(string folderPath, List<Mat> images, List<string> imagePaths)
|
||
{
|
||
foreach (string file in Directory.GetFiles(folderPath, "*.png"))
|
||
{
|
||
Mat img = Imgcodecs.imread(file);
|
||
if (!img.empty())
|
||
{
|
||
images.Add(img);
|
||
imagePaths.Add(FormatImgFilePath(file));
|
||
}
|
||
}
|
||
}
|
||
|
||
static List<Task<(double?, (double, double)?)>> tasks;
|
||
static ObjectPool<RotationScaleDetector> detectorPool;
|
||
|
||
public static async Task ProcessFolderAsync(List<Mat> images, string targetFilePath,
|
||
double distanceDifference, Action<int, (double, (double, double), bool)> callback,
|
||
Action endCallback)
|
||
{
|
||
if (tasks == null) tasks = new (images.Count);
|
||
if (detectorPool == null) detectorPool = new (images.Count);
|
||
|
||
Mat targetImage = Imgcodecs.imread(targetFilePath);
|
||
|
||
List<RotationScaleDetector> detectors = new(images.Count);
|
||
tasks.Clear();
|
||
foreach (var img in images)
|
||
{
|
||
RotationScaleDetector detector = detectorPool.GetObject();
|
||
tasks.Add(detector.GetRotationScaleAsync(targetImage, img, distanceDifference));
|
||
detectors.Add(detector);
|
||
}
|
||
|
||
var resultsTask = await Task.WhenAll(tasks.ToArray());
|
||
foreach (var detector in detectors) detectorPool.ReturnObject(detector);
|
||
|
||
for (int index = 0; index < resultsTask.Length; index++)
|
||
{
|
||
int _index = index;
|
||
var result = resultsTask[index];
|
||
UnityMainThreadDispatcher.Instance().Enqueue(() =>
|
||
{
|
||
if (result.Item1.HasValue)
|
||
{
|
||
double rotationAngleDegrees = result.Item1.Value;
|
||
double scaleX = 0;
|
||
double scaleY = 0;
|
||
|
||
if (result.Item2.HasValue)
|
||
{
|
||
var scale = result.Item2.Value;
|
||
scaleX = scale.Item1;
|
||
scaleY = scale.Item2;
|
||
|
||
callback(_index, (rotationAngleDegrees, (scaleX, scaleY), false));
|
||
}
|
||
else
|
||
{ // SimilarityCalc
|
||
callback(_index, (rotationAngleDegrees, (scaleX, scaleY), true));
|
||
}
|
||
|
||
Debug.Log($"Target Image -> Image {_index}");
|
||
Debug.Log($"Rotation Angle: {rotationAngleDegrees} degrees");
|
||
Debug.Log($"Scale X: {scaleX}");
|
||
Debug.Log($"Scale Y: {scaleY}");
|
||
Debug.Log($"SimilarityCalc : {result.Item2 == null}");
|
||
}
|
||
});
|
||
}
|
||
|
||
UnityMainThreadDispatcher.Instance().Enqueue(() =>
|
||
{
|
||
endCallback();
|
||
});
|
||
}
|
||
}
|
||
|
||
public class RotationScaleDetector
|
||
{
|
||
private SIFT sift;
|
||
private BFMatcher bf;
|
||
private Mat gray;
|
||
private Mat descriptors1;
|
||
private Mat descriptors2;
|
||
private MatOfKeyPoint keypoints;
|
||
private Mat inliers;
|
||
private List<MatOfDMatch> knnMatches;
|
||
private List<DMatch> goodMatches;
|
||
private List<Point> srcPts;
|
||
private List<Point> dstPts;
|
||
private Mat resizedImage;
|
||
private Mat dctImage;
|
||
StringBuilder sb;
|
||
|
||
public RotationScaleDetector()
|
||
{
|
||
sift = SIFT.create();
|
||
bf = BFMatcher.create();
|
||
gray = new Mat();
|
||
descriptors1 = new Mat();
|
||
descriptors2 = new Mat();
|
||
keypoints = new MatOfKeyPoint();
|
||
inliers = new Mat();
|
||
knnMatches = new List<MatOfDMatch>();
|
||
goodMatches = new List<DMatch>();
|
||
srcPts = new List<Point>();
|
||
dstPts = new List<Point>();
|
||
resizedImage = new Mat();
|
||
dctImage = new Mat();
|
||
sb = new StringBuilder();
|
||
}
|
||
|
||
private KeyPoint[] Sift(Mat image, Mat descriptors)
|
||
{
|
||
Imgproc.cvtColor(image, gray, Imgproc.COLOR_BGR2GRAY);
|
||
sift.detectAndCompute(gray, new Mat(), keypoints, descriptors);
|
||
return keypoints.toArray();
|
||
}
|
||
|
||
#region MD5
|
||
public string CalculateMD5(Mat image)
|
||
{
|
||
// <20><>Matת<74><D7AA>Ϊ<EFBFBD>ֽ<EFBFBD><D6BD><EFBFBD><EFBFBD><EFBFBD>
|
||
byte[] imageBytes = new byte[image.total() * image.elemSize()];
|
||
image.get(0, 0, imageBytes);
|
||
|
||
// <20><><EFBFBD><EFBFBD>MD5<44><35>ϣ<EFBFBD>㷨ʵ<E3B7A8><CAB5>
|
||
using (MD5 md5 = MD5.Create())
|
||
{
|
||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ϣֵ
|
||
byte[] hashBytes = md5.ComputeHash(imageBytes);
|
||
|
||
// <20><><EFBFBD><EFBFBD>ϣ<EFBFBD>ֽ<EFBFBD><D6BD><EFBFBD><EFBFBD><EFBFBD>ת<EFBFBD><D7AA>Ϊʮ<CEAA><CAAE><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ַ<EFBFBD><D6B7><EFBFBD>
|
||
sb.Clear();
|
||
foreach (byte b in hashBytes)
|
||
{
|
||
sb.Append(b.ToString("x2"));
|
||
}
|
||
|
||
return sb.ToString();
|
||
}
|
||
}
|
||
#endregion
|
||
|
||
#region Phash
|
||
private Mat CalculatePhash(Mat image)
|
||
{
|
||
Imgproc.cvtColor(image, gray, Imgproc.COLOR_BGR2GRAY);
|
||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>СΪ32x32<33><32><EFBFBD><EFBFBD>ת<EFBFBD><D7AA>Ϊ32λ<32><CEBB><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
||
Imgproc.resize(gray, resizedImage, new Size(32, 32), 0, 0, Imgproc.INTER_AREA);
|
||
resizedImage.convertTo(resizedImage, CvType.CV_32F);
|
||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ɢ<EFBFBD><C9A2><EFBFBD>ұ任<D2B1><E4BBBB>DCT<43><54>
|
||
Core.dct(resizedImage, dctImage);
|
||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>С
|
||
if (dctImage.rows() < 8 || dctImage.cols() < 8)
|
||
{
|
||
Debug.LogError("DCT matrix is too small!");
|
||
return new Mat();
|
||
}
|
||
// ȡ<><C8A1><EFBFBD>Ͻ<EFBFBD>8x8<78><38>DCTϵ<54><CFB5>
|
||
Mat dctLowFreq = dctImage.submat(new OpenCVForUnity.CoreModule.Rect(0, 0, 8, 8));
|
||
// <20><>DCTϵ<54><CFB5>ת<EFBFBD><D7AA>Ϊ<EFBFBD><CEAA><EFBFBD><EFBFBD>
|
||
float[] dctArray = new float[64];
|
||
dctLowFreq.get(0, 0, dctArray);
|
||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ֵ
|
||
float medianValue = GetMedian(dctArray);
|
||
// <20><><EFBFBD><EFBFBD>pHash
|
||
Mat phash = new Mat(dctLowFreq.size(), CvType.CV_8U);
|
||
for (int i = 0; i < dctLowFreq.rows(); i++)
|
||
{
|
||
for (int j = 0; j < dctLowFreq.cols(); j++)
|
||
{
|
||
phash.put(i, j, dctLowFreq.get(i, j)[0] > medianValue ? 1 : 0);
|
||
}
|
||
}
|
||
return phash;
|
||
}
|
||
|
||
private float GetMedian(float[] array)
|
||
{
|
||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>
|
||
System.Array.Sort(array);
|
||
// <20><><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ֵ
|
||
int middle = array.Length / 2;
|
||
if (array.Length % 2 == 0)
|
||
{
|
||
return (array[middle - 1] + array[middle]) / 2.0f;
|
||
}
|
||
else
|
||
{
|
||
return array[middle];
|
||
}
|
||
}
|
||
|
||
private bool IsPhashValid(Mat phash)
|
||
{
|
||
// <20><><EFBFBD><EFBFBD>pHash<73><68>Ϊ1<CEAA><31>λ<EFBFBD><CEBB>
|
||
return Core.countNonZero(phash) >= 13;
|
||
}
|
||
|
||
private int CalculateHammingDistance(Mat phash1, Mat phash2)
|
||
{
|
||
if (phash1.rows() != phash2.rows() || phash1.cols() != phash2.cols())
|
||
{
|
||
Debug.LogError("pHash sizes do not match!");
|
||
return -1;
|
||
}
|
||
|
||
int hammingDistance = 0;
|
||
for (int i = 0; i < phash1.rows(); i++)
|
||
{
|
||
for (int j = 0; j < phash1.cols(); j++)
|
||
{
|
||
if (phash1.get(i, j)[0] != phash2.get(i, j)[0])
|
||
{
|
||
hammingDistance++;
|
||
}
|
||
}
|
||
}
|
||
|
||
return hammingDistance;
|
||
}
|
||
|
||
#endregion
|
||
|
||
public async Task<(double?, (double, double)?)> GetRotationScaleAsync(Mat img0, Mat img1, double distanceDifference)
|
||
{
|
||
return await Task.Run<(double?, (double, double)?)>(() => {
|
||
try
|
||
{
|
||
string md5Hash0 = CalculateMD5(img0);
|
||
string md5Hash1 = CalculateMD5(img1);
|
||
if (md5Hash0 == md5Hash1)
|
||
{
|
||
return (0, null);
|
||
}
|
||
|
||
Mat phash1 = CalculatePhash(img0);
|
||
Mat phash2 = CalculatePhash(img1);
|
||
|
||
if (IsPhashValid(phash1) && IsPhashValid(phash2) && CalculateHammingDistance(phash1, phash2) < 10)
|
||
{
|
||
return (0, null);
|
||
}
|
||
|
||
KeyPoint[] kp1 = Sift(img0, descriptors1);
|
||
KeyPoint[] kp2 = Sift(img1, descriptors2);
|
||
|
||
if (kp1.Length == 0 || kp2.Length == 0)
|
||
{
|
||
return (null, null);
|
||
}
|
||
|
||
knnMatches.Clear();
|
||
goodMatches.Clear();
|
||
bf.knnMatch(descriptors1, descriptors2, knnMatches, 2);
|
||
|
||
foreach (MatOfDMatch matofDMatch in knnMatches)
|
||
{
|
||
DMatch[] matches = matofDMatch.toArray();
|
||
if (matches[0].distance < distanceDifference * matches[1].distance)
|
||
{
|
||
goodMatches.Add(matches[0]);
|
||
}
|
||
}
|
||
|
||
if (goodMatches.Count < 3)
|
||
{
|
||
return (null, null);
|
||
}
|
||
|
||
srcPts.Clear();
|
||
dstPts.Clear();
|
||
foreach (DMatch match in goodMatches)
|
||
{
|
||
srcPts.Add(kp1[match.queryIdx].pt);
|
||
dstPts.Add(kp2[match.trainIdx].pt);
|
||
}
|
||
|
||
MatOfPoint2f srcMatOfPoint2f = new MatOfPoint2f(srcPts.ToArray());
|
||
MatOfPoint2f dstMatOfPoint2f = new MatOfPoint2f(dstPts.ToArray());
|
||
|
||
Mat M = Calib3d.estimateAffinePartial2D(srcMatOfPoint2f, dstMatOfPoint2f, inliers, Calib3d.RANSAC, 5);
|
||
if (M.empty())
|
||
{
|
||
return (null, null);
|
||
}
|
||
|
||
Mat R = M.colRange(0, 2);
|
||
double theta = Math.Atan2(R.get(1, 0)[0], R.get(0, 0)[0]);
|
||
double rotationAngleDegrees = theta * 180.0 / Math.PI;
|
||
|
||
double scaleX = Core.norm(R.row(0));
|
||
double scaleY = Core.norm(R.row(1));
|
||
|
||
return (rotationAngleDegrees, (scaleX, scaleY));
|
||
}
|
||
catch (Exception e)
|
||
{
|
||
Debug.LogException(e);
|
||
|
||
return (null, null);
|
||
}
|
||
});
|
||
}
|
||
}
|
||
|
||
#endif |