#if UNITY_EDITOR using OpenCVForUnity.CoreModule; using OpenCVForUnity.Features2dModule; using OpenCVForUnity.ImgprocModule; using OpenCVForUnity.Calib3dModule; using OpenCVForUnity.UnityUtils; using System; using System.IO; using System.Collections.Generic; using UnityEngine; using OpenCVForUnity.ImgcodecsModule; using System.Threading.Tasks; using UguiToolkit.Editor; using System.Security.Cryptography; using System.Text; public static class ImageUtils { public static void SetDebugMode(bool debug) { Utils.setDebugMode(debug); } public static string FormatImgFilePath(string imgFilePath) { string projectPath = Directory.GetParent(Application.dataPath).FullName; return Path.GetRelativePath(projectPath, imgFilePath).Replace("\\", "/"); } public static void LoadPngImagesFromFolder(string folderPath, List images, List imagePaths) { foreach (string file in Directory.GetFiles(folderPath, "*.png")) { Mat img = Imgcodecs.imread(file); if (!img.empty()) { images.Add(img); imagePaths.Add(FormatImgFilePath(file)); } } } static List> tasks; static ObjectPool detectorPool; public static async Task ProcessFolderAsync(List images, string targetFilePath, double distanceDifference, Action callback, Action endCallback) { if (tasks == null) tasks = new (images.Count); if (detectorPool == null) detectorPool = new (images.Count); Mat targetImage = Imgcodecs.imread(targetFilePath); List detectors = new(images.Count); tasks.Clear(); foreach (var img in images) { RotationScaleDetector detector = detectorPool.GetObject(); tasks.Add(detector.GetRotationScaleAsync(targetImage, img, distanceDifference)); detectors.Add(detector); } var resultsTask = await Task.WhenAll(tasks.ToArray()); foreach (var detector in detectors) detectorPool.ReturnObject(detector); for (int index = 0; index < resultsTask.Length; index++) { int _index = index; var result = resultsTask[index]; UnityMainThreadDispatcher.Instance().Enqueue(() => { if (result.Item1.HasValue) { double rotationAngleDegrees = result.Item1.Value; double scaleX = 0; double scaleY = 0; if (result.Item2.HasValue) { var scale = result.Item2.Value; scaleX = scale.Item1; scaleY = scale.Item2; callback(_index, (rotationAngleDegrees, (scaleX, scaleY), false)); } else { // SimilarityCalc callback(_index, (rotationAngleDegrees, (scaleX, scaleY), true)); } Debug.Log($"Target Image -> Image {_index}"); Debug.Log($"Rotation Angle: {rotationAngleDegrees} degrees"); Debug.Log($"Scale X: {scaleX}"); Debug.Log($"Scale Y: {scaleY}"); Debug.Log($"SimilarityCalc : {result.Item2 == null}"); } }); } UnityMainThreadDispatcher.Instance().Enqueue(() => { endCallback(); }); } } public class RotationScaleDetector { private SIFT sift; private BFMatcher bf; private Mat gray; private Mat descriptors1; private Mat descriptors2; private MatOfKeyPoint keypoints; private Mat inliers; private List knnMatches; private List goodMatches; private List srcPts; private List dstPts; private Mat resizedImage; private Mat dctImage; StringBuilder sb; public RotationScaleDetector() { sift = SIFT.create(); bf = BFMatcher.create(); gray = new Mat(); descriptors1 = new Mat(); descriptors2 = new Mat(); keypoints = new MatOfKeyPoint(); inliers = new Mat(); knnMatches = new List(); goodMatches = new List(); srcPts = new List(); dstPts = new List(); resizedImage = new Mat(); dctImage = new Mat(); sb = new StringBuilder(); } private KeyPoint[] Sift(Mat image, Mat descriptors) { Imgproc.cvtColor(image, gray, Imgproc.COLOR_BGR2GRAY); sift.detectAndCompute(gray, new Mat(), keypoints, descriptors); return keypoints.toArray(); } #region MD5 public string CalculateMD5(Mat image) { // 将Mat转换为字节数组 byte[] imageBytes = new byte[image.total() * image.elemSize()]; image.get(0, 0, imageBytes); // 创建MD5哈希算法实例 using (MD5 md5 = MD5.Create()) { // 计算哈希值 byte[] hashBytes = md5.ComputeHash(imageBytes); // 将哈希字节数组转换为十六进制字符串 sb.Clear(); foreach (byte b in hashBytes) { sb.Append(b.ToString("x2")); } return sb.ToString(); } } #endregion #region Phash private Mat CalculatePhash(Mat image) { Imgproc.cvtColor(image, gray, Imgproc.COLOR_BGR2GRAY); // 调整大小为32x32,并转换为32位浮点类型 Imgproc.resize(gray, resizedImage, new Size(32, 32), 0, 0, Imgproc.INTER_AREA); resizedImage.convertTo(resizedImage, CvType.CV_32F); // 进行离散余弦变换(DCT) Core.dct(resizedImage, dctImage); // 检查矩阵大小 if (dctImage.rows() < 8 || dctImage.cols() < 8) { Debug.LogError("DCT matrix is too small!"); return new Mat(); } // 取左上角8x8的DCT系数 Mat dctLowFreq = dctImage.submat(new OpenCVForUnity.CoreModule.Rect(0, 0, 8, 8)); // 将DCT系数转换为数组 float[] dctArray = new float[64]; dctLowFreq.get(0, 0, dctArray); // 计算中值 float medianValue = GetMedian(dctArray); // 生成pHash Mat phash = new Mat(dctLowFreq.size(), CvType.CV_8U); for (int i = 0; i < dctLowFreq.rows(); i++) { for (int j = 0; j < dctLowFreq.cols(); j++) { phash.put(i, j, dctLowFreq.get(i, j)[0] > medianValue ? 1 : 0); } } return phash; } private float GetMedian(float[] array) { // 排序数组 System.Array.Sort(array); // 计算中值 int middle = array.Length / 2; if (array.Length % 2 == 0) { return (array[middle - 1] + array[middle]) / 2.0f; } else { return array[middle]; } } private bool IsPhashValid(Mat phash) { // 计算pHash中为1的位数 return Core.countNonZero(phash) >= 13; } private int CalculateHammingDistance(Mat phash1, Mat phash2) { if (phash1.rows() != phash2.rows() || phash1.cols() != phash2.cols()) { Debug.LogError("pHash sizes do not match!"); return -1; } int hammingDistance = 0; for (int i = 0; i < phash1.rows(); i++) { for (int j = 0; j < phash1.cols(); j++) { if (phash1.get(i, j)[0] != phash2.get(i, j)[0]) { hammingDistance++; } } } return hammingDistance; } #endregion public async Task<(double?, (double, double)?)> GetRotationScaleAsync(Mat img0, Mat img1, double distanceDifference) { return await Task.Run<(double?, (double, double)?)>(() => { try { string md5Hash0 = CalculateMD5(img0); string md5Hash1 = CalculateMD5(img1); if (md5Hash0 == md5Hash1) { return (0, null); } Mat phash1 = CalculatePhash(img0); Mat phash2 = CalculatePhash(img1); if (IsPhashValid(phash1) && IsPhashValid(phash2) && CalculateHammingDistance(phash1, phash2) < 10) { return (0, null); } KeyPoint[] kp1 = Sift(img0, descriptors1); KeyPoint[] kp2 = Sift(img1, descriptors2); if (kp1.Length == 0 || kp2.Length == 0) { return (null, null); } knnMatches.Clear(); goodMatches.Clear(); bf.knnMatch(descriptors1, descriptors2, knnMatches, 2); foreach (MatOfDMatch matofDMatch in knnMatches) { DMatch[] matches = matofDMatch.toArray(); if (matches[0].distance < distanceDifference * matches[1].distance) { goodMatches.Add(matches[0]); } } if (goodMatches.Count < 3) { return (null, null); } srcPts.Clear(); dstPts.Clear(); foreach (DMatch match in goodMatches) { srcPts.Add(kp1[match.queryIdx].pt); dstPts.Add(kp2[match.trainIdx].pt); } MatOfPoint2f srcMatOfPoint2f = new MatOfPoint2f(srcPts.ToArray()); MatOfPoint2f dstMatOfPoint2f = new MatOfPoint2f(dstPts.ToArray()); Mat M = Calib3d.estimateAffinePartial2D(srcMatOfPoint2f, dstMatOfPoint2f, inliers, Calib3d.RANSAC, 5); if (M.empty()) { return (null, null); } Mat R = M.colRange(0, 2); double theta = Math.Atan2(R.get(1, 0)[0], R.get(0, 0)[0]); double rotationAngleDegrees = theta * 180.0 / Math.PI; double scaleX = Core.norm(R.row(0)); double scaleY = Core.norm(R.row(1)); return (rotationAngleDegrees, (scaleX, scaleY)); } catch (Exception e) { Debug.LogException(e); return (null, null); } }); } } #endif