feat: 迁移了部分OpenCv代码

This commit is contained in:
huiyadanli 2023-09-02 13:19:44 +08:00
parent e2ed073725
commit 6e0f9e3515
9 changed files with 882 additions and 1 deletions

View File

@ -0,0 +1,27 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace Vision.Recognition.Helper
{
public class Log
{
public static void LogInformation(string? message, params object?[] args)
{
VisionContext.Instance().Log?.LogInformation(message, args);
}
public static void LogWarning(string? message, params object?[] args)
{
VisionContext.Instance().Log?.LogWarning(message, args);
}
public static void LogError(string? message, params object?[] args)
{
VisionContext.Instance().Log?.LogError(message, args);
}
}
}

View File

@ -0,0 +1,61 @@
using OpenCvSharp;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Vision.Recognition.Helper.OpenCv
{
public class ArithmeticHelper
{
/// <summary>
/// 水平投影
/// </summary>
/// <param name="gray"></param>
/// <returns></returns>
public static int[] HorizontalProjection(Mat gray)
{
var projection = new int[gray.Height];
//对每一行计算投影值
for (var y = 0; y < gray.Height; ++y)
{
//遍历这一行的每一个像素,如果是有效的,累加投影值
for (var x = 0; x < gray.Width; ++x)
{
var s = gray.Get<Vec2b>(y, x);
if (s.Item0 == 255)
{
projection[y]++;
}
}
}
return projection;
}
/// <summary>
/// 垂直投影
/// </summary>
/// <param name="gray"></param>
/// <returns></returns>
public static int[] VerticalProjection(Mat gray)
{
var projection = new int[gray.Width];
//遍历每一列计算投影值
for (var x = 0; x < gray.Width; ++x)
{
for (var y = 0; y < gray.Height; ++y)
{
var s = gray.Get<Vec2b>(y, x);
if (s.Item0 == 255)
{
projection[x]++;
}
}
}
return projection;
}
}
}

View File

@ -0,0 +1,496 @@
using OpenCvSharp;
using System;
using System.Collections.Generic;
using System.Drawing.Imaging;
using System.Drawing;
using System.Linq;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading.Tasks;
namespace Vision.Recognition.Helper.OpenCv
{
/// <summary>
/// static class which provides conversion between System.Drawing.Bitmap and Mat
/// copy of https://github.com/shimat/opencvsharp/blob/main/src/OpenCvSharp.Extensions/BitmapConverter.cs
/// </summary>
public static class BitmapConverter
{
#region ToMat
/// <summary>
/// Converts System.Drawing.Bitmap to Mat
/// </summary>
/// <param name="src">System.Drawing.Bitmap object to be converted</param>
/// <returns>A Mat object which is converted from System.Drawing.Bitmap</returns>
public static Mat ToMat(this Bitmap src)
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
throw new NotSupportedException("Non-Windows OS are not supported");
if (src is null)
throw new ArgumentNullException(nameof(src));
int w = src.Width;
int h = src.Height;
int channels;
switch (src.PixelFormat)
{
case PixelFormat.Format24bppRgb:
case PixelFormat.Format32bppRgb:
channels = 3; break;
case PixelFormat.Format32bppArgb:
case PixelFormat.Format32bppPArgb:
channels = 4; break;
case PixelFormat.Format8bppIndexed:
case PixelFormat.Format1bppIndexed:
channels = 1; break;
default:
throw new NotImplementedException();
}
Mat dst = new Mat(h, w, MatType.CV_8UC(channels));
ToMat(src, dst);
return dst;
}
/// <summary>
/// Converts System.Drawing.Bitmap to Mat
/// </summary>
/// <param name="src">System.Drawing.Bitmap object to be converted</param>
/// <param name="dst">A Mat object which is converted from System.Drawing.Bitmap</param>
public static unsafe void ToMat(this Bitmap src, Mat dst)
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
throw new NotSupportedException("Non-Windows OS are not supported");
if (src is null)
throw new ArgumentNullException(nameof(src));
if (dst is null)
throw new ArgumentNullException(nameof(dst));
if (dst.IsDisposed)
throw new ArgumentException("The specified dst is disposed.", nameof(dst));
if (dst.Depth() != MatType.CV_8U)
throw new NotSupportedException("Mat depth != CV_8U");
if (dst.Dims != 2)
throw new NotSupportedException("Mat dims != 2");
if (src.Width != dst.Width || src.Height != dst.Height)
throw new ArgumentException("src.Size != dst.Size");
int w = src.Width;
int h = src.Height;
Rectangle rect = new Rectangle(0, 0, w, h);
BitmapData? bd = null;
try
{
bd = src.LockBits(rect, ImageLockMode.ReadOnly, src.PixelFormat);
switch (src.PixelFormat)
{
case PixelFormat.Format1bppIndexed:
Format1bppIndexed();
break;
case PixelFormat.Format8bppIndexed:
Format8bppIndexed();
break;
case PixelFormat.Format24bppRgb:
Format24bppRgb();
break;
case PixelFormat.Format32bppRgb:
case PixelFormat.Format32bppArgb:
case PixelFormat.Format32bppPArgb:
Format32bppRgb();
break;
}
}
finally
{
if (bd is not null)
src.UnlockBits(bd);
}
// ReSharper disable once InconsistentNaming
void Format1bppIndexed()
{
if (dst.Channels() != 1)
throw new ArgumentException("Invalid nChannels");
if (dst.IsSubmatrix())
throw new NotImplementedException("submatrix not supported");
if (bd is null)
throw new NotSupportedException("BitmapData is null (Format1bppIndexed)");
byte* srcPtr = (byte*)bd.Scan0.ToPointer();
byte* dstPtr = dst.DataPointer;
int srcStep = bd.Stride;
uint dstStep = (uint)dst.Step();
int x = 0;
for (int y = 0; y < h; y++)
{
// 横は必ず4byte幅に切り上げられる。
// この行の各バイトを調べていく
for (int bytePos = 0; bytePos < srcStep; bytePos++)
{
if (x < w)
{
// 現在の位置のバイトからそれぞれのビット8つを取り出す
byte b = srcPtr[bytePos];
for (int i = 0; i < 8; i++)
{
if (x >= w)
{
break;
}
// IplImageは8bit/pixel
dstPtr[dstStep * y + x] = ((b & 0x80) == 0x80) ? (byte)255 : (byte)0;
b <<= 1;
x++;
}
}
}
// 次の行へ
x = 0;
srcPtr += srcStep;
}
}
// ReSharper disable once InconsistentNaming
void Format8bppIndexed()
{
static void Ch1(Mat dst, int height, int srcStep, uint dstStep, IntPtr srcData, byte[] palette)
{
if (dstStep == srcStep && !dst.IsSubmatrix() && dst.IsContinuous())
{
// Read Bitmap pixel data to managed array
long length = dst.DataEnd.ToInt64() - dst.Data.ToInt64();
if (length > int.MaxValue)
throw new NotSupportedException("Too big dst Mat");
var buffer = new byte[length];
Marshal.Copy(srcData, buffer, 0, buffer.Length);
// Apply conversion by palette
buffer = buffer.Select(b => palette[b]).ToArray();
// Write to dst Mat
Marshal.Copy(buffer, 0, dst.Data, buffer.Length);
}
else
{
// Copy line bytes from src to dst for each line
byte* sp = (byte*)srcData;
byte* dp = (byte*)dst.Data;
var buffer = new byte[srcStep];
for (int y = 0; y < height; y++)
{
// Read Bitmap pixel data to managed array
Marshal.Copy(new IntPtr(sp), buffer, 0, buffer.Length);
// Apply conversion by palette
buffer = buffer.Select(b => palette[b]).ToArray();
// Write to dst Mat
Marshal.Copy(buffer, 0, new IntPtr(dp), buffer.Length);
sp += srcStep;
dp += dstStep;
}
}
}
int srcStep = bd.Stride;
uint dstStep = (uint)dst.Step();
int channels = dst.Channels();
if (channels == 1)
{
var palette = new byte[256];
var paletteLength = Math.Min(256, src.Palette.Entries.Length);
for (int i = 0; i < paletteLength; i++)
{
// TODO src.Palette.Flags & 2 == 2
// https://docs.microsoft.com/ja-jp/dotnet/api/system.drawing.imaging.colorpalette.flags?view=netframework-4.8
palette[i] = src.Palette.Entries[i].R;
}
Ch1(dst, h, srcStep, dstStep, bd.Scan0, palette);
}
else if (channels == 3)
{
// Palette
var paletteR = new byte[256];
var paletteG = new byte[256];
var paletteB = new byte[256];
var paletteLength = Math.Min(256, src.Palette.Entries.Length);
for (int i = 0; i < paletteLength; i++)
{
var c = src.Palette.Entries[i];
paletteR[i] = c.R;
paletteG[i] = c.G;
paletteB[i] = c.B;
}
using var dstR = new Mat(h, w, MatType.CV_8UC1);
using var dstG = new Mat(h, w, MatType.CV_8UC1);
using var dstB = new Mat(h, w, MatType.CV_8UC1);
Ch1(dstR, h, srcStep, (uint)dstR.Step(), bd.Scan0, paletteR);
Ch1(dstG, h, srcStep, (uint)dstG.Step(), bd.Scan0, paletteG);
Ch1(dstB, h, srcStep, (uint)dstB.Step(), bd.Scan0, paletteB);
Cv2.Merge(new[] { dstB, dstG, dstR }, dst);
}
else
{
throw new ArgumentException($"Invalid channels of dst Mat ({channels})");
}
}
// ReSharper disable once InconsistentNaming
void Format24bppRgb()
{
if (dst.Channels() != 3)
throw new ArgumentException("Invalid nChannels");
if (dst.Depth() != MatType.CV_8U && dst.Depth() != MatType.CV_8S)
throw new ArgumentException("Invalid depth of dst Mat");
int srcStep = bd.Stride;
long dstStep = dst.Step();
if (dstStep == srcStep && !dst.IsSubmatrix() && dst.IsContinuous())
{
IntPtr dstData = dst.Data;
long bytesToCopy = dst.DataEnd.ToInt64() - dstData.ToInt64();
Buffer.MemoryCopy(bd.Scan0.ToPointer(), dstData.ToPointer(), bytesToCopy, bytesToCopy);
}
else
{
// Copy line bytes from src to dst for each line
byte* sp = (byte*)bd.Scan0;
byte* dp = (byte*)dst.Data;
for (int y = 0; y < h; y++)
{
Buffer.MemoryCopy(sp, dp, dstStep, dstStep);
sp += srcStep;
dp += dstStep;
}
}
}
// ReSharper disable once InconsistentNaming
void Format32bppRgb()
{
int srcStep = bd.Stride;
long dstStep = dst.Step();
switch (dst.Channels())
{
case 4:
if (!dst.IsSubmatrix() && dst.IsContinuous())
{
IntPtr dstData = dst.Data;
long bytesToCopy = dst.DataEnd.ToInt64() - dstData.ToInt64();
Buffer.MemoryCopy(bd.Scan0.ToPointer(), dstData.ToPointer(), bytesToCopy, bytesToCopy);
}
else
{
byte* sp = (byte*)bd.Scan0;
byte* dp = (byte*)dst.Data;
for (int y = 0; y < h; y++)
{
Buffer.MemoryCopy(sp, dp, dstStep, dstStep);
sp += srcStep;
dp += dstStep;
}
}
break;
case 3:
byte* srcPtr = (byte*)bd.Scan0.ToPointer();
byte* dstPtr = (byte*)dst.Data.ToPointer();
for (int y = 0; y < h; y++)
{
for (int x = 0; x < w; x++)
{
dstPtr[y * dstStep + x * 3 + 0] = srcPtr[y * srcStep + x * 4 + 0];
dstPtr[y * dstStep + x * 3 + 1] = srcPtr[y * srcStep + x * 4 + 1];
dstPtr[y * dstStep + x * 3 + 2] = srcPtr[y * srcStep + x * 4 + 2];
}
}
break;
default:
throw new ArgumentException("Invalid nChannels");
}
}
}
#endregion
#region ToBitmap
/// <summary>
/// Converts Mat to System.Drawing.Bitmap
/// </summary>
/// <param name="src">Mat</param>
/// <returns></returns>
public static Bitmap ToBitmap(this Mat src)
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
throw new NotSupportedException("Non-Windows OS are not supported");
if (src is null)
throw new ArgumentNullException(nameof(src));
PixelFormat pf;
switch (src.Channels())
{
case 1:
pf = PixelFormat.Format8bppIndexed; break;
case 3:
pf = PixelFormat.Format24bppRgb; break;
case 4:
pf = PixelFormat.Format32bppArgb; break;
default:
throw new ArgumentException("Number of channels must be 1, 3 or 4.", nameof(src));
}
return ToBitmap(src, pf);
}
/// <summary>
/// Converts Mat to System.Drawing.Bitmap
/// </summary>
/// <param name="src">Mat</param>
/// <param name="pf">Pixel Depth</param>
/// <returns></returns>
public static Bitmap ToBitmap(this Mat src, PixelFormat pf)
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
throw new NotSupportedException("Non-Windows OS are not supported");
if (src is null)
throw new ArgumentNullException(nameof(src));
src.ThrowIfDisposed();
Bitmap bitmap = new Bitmap(src.Width, src.Height, pf);
ToBitmap(src, bitmap);
return bitmap;
}
/// <summary>
/// Converts Mat to System.Drawing.Bitmap
/// </summary>
/// <param name="src">Mat</param>
/// <param name="dst">Mat</param>
/// <remarks>Author: shimat, Gummo (ROI support)</remarks>
public static unsafe void ToBitmap(this Mat src, Bitmap dst)
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
throw new NotSupportedException("Non-Windows OS are not supported");
if (src is null)
throw new ArgumentNullException(nameof(src));
if (dst is null)
throw new ArgumentNullException(nameof(dst));
if (src.IsDisposed)
throw new ArgumentException("The image is disposed.", nameof(src));
if (src.Depth() != MatType.CV_8U)
throw new ArgumentException("Depth of the image must be CV_8U");
//if (src.IsSubmatrix())
// throw new ArgumentException("Submatrix is not supported");
if (src.Width != dst.Width || src.Height != dst.Height)
throw new ArgumentException("");
PixelFormat pf = dst.PixelFormat;
// 1プレーン用の場合、グレースケールのパレット情報を生成する
if (pf == PixelFormat.Format8bppIndexed)
{
ColorPalette plt = dst.Palette;
for (int x = 0; x < 256; x++)
{
plt.Entries[x] = Color.FromArgb(x, x, x);
}
dst.Palette = plt;
}
int w = src.Width;
int h = src.Height;
Rectangle rect = new Rectangle(0, 0, w, h);
BitmapData? bd = null;
bool submat = src.IsSubmatrix();
bool continuous = src.IsContinuous();
try
{
bd = dst.LockBits(rect, ImageLockMode.WriteOnly, pf);
IntPtr srcData = src.Data;
byte* pSrc = (byte*)(srcData.ToPointer());
byte* pDst = (byte*)(bd.Scan0.ToPointer());
int ch = src.Channels();
int srcStep = (int)src.Step();
int dstStep = ((src.Width * ch) + 3) / 4 * 4; // 4の倍数に揃える
int stride = bd.Stride;
switch (pf)
{
case PixelFormat.Format1bppIndexed:
{
if (submat)
throw new NotImplementedException("submatrix not supported");
// BitmapDataは4byte幅だが、IplImageは1byte幅
// 手作業で移し替える
//int offset = stride - (w / 8);
int x = 0;
byte b = 0;
for (int y = 0; y < h; y++)
{
for (int bytePos = 0; bytePos < stride; bytePos++)
{
if (x < w)
{
for (int i = 0; i < 8; i++)
{
var mask = (byte)(0x80 >> i);
if (x < w && pSrc[srcStep * y + x] == 0)
b &= (byte)(mask ^ 0xff);
else
b |= mask;
x++;
}
pDst[bytePos] = b;
}
}
x = 0;
pDst += stride;
}
break;
}
case PixelFormat.Format8bppIndexed:
case PixelFormat.Format24bppRgb:
case PixelFormat.Format32bppArgb:
if (srcStep == dstStep && !submat && continuous)
{
long bytesToCopy = src.DataEnd.ToInt64() - src.Data.ToInt64();
Buffer.MemoryCopy(pSrc, pDst, bytesToCopy, bytesToCopy);
}
else
{
for (int y = 0; y < h; y++)
{
long offsetSrc = (y * srcStep);
long offsetDst = (y * dstStep);
long bytesToCopy = w * ch;
// 一列ごとにコピー
Buffer.MemoryCopy(pSrc + offsetSrc, pDst + offsetDst, bytesToCopy, bytesToCopy);
}
}
break;
default:
throw new NotImplementedException();
}
}
finally
{
if (bd is not null)
dst.UnlockBits(bd);
}
}
#endregion
}
}

View File

@ -0,0 +1,191 @@
using OpenCvSharp;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Point = OpenCvSharp.Point;
namespace Vision.Recognition.Helper.OpenCv
{
public class MatchTemplateHelper
{
public static double WidthScale = 1;
public static double HeightScale = 1;
public static Point FindSingleTarget(Bitmap imgSrc, Bitmap imgSub, double threshold = 0.8)
{
Mat? srcMat = null;
Mat? dstMat = null;
try
{
srcMat = imgSrc.ToMat();
dstMat = imgSub.ToMat();
return FindSingleTarget(srcMat, dstMat, threshold);
}
catch (Exception ex)
{
Log.LogError(ex.ToString());
return new Point();
}
finally
{
srcMat?.Dispose();
dstMat?.Dispose();
}
}
public static Point FindSingleTarget(Mat srcMat, Mat dstMat, double threshold = 0.8)
{
Point p = new Point();
OutputArray? outArray = null;
try
{
dstMat = ResizeHelper.Resize(dstMat, WidthScale);
outArray = OutputArray.Create(srcMat);
Cv2.MatchTemplate(srcMat, dstMat, outArray, TemplateMatchModes.CCoeffNormed);
double minValue, maxValue;
Point location, point;
Cv2.MinMaxLoc(InputArray.Create(outArray.GetMat()), out minValue, out maxValue,
out location, out point);
if (maxValue >= threshold)
{
p = new Point(point.X + dstMat.Width / 2, point.Y + dstMat.Height / 2);
if (VisionContext.Instance().Drawable)
{
VisionContext.Instance().DrawRectList
.Add(new Rect(point.X, point.Y, dstMat.Width, dstMat.Height));
VisionContext.Instance().DrawTextList
.Add(new Tuple<Point, string>(new Point(point.X, point.Y - 10), maxValue.ToString("0.00")));
//var imgTar = srcMat.Clone();
//Cv2.Rectangle(imgTar, point,
// new OpenCvSharp.Point(point.X + dstMat.Width, point.Y + dstMat.Height),
// Scalar.Red, 2);
//Cv2.PutText(imgTar, maxValue.ToString("0.00"), new OpenCvSharp.Point(point.X, point.Y - 10),
// HersheyFonts.HersheySimplex, 0.5, Scalar.Red);
//Cv2.ImShow("识别窗口", imgTar);
}
}
return p;
}
catch (Exception ex)
{
Log.LogError(ex.ToString());
return p;
}
finally
{
outArray?.Dispose();
}
}
public static List<Point> FindMultiTarget(Mat srcMat, Mat dstMat, string title, out Mat resMat,
double threshold = 0.8, int findTargetCount = 8)
{
List<Point> pointList = new List<Point>();
resMat = srcMat.Clone();
try
{
dstMat = ResizeHelper.Resize(dstMat, WidthScale);
Mat matchResult = new Mat();
Cv2.MatchTemplate(srcMat, dstMat, matchResult, TemplateMatchModes.CCoeffNormed);
double minValue = 0;
double maxValue = 0;
Point minLoc = new();
//寻找最几个最值的位置
Mat mask = new Mat(matchResult.Height, matchResult.Width, MatType.CV_8UC1, Scalar.White);
Mat maskSub = new Mat(matchResult.Height, matchResult.Width, MatType.CV_8UC1, Scalar.Black);
var point = new OpenCvSharp.Point(0, 0);
for (int i = 0; i < findTargetCount; i++)
{
Cv2.MinMaxLoc(matchResult, out minValue, out maxValue, out minLoc, out point, mask);
Rect maskRect = new Rect(point.X - dstMat.Width / 2, point.Y - dstMat.Height / 2, dstMat.Width,
dstMat.Height);
maskSub.Rectangle(maskRect, Scalar.White, -1);
mask -= maskSub;
if (maxValue >= threshold)
{
pointList.Add(new Point(point.X + dstMat.Width / 2, point.Y + dstMat.Height / 2));
if (VisionContext.Instance().Drawable)
{
VisionContext.Instance().DrawRectList
.Add(new Rect(point.X, point.Y, dstMat.Width, dstMat.Height));
VisionContext.Instance().DrawTextList
.Add(new Tuple<Point, string>(new Point(point.X, point.Y - 10), maxValue.ToString("0.00")));
}
//if (IsDebug)
//{
// VisionContext.Instance().Log
// ?.LogInformation(title + " " + maxValue.ToString("0.000") + " " + point);
// Cv2.Rectangle(resMat, point,
// new OpenCvSharp.Point(point.X + dstMat.Width, point.Y + dstMat.Height),
// Scalar.Red, 2);
// Cv2.PutText(resMat, title + " " + maxValue.ToString("0.00"),
// new OpenCvSharp.Point(point.X, point.Y - 10),
// HersheyFonts.HersheySimplex, 0.5, Scalar.Red);
//}
}
else
{
break;
}
}
return pointList;
}
catch (Exception ex)
{
Log.LogError(ex.ToString());
return pointList;
}
finally
{
srcMat?.Dispose();
dstMat?.Dispose();
}
}
public static Dictionary<string, List<Point>> FindMultiPicFromOneImage(Bitmap imgSrc,
Dictionary<string, Bitmap> imgSubDictionary, double threshold = 0.8)
{
Dictionary<string, List<Point>> dictionary = new Dictionary<string, List<Point>>();
Mat srcMat = imgSrc.ToMat();
Mat resMat;
foreach (KeyValuePair<string, Bitmap> kvp in imgSubDictionary)
{
dictionary.Add(kvp.Key, FindMultiTarget(srcMat, kvp.Value.ToMat(), kvp.Key, out resMat, threshold));
srcMat = resMat.Clone();
}
return dictionary;
}
public static Dictionary<string, List<Point>> FindMultiPicFromOneImage(Mat srcMat,
Dictionary<string, Bitmap> imgSubDictionary, double threshold = 0.8)
{
Dictionary<string, List<Point>> dictionary = new Dictionary<string, List<Point>>();
Mat resMat;
foreach (KeyValuePair<string, Bitmap> kvp in imgSubDictionary)
{
dictionary.Add(kvp.Key, FindMultiTarget(srcMat, kvp.Value.ToMat(), kvp.Key, out resMat, threshold));
srcMat = resMat.Clone();
}
return dictionary;
}
}
}

View File

@ -0,0 +1,12 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Vision.Recognition.Helper.OpenCv
{
class OpenCvCommonHelper
{
}
}

View File

@ -0,0 +1,39 @@
using OpenCvSharp;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Vision.Recognition.Helper.OpenCv
{
public class ResizeHelper
{
/// <summary>
/// 等比放大的
/// </summary>
/// <param name="src"></param>
/// <param name="scale"></param>
/// <returns></returns>
public static Mat Resize(Mat src, double scale)
{
if (Math.Abs(scale - 1) < 0.00001)
{
return Resize(src, scale, scale);
}
return src;
}
public static Mat Resize(Mat src, double widthScale, double heightScale)
{
if (Math.Abs(widthScale - 1) < 0.00001 || Math.Abs(heightScale - 1) < 0.00001)
{
var dst = new Mat();
Cv2.Resize(src, dst, new Size(src.Width * widthScale, src.Height * heightScale));
return dst;
}
return src;
}
}
}

View File

@ -12,6 +12,7 @@ using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Shapes;
using Vision.Recognition;
namespace BetterGenshinImpact
{
@ -30,7 +31,8 @@ namespace BetterGenshinImpact
public static MaskWindow Instance(ILogger<MaskWindow>? logger = null)
{
_maskWindow ??= new MaskWindow();
_maskWindow.Logger = logger;
_maskWindow.Logger ??= logger;
VisionContext.Instance().Log ??= logger;
return _maskWindow;
}

View File

@ -5,11 +5,13 @@
<TargetFramework>net7.0-windows</TargetFramework>
<Nullable>enable</Nullable>
<UseWPF>true</UseWPF>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging" Version="7.0.0" />
<PackageReference Include="OpenCvSharp4.Windows" Version="4.8.0.20230708" />
<PackageReference Include="System.Drawing.Common" Version="7.0.0" />
</ItemGroup>
<ItemGroup>

View File

@ -0,0 +1,51 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using BetterGenshinImpact;
using Microsoft.Extensions.Logging;
using OpenCvSharp;
namespace Vision.Recognition
{
/// <summary>
/// Vision 上下文
/// </summary>
public class VisionContext
{
private static VisionContext? _uniqueInstance;
private static readonly object Locker = new();
private VisionContext()
{
}
public static VisionContext Instance()
{
if (_uniqueInstance == null)
{
lock (Locker)
{
_uniqueInstance ??= new VisionContext();
}
}
return _uniqueInstance;
}
public ILogger? Log { get; set; }
public bool Drawable { get; set; }
/// <summary>
/// 在遮罩窗口上绘制的矩形
/// </summary>
public List<Rect> DrawRectList { get; set; } = new();
/// <summary>
/// 在遮罩窗口上绘制的文本
/// </summary>
public List<Tuple<Point, string>> DrawTextList { get; set; } = new();
}
}