Added Class Library FaceRecognitionDotNet
This commit is contained in:
parent
f642c5669a
commit
2ebec0b7a9
@ -160,7 +160,7 @@ public class Compare
|
||||
ticks = LogDelta(ticks, nameof(Property.Models.Stateless.A_Property.GetGroupCollection));
|
||||
}
|
||||
PropertyLogic propertyLogic = GetPropertyLogic();
|
||||
if (_IsEnvironment.Development && propertyConfiguration.PopulatePropertyId.Value && !propertyLogic.IndicesFromOld.Any())
|
||||
if (_IsEnvironment.Development && propertyConfiguration.PopulatePropertyId.Value && !propertyLogic.KeyValuePairs.Any())
|
||||
throw new Exception("Copy keyValuePairs-####.json file");
|
||||
List<PropertyHolder[]> propertyHolderCollections = Property.Models.Stateless.A_Property.Get(propertyConfiguration, reverse, modelName, predictorModelName, propertyLogic);
|
||||
if (!isSilent)
|
||||
|
@ -87,14 +87,14 @@
|
||||
"Pattern": "[^ABCDEFGHIJKLMNOPQRSTUVWXYZbcdfghjklmnpqrstvwxyz0-9]",
|
||||
"PopulatePropertyId": true,
|
||||
"PropertiesChangedForProperty": false,
|
||||
"RootDirectory": "C:/Tmp/Phares/Compare/Images 2022-07-27 - 20220727 - III",
|
||||
"RootDirectory": "C:/Tmp/Phares/Compare/Images 2022-07-27 - f642c5669a1d89d598a2efd70da9dc7129d02c15 - III",
|
||||
"WriteBitmapDataBytes": false,
|
||||
"IgnoreExtensions": [
|
||||
".gif",
|
||||
".GIF"
|
||||
],
|
||||
"PropertyContentCollectionFiles": [
|
||||
"/Images 2022-07-27 - 20220727 - III - Results/A) Property/2022-07-27/[()]/637869381676042455.json",
|
||||
"/Images 2022-07-27 - f642c5669a1d89d598a2efd70da9dc7129d02c15 - III - Results/A) Property/2022-07-27/[()]/637869381676042455.json",
|
||||
"/Not-Copy-Copy/Images 2019-06-08 - 34a9240ac28b52da97428d7725153a80a757ee6b - Not-Copy-Copy - Results/A) Property/2022-07-27/[()]/637869733124119330.json",
|
||||
"/Not-Copy-Copy/Images 2018-12-25 - 34a9240ac28b52da97428d7725153a80a757ee6b - Not-Copy-Copy - Results/A) Property/2022-07-27/[()]/637869734240700328.json",
|
||||
"/Not-Copy-Copy/Images 2018-05-12 - b01d4763d8853b6d6057a3870b2723449726da75 - Not-Copy-Copy - Results/A) Property/2022-07-27/[()]/637869734970730630.json",
|
||||
|
@ -94,7 +94,7 @@
|
||||
".GIF"
|
||||
],
|
||||
"PropertyContentCollectionFiles": [
|
||||
"/Images 2022-07-27 - 20220727 - III - Results/A) Property/2022-07-27/[()]/637869381676042455.json",
|
||||
"/Images 2022-07-27 - f642c5669a1d89d598a2efd70da9dc7129d02c15 - III - Results/A) Property/2022-07-27/[()]/637869381676042455.json",
|
||||
"/Not-Copy-Copy/Images 2019-06-08 - 34a9240ac28b52da97428d7725153a80a757ee6b - Not-Copy-Copy - Results/A) Property/2022-07-27/[()]/637869733124119330.json",
|
||||
"/Not-Copy-Copy/Images 2018-12-25 - 34a9240ac28b52da97428d7725153a80a757ee6b - Not-Copy-Copy - Results/A) Property/2022-07-27/[()]/637869734240700328.json",
|
||||
"/Not-Copy-Copy/Images 2018-05-12 - b01d4763d8853b6d6057a3870b2723449726da75 - Not-Copy-Copy - Results/A) Property/2022-07-27/[()]/637869734970730630.json",
|
||||
|
@ -63,7 +63,7 @@
|
||||
"Pattern": "[^ABCDEFGHIJKLMNOPQRSTUVWXYZbcdfghjklmnpqrstvwxyz0-9]",
|
||||
"PopulatePropertyId": true,
|
||||
"PropertiesChangedForProperty": false,
|
||||
"RootDirectory": "C:/Tmp/Phares/Compare/Images 2022-07-27 - 20220727 - III",
|
||||
"RootDirectory": "C:/Tmp/Phares/Compare/Images 2022-07-27 - f642c5669a1d89d598a2efd70da9dc7129d02c15 - III",
|
||||
"WriteBitmapDataBytes": false,
|
||||
"IgnoreExtensions": [
|
||||
".gif",
|
||||
|
128
FaceRecognitionDotNet/DisposableObject.cs
Normal file
128
FaceRecognitionDotNet/DisposableObject.cs
Normal file
@ -0,0 +1,128 @@
|
||||
namespace View_by_Distance.FaceRecognitionDotNet;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a class which has managed or unmanaged resources.
|
||||
/// </summary>
|
||||
public abstract class DisposableObject : IDisposable
|
||||
{
|
||||
|
||||
#region Properties
|
||||
|
||||
/// <summary>
|
||||
/// Gets a value indicating whether this instance has been disposed.
|
||||
/// </summary>
|
||||
/// <returns>true if this instance has been disposed; otherwise, false.</returns>
|
||||
public bool IsDisposed
|
||||
{
|
||||
get;
|
||||
private set;
|
||||
/* Unmerged change from project 'FaceRecognitionotNet(netstandard2.0)'
|
||||
Before:
|
||||
/// If this object is disposed, then <see cref="System.ObjectDisposedException"/> is thrown.
|
||||
After:
|
||||
/// If this object is disposed, then <see cref="ObjectDisposedException"/> is thrown.
|
||||
*/
|
||||
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Methods
|
||||
|
||||
/// <summary>
|
||||
/// If this object is disposed, then <see cref="ObjectDisposedException"/> is thrown.
|
||||
/// </summary>
|
||||
public void ThrowIfDisposed()
|
||||
{
|
||||
if (IsDisposed)
|
||||
throw new ObjectDisposedException(GetType().FullName);
|
||||
}
|
||||
|
||||
internal void ThrowIfDisposed(string objectName)
|
||||
{
|
||||
if (IsDisposed)
|
||||
throw new ObjectDisposedException(objectName);
|
||||
}
|
||||
|
||||
#region Overrides
|
||||
|
||||
/// <summary>
|
||||
/// Releases all managed resources.
|
||||
/// </summary>
|
||||
protected virtual void DisposeManaged()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Releases all unmanaged resources.
|
||||
/// </summary>
|
||||
protected virtual void DisposeUnmanaged()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
|
||||
#region IDisposable Members
|
||||
|
||||
/// <summary>
|
||||
/// Releases all resources used by this <see cref="DisposableObject"/>.
|
||||
/// </summary>
|
||||
public void Dispose()
|
||||
{
|
||||
GC.SuppressFinalize(this);
|
||||
/* Unmerged change from project 'FaceRecognitionotNet(netstandard2.0)'
|
||||
Before:
|
||||
Dispose(true);
|
||||
After:
|
||||
Dispose(true);
|
||||
*/
|
||||
|
||||
Dispose(true);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Releases all resources used by this <see cref="DisposableObject"/>.
|
||||
/// </summary>
|
||||
/// <param name="disposing">Indicate value whether <see cref="IDisposable.Dispose"/> method was called.</param>
|
||||
private void Dispose(bool disposing)
|
||||
{
|
||||
if (IsDisposed)
|
||||
{
|
||||
return;
|
||||
/* Unmerged change from project 'FaceRecognitionotNet(netstandard2.0)'
|
||||
Before:
|
||||
IsDisposed = true;
|
||||
After:
|
||||
IsDisposed = true;
|
||||
*/
|
||||
|
||||
}
|
||||
|
||||
IsDisposed = true;
|
||||
|
||||
if (disposing)
|
||||
/* Unmerged change from project 'FaceRecognitionotNet(netstandard2.0)'
|
||||
Before:
|
||||
DisposeManaged();
|
||||
After:
|
||||
DisposeManaged();
|
||||
*/
|
||||
|
||||
DisposeManaged();
|
||||
/* Unmerged change from project 'FaceRecognitionotNet(netstandard2.0)'
|
||||
Before:
|
||||
DisposeUnmanaged();
|
||||
After:
|
||||
DisposeUnmanaged();
|
||||
*/
|
||||
|
||||
DisposeUnmanaged();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
111
FaceRecognitionDotNet/Dlib/Python/CnnFaceDetectionModelV1.cs
Normal file
111
FaceRecognitionDotNet/Dlib/Python/CnnFaceDetectionModelV1.cs
Normal file
@ -0,0 +1,111 @@
|
||||
using DlibDotNet;
|
||||
using DlibDotNet.Dnn;
|
||||
using View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
namespace View_by_Distance.FaceRecognitionDotNet.Dlib.Python;
|
||||
|
||||
internal sealed class CnnFaceDetectionModelV1
|
||||
{
|
||||
|
||||
#region Methods
|
||||
|
||||
public static IEnumerable<MModRect> Detect(LossMmod net, Image image, int upsampleNumTimes)
|
||||
{
|
||||
using PyramidDown? pyr = new(2);
|
||||
List<MModRect>? rects = new();
|
||||
|
||||
// Copy the data into dlib based objects
|
||||
using Matrix<RgbPixel>? matrix = new();
|
||||
Mode type = image.Mode;
|
||||
switch (type)
|
||||
{
|
||||
case Mode.Greyscale:
|
||||
case Mode.Rgb:
|
||||
DlibDotNet.Dlib.AssignImage(image.Matrix, matrix);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException("Unsupported image type, must be 8bit gray or RGB image.");
|
||||
}
|
||||
|
||||
// Upsampling the image will allow us to detect smaller faces but will cause the
|
||||
// program to use more RAM and run longer.
|
||||
int levels = upsampleNumTimes;
|
||||
while (levels > 0)
|
||||
{
|
||||
levels--;
|
||||
DlibDotNet.Dlib.PyramidUp<PyramidDown>(matrix, 2);
|
||||
}
|
||||
|
||||
OutputLabels<IEnumerable<MModRect>>? dets = net.Operator(matrix);
|
||||
|
||||
// Scale the detection locations back to the original image size
|
||||
// if the image was upscaled.
|
||||
foreach (MModRect? d in dets.First())
|
||||
{
|
||||
DRectangle drect = pyr.RectDown(new DRectangle(d.Rect), (uint)upsampleNumTimes);
|
||||
d.Rect = new Rectangle((int)drect.Left, (int)drect.Top, (int)drect.Right, (int)drect.Bottom);
|
||||
rects.Add(d);
|
||||
}
|
||||
|
||||
return rects;
|
||||
}
|
||||
|
||||
public static IEnumerable<IEnumerable<MModRect>> DetectMulti(LossMmod net, IEnumerable<Image> images, int upsampleNumTimes, int batchSize = 128)
|
||||
{
|
||||
List<Matrix<RgbPixel>>? destImages = new();
|
||||
List<IEnumerable<MModRect>>? allRects = new();
|
||||
|
||||
try
|
||||
{
|
||||
using PyramidDown? pyr = new(2);
|
||||
// Copy the data into dlib based objects
|
||||
foreach (Image? image in images)
|
||||
{
|
||||
Matrix<RgbPixel>? matrix = new();
|
||||
Mode type = image.Mode;
|
||||
switch (type)
|
||||
{
|
||||
case Mode.Greyscale:
|
||||
case Mode.Rgb:
|
||||
DlibDotNet.Dlib.AssignImage(image.Matrix, matrix);
|
||||
break;
|
||||
default:
|
||||
throw new NotSupportedException("Unsupported image type, must be 8bit gray or RGB image.");
|
||||
}
|
||||
|
||||
for (int i = 0; i < upsampleNumTimes; i++)
|
||||
DlibDotNet.Dlib.PyramidUp(matrix);
|
||||
|
||||
destImages.Add(matrix);
|
||||
}
|
||||
|
||||
for (int i = 1; i < destImages.Count; i++)
|
||||
if (destImages[i - 1].Columns != destImages[i].Columns || destImages[i - 1].Rows != destImages[i].Rows)
|
||||
throw new ArgumentException("Images in list must all have the same dimensions.");
|
||||
|
||||
OutputLabels<IEnumerable<MModRect>>? dets = net.Operator(destImages, (ulong)batchSize);
|
||||
foreach (IEnumerable<MModRect>? det in dets)
|
||||
{
|
||||
List<MModRect>? rects = new();
|
||||
foreach (MModRect? d in det)
|
||||
{
|
||||
DRectangle drect = pyr.RectDown(new DRectangle(d.Rect), (uint)upsampleNumTimes);
|
||||
d.Rect = new Rectangle((int)drect.Left, (int)drect.Top, (int)drect.Right, (int)drect.Bottom);
|
||||
rects.Add(d);
|
||||
}
|
||||
|
||||
allRects.Add(rects);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
foreach (Matrix<RgbPixel>? matrix in destImages)
|
||||
matrix.Dispose();
|
||||
}
|
||||
|
||||
return allRects;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
129
FaceRecognitionDotNet/Dlib/Python/FaceRecognitionModelV1.cs
Normal file
129
FaceRecognitionDotNet/Dlib/Python/FaceRecognitionModelV1.cs
Normal file
@ -0,0 +1,129 @@
|
||||
using DlibDotNet;
|
||||
using DlibDotNet.Dnn;
|
||||
|
||||
namespace View_by_Distance.FaceRecognitionDotNet.Dlib.Python;
|
||||
|
||||
internal sealed class FaceRecognitionModelV1
|
||||
{
|
||||
|
||||
#region Methods
|
||||
|
||||
public static Matrix<double> ComputeFaceDescriptor(LossMetric net, Image img, FullObjectDetection face, int numJitters)
|
||||
{
|
||||
FullObjectDetection[]? faces = new[] { face };
|
||||
return ComputeFaceDescriptors(net, img, faces, numJitters).First();
|
||||
}
|
||||
|
||||
public static IEnumerable<Matrix<double>> ComputeFaceDescriptors(LossMetric net, Image img, IEnumerable<FullObjectDetection> faces, int numJitters)
|
||||
{
|
||||
Image[]? batchImage = new[] { img };
|
||||
IEnumerable<FullObjectDetection>[]? batchFaces = new[] { faces };
|
||||
return BatchComputeFaceDescriptors(net, batchImage, batchFaces, numJitters).First();
|
||||
}
|
||||
|
||||
public static IEnumerable<IEnumerable<Matrix<double>>> BatchComputeFaceDescriptors(LossMetric net,
|
||||
IList<Image> batchImages,
|
||||
IList<IEnumerable<FullObjectDetection>> batchFaces,
|
||||
int numJitters)
|
||||
{
|
||||
if (batchImages.Count != batchFaces.Count)
|
||||
throw new ArgumentException("The array of images and the array of array of locations must be of the same size");
|
||||
|
||||
foreach (IEnumerable<FullObjectDetection>? faces in batchFaces)
|
||||
foreach (FullObjectDetection? f in faces)
|
||||
{
|
||||
if (f.Parts is not 68 and not 5)
|
||||
throw new ArgumentException("The full_object_detection must use the iBUG 300W 68 point face landmark style or dlib's 5 point style.");
|
||||
}
|
||||
|
||||
List<Array<Matrix<RgbPixel>>>? faceChipsArray = new(batchImages.Count);
|
||||
List<Matrix<RgbPixel>>? faceChips = new();
|
||||
for (int i = 0; i < batchImages.Count; ++i)
|
||||
{
|
||||
IEnumerable<FullObjectDetection>? faces = batchFaces[i];
|
||||
Image? img = batchImages[i];
|
||||
|
||||
List<ChipDetails>? dets = new(faces.Count());
|
||||
foreach (FullObjectDetection? f in faces)
|
||||
dets.Add(DlibDotNet.Dlib.GetFaceChipDetails(f, 150, 0.25));
|
||||
|
||||
Array<Matrix<RgbPixel>>? thisImageFaceChips = DlibDotNet.Dlib.ExtractImageChips<RgbPixel>(img.Matrix, dets);
|
||||
foreach (Matrix<RgbPixel>? chip in thisImageFaceChips)
|
||||
faceChips.Add(chip);
|
||||
faceChipsArray.Add(thisImageFaceChips);
|
||||
|
||||
foreach (ChipDetails? det in dets)
|
||||
det.Dispose();
|
||||
}
|
||||
|
||||
List<List<Matrix<double>>>? faceDescriptors = new();
|
||||
for (int i = 0, count = batchImages.Count; i < count; i++)
|
||||
faceDescriptors.Add(new List<Matrix<double>>());
|
||||
|
||||
if (numJitters <= 1)
|
||||
{
|
||||
// extract descriptors and convert from float vectors to double vectors
|
||||
OutputLabels<Matrix<float>>? descriptors = net.Operator(faceChips, 16);
|
||||
int index = 0;
|
||||
Matrix<float>[]? list = descriptors.Select(matrix => matrix).ToArray();
|
||||
for (int i = 0; i < batchFaces.Count; ++i)
|
||||
for (int j = 0; j < batchFaces[i].Count(); ++j)
|
||||
faceDescriptors[i].Add(DlibDotNet.Dlib.MatrixCast<double>(list[index++]));
|
||||
|
||||
if (index != list.Length)
|
||||
throw new ApplicationException();
|
||||
}
|
||||
else
|
||||
{
|
||||
// extract descriptors and convert from float vectors to double vectors
|
||||
int index = 0;
|
||||
for (int i = 0; i < batchFaces.Count; ++i)
|
||||
for (int j = 0; j < batchFaces[i].Count(); ++j)
|
||||
{
|
||||
Matrix<RgbPixel>[]? tmp = JitterImage(faceChips[index++], numJitters).ToArray();
|
||||
using (OutputLabels<Matrix<float>>? tmp2 = net.Operator(tmp, 16))
|
||||
using (MatrixOp? mat = DlibDotNet.Dlib.Mat(tmp2))
|
||||
{
|
||||
Matrix<double>? r = DlibDotNet.Dlib.Mean<double>(mat);
|
||||
faceDescriptors[i].Add(r);
|
||||
}
|
||||
|
||||
foreach (Matrix<RgbPixel>? matrix in tmp)
|
||||
matrix.Dispose();
|
||||
}
|
||||
|
||||
if (index != faceChips.Count)
|
||||
throw new ApplicationException();
|
||||
}
|
||||
|
||||
if (faceChipsArray.Any())
|
||||
{
|
||||
foreach (Array<Matrix<RgbPixel>>? array in faceChipsArray)
|
||||
{
|
||||
foreach (Matrix<RgbPixel>? faceChip in array)
|
||||
faceChip.Dispose();
|
||||
array.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
return faceDescriptors;
|
||||
}
|
||||
|
||||
#region Helpers
|
||||
|
||||
private static readonly Rand _Rand = new();
|
||||
|
||||
private static IEnumerable<Matrix<RgbPixel>> JitterImage(Matrix<RgbPixel> img, int numJitters)
|
||||
{
|
||||
List<Matrix<RgbPixel>>? crops = new();
|
||||
for (int i = 0; i < numJitters; ++i)
|
||||
crops.Add(DlibDotNet.Dlib.JitterImage(img, _Rand));
|
||||
|
||||
return crops;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
169
FaceRecognitionDotNet/Dlib/Python/SimpleObjectDetector.cs
Normal file
169
FaceRecognitionDotNet/Dlib/Python/SimpleObjectDetector.cs
Normal file
@ -0,0 +1,169 @@
|
||||
using DlibDotNet;
|
||||
using View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
namespace View_by_Distance.FaceRecognitionDotNet.Dlib.Python;
|
||||
|
||||
internal sealed class SimpleObjectDetector
|
||||
{
|
||||
|
||||
#region Methods
|
||||
|
||||
public static IEnumerable<Rectangle> RunDetectorWithUpscale1(FrontalFaceDetector detector,
|
||||
Image img,
|
||||
uint upsamplingAmount,
|
||||
double adjustThreshold,
|
||||
List<double> detectionConfidences,
|
||||
List<ulong> weightIndices)
|
||||
{
|
||||
List<Rectangle>? rectangles = new();
|
||||
|
||||
if (img.Mode == Mode.Greyscale)
|
||||
{
|
||||
Matrix<byte>? greyscaleMatrix = img.Matrix as Matrix<byte>;
|
||||
if (upsamplingAmount == 0)
|
||||
{
|
||||
detector.Operator(greyscaleMatrix, out IEnumerable<RectDetection>? rectDetections, adjustThreshold);
|
||||
|
||||
RectDetection[]? dets = rectDetections.ToArray();
|
||||
SplitRectDetections(dets, rectangles, detectionConfidences, weightIndices);
|
||||
|
||||
foreach (RectDetection? rectDetection in dets)
|
||||
rectDetection.Dispose();
|
||||
}
|
||||
else
|
||||
{
|
||||
using PyramidDown? pyr = new(2);
|
||||
Matrix<byte>? temp = null;
|
||||
|
||||
try
|
||||
{
|
||||
DlibDotNet.Dlib.PyramidUp(greyscaleMatrix, pyr, out temp);
|
||||
|
||||
uint levels = upsamplingAmount - 1;
|
||||
while (levels > 0)
|
||||
{
|
||||
levels--;
|
||||
DlibDotNet.Dlib.PyramidUp(temp);
|
||||
}
|
||||
|
||||
detector.Operator(temp, out IEnumerable<RectDetection>? rectDetections, adjustThreshold);
|
||||
|
||||
RectDetection[]? dets = rectDetections.ToArray();
|
||||
foreach (RectDetection? t in dets)
|
||||
t.Rect = pyr.RectDown(t.Rect, upsamplingAmount);
|
||||
|
||||
SplitRectDetections(dets, rectangles, detectionConfidences, weightIndices);
|
||||
|
||||
foreach (RectDetection? rectDetection in dets)
|
||||
rectDetection.Dispose();
|
||||
}
|
||||
finally
|
||||
{
|
||||
temp?.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
return rectangles;
|
||||
}
|
||||
else
|
||||
{
|
||||
Matrix<RgbPixel>? rgbMatrix = img.Matrix as Matrix<RgbPixel>;
|
||||
if (upsamplingAmount == 0)
|
||||
{
|
||||
detector.Operator(rgbMatrix, out IEnumerable<RectDetection>? rectDetections, adjustThreshold);
|
||||
|
||||
RectDetection[]? dets = rectDetections.ToArray();
|
||||
SplitRectDetections(dets, rectangles, detectionConfidences, weightIndices);
|
||||
|
||||
foreach (RectDetection? rectDetection in dets)
|
||||
rectDetection.Dispose();
|
||||
}
|
||||
else
|
||||
{
|
||||
using PyramidDown? pyr = new(2);
|
||||
Matrix<RgbPixel>? temp = null;
|
||||
|
||||
try
|
||||
{
|
||||
DlibDotNet.Dlib.PyramidUp(rgbMatrix, pyr, out temp);
|
||||
|
||||
uint levels = upsamplingAmount - 1;
|
||||
while (levels > 0)
|
||||
{
|
||||
levels--;
|
||||
DlibDotNet.Dlib.PyramidUp(temp);
|
||||
}
|
||||
|
||||
detector.Operator(temp, out IEnumerable<RectDetection>? rectDetections, adjustThreshold);
|
||||
|
||||
RectDetection[]? dets = rectDetections.ToArray();
|
||||
foreach (RectDetection? t in dets)
|
||||
t.Rect = pyr.RectDown(t.Rect, upsamplingAmount);
|
||||
|
||||
SplitRectDetections(dets, rectangles, detectionConfidences, weightIndices);
|
||||
|
||||
foreach (RectDetection? rectDetection in dets)
|
||||
rectDetection.Dispose();
|
||||
}
|
||||
finally
|
||||
{
|
||||
temp?.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
return rectangles;
|
||||
}
|
||||
}
|
||||
|
||||
public static IEnumerable<Tuple<Rectangle, double>> RunDetectorWithUpscale2(FrontalFaceDetector detector,
|
||||
Image image,
|
||||
uint upsamplingAmount)
|
||||
{
|
||||
if (detector == null)
|
||||
throw new ArgumentNullException(nameof(detector));
|
||||
if (image == null)
|
||||
throw new ArgumentNullException(nameof(image));
|
||||
|
||||
detector.ThrowIfDisposed();
|
||||
image.ThrowIfDisposed();
|
||||
|
||||
List<double>? detectionConfidences = new();
|
||||
List<ulong>? weightIndices = new();
|
||||
const double adjustThreshold = 0.0;
|
||||
|
||||
Rectangle[]? rects = RunDetectorWithUpscale1(detector,
|
||||
image,
|
||||
upsamplingAmount,
|
||||
adjustThreshold,
|
||||
detectionConfidences,
|
||||
weightIndices).ToArray();
|
||||
|
||||
int index = 0;
|
||||
foreach (Rectangle rect in rects)
|
||||
yield return new Tuple<Rectangle, double>(rect, detectionConfidences[index++]);
|
||||
}
|
||||
|
||||
#region Helpers
|
||||
|
||||
private static void SplitRectDetections(RectDetection[] rectDetections,
|
||||
List<Rectangle> rectangles,
|
||||
List<double> detectionConfidences,
|
||||
List<ulong> weightIndices)
|
||||
{
|
||||
rectangles.Clear();
|
||||
detectionConfidences.Clear();
|
||||
weightIndices.Clear();
|
||||
|
||||
foreach (RectDetection? rectDetection in rectDetections)
|
||||
{
|
||||
rectangles.Add(rectDetection.Rect);
|
||||
detectionConfidences.Add(rectDetection.DetectionConfidence);
|
||||
weightIndices.Add(rectDetection.WeightIndex);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
26
FaceRecognitionDotNet/Extensions/FaceDetector.cs
Normal file
26
FaceRecognitionDotNet/Extensions/FaceDetector.cs
Normal file
@ -0,0 +1,26 @@
|
||||
using DlibDotNet;
|
||||
using View_by_Distance.Shared.Models;
|
||||
|
||||
namespace View_by_Distance.FaceRecognitionDotNet.Extensions;
|
||||
|
||||
/// <summary>
|
||||
/// An abstract base class that provides functionality to detect face locations from image.
|
||||
/// </summary>
|
||||
public abstract class FaceDetector : DisposableObject
|
||||
{
|
||||
|
||||
#region Methods
|
||||
|
||||
internal IEnumerable<Location> Detect(Image image, int numberOfTimesToUpsample) => RawDetect(image.Matrix, numberOfTimesToUpsample);
|
||||
|
||||
/// <summary>
|
||||
/// Returns an enumerable collection of face location correspond to all faces in specified image.
|
||||
/// </summary>
|
||||
/// <param name="matrix">The matrix contains a face.</param>
|
||||
/// <param name="numberOfTimesToUpsample">The number of times to up-sample the image when finding faces.</param>
|
||||
/// <returns>An enumerable collection of face location correspond to all faces.</returns>
|
||||
protected abstract IEnumerable<Location> RawDetect(MatrixBase matrix, int numberOfTimesToUpsample);
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
36
FaceRecognitionDotNet/Extensions/FaceLandmarkDetector.cs
Normal file
36
FaceRecognitionDotNet/Extensions/FaceLandmarkDetector.cs
Normal file
@ -0,0 +1,36 @@
|
||||
using DlibDotNet;
|
||||
using View_by_Distance.Shared.Models;
|
||||
using View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
namespace View_by_Distance.FaceRecognitionDotNet.Extensions;
|
||||
|
||||
/// <summary>
|
||||
/// An abstract base class that provides functionality to detect face parts locations from face image.
|
||||
/// </summary>
|
||||
public abstract class FaceLandmarkDetector : DisposableObject
|
||||
{
|
||||
|
||||
#region Methods
|
||||
|
||||
internal FullObjectDetection Detect(Image image, Location location) => RawDetect(image.Matrix, location);
|
||||
|
||||
internal IEnumerable<Dictionary<FacePart, IEnumerable<FacePoint>>> GetLandmarks(IEnumerable<FacePoint[]> landmarkTuples) => RawGetLandmarks(landmarkTuples);
|
||||
|
||||
/// <summary>
|
||||
/// Returns an object contains information of face parts corresponds to specified location in specified image.
|
||||
/// </summary>
|
||||
/// <param name="matrix">The matrix contains a face.</param>
|
||||
/// <param name="location">The location rectangle for a face.</param>
|
||||
/// <returns>An object contains information of face parts.</returns>
|
||||
protected abstract FullObjectDetection RawDetect(MatrixBase matrix, Location location);
|
||||
|
||||
/// <summary>
|
||||
/// Returns an enumerable collection of dictionary of face parts locations (eyes, nose, etc).
|
||||
/// </summary>
|
||||
/// <param name="landmarkTuples">The enumerable collection of face parts location.</param>
|
||||
/// <returns>An enumerable collection of dictionary of face parts locations (eyes, nose, etc).</returns>
|
||||
protected abstract IEnumerable<Dictionary<FacePart, IEnumerable<FacePoint>>> RawGetLandmarks(IEnumerable<FacePoint[]> landmarkTuples);
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
105
FaceRecognitionDotNet/FaceEncoding.cs
Normal file
105
FaceRecognitionDotNet/FaceEncoding.cs
Normal file
@ -0,0 +1,105 @@
|
||||
using DlibDotNet;
|
||||
using System.Runtime.Serialization;
|
||||
|
||||
namespace View_by_Distance.FaceRecognitionDotNet;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a feature data of face. This class cannot be inherited.
|
||||
/// </summary>
|
||||
[Serializable]
|
||||
public sealed class FaceEncoding : DisposableObject, ISerializable
|
||||
{
|
||||
|
||||
#region Fields
|
||||
|
||||
[NonSerialized]
|
||||
private readonly Matrix<double> _Encoding;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
internal FaceEncoding(Matrix<double> encoding) => _Encoding = encoding;
|
||||
|
||||
private FaceEncoding(SerializationInfo info, StreamingContext context)
|
||||
{
|
||||
if (info == null)
|
||||
throw new ArgumentNullException(nameof(info));
|
||||
|
||||
double[]? array = info.GetValue(nameof(_Encoding), typeof(double[])) as double[];
|
||||
int? row = (int?)info.GetValue(nameof(_Encoding.Rows), typeof(int));
|
||||
int? column = (int?)info.GetValue(nameof(_Encoding.Columns), typeof(int));
|
||||
if (row is null)
|
||||
throw new Exception($"{nameof(row)} is null");
|
||||
if (column is null)
|
||||
throw new Exception($"{nameof(column)} is null");
|
||||
_Encoding = new Matrix<double>(array, row.Value, column.Value);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Properties
|
||||
|
||||
internal Matrix<double> Encoding => _Encoding;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the size of feature data.
|
||||
/// </summary>
|
||||
/// <exception cref="ObjectDisposedException">This object is disposed.</exception>
|
||||
public int Size
|
||||
{
|
||||
get
|
||||
{
|
||||
ThrowIfDisposed();
|
||||
return _Encoding.Size;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Methods
|
||||
|
||||
/// <summary>
|
||||
/// Gets a feature data of face as raw format.
|
||||
/// </summary>
|
||||
/// <returns>A <see cref="double"/> array that represents a feature data.</returns>
|
||||
/// <remarks><see cref="FaceEncoding"/> class supports serialization. This method is for interoperability between FaceRecognitionotNet and dlib.</remarks>
|
||||
/// <exception cref="ObjectDisposedException">This object is disposed.</exception>
|
||||
public double[] GetRawEncoding()
|
||||
{
|
||||
ThrowIfDisposed();
|
||||
return _Encoding.ToArray();
|
||||
}
|
||||
|
||||
#region Overrides
|
||||
|
||||
/// <summary>
|
||||
/// Releases all unmanaged resources.
|
||||
/// </summary>
|
||||
protected override void DisposeUnmanaged()
|
||||
{
|
||||
base.DisposeUnmanaged();
|
||||
_Encoding?.Dispose();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
|
||||
#region ISerializable Members
|
||||
|
||||
/// <summary>
|
||||
/// Populates a <see cref="SerializationInfo"/> with the data needed to serialize the target object.
|
||||
/// </summary>
|
||||
/// <param name="info">The <see cref="SerializationInfo"/> to populate with data.</param>
|
||||
/// <param name="context">The destination (see <see cref="StreamingContext"/>) for this serialization.</param>
|
||||
public void GetObjectData(SerializationInfo info, StreamingContext context)
|
||||
{
|
||||
info.AddValue(nameof(_Encoding), _Encoding.ToArray());
|
||||
info.AddValue(nameof(_Encoding.Rows), _Encoding.Rows);
|
||||
info.AddValue(nameof(_Encoding.Columns), _Encoding.Columns);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
871
FaceRecognitionDotNet/FaceRecognition.cs
Normal file
871
FaceRecognitionDotNet/FaceRecognition.cs
Normal file
@ -0,0 +1,871 @@
|
||||
using DlibDotNet;
|
||||
using DlibDotNet.Dnn;
|
||||
using System.Drawing;
|
||||
using System.Drawing.Imaging;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Text;
|
||||
using View_by_Distance.FaceRecognitionDotNet.Dlib.Python;
|
||||
using View_by_Distance.FaceRecognitionDotNet.Extensions;
|
||||
using View_by_Distance.Shared.Models;
|
||||
using View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
namespace View_by_Distance.FaceRecognitionDotNet;
|
||||
|
||||
/// <summary>
|
||||
/// Provides the method to find and recognize face methods. This class cannot be inherited.
|
||||
/// </summary>
|
||||
public sealed class FaceRecognition : DisposableObject
|
||||
{
|
||||
|
||||
#region Fields
|
||||
|
||||
private readonly ShapePredictor _PosePredictor68Point;
|
||||
|
||||
private readonly ShapePredictor _PosePredictor5Point;
|
||||
|
||||
private readonly LossMmod _CnnFaceDetector;
|
||||
|
||||
private readonly LossMetric _FaceEncoder;
|
||||
|
||||
private readonly FrontalFaceDetector _FaceDetector;
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FaceRecognition"/> class with the directory path that stores model files.
|
||||
/// </summary>
|
||||
/// <param name="directory">The directory path that stores model files.</param>
|
||||
/// <exception cref="FileNotFoundException">The model file is not found.</exception>
|
||||
/// <exception cref="DirectoryNotFoundException">The specified directory path is not found.</exception>
|
||||
private FaceRecognition(string directory)
|
||||
{
|
||||
if (!Directory.Exists(directory))
|
||||
throw new DirectoryNotFoundException(directory);
|
||||
|
||||
string? predictor68PointModel = Path.Combine(directory, FaceRecognitionModels.GetPosePredictorModelLocation());
|
||||
if (!File.Exists(predictor68PointModel))
|
||||
throw new FileNotFoundException(predictor68PointModel);
|
||||
|
||||
string? predictor5PointModel = Path.Combine(directory, FaceRecognitionModels.GetPosePredictorFivePointModelLocation());
|
||||
if (!File.Exists(predictor5PointModel))
|
||||
throw new FileNotFoundException(predictor5PointModel);
|
||||
|
||||
string? cnnFaceDetectionModel = Path.Combine(directory, FaceRecognitionModels.GetCnnFaceDetectorModelLocation());
|
||||
if (!File.Exists(cnnFaceDetectionModel))
|
||||
throw new FileNotFoundException(cnnFaceDetectionModel);
|
||||
|
||||
string? faceRecognitionModel = Path.Combine(directory, FaceRecognitionModels.GetFaceRecognitionModelLocation());
|
||||
if (!File.Exists(faceRecognitionModel))
|
||||
throw new FileNotFoundException(faceRecognitionModel);
|
||||
|
||||
_FaceDetector?.Dispose();
|
||||
_FaceDetector = DlibDotNet.Dlib.GetFrontalFaceDetector();
|
||||
|
||||
_PosePredictor68Point?.Dispose();
|
||||
_PosePredictor68Point = ShapePredictor.Deserialize(predictor68PointModel);
|
||||
|
||||
_PosePredictor5Point?.Dispose();
|
||||
_PosePredictor5Point = ShapePredictor.Deserialize(predictor5PointModel);
|
||||
|
||||
_CnnFaceDetector?.Dispose();
|
||||
_CnnFaceDetector = LossMmod.Deserialize(cnnFaceDetectionModel);
|
||||
|
||||
_FaceEncoder?.Dispose();
|
||||
_FaceEncoder = LossMetric.Deserialize(faceRecognitionModel);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FaceRecognition"/> class with the instance that contains model binary datum.
|
||||
/// </summary>
|
||||
/// <param name="parameter">The instance that contains model binary datum.</param>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="parameter"/> is null.</exception>
|
||||
/// <exception cref="NullReferenceException">The model data is null.</exception>
|
||||
private FaceRecognition(ModelParameter parameter)
|
||||
{
|
||||
if (parameter == null)
|
||||
throw new ArgumentNullException(nameof(parameter));
|
||||
|
||||
if (parameter.PosePredictor5FaceLandmarksModel == null)
|
||||
throw new NullReferenceException(nameof(parameter.PosePredictor5FaceLandmarksModel));
|
||||
|
||||
if (parameter.PosePredictor68FaceLandmarksModel == null)
|
||||
throw new NullReferenceException(nameof(parameter.PosePredictor68FaceLandmarksModel));
|
||||
|
||||
if (parameter.CnnFaceDetectorModel == null)
|
||||
throw new NullReferenceException(nameof(parameter.CnnFaceDetectorModel));
|
||||
|
||||
if (parameter.FaceRecognitionModel == null)
|
||||
throw new NullReferenceException(nameof(parameter.FaceRecognitionModel));
|
||||
|
||||
_FaceDetector?.Dispose();
|
||||
_FaceDetector = DlibDotNet.Dlib.GetFrontalFaceDetector();
|
||||
|
||||
_PosePredictor68Point?.Dispose();
|
||||
_PosePredictor68Point = ShapePredictor.Deserialize(parameter.PosePredictor68FaceLandmarksModel);
|
||||
|
||||
_PosePredictor5Point?.Dispose();
|
||||
_PosePredictor5Point = ShapePredictor.Deserialize(parameter.PosePredictor5FaceLandmarksModel);
|
||||
|
||||
_CnnFaceDetector?.Dispose();
|
||||
_CnnFaceDetector = LossMmod.Deserialize(parameter.CnnFaceDetectorModel);
|
||||
|
||||
_FaceEncoder?.Dispose();
|
||||
_FaceEncoder = LossMetric.Deserialize(parameter.FaceRecognitionModel);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Properties
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the custom face detector that user defined.
|
||||
/// </summary>
|
||||
public FaceDetector? CustomFaceDetector { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the custom face landmark detector that user defined.
|
||||
/// </summary>
|
||||
public FaceLandmarkDetector? CustomFaceLandmarkDetector { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the character encoding to convert <see cref="System.String"/> to array of <see cref="byte"/> for internal library.
|
||||
/// </summary>
|
||||
public static Encoding InternalEncoding
|
||||
{
|
||||
get => DlibDotNet.Dlib.Encoding;
|
||||
set => DlibDotNet.Dlib.Encoding = value ?? Encoding.UTF8;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Methods
|
||||
|
||||
/// <summary>
|
||||
/// Returns an enumerable collection of array of bounding boxes of human faces in a image using the cnn face detector.
|
||||
/// </summary>
|
||||
/// <param name="images">An enumerable collection of images.</param>
|
||||
/// <param name="numberOfTimesToUpsample">The number of image looking for faces. Higher numbers find smaller faces.</param>
|
||||
/// <param name="batchSize">The number of images to include in each GPU processing batch.</param>
|
||||
/// <returns>An enumerable collection of array of found face locations.</returns>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="images"/> is null.</exception>
|
||||
public IEnumerable<Location[]> BatchFaceLocations(IEnumerable<Image> images, int numberOfTimesToUpsample = 1, int batchSize = 128)
|
||||
{
|
||||
if (images == null)
|
||||
throw new ArgumentNullException(nameof(images));
|
||||
|
||||
List<Location[]>? results = new();
|
||||
|
||||
Image[]? imagesArray = images.ToArray();
|
||||
if (!imagesArray.Any())
|
||||
return results;
|
||||
|
||||
IEnumerable<MModRect>[]? rawDetectionsBatched = RawFaceLocationsBatched(imagesArray, numberOfTimesToUpsample, batchSize).ToArray();
|
||||
|
||||
Image? image = imagesArray[0];
|
||||
for (int index = 0; index < rawDetectionsBatched.Length; index++)
|
||||
{
|
||||
MModRect[]? faces = rawDetectionsBatched[index].ToArray();
|
||||
Location[]? locations = faces.Select(rect => new Location(TrimBound(rect.Rect, image.Width, image.Height), rect.DetectionConfidence)).ToArray();
|
||||
foreach (MModRect? face in faces)
|
||||
face.Dispose();
|
||||
results.Add(locations);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compare a known face encoding against a candidate encoding to see if they match.
|
||||
/// </summary>
|
||||
/// <param name="knownFaceEncoding">A known face encodings.</param>
|
||||
/// <param name="faceEncodingToCheck">A single face encoding to compare against a known face encoding.</param>
|
||||
/// <param name="tolerance">The distance between faces to consider it a match. Lower is more strict. The default value is 0.6.</param>
|
||||
/// <returns>A True/False value indicating which known a face encoding matches the face encoding to check.</returns>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="knownFaceEncoding"/> or <paramref name="faceEncodingToCheck"/> is null.</exception>
|
||||
/// <exception cref="ObjectDisposedException"><paramref name="knownFaceEncoding"/> or <paramref name="faceEncodingToCheck"/>.</exception>
|
||||
public static bool CompareFace(FaceEncoding knownFaceEncoding, FaceEncoding faceEncodingToCheck, double tolerance = 0.6d)
|
||||
{
|
||||
if (knownFaceEncoding == null)
|
||||
throw new ArgumentNullException(nameof(knownFaceEncoding));
|
||||
if (faceEncodingToCheck == null)
|
||||
throw new ArgumentNullException(nameof(faceEncodingToCheck));
|
||||
|
||||
knownFaceEncoding.ThrowIfDisposed();
|
||||
faceEncodingToCheck.ThrowIfDisposed();
|
||||
|
||||
return FaceDistance(knownFaceEncoding, faceEncodingToCheck) <= tolerance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compare an enumerable collection of face encodings against a candidate encoding to see if they match.
|
||||
/// </summary>
|
||||
/// <param name="knownFaceEncodings">An enumerable collection of known face encodings.</param>
|
||||
/// <param name="faceEncodingToCheck">A single face encoding to compare against the enumerable collection.</param>
|
||||
/// <param name="tolerance">The distance between faces to consider it a match. Lower is more strict. The default value is 0.6.</param>
|
||||
/// <returns>An enumerable collection of True/False values indicating which known face encodings match the face encoding to check.</returns>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="knownFaceEncodings"/> or <paramref name="faceEncodingToCheck"/> is null.</exception>
|
||||
/// <exception cref="ObjectDisposedException"><paramref name="faceEncodingToCheck"/> is disposed. Or <paramref name="knownFaceEncodings"/> contains disposed object.</exception>
|
||||
public static IEnumerable<bool> CompareFaces(IEnumerable<FaceEncoding> knownFaceEncodings, FaceEncoding faceEncodingToCheck, double tolerance = 0.6d)
|
||||
{
|
||||
if (knownFaceEncodings == null)
|
||||
throw new ArgumentNullException(nameof(knownFaceEncodings));
|
||||
if (faceEncodingToCheck == null)
|
||||
throw new ArgumentNullException(nameof(faceEncodingToCheck));
|
||||
|
||||
faceEncodingToCheck.ThrowIfDisposed();
|
||||
|
||||
FaceEncoding[]? array = knownFaceEncodings.ToArray();
|
||||
if (array.Any(encoding => encoding.IsDisposed))
|
||||
throw new ObjectDisposedException($"{nameof(knownFaceEncodings)} contains disposed object.");
|
||||
|
||||
List<bool>? results = new();
|
||||
if (array.Length == 0)
|
||||
return results;
|
||||
|
||||
foreach (FaceEncoding? faceEncoding in array)
|
||||
results.Add(FaceDistance(faceEncoding, faceEncodingToCheck) <= tolerance);
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a new instance of the <see cref="FaceRecognition"/> class.
|
||||
/// </summary>
|
||||
/// <param name="directory">The directory path that stores model files.</param>
|
||||
/// <exception cref="FileNotFoundException">The model file is not found.</exception>
|
||||
/// <exception cref="DirectoryNotFoundException">The specified directory path is not found.</exception>
|
||||
public static FaceRecognition Create(string directory) => new(directory);
|
||||
|
||||
/// <summary>
|
||||
/// Create a new instance of the <see cref="FaceRecognition"/> class.
|
||||
/// </summary>
|
||||
/// <param name="parameter">The instance that contains model binary datum.</param>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="parameter"/> is null.</exception>
|
||||
/// <exception cref="NullReferenceException">The model data is null.</exception>
|
||||
public static FaceRecognition Create(ModelParameter parameter) => new(parameter);
|
||||
|
||||
/// <summary>
|
||||
/// Crop a specified image with enumerable collection of face locations.
|
||||
/// </summary>
|
||||
/// <param name="image">The image contains a face.</param>
|
||||
/// <param name="locations">The enumerable collection of location rectangle for faces.</param>
|
||||
/// <returns></returns>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="image"/> or <paramref name="locations"/> is null.</exception>
|
||||
/// <exception cref="ObjectDisposedException"><paramref name="image"/> is disposed.</exception>
|
||||
public static IEnumerable<Image> CropFaces(Image image, IEnumerable<Location> locations)
|
||||
{
|
||||
if (image == null)
|
||||
throw new ArgumentNullException(nameof(image));
|
||||
if (locations == null)
|
||||
throw new ArgumentNullException(nameof(locations));
|
||||
|
||||
image.ThrowIfDisposed();
|
||||
|
||||
List<Image>? results = new();
|
||||
foreach (Location? location in locations)
|
||||
{
|
||||
DlibDotNet.Rectangle rect = new(location.Left, location.Top, location.Right, location.Bottom);
|
||||
DPoint[]? dPoint = new[]
|
||||
{
|
||||
new DPoint(rect.Left, rect.Top),
|
||||
new DPoint(rect.Right, rect.Top),
|
||||
new DPoint(rect.Left, rect.Bottom),
|
||||
new DPoint(rect.Right, rect.Bottom),
|
||||
};
|
||||
|
||||
int width = (int)rect.Width;
|
||||
int height = (int)rect.Height;
|
||||
|
||||
switch (image.Mode)
|
||||
{
|
||||
case Mode.Rgb:
|
||||
Matrix<RgbPixel>? rgb = image.Matrix as Matrix<RgbPixel>;
|
||||
results.Add(new Image(DlibDotNet.Dlib.ExtractImage4Points(rgb, dPoint, width, height),
|
||||
Mode.Rgb));
|
||||
break;
|
||||
case Mode.Greyscale:
|
||||
Matrix<byte>? gray = image.Matrix as Matrix<byte>;
|
||||
results.Add(new Image(DlibDotNet.Dlib.ExtractImage4Points(gray, dPoint, width, height),
|
||||
Mode.Greyscale));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compare a face encoding to a known face encoding and get a euclidean distance for comparison face.
|
||||
/// </summary>
|
||||
/// <param name="faceEncoding">The face encoding to compare.</param>
|
||||
/// <param name="faceToCompare">The face encoding to compare against.</param>
|
||||
/// <returns>The euclidean distance for comparison face. If 0, faces are completely equal.</returns>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="faceEncoding"/> or <paramref name="faceToCompare"/> is null.</exception>
|
||||
/// <exception cref="ObjectDisposedException"><paramref name="faceEncoding"/> or <paramref name="faceToCompare"/> is disposed.</exception>
|
||||
public static double FaceDistance(FaceEncoding faceEncoding, FaceEncoding faceToCompare)
|
||||
{
|
||||
if (faceEncoding == null)
|
||||
throw new ArgumentNullException(nameof(faceEncoding));
|
||||
if (faceToCompare == null)
|
||||
throw new ArgumentNullException(nameof(faceToCompare));
|
||||
|
||||
faceEncoding.ThrowIfDisposed();
|
||||
faceToCompare.ThrowIfDisposed();
|
||||
|
||||
if (faceEncoding.Encoding.Size == 0)
|
||||
return 0;
|
||||
|
||||
using Matrix<double>? diff = faceEncoding.Encoding - faceToCompare.Encoding;
|
||||
return DlibDotNet.Dlib.Length(diff);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compare an enumerable collection of face encoding to a known face encoding and get an enumerable collection of euclidean distance for comparison face.
|
||||
/// </summary>
|
||||
/// <param name="faceEncodings">The enumerable collection of face encoding to compare.</param>
|
||||
/// <param name="faceToCompare">The face encoding to compare against.</param>
|
||||
/// <returns>The enumerable collection of euclidean distance for comparison face. If 0, faces are completely equal.</returns>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="faceEncodings"/> or <paramref name="faceToCompare"/> is null.</exception>
|
||||
/// <exception cref="ObjectDisposedException"><paramref name="faceToCompare"/> is disposed. Or <paramref name="faceEncodings"/> contains disposed object.</exception>
|
||||
public static IEnumerable<double> FaceDistances(IEnumerable<FaceEncoding> faceEncodings, FaceEncoding faceToCompare)
|
||||
{
|
||||
if (faceEncodings == null)
|
||||
throw new ArgumentNullException(nameof(faceEncodings));
|
||||
if (faceToCompare == null)
|
||||
throw new ArgumentNullException(nameof(faceToCompare));
|
||||
|
||||
faceToCompare.ThrowIfDisposed();
|
||||
|
||||
FaceEncoding[]? array = faceEncodings.ToArray();
|
||||
if (array.Any(encoding => encoding.IsDisposed))
|
||||
throw new ObjectDisposedException($"{nameof(faceEncodings)} contains disposed object.");
|
||||
|
||||
List<double>? results = new();
|
||||
if (array.Length == 0)
|
||||
return results;
|
||||
|
||||
foreach (FaceEncoding? faceEncoding in array)
|
||||
using (Matrix<double>? diff = faceEncoding.Encoding - faceToCompare.Encoding)
|
||||
results.Add(DlibDotNet.Dlib.Length(diff));
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns an enumerable collection of face feature data corresponds to all faces in specified image.
|
||||
/// </summary>
|
||||
/// <param name="image">The image contains faces. The image can contain multiple faces.</param>
|
||||
/// <param name="knownFaceLocation">The enumerable collection of location rectangle for faces. If specified null, method will find face locations.</param>
|
||||
/// <param name="numJitters">The number of times to re-sample the face when calculating encoding.</param>
|
||||
/// <param name="predictorModel">The dimension of vector which be returned from detector.</param>
|
||||
/// <param name="model">The model of face detector to detect in image. If <paramref name="knownFaceLocation"/> is not null, this value is ignored.</param>
|
||||
/// <returns>An enumerable collection of face feature data corresponds to all faces in specified image.</returns>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="image"/> is null.</exception>
|
||||
/// <exception cref="InvalidOperationException"><paramref name="knownFaceLocation"/> contains no elements.</exception>
|
||||
/// <exception cref="ObjectDisposedException"><paramref name="image"/> or this object or custom face landmark detector is disposed.</exception>
|
||||
/// <exception cref="NotSupportedException"><see cref="PredictorModel.Custom"/> is not supported.</exception>
|
||||
public IEnumerable<FaceEncoding> FaceEncodings(Image image,
|
||||
IEnumerable<Location>? knownFaceLocation = null,
|
||||
int numJitters = 1,
|
||||
PredictorModel predictorModel = PredictorModel.Small,
|
||||
Model model = Model.Hog)
|
||||
{
|
||||
if (image == null)
|
||||
throw new ArgumentNullException(nameof(image));
|
||||
if (predictorModel == PredictorModel.Custom)
|
||||
throw new NotSupportedException("FaceRecognition.PredictorModel.Custom is not supported.");
|
||||
|
||||
if (knownFaceLocation != null && !knownFaceLocation.Any())
|
||||
throw new InvalidOperationException($"{nameof(knownFaceLocation)} contains no elements.");
|
||||
|
||||
image.ThrowIfDisposed();
|
||||
ThrowIfDisposed();
|
||||
|
||||
IEnumerable<FullObjectDetection>? rawLandmarks = RawFaceLandmarks(image, knownFaceLocation, predictorModel, model);
|
||||
|
||||
List<FaceEncoding>? results = new();
|
||||
foreach (FullObjectDetection? landmark in rawLandmarks)
|
||||
{
|
||||
FaceEncoding? ret = new(FaceRecognitionModelV1.ComputeFaceDescriptor(_FaceEncoder, image, landmark, numJitters));
|
||||
landmark.Dispose();
|
||||
results.Add(ret);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns an enumerable collection of dictionary of face parts locations (eyes, nose, etc) for each face in the image.
|
||||
/// </summary>
|
||||
/// <param name="faceImage">The image contains faces. The image can contain multiple faces.</param>
|
||||
/// <param name="faceLocations">The enumerable collection of location rectangle for faces. If specified null, method will find face locations.</param>
|
||||
/// <param name="predictorModel">The dimension of vector which be returned from detector.</param>
|
||||
/// <param name="model">The model of face detector to detect in image. If <paramref name="faceLocations"/> is not null, this value is ignored.</param>
|
||||
/// <returns>An enumerable collection of dictionary of face parts locations (eyes, nose, etc).</returns>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="faceImage"/> is null.</exception>
|
||||
/// <exception cref="InvalidOperationException"><paramref name="faceLocations"/> contains no elements.</exception>
|
||||
/// <exception cref="ObjectDisposedException"><paramref name="faceImage"/> or this object or custom face landmark detector is disposed.</exception>
|
||||
/// <exception cref="NotSupportedException">The custom face landmark detector is not ready.</exception>
|
||||
public IEnumerable<IDictionary<FacePart, IEnumerable<FacePoint>>> FaceLandmark(Image faceImage,
|
||||
IEnumerable<Location>? faceLocations = null,
|
||||
PredictorModel predictorModel = PredictorModel.Large,
|
||||
Model model = Model.Hog)
|
||||
{
|
||||
if (faceImage == null)
|
||||
throw new ArgumentNullException(nameof(faceImage));
|
||||
|
||||
if (faceLocations != null && !faceLocations.Any())
|
||||
throw new InvalidOperationException($"{nameof(faceLocations)} contains no elements.");
|
||||
|
||||
faceImage.ThrowIfDisposed();
|
||||
ThrowIfDisposed();
|
||||
|
||||
if (predictorModel == PredictorModel.Custom)
|
||||
{
|
||||
if (CustomFaceLandmarkDetector == null)
|
||||
throw new NotSupportedException("The custom face landmark detector is not ready.");
|
||||
|
||||
if (CustomFaceLandmarkDetector.IsDisposed)
|
||||
throw new ObjectDisposedException($"{nameof(CustomFaceLandmarkDetector)}", "The custom face landmark detector is disposed.");
|
||||
}
|
||||
|
||||
FullObjectDetection[]? landmarks = RawFaceLandmarks(faceImage, faceLocations, predictorModel, model).ToArray();
|
||||
IEnumerable<FacePoint[]>? landmarkTuples = landmarks.Select(landmark => Enumerable.Range(0, (int)landmark.Parts)
|
||||
.Select(index => new FacePoint(index, landmark.GetPart((uint)index).X, landmark.GetPart((uint)index).Y)).ToArray());
|
||||
|
||||
List<Dictionary<FacePart, IEnumerable<FacePoint>>>? results = new();
|
||||
|
||||
try
|
||||
{
|
||||
|
||||
// For a definition of each point index, see https://cdn-images-1.medium.com/max/1600/1*AbEg31EgkbXSQehuNJBlWg.png
|
||||
switch (predictorModel)
|
||||
{
|
||||
case PredictorModel.Large:
|
||||
results.AddRange(landmarkTuples.Select(landmarkTuple => new Dictionary<FacePart, IEnumerable<FacePoint>>
|
||||
{
|
||||
{ FacePart.Chin, Enumerable.Range(0,17).Select(i => landmarkTuple[i]).ToArray() },
|
||||
{ FacePart.LeftEyebrow, Enumerable.Range(17,5).Select(i => landmarkTuple[i]).ToArray() },
|
||||
{ FacePart.RightEyebrow, Enumerable.Range(22,5).Select(i => landmarkTuple[i]).ToArray() },
|
||||
{ FacePart.NoseBridge, Enumerable.Range(27,5).Select(i => landmarkTuple[i]).ToArray() },
|
||||
{ FacePart.NoseTip, Enumerable.Range(31,5).Select(i => landmarkTuple[i]).ToArray() },
|
||||
{ FacePart.LeftEye, Enumerable.Range(36,6).Select(i => landmarkTuple[i]).ToArray() },
|
||||
{ FacePart.RightEye, Enumerable.Range(42,6).Select(i => landmarkTuple[i]).ToArray() },
|
||||
{ FacePart.TopLip, Enumerable.Range(48,7).Select(i => landmarkTuple[i])
|
||||
.Concat( new [] { landmarkTuple[64] })
|
||||
.Concat( new [] { landmarkTuple[63] })
|
||||
.Concat( new [] { landmarkTuple[62] })
|
||||
.Concat( new [] { landmarkTuple[61] })
|
||||
.Concat( new [] { landmarkTuple[60] }) },
|
||||
{ FacePart.BottomLip, Enumerable.Range(54,6).Select(i => landmarkTuple[i])
|
||||
.Concat( new [] { landmarkTuple[48] })
|
||||
.Concat( new [] { landmarkTuple[60] })
|
||||
.Concat( new [] { landmarkTuple[67] })
|
||||
.Concat( new [] { landmarkTuple[66] })
|
||||
.Concat( new [] { landmarkTuple[65] })
|
||||
.Concat( new [] { landmarkTuple[64] }) }
|
||||
}));
|
||||
break;
|
||||
case PredictorModel.Small:
|
||||
results.AddRange(landmarkTuples.Select(landmarkTuple => new Dictionary<FacePart, IEnumerable<FacePoint>>
|
||||
{
|
||||
{ FacePart.NoseTip, Enumerable.Range(4,1).Select(i => landmarkTuple[i]).ToArray() },
|
||||
{ FacePart.LeftEye, Enumerable.Range(2,2).Select(i => landmarkTuple[i]).ToArray() },
|
||||
{ FacePart.RightEye, Enumerable.Range(0,2).Select(i => landmarkTuple[i]).ToArray() }
|
||||
}));
|
||||
break;
|
||||
case PredictorModel.Custom:
|
||||
if (CustomFaceLandmarkDetector is null)
|
||||
throw new Exception($"{nameof(CustomFaceLandmarkDetector)} is null");
|
||||
results.AddRange(CustomFaceLandmarkDetector.GetLandmarks(landmarkTuples));
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentOutOfRangeException(nameof(predictorModel), predictorModel, null);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
foreach (FullObjectDetection? landmark in landmarks)
|
||||
landmark.Dispose();
|
||||
}
|
||||
|
||||
return results.ToArray();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Returns an enumerable collection of face location correspond to all faces in specified image.
|
||||
/// </summary>
|
||||
/// <param name="image">The image contains faces. The image can contain multiple faces.</param>
|
||||
/// <param name="numberOfTimesToUpsample">The number of times to up-sample the image when finding faces.</param>
|
||||
/// <param name="model">The model of face detector to detect in image.</param>
|
||||
/// <returns>An enumerable collection of face location correspond to all faces in specified image.</returns>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="image"/> is null.</exception>
|
||||
/// <exception cref="ObjectDisposedException"><paramref name="image"/> or this object is disposed.</exception>
|
||||
public IEnumerable<Location> FaceLocations(Image image, int numberOfTimesToUpsample = 1, Model model = Model.Hog)
|
||||
{
|
||||
if (image == null)
|
||||
throw new ArgumentNullException(nameof(image));
|
||||
|
||||
image.ThrowIfDisposed();
|
||||
ThrowIfDisposed();
|
||||
|
||||
List<Location>? results = new();
|
||||
foreach (MModRect? face in RawFaceLocations(image, numberOfTimesToUpsample, model))
|
||||
{
|
||||
Location? ret = TrimBound(face.Rect, image.Width, image.Height);
|
||||
double confidence = face.DetectionConfidence;
|
||||
face.Dispose();
|
||||
results.Add(new Location(ret, confidence));
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an <see cref="FaceEncoding"/> from the <see cref="double"/> array.
|
||||
/// </summary>
|
||||
/// <param name="encoding">The <see cref="double"/> array contains face encoding data.</param>
|
||||
/// <returns>The <see cref="FaceEncoding"/> this method creates.</returns>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="encoding"/> is null.</exception>
|
||||
/// <exception cref="ArgumentOutOfRangeException"><paramref name="encoding"/> must be 128.</exception>
|
||||
public static FaceEncoding LoadFaceEncoding(double[] encoding)
|
||||
{
|
||||
if (encoding == null)
|
||||
throw new ArgumentNullException(nameof(encoding));
|
||||
if (encoding.Length != 128)
|
||||
{
|
||||
string message = $"{nameof(encoding)}.{nameof(encoding.Length)} must be 128.";
|
||||
throw new ArgumentOutOfRangeException(message);
|
||||
}
|
||||
#pragma warning disable
|
||||
Matrix<double>? matrix = Matrix<double>.CreateTemplateParameterizeMatrix(0, 1);
|
||||
#pragma warning restore
|
||||
matrix.SetSize(128);
|
||||
matrix.Assign(encoding);
|
||||
return new FaceEncoding(matrix);
|
||||
}
|
||||
|
||||
#pragma warning disable CA1416
|
||||
|
||||
/// <summary>
|
||||
/// Creates an <see cref="Image"/> from the specified existing bitmap image.
|
||||
/// </summary>
|
||||
/// <param name="bitmap">The <see cref="Bitmap"/> from which to create the new <see cref="Image"/>.</param>
|
||||
/// <returns>The <see cref="Image"/> this method creates.</returns>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="bitmap"/> is null.</exception>
|
||||
/// <exception cref="ArgumentOutOfRangeException">The specified <see cref="PixelFormat"/> is not supported.</exception>
|
||||
public static Image? LoadImage(Bitmap bitmap)
|
||||
{
|
||||
int width = bitmap.Width;
|
||||
int height = bitmap.Height;
|
||||
System.Drawing.Rectangle rect = new(0, 0, width, height);
|
||||
PixelFormat format = bitmap.PixelFormat;
|
||||
|
||||
Mode mode;
|
||||
int srcChannel;
|
||||
int dstChannel;
|
||||
switch (format)
|
||||
{
|
||||
case PixelFormat.Format8bppIndexed:
|
||||
mode = Mode.Greyscale;
|
||||
srcChannel = 1;
|
||||
dstChannel = 1;
|
||||
break;
|
||||
case PixelFormat.Format24bppRgb:
|
||||
mode = Mode.Rgb;
|
||||
srcChannel = 3;
|
||||
dstChannel = 3;
|
||||
break;
|
||||
case PixelFormat.Format32bppRgb:
|
||||
case PixelFormat.Format32bppArgb:
|
||||
mode = Mode.Rgb;
|
||||
srcChannel = 4;
|
||||
dstChannel = 3;
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentOutOfRangeException($"{nameof(bitmap)}", $"The specified {nameof(PixelFormat)} is not supported.");
|
||||
}
|
||||
|
||||
BitmapData? data = null;
|
||||
|
||||
try
|
||||
{
|
||||
data = bitmap.LockBits(rect, ImageLockMode.ReadOnly, format);
|
||||
|
||||
unsafe
|
||||
{
|
||||
byte[]? array = new byte[width * height * dstChannel];
|
||||
fixed (byte* pArray = &array[0])
|
||||
{
|
||||
byte* dst = pArray;
|
||||
|
||||
switch (srcChannel)
|
||||
{
|
||||
case 1:
|
||||
{
|
||||
IntPtr src = data.Scan0;
|
||||
int stride = data.Stride;
|
||||
|
||||
for (int h = 0; h < height; h++)
|
||||
Marshal.Copy(IntPtr.Add(src, h * stride), array, h * width, width * dstChannel);
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
case 4:
|
||||
{
|
||||
byte* src = (byte*)data.Scan0;
|
||||
int stride = data.Stride;
|
||||
|
||||
for (int h = 0; h < height; h++)
|
||||
{
|
||||
int srcOffset = h * stride;
|
||||
int dstOffset = h * width * dstChannel;
|
||||
|
||||
for (int w = 0; w < width; w++)
|
||||
{
|
||||
// BGR order to RGB order
|
||||
dst[dstOffset + w * dstChannel + 0] = src[srcOffset + w * srcChannel + 2];
|
||||
dst[dstOffset + w * dstChannel + 1] = src[srcOffset + w * srcChannel + 1];
|
||||
dst[dstOffset + w * dstChannel + 2] = src[srcOffset + w * srcChannel + 0];
|
||||
}
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
IntPtr ptr = (IntPtr)pArray;
|
||||
switch (mode)
|
||||
{
|
||||
case Mode.Rgb:
|
||||
return new Image(new Matrix<RgbPixel>(ptr, height, width, width * 3), Mode.Rgb);
|
||||
case Mode.Greyscale:
|
||||
return new Image(new Matrix<byte>(ptr, height, width, width), Mode.Greyscale);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (data != null)
|
||||
bitmap.UnlockBits(data);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
#pragma warning restore CA1416
|
||||
|
||||
/// <summary>
|
||||
/// Creates an <see cref="Image"/> from the <see cref="byte"/> array.
|
||||
/// </summary>
|
||||
/// <param name="array">The <see cref="byte"/> array contains image data.</param>
|
||||
/// <param name="row">The number of rows in a image data.</param>
|
||||
/// <param name="column">The number of columns in a image data.</param>
|
||||
/// <param name="stride">The stride width in bytes.</param>
|
||||
/// <param name="mode">A image color mode.</param>
|
||||
/// <returns>The <see cref="Image"/> this method creates.</returns>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="array"/> is null.</exception>
|
||||
/// <exception cref="ArgumentOutOfRangeException"><paramref name="row"/> is less than 0.</exception>
|
||||
/// <exception cref="ArgumentOutOfRangeException"><paramref name="column"/> is less than 0.</exception>
|
||||
/// <exception cref="ArgumentOutOfRangeException"><paramref name="stride"/> is less than 0.</exception>
|
||||
/// <exception cref="ArgumentOutOfRangeException"><paramref name="stride"/> is less than <paramref name="column"/>.</exception>
|
||||
/// <exception cref="ArgumentOutOfRangeException"><paramref name="row"/> x <paramref name="stride"/> is less than <see cref="Array.Length"/>.</exception>
|
||||
public static Image? LoadImage(byte[] array, int row, int column, int stride, Mode mode)
|
||||
{
|
||||
if (array == null)
|
||||
throw new ArgumentNullException(nameof(array));
|
||||
if (row < 0)
|
||||
throw new ArgumentOutOfRangeException($"{nameof(row)}", $"{nameof(row)} is less than 0.");
|
||||
if (column < 0)
|
||||
throw new ArgumentOutOfRangeException($"{nameof(column)}", $"{nameof(column)} is less than 0.");
|
||||
if (stride < 0)
|
||||
throw new ArgumentOutOfRangeException($"{nameof(stride)}", $"{nameof(stride)} is less than 0.");
|
||||
if (stride < column)
|
||||
throw new ArgumentOutOfRangeException($"{nameof(stride)}", $"{nameof(stride)} is less than {nameof(column)}.");
|
||||
int min = row * stride;
|
||||
if (!(array.Length >= min))
|
||||
throw new ArgumentOutOfRangeException("", $"{nameof(row)} x {nameof(stride)} is less than {nameof(Array)}.{nameof(Array.Length)}.");
|
||||
|
||||
unsafe
|
||||
{
|
||||
fixed (byte* p = &array[0])
|
||||
{
|
||||
IntPtr ptr = (IntPtr)p;
|
||||
switch (mode)
|
||||
{
|
||||
case Mode.Rgb:
|
||||
return new Image(new Matrix<RgbPixel>(ptr, row, column, stride), Mode.Rgb);
|
||||
case Mode.Greyscale:
|
||||
return new Image(new Matrix<byte>(ptr, row, column, stride), Mode.Greyscale);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an <see cref="Image"/> from the unmanaged memory pointer indicates <see cref="byte"/> array image data.
|
||||
/// </summary>
|
||||
/// <param name="array">The unmanaged memory pointer indicates <see cref="byte"/> array image data.</param>
|
||||
/// <param name="row">The number of rows in a image data.</param>
|
||||
/// <param name="column">The number of columns in a image data.</param>
|
||||
/// <param name="stride">The stride width in bytes.</param>
|
||||
/// <param name="mode">A image color mode.</param>
|
||||
/// <returns>The <see cref="Image"/> this method creates.</returns>
|
||||
/// <exception cref="ArgumentException"><paramref name="array"/> is <see cref="IntPtr.Zero"/>.</exception>
|
||||
/// <exception cref="ArgumentOutOfRangeException"><paramref name="row"/> is less than 0.</exception>
|
||||
/// <exception cref="ArgumentOutOfRangeException"><paramref name="column"/> is less than 0.</exception>
|
||||
/// <exception cref="ArgumentOutOfRangeException"><paramref name="stride"/> is less than 0.</exception>
|
||||
/// <exception cref="ArgumentOutOfRangeException"><paramref name="stride"/> is less than <paramref name="column"/>.</exception>
|
||||
public static Image? LoadImage(IntPtr array, int row, int column, int stride, Mode mode)
|
||||
{
|
||||
if (array == IntPtr.Zero)
|
||||
throw new ArgumentException($"{nameof(array)} is {nameof(IntPtr)}.{nameof(IntPtr.Zero)}", nameof(array));
|
||||
if (row < 0)
|
||||
throw new ArgumentOutOfRangeException($"{nameof(row)}", $"{nameof(row)} is less than 0.");
|
||||
if (column < 0)
|
||||
throw new ArgumentOutOfRangeException($"{nameof(column)}", $"{nameof(column)} is less than 0.");
|
||||
if (stride < 0)
|
||||
throw new ArgumentOutOfRangeException($"{nameof(stride)}", $"{nameof(stride)} is less than 0.");
|
||||
if (stride < column)
|
||||
throw new ArgumentOutOfRangeException($"{nameof(stride)}", $"{nameof(stride)} is less than {nameof(column)}.");
|
||||
|
||||
return mode switch
|
||||
{
|
||||
Mode.Rgb => new Image(new Matrix<RgbPixel>(array, row, column, stride), mode),
|
||||
Mode.Greyscale => new Image(new Matrix<byte>(array, row, column, stride), mode),
|
||||
_ => null,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an <see cref="Image"/> from the specified path.
|
||||
/// </summary>
|
||||
/// <param name="file">A string that contains the path of the file from which to create the <see cref="Image"/>.</param>
|
||||
/// <param name="mode">A image color mode.</param>
|
||||
/// <returns>The <see cref="Image"/> this method creates.</returns>
|
||||
/// <exception cref="FileNotFoundException">The specified path does not exist.</exception>
|
||||
public static Image? LoadImageFile(string file, Mode mode = Mode.Rgb)
|
||||
{
|
||||
if (!File.Exists(file))
|
||||
throw new FileNotFoundException(file);
|
||||
|
||||
return mode switch
|
||||
{
|
||||
Mode.Rgb => new Image(DlibDotNet.Dlib.LoadImageAsMatrix<RgbPixel>(file), mode),
|
||||
Mode.Greyscale => new Image(DlibDotNet.Dlib.LoadImageAsMatrix<byte>(file), mode),
|
||||
_ => null,
|
||||
};
|
||||
}
|
||||
|
||||
#region Helpers
|
||||
|
||||
private IEnumerable<FullObjectDetection> RawFaceLandmarks(Image faceImage,
|
||||
IEnumerable<Location>? faceLocations = null,
|
||||
PredictorModel predictorModel = PredictorModel.Large,
|
||||
Model model = Model.Hog)
|
||||
{
|
||||
IEnumerable<Location> rects;
|
||||
|
||||
if (faceLocations == null)
|
||||
{
|
||||
List<Location>? list = new();
|
||||
IEnumerable<MModRect>? tmp = RawFaceLocations(faceImage, 1, model);
|
||||
foreach (MModRect? mModRect in tmp)
|
||||
{
|
||||
list.Add(new Location(mModRect.DetectionConfidence, mModRect.Rect.Bottom, mModRect.Rect.Left, mModRect.Rect.Right, mModRect.Rect.Top));
|
||||
mModRect.Dispose();
|
||||
}
|
||||
|
||||
rects = list;
|
||||
}
|
||||
else
|
||||
{
|
||||
rects = faceLocations;
|
||||
}
|
||||
|
||||
List<FullObjectDetection>? results = new();
|
||||
if (predictorModel == PredictorModel.Custom)
|
||||
{
|
||||
if (CustomFaceLandmarkDetector is null)
|
||||
throw new Exception($"{nameof(CustomFaceLandmarkDetector)} is null");
|
||||
foreach (Location? rect in rects)
|
||||
{
|
||||
FullObjectDetection? ret = CustomFaceLandmarkDetector.Detect(faceImage, rect);
|
||||
results.Add(ret);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
ShapePredictor? posePredictor = _PosePredictor68Point;
|
||||
switch (predictorModel)
|
||||
{
|
||||
case PredictorModel.Small:
|
||||
posePredictor = _PosePredictor5Point;
|
||||
break;
|
||||
}
|
||||
|
||||
foreach (Location? rect in rects)
|
||||
{
|
||||
FullObjectDetection? ret = posePredictor.Detect(faceImage.Matrix, new DlibDotNet.Rectangle(rect.Left, rect.Top, rect.Right, rect.Bottom));
|
||||
results.Add(ret);
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private IEnumerable<MModRect> RawFaceLocations(Image faceImage, int numberOfTimesToUpsample = 1, Model model = Model.Hog)
|
||||
{
|
||||
switch (model)
|
||||
{
|
||||
case Model.Custom:
|
||||
if (CustomFaceDetector == null)
|
||||
throw new NotSupportedException("The custom face detector is not ready.");
|
||||
return CustomFaceDetector.Detect(faceImage, numberOfTimesToUpsample).Select(rect => new MModRect
|
||||
{
|
||||
Rect = new DlibDotNet.Rectangle(rect.Left, rect.Top, rect.Right, rect.Bottom),
|
||||
DetectionConfidence = rect.Confidence
|
||||
});
|
||||
case Model.Cnn:
|
||||
return CnnFaceDetectionModelV1.Detect(_CnnFaceDetector, faceImage, numberOfTimesToUpsample);
|
||||
default:
|
||||
IEnumerable<Tuple<DlibDotNet.Rectangle, double>>? locations = SimpleObjectDetector.RunDetectorWithUpscale2(_FaceDetector, faceImage, (uint)numberOfTimesToUpsample);
|
||||
return locations.Select(tuple => new MModRect { Rect = tuple.Item1, DetectionConfidence = tuple.Item2 });
|
||||
}
|
||||
}
|
||||
|
||||
private IEnumerable<IEnumerable<MModRect>> RawFaceLocationsBatched(IEnumerable<Image> faceImages, int numberOfTimesToUpsample = 1, int batchSize = 128) => CnnFaceDetectionModelV1.DetectMulti(_CnnFaceDetector, faceImages, numberOfTimesToUpsample, batchSize);
|
||||
|
||||
private static Location TrimBound(DlibDotNet.Rectangle location, int width, int height) => new(Math.Max(location.Left, 0), Math.Max(location.Top, 0), Math.Min(location.Right, width), Math.Min(location.Bottom, height));
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
|
||||
#region Methods
|
||||
|
||||
#region Overrides
|
||||
|
||||
/// <summary>
|
||||
/// Releases all unmanaged resources.
|
||||
/// </summary>
|
||||
protected override void DisposeUnmanaged()
|
||||
{
|
||||
base.DisposeUnmanaged();
|
||||
|
||||
_PosePredictor68Point?.Dispose();
|
||||
_PosePredictor5Point?.Dispose();
|
||||
_CnnFaceDetector?.Dispose();
|
||||
_FaceEncoder?.Dispose();
|
||||
_FaceDetector?.Dispose();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
52
FaceRecognitionDotNet/FaceRecognitionDotNet.csproj
Normal file
52
FaceRecognitionDotNet/FaceRecognitionDotNet.csproj
Normal file
@ -0,0 +1,52 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>10.0</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<OutputType>library</OutputType>
|
||||
<RuntimeIdentifier>win-x64</RuntimeIdentifier>
|
||||
<TargetFramework>net6.0</TargetFramework>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<PackageId>Phares.View.by.Distance.FaceRecognitionDotNet</PackageId>
|
||||
<GeneratePackageOnBuild>false</GeneratePackageOnBuild>
|
||||
<Version>5.0.402.104</Version>
|
||||
<Authors>Mike Phares</Authors>
|
||||
<Company>Phares</Company>
|
||||
<IncludeSymbols>true</IncludeSymbols>
|
||||
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<IsWindows Condition="'$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::Windows)))' == 'true'">true</IsWindows>
|
||||
<IsOSX Condition="'$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::OSX)))' == 'true'">true</IsOSX>
|
||||
<IsLinux Condition="'$([System.Runtime.InteropServices.RuntimeInformation]::IsOSPlatform($([System.Runtime.InteropServices.OSPlatform]::Linux)))' == 'true'">true</IsLinux>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(IsWindows)'=='true'">
|
||||
<DefineConstants>Windows</DefineConstants>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(IsOSX)'=='true'">
|
||||
<DefineConstants>OSX</DefineConstants>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(IsLinux)'=='true'">
|
||||
<DefineConstants>Linux</DefineConstants>
|
||||
</PropertyGroup>
|
||||
<ItemGroup Condition="'$(RuntimeIdentifier)' == 'browser-wasm'">
|
||||
<SupportedPlatform Include="browser" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="DlibDotNet" Version="19.21.0.20220724" />
|
||||
<!--PackageReference Include="configuration.MKL" Version="19.21.0.20210302" /-->
|
||||
<!--PackageReference Include="DlibDotNet-WithCUDA" Version="19.17.0.20190429" /-->
|
||||
<!--PackageReference Include="configuration.CUDA92" Version="19.21.0.20210302" /-->
|
||||
<!--PackageReference Include="configuration.CUDA102" Version="19.21.0.20210302" /-->
|
||||
<!--PackageReference Include="configuration.CUDA110" Version="19.21.0.20210302" /-->
|
||||
<!--PackageReference Include="configuration.CUDA111" Version="19.21.0.20210302" /-->
|
||||
<!--PackageReference Include="configuration.CUDA112" Version="19.21.0.20210302" /-->
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\Shared\View-by-Distance.Shared.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
22
FaceRecognitionDotNet/FaceRecognitionModels.cs
Normal file
22
FaceRecognitionDotNet/FaceRecognitionModels.cs
Normal file
@ -0,0 +1,22 @@
|
||||
namespace View_by_Distance.FaceRecognitionDotNet;
|
||||
|
||||
internal sealed class FaceRecognitionModels
|
||||
{
|
||||
|
||||
public static string GetPosePredictorModelLocation() => "shape_predictor_68_face_landmarks.dat";
|
||||
|
||||
public static string GetPosePredictorFivePointModelLocation() => "shape_predictor_5_face_landmarks.dat";
|
||||
|
||||
public static string GetFaceRecognitionModelLocation() => "dlib_face_recognition_resnet_model_v1.dat";
|
||||
|
||||
public static string GetCnnFaceDetectorModelLocation() => "mmod_human_face_detector.dat";
|
||||
|
||||
public static string GetPosePredictor194PointModelLocation() => "helen-dataset.dat";
|
||||
|
||||
public static string GetAgeNetworkModelLocation() => "adience-age-network.dat";
|
||||
|
||||
public static string GetGenderNetworkModelLocation() => "utkface-gender-network.dat";
|
||||
|
||||
public static string GetEmotionNetworkModelLocation() => "corrective-reannotation-of-fer-ck-kdef-emotion-network_test_best.dat";
|
||||
|
||||
}
|
127
FaceRecognitionDotNet/Image.cs
Normal file
127
FaceRecognitionDotNet/Image.cs
Normal file
@ -0,0 +1,127 @@
|
||||
using DlibDotNet;
|
||||
using DlibDotNet.Extensions;
|
||||
using System.Drawing;
|
||||
using View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
namespace View_by_Distance.FaceRecognitionDotNet;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a image data. This class cannot be inherited.
|
||||
/// </summary>
|
||||
public sealed class Image : DisposableObject
|
||||
{
|
||||
|
||||
#region Fields
|
||||
|
||||
#endregion
|
||||
|
||||
#region Constructors
|
||||
|
||||
internal Image(MatrixBase matrix, Mode mode)
|
||||
{
|
||||
Matrix = matrix;
|
||||
Mode = mode;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Properties
|
||||
|
||||
/// <summary>
|
||||
/// Gets the height of the image.
|
||||
/// </summary>
|
||||
/// <exception cref="ObjectDisposedException">This object is disposed.</exception>
|
||||
public int Height
|
||||
{
|
||||
get
|
||||
{
|
||||
ThrowIfDisposed();
|
||||
return Matrix.Rows;
|
||||
}
|
||||
}
|
||||
|
||||
internal MatrixBase Matrix { get; private set; }
|
||||
|
||||
internal Mode Mode { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the width of the image.
|
||||
/// </summary>
|
||||
/// <exception cref="ObjectDisposedException">This object is disposed.</exception>
|
||||
public int Width
|
||||
{
|
||||
get
|
||||
{
|
||||
ThrowIfDisposed();
|
||||
return Matrix.Columns;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Methods
|
||||
|
||||
/// <summary>
|
||||
/// Saves this <see cref="Image"/> to the specified file.
|
||||
/// </summary>
|
||||
/// <param name="filename">A string that contains the name of the file to which to save this <see cref="Image"/>.</param>
|
||||
/// <param name="format">The <see cref="ImageFormat"/> for this <see cref="Image"/>.</param>
|
||||
/// <exception cref="ArgumentNullException"><paramref name="filename"/> is null.</exception>
|
||||
/// <exception cref="ObjectDisposedException">This object is disposed.</exception>
|
||||
public void Save(string filename, ImageFormat format)
|
||||
{
|
||||
if (filename == null)
|
||||
throw new ArgumentNullException(nameof(filename));
|
||||
|
||||
ThrowIfDisposed();
|
||||
|
||||
string? directory = Path.GetDirectoryName(filename);
|
||||
if (!Directory.Exists(directory) && !string.IsNullOrWhiteSpace(directory))
|
||||
_ = Directory.CreateDirectory(directory);
|
||||
|
||||
switch (format)
|
||||
{
|
||||
case ImageFormat.Bmp:
|
||||
DlibDotNet.Dlib.SaveBmp(Matrix, filename);
|
||||
break;
|
||||
case ImageFormat.Jpeg:
|
||||
DlibDotNet.Dlib.SaveJpeg(Matrix, filename);
|
||||
break;
|
||||
case ImageFormat.Png:
|
||||
DlibDotNet.Dlib.SavePng(Matrix, filename);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts this <see cref="Image"/> to a GDI+ <see cref="Bitmap"/>.
|
||||
/// </summary>
|
||||
/// <returns>A <see cref="Bitmap"/> that represents the converted <see cref="Image"/>.</returns>
|
||||
/// <exception cref="ObjectDisposedException">This object is disposed.</exception>
|
||||
/// <exception cref="NotSupportedException">A Greyscale image is not supported.</exception>
|
||||
public Bitmap ToBitmap()
|
||||
{
|
||||
ThrowIfDisposed();
|
||||
|
||||
if (Mode == Mode.Greyscale)
|
||||
throw new NotSupportedException();
|
||||
|
||||
return ((Matrix<RgbPixel>)Matrix).ToBitmap();
|
||||
}
|
||||
|
||||
#region Overrides
|
||||
|
||||
/// <summary>
|
||||
/// Releases all unmanaged resources.
|
||||
/// </summary>
|
||||
protected override void DisposeUnmanaged()
|
||||
{
|
||||
base.DisposeUnmanaged();
|
||||
Matrix?.Dispose();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
49
FaceRecognitionDotNet/ModelParameter.cs
Normal file
49
FaceRecognitionDotNet/ModelParameter.cs
Normal file
@ -0,0 +1,49 @@
|
||||
namespace View_by_Distance.FaceRecognitionDotNet;
|
||||
|
||||
/// <summary>
|
||||
/// Describes the model binary datum. This class cannot be inherited.
|
||||
/// </summary>
|
||||
public sealed class ModelParameter
|
||||
{
|
||||
|
||||
#region Properties
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the binary data of model for 68 points face landmarks.
|
||||
/// </summary>
|
||||
public byte[]? PosePredictor68FaceLandmarksModel
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the binary data of model for 5 points face landmarks.
|
||||
/// </summary>
|
||||
public byte[]? PosePredictor5FaceLandmarksModel
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the binary data of model for face encoding.
|
||||
/// </summary>
|
||||
public byte[]? FaceRecognitionModel
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the binary data of model for face detector by using CNN.
|
||||
/// </summary>
|
||||
public byte[]? CnnFaceDetectorModel
|
||||
{
|
||||
get;
|
||||
set;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
114
FaceRecognitionDotNet/Point.cs
Normal file
114
FaceRecognitionDotNet/Point.cs
Normal file
@ -0,0 +1,114 @@
|
||||
namespace View_by_Distance.FaceRecognitionDotNet;
|
||||
|
||||
/// <summary>
|
||||
/// Represents an ordered pair of integer x- and y-coordinates that defines a point in a two-dimensional plane.
|
||||
/// </summary>
|
||||
public struct Point : IEquatable<Point>
|
||||
{
|
||||
|
||||
#region Constructors
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="Point"/> structure with the specified coordinates.
|
||||
/// </summary>
|
||||
/// <param name="x">The horizontal position of the point.</param>
|
||||
/// <param name="y">The vertical position of the point.</param>
|
||||
public Point(int x, int y)
|
||||
{
|
||||
X = x;
|
||||
Y = y;
|
||||
}
|
||||
|
||||
internal Point(DlibDotNet.Point point)
|
||||
{
|
||||
X = point.X;
|
||||
Y = point.Y;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Properties
|
||||
|
||||
/// <summary>
|
||||
/// Gets the x-coordinate of this <see cref="Point"/>.
|
||||
/// </summary>
|
||||
public int X
|
||||
{
|
||||
get;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the y-coordinate of this <see cref="Point"/>.
|
||||
/// </summary>
|
||||
public int Y
|
||||
{
|
||||
get;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Methods
|
||||
|
||||
/// <summary>
|
||||
/// Compares two <see cref="Point"/> structures for equality.
|
||||
/// </summary>
|
||||
/// <param name="other">The point to compare to this instance.</param>
|
||||
/// <returns><code>true</code> if both <see cref="Point"/> structures contain the same <see cref="X"/> and <see cref="Y"/> values; otherwise, <code>false</code>.</returns>
|
||||
public bool Equals(Point other)
|
||||
{
|
||||
return X == other.X &&
|
||||
Y == other.Y;
|
||||
}
|
||||
|
||||
#region overrides
|
||||
|
||||
/// <summary>
|
||||
/// Determines whether the specified <see cref="Object"/> is a <see cref="Point"/> and whether it contains the same coordinates as this <see cref="Point"/>.
|
||||
/// </summary>
|
||||
/// <param name="obj">The <see cref="Object"/> to compare.</param>
|
||||
/// <returns><code>true</code> if <paramref name="obj"/> is a <see cref="Point"/> and contains the same <see cref="X"/> and <see cref="Y"/> values as this <see cref="Point"/>; otherwise, <code>false</code>.</returns>
|
||||
public override bool Equals(object? obj) => obj is Point point && Equals(point);
|
||||
|
||||
/// <summary>
|
||||
/// Returns the hash code for this <see cref="Point"/>.
|
||||
/// </summary>
|
||||
/// <returns>The hash code for this <see cref="Point"/> structure.</returns>
|
||||
public override int GetHashCode()
|
||||
{
|
||||
int hashCode = 1861411795;
|
||||
hashCode = hashCode * -1521134295 + X.GetHashCode();
|
||||
hashCode = hashCode * -1521134295 + Y.GetHashCode();
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares two <see cref="Point"/> structures for equality.
|
||||
/// </summary>
|
||||
/// <param name="point1">The first <see cref="Point"/> structure to compare.</param>
|
||||
/// <param name="point2">The second <see cref="Point"/> structure to compare.</param>
|
||||
/// <returns><code>true</code> if both the <see cref="X"/> and <see cref="Y"/> coordinates of <paramref name="point1"/> and <paramref name="point2"/> are equal; otherwise, <code>false</code>.</returns>
|
||||
public static bool operator ==(Point point1, Point point2) => point1.Equals(point2);
|
||||
|
||||
/// <summary>
|
||||
/// Compares two <see cref="Point"/> structures for inequality.
|
||||
/// </summary>
|
||||
/// <param name="point1">The first <see cref="Point"/> structure to compare.</param>
|
||||
/// <param name="point2">The second <see cref="Point"/> structure to compare.</param>
|
||||
/// <returns><code>true</code> if <paramref name="point1"/> and <paramref name="point2"/> have different <see cref="X"/> or <see cref="Y"/> coordinates; <code>false</code> if <paramref name="point1"/> and <paramref name="point2"/> have the same <see cref="X"/> and <see cref="Y"/> coordinates.</returns>
|
||||
|
||||
/* Unmerged change from project 'FaceRecognition(netstandard2.0)'
|
||||
Before:
|
||||
public static bool operator !=(Point point1, Point point2)
|
||||
{
|
||||
return !(point1 == point2);
|
||||
}
|
||||
After:
|
||||
public static bool operator !=(Point point1, Point point2) => !(point1 == point2);
|
||||
*/
|
||||
public static bool operator !=(Point point1, Point point2) => !(point1 == point2);
|
||||
|
||||
#endregion
|
||||
|
||||
#endregion
|
||||
|
||||
}
|
@ -1,15 +1,16 @@
|
||||
using FaceRecognitionDotNet;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Phares.Shared;
|
||||
using ShellProgressBar;
|
||||
using System.Drawing.Imaging;
|
||||
using System.Text.Json;
|
||||
using View_by_Distance.FaceRecognitionDotNet;
|
||||
using View_by_Distance.Instance.Models;
|
||||
using View_by_Distance.Metadata.Models;
|
||||
using View_by_Distance.Property.Models;
|
||||
using View_by_Distance.Resize.Models;
|
||||
using View_by_Distance.Shared.Models;
|
||||
using View_by_Distance.Shared.Models.Methods;
|
||||
using View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
namespace View_by_Distance.Instance;
|
||||
|
||||
@ -548,7 +549,7 @@ public class DlibDotNet
|
||||
}
|
||||
}
|
||||
|
||||
private void FullDoWork(Property.Models.Configuration configuration, string[] juliePhares, Model model, PredictorModel predictorModel, string argZero, Person[] people, PropertyLogic propertyLogic, List<PropertyHolder[]> propertyHolderCollections)
|
||||
private void FullDoWork(Property.Models.Configuration configuration, string[] juliePhares, Model model, PredictorModel predictorModel, string argZero, Dictionary<string, List<Person>> peopleCollection, PropertyLogic propertyLogic, List<PropertyHolder[]> propertyHolderCollections)
|
||||
{
|
||||
if (_Log is null)
|
||||
throw new Exception($"{nameof(_Log)} is null!");
|
||||
@ -673,8 +674,10 @@ public class DlibDotNet
|
||||
_Exceptions.Add(sourceDirectory);
|
||||
if (exceptionCount == 0 && _ArgZeroIsConfigurationRootDirectory)
|
||||
WriteGroup(configuration, propertyLogic, propertyCollection, metadataCollection, faceCollections, resizeKeyValuePairs, sourceDirectory, filteredPropertyHolderCollection);
|
||||
if (exceptionCount == 0 && _Configuration.LoadOrCreateThenSaveImageFacesResults.Value && _Configuration.SaveShortcuts.HasValue && _Configuration.SaveShortcuts.Value)
|
||||
_Faces.SaveShortcuts(configuration, juliePhares, model, predictorModel, people, propertyLogic, outputResolution, filteredPropertyHolderCollection, propertyCollection, faceCollections);
|
||||
if (exceptionCount == 0)
|
||||
propertyLogic.AddToPropertyLogicAllCollection(filteredPropertyHolderCollection);
|
||||
if (exceptionCount == 0 && _Configuration.LoadOrCreateThenSaveImageFacesResults.Value && _Configuration.SaveShortcuts.HasValue && _Configuration.SaveShortcuts.Value && propertyLogic.NamedFaceInfoDeterministicHashCodeIndices.Any())
|
||||
_Faces.SaveShortcuts(configuration, juliePhares, model, predictorModel, propertyLogic, peopleCollection, outputResolution, filteredPropertyHolderCollection, propertyCollection, faceCollections);
|
||||
if (exceptionCount == 0 && _Configuration.LoadOrCreateThenSaveDistanceResults.HasValue && _Configuration.LoadOrCreateThenSaveDistanceResults.Value)
|
||||
_Distance.LoadOrCreateThenSaveDistanceResults(configuration, model, predictorModel, sourceDirectory, outputResolution, sourceDirectoryChanges, filteredPropertyHolderCollection, faceCollections);
|
||||
if (_Resize.AngleBracketCollection.Any())
|
||||
@ -696,6 +699,7 @@ public class DlibDotNet
|
||||
}
|
||||
if (_ArgZeroIsConfigurationRootDirectory && outputResolution == _Configuration.OutputResolutions[0])
|
||||
{
|
||||
propertyLogic.SaveAllCollection();
|
||||
if (!_Configuration.LoadOrCreateThenSaveImageFacesResults.Value && !_Configuration.LoadOrCreateThenSaveDirectoryDistanceResults.Value && !_Configuration.LoadOrCreateThenSaveDistanceResults.Value)
|
||||
break;
|
||||
if (_Exceptions.Count == 0)
|
||||
@ -729,8 +733,9 @@ public class DlibDotNet
|
||||
private void Search(Property.Models.Configuration configuration, string[] juliePhares, bool reverse, Model model, PredictorModel predictorModel, string argZero, Person[] people)
|
||||
{
|
||||
PropertyLogic propertyLogic = GetPropertyLogic();
|
||||
Dictionary<string, List<Person>> peopleCollection = A2_People.Convert(people);
|
||||
List<PropertyHolder[]> propertyHolderCollections = Property.Models.Stateless.A_Property.Get(configuration, reverse, model.ToString(), predictorModel.ToString(), propertyLogic);
|
||||
FullDoWork(configuration, juliePhares, model, predictorModel, argZero, people, propertyLogic, propertyHolderCollections);
|
||||
FullDoWork(configuration, juliePhares, model, predictorModel, argZero, peopleCollection, propertyLogic, propertyHolderCollections);
|
||||
}
|
||||
|
||||
internal void RenameQueue(Property.Models.Configuration configuration, Model model, PredictorModel predictorModel) => _Rename.RenameQueue(configuration, model, predictorModel);
|
||||
|
@ -52,31 +52,12 @@
|
||||
<PackageReference Include="ShellProgressBar" Version="5.1.0" />
|
||||
<PackageReference Include="WindowsShortcutFactory" Version="1.0.1" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="DlibDotNet" Version="19.21.0.20210302" />
|
||||
<!--PackageReference Include="configuration.MKL" Version="19.21.0.20210302" /-->
|
||||
<!--PackageReference Include="DlibDotNet-WithCUDA" Version="19.17.0.20190429" /-->
|
||||
<!--PackageReference Include="configuration.CUDA92" Version="19.21.0.20210302" /-->
|
||||
<!--PackageReference Include="configuration.CUDA102" Version="19.21.0.20210302" /-->
|
||||
<!--PackageReference Include="configuration.CUDA110" Version="19.21.0.20210302" /-->
|
||||
<!--PackageReference Include="configuration.CUDA111" Version="19.21.0.20210302" /-->
|
||||
<!--PackageReference Include="configuration.CUDA112" Version="19.21.0.20210302" /-->
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FaceRecognitionDotNet" Version="1.3.0.4" />
|
||||
<!--PackageReference Include="FaceRecognitionDotNet.MKL" Version="1.3.0.4" /-->
|
||||
<!--PackageReference Include="FaceRecognitionDotNet-WithCUDA" Version="1.2.3.10" /-->
|
||||
<!--PackageReference Include="FaceRecognitionDotNet.CUDA92" Version="1.3.0.4" /-->
|
||||
<!--PackageReference Include="FaceRecognitionDotNet.CUDA102" Version="1.3.0.4" /-->
|
||||
<!--PackageReference Include="FaceRecognitionDotNet.CUDA110" Version="1.3.0.4" /-->
|
||||
<!--PackageReference Include="FaceRecognitionDotNet.CUDA111" Version="1.3.0.4" /-->
|
||||
<!--PackageReference Include="FaceRecognitionDotNet.CUDA112" Version="1.3.0.4" /-->
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\Shared\View-by-Distance.Shared.csproj" />
|
||||
<ProjectReference Include="..\Property\Property.csproj" />
|
||||
<ProjectReference Include="..\Metadata\Metadata.csproj" />
|
||||
<ProjectReference Include="..\Resize\Resize.csproj" />
|
||||
<ProjectReference Include="..\FaceRecognitionDotNet\FaceRecognitionDotNet.csproj" />
|
||||
<ProjectReference Include="..\Property-Compare\Property-Compare.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
|
@ -76,4 +76,18 @@ internal class A2_People
|
||||
return results.ToArray();
|
||||
}
|
||||
|
||||
internal static Dictionary<string, List<Person>> Convert(Person[] people)
|
||||
{
|
||||
Dictionary<string, List<Person>> results = new();
|
||||
string personKey;
|
||||
foreach (Person person in people)
|
||||
{
|
||||
personKey = Shared.Models.Stateless.Methods.IPersonBirthday.GetFormatted(person.Birthday);
|
||||
if (!results.ContainsKey(personKey))
|
||||
results.Add(personKey, new List<Person>());
|
||||
results[personKey].Add(person);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -1,9 +1,9 @@
|
||||
using FaceRecognitionDotNet;
|
||||
using System.Drawing;
|
||||
using System.Text.Json;
|
||||
using View_by_Distance.Metadata.Models;
|
||||
using View_by_Distance.Property.Models;
|
||||
using View_by_Distance.Resize.Models;
|
||||
using View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
namespace View_by_Distance.Instance.Models;
|
||||
|
||||
@ -33,7 +33,7 @@ internal class D2_FaceLandmarks
|
||||
|
||||
#pragma warning disable CA1416
|
||||
|
||||
private static Bitmap RotateBitmap(System.Drawing.Image image, float angle)
|
||||
private static Bitmap RotateBitmap(Image image, float angle)
|
||||
{
|
||||
Bitmap result;
|
||||
Bitmap bitmap = new(image);
|
||||
@ -63,7 +63,7 @@ internal class D2_FaceLandmarks
|
||||
rotatedImageFileFullName = imageFiles[i][1];
|
||||
try
|
||||
{
|
||||
using (System.Drawing.Image image = System.Drawing.Image.FromFile(resizedFileInfo.FullName))
|
||||
using (Image image = Image.FromFile(resizedFileInfo.FullName))
|
||||
{
|
||||
using Graphics graphic = Graphics.FromImage(image);
|
||||
if (face.FaceLandmarks is null || !face.FaceLandmarks.Any())
|
||||
@ -92,7 +92,7 @@ internal class D2_FaceLandmarks
|
||||
}
|
||||
if (face.α.HasValue)
|
||||
{
|
||||
using System.Drawing.Image image = System.Drawing.Image.FromFile(resizedFileInfo.FullName);
|
||||
using Image image = Image.FromFile(resizedFileInfo.FullName);
|
||||
rotated = RotateBitmap(image, (float)face.α.Value);
|
||||
if (rotated is not null)
|
||||
{
|
||||
|
@ -1,14 +1,15 @@
|
||||
using FaceRecognitionDotNet;
|
||||
using System.Drawing;
|
||||
using System.Drawing.Drawing2D;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Text.RegularExpressions;
|
||||
using View_by_Distance.FaceRecognitionDotNet;
|
||||
using View_by_Distance.Metadata.Models;
|
||||
using View_by_Distance.Property.Models;
|
||||
using View_by_Distance.Resize.Models;
|
||||
using View_by_Distance.Shared.Models;
|
||||
using View_by_Distance.Shared.Models.Methods;
|
||||
using View_by_Distance.Shared.Models.Stateless;
|
||||
using WindowsShortcutFactory;
|
||||
|
||||
namespace View_by_Distance.Instance.Models;
|
||||
@ -32,8 +33,8 @@ public class D_Face : Shared.Models.Properties.IFace, IFace
|
||||
protected double? _Α;
|
||||
protected DateTime _DateTime;
|
||||
protected Shared.Models.FaceEncoding _FaceEncoding;
|
||||
protected Dictionary<string, Shared.Models.FacePoint[]> _FaceLandmarks;
|
||||
protected Shared.Models.Location _Location;
|
||||
protected Dictionary<string, FacePoint[]> _FaceLandmarks;
|
||||
protected Location _Location;
|
||||
protected int? _LocationIndex;
|
||||
protected OutputResolution _OutputResolution;
|
||||
protected bool _Populated;
|
||||
@ -41,9 +42,9 @@ public class D_Face : Shared.Models.Properties.IFace, IFace
|
||||
public double? α => _Α;
|
||||
public DateTime DateTime => _DateTime;
|
||||
public Shared.Models.FaceEncoding FaceEncoding => _FaceEncoding;
|
||||
public Dictionary<string, Shared.Models.FacePoint[]> FaceLandmarks => _FaceLandmarks;
|
||||
public Dictionary<string, FacePoint[]> FaceLandmarks => _FaceLandmarks;
|
||||
public OutputResolution OutputResolution => _OutputResolution;
|
||||
public Shared.Models.Location Location => _Location;
|
||||
public Location Location => _Location;
|
||||
public int? LocationIndex => _LocationIndex;
|
||||
public bool Populated => _Populated;
|
||||
public string RelativePath => _RelativePath;
|
||||
@ -51,7 +52,7 @@ public class D_Face : Shared.Models.Properties.IFace, IFace
|
||||
#nullable disable
|
||||
|
||||
[JsonConstructor]
|
||||
public D_Face(double? α, DateTime dateTime, Shared.Models.FaceEncoding faceEncoding, Dictionary<string, Shared.Models.FacePoint[]> faceLandmarks, Shared.Models.Location location, int? locationIndex, OutputResolution outputResolution, bool populated, string relativePath)
|
||||
public D_Face(double? α, DateTime dateTime, Shared.Models.FaceEncoding faceEncoding, Dictionary<string, FacePoint[]> faceLandmarks, Location location, int? locationIndex, OutputResolution outputResolution, bool populated, string relativePath)
|
||||
{
|
||||
_Α = α;
|
||||
_DateTime = dateTime;
|
||||
@ -76,7 +77,7 @@ public class D_Face : Shared.Models.Properties.IFace, IFace
|
||||
_WriteIndentedJsonSerializerOptions = new JsonSerializerOptions { WriteIndented = true };
|
||||
}
|
||||
|
||||
private D_Face(Shared.Models.Location location)
|
||||
private D_Face(Location location)
|
||||
{
|
||||
_Α = α;
|
||||
_DateTime = DateTime.MinValue;
|
||||
@ -102,12 +103,12 @@ public class D_Face : Shared.Models.Properties.IFace, IFace
|
||||
_RelativePath = string.Empty;
|
||||
}
|
||||
|
||||
private D_Face(A_Property property, int outputResolutionWidth, int outputResolutionHeight, int outputResolutionOrientation, string relativePath, int? i, Shared.Models.Location location)
|
||||
private D_Face(A_Property property, int outputResolutionWidth, int outputResolutionHeight, int outputResolutionOrientation, string relativePath, int? i, Location location)
|
||||
{
|
||||
DateTime?[] dateTimes;
|
||||
dateTimes = new DateTime?[] { property.CreationTime, property.LastWriteTime, property.DateTime, property.DateTimeDigitized, property.DateTimeOriginal, property.GPSDateStamp };
|
||||
_DateTime = (from l in dateTimes where l.HasValue select l.Value).Min();
|
||||
_FaceLandmarks = new Dictionary<string, Shared.Models.FacePoint[]>();
|
||||
_FaceLandmarks = new Dictionary<string, FacePoint[]>();
|
||||
_OutputResolution = new(outputResolutionHeight, outputResolutionOrientation, outputResolutionWidth);
|
||||
_Location = location;
|
||||
_LocationIndex = i;
|
||||
@ -223,15 +224,15 @@ public class D_Face : Shared.Models.Properties.IFace, IFace
|
||||
int width;
|
||||
int height;
|
||||
Graphics graphics;
|
||||
Rectangle rectangle;
|
||||
Location location;
|
||||
Bitmap preRotated;
|
||||
Shared.Models.Location location;
|
||||
Rectangle rectangle;
|
||||
using Bitmap source = new(resizedFileInfo.FullName);
|
||||
for (int i = 0; i < faceCollection.Count; i++)
|
||||
{
|
||||
if (!faceCollection[i].Populated || faceCollection[i]?.Location is null)
|
||||
continue;
|
||||
location = new Shared.Models.Location(faceCollection[i].Location.Confidence,
|
||||
location = new Location(faceCollection[i].Location.Confidence,
|
||||
faceCollection[i].Location.Bottom,
|
||||
faceCollection[i].Location.Left,
|
||||
faceCollection[i].Location.Right,
|
||||
@ -255,7 +256,7 @@ public class D_Face : Shared.Models.Properties.IFace, IFace
|
||||
throw new Exception();
|
||||
if (_Configuration.NumJitters is null)
|
||||
throw new Exception();
|
||||
FaceRecognitionDotNet.Location[] locations;
|
||||
Location[] locations;
|
||||
const int numberOfTimesToUpSample = 1;
|
||||
FaceRecognitionDotNet.Image? unknownImage = null;
|
||||
if (resizedFileInfo.Exists)
|
||||
@ -284,18 +285,18 @@ public class D_Face : Shared.Models.Properties.IFace, IFace
|
||||
int rightEyeY;
|
||||
Bitmap rotated;
|
||||
string faceFile;
|
||||
Location location;
|
||||
Bitmap preRotated;
|
||||
Graphics graphics;
|
||||
D_Face? face = null;
|
||||
Rectangle rectangle;
|
||||
double[] rawEncoding;
|
||||
Shared.Models.Location location;
|
||||
FaceRecognitionDotNet.Image knownImage;
|
||||
FaceRecognitionDotNet.Image? knownImage;
|
||||
FaceRecognitionDotNet.Image? rotatedImage;
|
||||
Shared.Models.FaceEncoding faceEncoding;
|
||||
FaceRecognitionDotNet.Image rotatedImage;
|
||||
FaceRecognitionDotNet.FaceEncoding[] faceEncodings;
|
||||
IEnumerable<FaceRecognitionDotNet.FacePoint> facePoints;
|
||||
IDictionary<FacePart, IEnumerable<FaceRecognitionDotNet.FacePoint>>[] faceLandmarks;
|
||||
IEnumerable<FacePoint> facePoints;
|
||||
IDictionary<FacePart, IEnumerable<FacePoint>>[] faceLandmarks;
|
||||
using Bitmap source = unknownImage.ToBitmap();
|
||||
padding = (int)((source.Width + source.Height) / 2 * .01);
|
||||
for (int i = 0; i < locations.Length; i++)
|
||||
@ -319,27 +320,31 @@ public class D_Face : Shared.Models.Properties.IFace, IFace
|
||||
// source.Save(Path.Combine(_Configuration.RootDirectory, "source.jpg"));
|
||||
// preRotated.Save(Path.Combine(_Configuration.RootDirectory, $"{p} - preRotated.jpg"));
|
||||
using (knownImage = FaceRecognition.LoadImage(preRotated))
|
||||
faceLandmarks = faceRecognition.FaceLandmark(knownImage, faceLocations: null, _PredictorModel, _Model).ToArray();
|
||||
if (knownImage is null)
|
||||
throw new Exception($"{nameof(knownImage)} is null");
|
||||
faceLandmarks = faceRecognition.FaceLandmark(knownImage, faceLocations: null, _PredictorModel, _Model).ToArray();
|
||||
if (faceLandmarks.Length == 0 && p < _Configuration.PaddingLoops.Value)
|
||||
continue;
|
||||
else if (faceLandmarks.Length != 1)
|
||||
continue;
|
||||
foreach (KeyValuePair<FacePart, IEnumerable<FaceRecognitionDotNet.FacePoint>> keyValuePair in faceLandmarks[0])
|
||||
face.FaceLandmarks.Add(keyValuePair.Key.ToString(), (from l in keyValuePair.Value select new Shared.Models.FacePoint(l.Index, l.Point.X, l.Point.Y)).ToArray());
|
||||
foreach (KeyValuePair<FacePart, IEnumerable<FacePoint>> keyValuePair in faceLandmarks[0])
|
||||
face.FaceLandmarks.Add(keyValuePair.Key.ToString(), keyValuePair.Value.ToArray());
|
||||
if (!faceLandmarks[0].ContainsKey(FacePart.LeftEye) || !faceLandmarks[0].ContainsKey(FacePart.RightEye))
|
||||
continue;
|
||||
facePoints = faceLandmarks[0][FacePart.LeftEye];
|
||||
leftEyeX = (int)(from l in facePoints select l.Point.X).Average();
|
||||
leftEyeY = (int)(from l in facePoints select l.Point.Y).Average();
|
||||
leftEyeX = (int)(from l in facePoints select l.X).Average();
|
||||
leftEyeY = (int)(from l in facePoints select l.Y).Average();
|
||||
facePoints = faceLandmarks[0][FacePart.RightEye];
|
||||
rightEyeX = (int)(from l in facePoints select l.Point.X).Average();
|
||||
rightEyeY = (int)(from l in facePoints select l.Point.Y).Average();
|
||||
rightEyeX = (int)(from l in facePoints select l.X).Average();
|
||||
rightEyeY = (int)(from l in facePoints select l.Y).Average();
|
||||
α = Shared.Models.Stateless.Methods.IFace.Getα(rightEyeX, leftEyeX, rightEyeY, leftEyeY);
|
||||
using (rotated = RotateBitmap(preRotated, (float)α.Value))
|
||||
{
|
||||
// rotated.Save(Path.Combine(_Configuration.RootDirectory, $"{p} - rotated.jpg"));
|
||||
using (rotatedImage = FaceRecognition.LoadImage(rotated))
|
||||
faceEncodings = faceRecognition.FaceEncodings(rotatedImage, knownFaceLocation: null, _Configuration.NumJitters.Value, _PredictorModel, _Model).ToArray();
|
||||
if (rotatedImage is null)
|
||||
throw new Exception($"{nameof(rotatedImage)} is null");
|
||||
faceEncodings = faceRecognition.FaceEncodings(rotatedImage, knownFaceLocation: null, _Configuration.NumJitters.Value, _PredictorModel, _Model).ToArray();
|
||||
if (faceEncodings.Length == 0 && p < _Configuration.PaddingLoops.Value)
|
||||
continue;
|
||||
else if (faceEncodings.Length != 1)
|
||||
@ -495,29 +500,26 @@ public class D_Face : Shared.Models.Properties.IFace, IFace
|
||||
SaveFaces(faceCollection, resizedFileInfo, imageFiles);
|
||||
}
|
||||
|
||||
internal void SaveShortcuts(Property.Models.Configuration configuration, string[] juliePhares, Model model, PredictorModel predictorModel, Person[] people, PropertyLogic propertyLogic, string outputResolution, PropertyHolder[] filteredPropertyHolderCollection, List<A_Property> propertyCollection, List<List<D_Face>> faceCollections)
|
||||
internal void SaveShortcuts(Property.Models.Configuration configuration, string[] juliePhares, Model model, PredictorModel predictorModel, PropertyLogic propertyLogic, Dictionary<string, List<Person>> peopleCollection, string outputResolution, PropertyHolder[] filteredPropertyHolderCollection, List<A_Property> propertyCollection, List<List<D_Face>> faceCollections)
|
||||
{
|
||||
int oldIndex;
|
||||
string[] keys;
|
||||
string fileName;
|
||||
string fullName;
|
||||
string personKey;
|
||||
string directory;
|
||||
bool? isWrongYear;
|
||||
FileInfo fileInfo;
|
||||
TimeSpan timeSpan;
|
||||
DateTime? birthDate;
|
||||
string copyDirectory;
|
||||
string? relativePath;
|
||||
string isWrongYearFlag;
|
||||
string subDirectoryName;
|
||||
DateTime minimumDateTime;
|
||||
List<D_Face> faceCollection;
|
||||
PropertyHolder propertyHolder;
|
||||
WindowsShortcut windowsShortcut;
|
||||
const string pattern = @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]";
|
||||
Dictionary<string, List<Person>> peopleCollection = new();
|
||||
foreach (Person person in people)
|
||||
{
|
||||
personKey = Shared.Models.Stateless.Methods.IPersonBirthday.GetFormatted(person.Birthday);
|
||||
if (!peopleCollection.ContainsKey(personKey))
|
||||
peopleCollection.Add(personKey, new List<Person>());
|
||||
peopleCollection[personKey].Add(person);
|
||||
}
|
||||
string dFacesContentDirectory = Path.Combine(Property.Models.Stateless.IResult.GetResultsFullGroupDirectory(configuration, model.ToString(), predictorModel.ToString(), nameof(D_Face), outputResolution, includeResizeGroup: true, includeModel: true, includePredictorModel: true), "(_)");
|
||||
for (int i = 0; i < filteredPropertyHolderCollection.Length; i++)
|
||||
{
|
||||
@ -532,37 +534,42 @@ public class D_Face : Shared.Models.Properties.IFace, IFace
|
||||
continue;
|
||||
if (propertyHolder.Property?.Id is null || propertyHolder.MinimumDateTime is null || propertyHolder.ResizedFileInfo is null)
|
||||
continue;
|
||||
if (propertyHolder.Property.Indices.Length < 2)
|
||||
directory = Path.Combine(dFacesContentDirectory, $"New{relativePath[2..]}");
|
||||
if (!propertyLogic.NamedFaceInfoDeterministicHashCodeIndices.ContainsKey(propertyHolder.Property.Id.Value))
|
||||
directory = Path.Combine(dFacesContentDirectory, $"Unnamed{relativePath[2..]}");
|
||||
else
|
||||
{
|
||||
oldIndex = propertyHolder.Property.Indices[1];
|
||||
if (!propertyLogic.NamedFaceInfo.ContainsKey(oldIndex))
|
||||
directory = Path.Combine(dFacesContentDirectory, $"Unnamed{relativePath[2..]}");
|
||||
faceCollection = faceCollections[i];
|
||||
keys = propertyLogic.NamedFaceInfoDeterministicHashCodeIndices[propertyHolder.Property.Id.Value];
|
||||
minimumDateTime = Property.Models.Stateless.A_Property.GetMinimumDateTime(propertyHolder.Property);
|
||||
(isWrongYear, _) = propertyHolder.Property.IsWrongYear(propertyHolder.ImageFileInfo.FullName, minimumDateTime);
|
||||
isWrongYearFlag = isWrongYear is null ? "#" : isWrongYear.Value ? "~" : "=";
|
||||
subDirectoryName = $"{isWrongYearFlag}{minimumDateTime:yyyy}";
|
||||
if (!faceCollection.Any())
|
||||
directory = Path.Combine(dFacesContentDirectory, $"None{relativePath[2..]}", subDirectoryName);
|
||||
else if (keys.Length != 1)
|
||||
directory = Path.Combine(dFacesContentDirectory, $"Not Supported{relativePath[2..]}", subDirectoryName);
|
||||
else if (faceCollection.Count != 1)
|
||||
directory = Path.Combine(dFacesContentDirectory, $"Many{relativePath[2..]}", subDirectoryName);
|
||||
else
|
||||
{
|
||||
faceCollection = faceCollections[i];
|
||||
keys = propertyLogic.NamedFaceInfo[oldIndex];
|
||||
if (!faceCollection.Any())
|
||||
directory = Path.Combine(dFacesContentDirectory, $"None{relativePath[2..]}");
|
||||
else if (keys.Length != 1)
|
||||
directory = Path.Combine(dFacesContentDirectory, $"Not Supported{relativePath[2..]}");
|
||||
else if (faceCollection.Count == 1)
|
||||
personKey = keys[0];
|
||||
if (isWrongYear is not null && !isWrongYear.Value && personKey[..2] is "19" or "20")
|
||||
{
|
||||
personKey = keys[0];
|
||||
if (juliePhares.Contains(personKey))
|
||||
copyDirectory = Path.Combine(dFacesContentDirectory, "Named Images");
|
||||
directory = Path.Combine(dFacesContentDirectory, "Named Shortcuts", personKey);
|
||||
birthDate = Shared.Models.Stateless.Methods.IPersonBirthday.Get(personKey);
|
||||
if (birthDate.HasValue)
|
||||
{
|
||||
if (minimumDateTime < birthDate.Value)
|
||||
subDirectoryName = "!---";
|
||||
else
|
||||
{
|
||||
timeSpan = new(minimumDateTime.Ticks - birthDate.Value.Ticks);
|
||||
subDirectoryName = $"^{Math.Floor(timeSpan.TotalDays / 365):000}";
|
||||
}
|
||||
}
|
||||
}
|
||||
else if ((from l in faceCollection where HasLeftAndRight(l.FaceLandmarks) select true).Count() == 1)
|
||||
{
|
||||
personKey = keys[0];
|
||||
if (juliePhares.Contains(personKey))
|
||||
copyDirectory = Path.Combine(dFacesContentDirectory, "Named Images^");
|
||||
directory = Path.Combine(dFacesContentDirectory, "Named Shortcuts", $"{personKey}^");
|
||||
}
|
||||
else
|
||||
directory = Path.Combine(dFacesContentDirectory, $"Many{relativePath[2..]}");
|
||||
directory = Path.Combine(dFacesContentDirectory, "Named Shortcuts", personKey, subDirectoryName);
|
||||
if (juliePhares.Contains(personKey))
|
||||
copyDirectory = Path.Combine(dFacesContentDirectory, "Named Images", personKey, subDirectoryName);
|
||||
}
|
||||
}
|
||||
if (!Directory.Exists(directory))
|
||||
@ -603,7 +610,7 @@ public class D_Face : Shared.Models.Properties.IFace, IFace
|
||||
|
||||
Face[] Shared.Models.Stateless.Methods.IFace.TestStatic_GetFaces(string jsonFileFullName) => throw new NotImplementedException();
|
||||
|
||||
private static bool HasLeftAndRight(Dictionary<string, Shared.Models.FacePoint[]> faceLandmarks)
|
||||
private static bool HasLeftAndRight(Dictionary<string, FacePoint[]> faceLandmarks)
|
||||
{
|
||||
bool result = true;
|
||||
if (!faceLandmarks.ContainsKey(FacePart.LeftEye.ToString()))
|
||||
|
@ -1,10 +1,10 @@
|
||||
using FaceRecognitionDotNet;
|
||||
using System.Text.Json;
|
||||
using View_by_Distance.Instance.Models.Stateless;
|
||||
using View_by_Distance.Metadata.Models;
|
||||
using View_by_Distance.Resize.Models;
|
||||
using View_by_Distance.Shared.Models;
|
||||
using View_by_Distance.Shared.Models.Methods;
|
||||
using View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
namespace View_by_Distance.Instance.Models;
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
using FaceRecognitionDotNet;
|
||||
using System.Text.Json;
|
||||
using View_by_Distance.Metadata.Models;
|
||||
using View_by_Distance.Property.Models;
|
||||
using View_by_Distance.Resize.Models;
|
||||
using View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
namespace View_by_Distance.Instance.Models;
|
||||
|
||||
|
@ -1,9 +1,10 @@
|
||||
using FaceRecognitionDotNet;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using View_by_Distance.FaceRecognitionDotNet;
|
||||
using View_by_Distance.Metadata.Models;
|
||||
using View_by_Distance.Property.Models;
|
||||
using View_by_Distance.Resize.Models;
|
||||
using View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
namespace View_by_Distance.Instance.Models;
|
||||
|
||||
@ -203,10 +204,10 @@ internal class E_Distance
|
||||
bool check = false;
|
||||
string parentCheck;
|
||||
FileInfo[] fileInfoCollection;
|
||||
System.IO.DirectoryInfo directoryInfo;
|
||||
System.IO.DirectoryInfo tvsDirectoryInfo;
|
||||
string fileNameWithoutExtension;
|
||||
List<string[]> directories = new();
|
||||
System.IO.DirectoryInfo directoryInfo;
|
||||
System.IO.DirectoryInfo tvsDirectoryInfo;
|
||||
string[] changesFrom = new string[] { nameof(A_Property), nameof(B_Metadata), nameof(C_Resize), nameof(D_Face) };
|
||||
List<DateTime> dateTimes = (from l in sourceDirectoryChanges where changesFrom.Contains(l.Item1) select l.Item2).ToList();
|
||||
List<string> directoryInfoCollection = Property.Models.Stateless.IResult.GetDirectoryInfoCollection(configuration,
|
||||
@ -289,8 +290,8 @@ internal class E_Distance
|
||||
private static List<(string, List<Shared.Models.Face>, List<FaceEncoding>)> GetMatches(List<(string, List<KeyValuePair<string, Shared.Models.Face[]>>)> files)
|
||||
{
|
||||
List<(string, List<Shared.Models.Face>, List<FaceEncoding>)> results = new();
|
||||
List<Shared.Models.Face> faces;
|
||||
FaceEncoding faceEncoding;
|
||||
List<Shared.Models.Face> faces;
|
||||
List<FaceEncoding> faceEncodings;
|
||||
foreach ((string, List<KeyValuePair<string, Shared.Models.Face[]>>) file in files)
|
||||
{
|
||||
|
@ -1,8 +1,8 @@
|
||||
using FaceRecognitionDotNet;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using View_by_Distance.Property.Models;
|
||||
using View_by_Distance.Shared.Models.Methods;
|
||||
using View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
namespace View_by_Distance.Instance.Models;
|
||||
|
||||
|
@ -10,7 +10,6 @@ namespace View_by_Distance.Instance;
|
||||
|
||||
public class Program
|
||||
{
|
||||
|
||||
public static void Secondary(List<string> args)
|
||||
{
|
||||
LoggerConfiguration loggerConfiguration = new();
|
||||
|
@ -86,7 +86,7 @@
|
||||
"PropertiesChangedForResize": false,
|
||||
"Reverse": false,
|
||||
"xRootDirectory": "C:/Tmp/phares/Pictures",
|
||||
"RootDirectory": "F:/Tmp/Phares/Compare/Images 2022-07-27 - 20220727 - III",
|
||||
"RootDirectory": "C:/Tmp/Phares/Compare/Images 2022-07-27 - f642c5669a1d89d598a2efd70da9dc7129d02c15 - III",
|
||||
"SaveFullYearOfRandomFiles": true,
|
||||
"SaveResizedSubFiles": true,
|
||||
"SaveShortcuts": true,
|
||||
@ -159,7 +159,8 @@
|
||||
"176 x 176",
|
||||
"256 x 256",
|
||||
"353 x 353",
|
||||
"1024 x 768"
|
||||
"1024 x 768",
|
||||
"1920 x 1080"
|
||||
],
|
||||
"OutputResolutions": [
|
||||
"1920 x 1080"
|
||||
|
@ -100,7 +100,8 @@
|
||||
"176 x 176",
|
||||
"256 x 256",
|
||||
"353 x 353",
|
||||
"1024 x 768"
|
||||
"1024 x 768",
|
||||
"1920 x 1080"
|
||||
],
|
||||
"PropertyContentCollectionFiles": [],
|
||||
"SaveFaceLandmarkForOutputResolutions": [
|
||||
|
@ -95,7 +95,7 @@ public class A_Property : Shared.Models.Properties.IProperty, IProperty
|
||||
string year;
|
||||
string directoryName;
|
||||
string[] directorySegments;
|
||||
string? check = Path.GetPathRoot(filteredSourceDirectoryFile);
|
||||
string? check = Path.GetFullPath(filteredSourceDirectoryFile);
|
||||
string? pathRoot = Path.GetPathRoot(filteredSourceDirectoryFile);
|
||||
if (string.IsNullOrEmpty(pathRoot))
|
||||
throw new Exception();
|
||||
|
@ -14,16 +14,18 @@ namespace View_by_Distance.Property.Models;
|
||||
public class PropertyLogic
|
||||
{
|
||||
|
||||
protected readonly List<(int, string[])> _AllCollection;
|
||||
protected readonly List<string> _ExceptionsDirectories;
|
||||
protected readonly Dictionary<int, int[]> _KeyValuePairs;
|
||||
protected readonly Dictionary<int, int[]> _IndicesFromNew;
|
||||
protected readonly Dictionary<int, int[]> _IndicesFromOld;
|
||||
protected readonly Dictionary<int, string[]> _NamedFaceInfo;
|
||||
protected readonly Dictionary<int, string[]> _SixCharacterNamedFaceInfo;
|
||||
protected readonly Dictionary<int, string[]> _NamedFaceInfoDeterministicHashCodeIndices;
|
||||
|
||||
public List<string> AngleBracketCollection { get; }
|
||||
public Dictionary<int, int[]> KeyValuePairs => _KeyValuePairs;
|
||||
public Dictionary<int, int[]> IndicesFromNew => _IndicesFromNew;
|
||||
public Dictionary<int, int[]> IndicesFromOld => _IndicesFromOld;
|
||||
public Dictionary<int, string[]> NamedFaceInfo => _NamedFaceInfo;
|
||||
public List<string> ExceptionsDirectories => _ExceptionsDirectories;
|
||||
public Dictionary<int, string[]> NamedFaceInfoDeterministicHashCodeIndices => _NamedFaceInfoDeterministicHashCodeIndices;
|
||||
|
||||
private readonly Serilog.ILogger? _Log;
|
||||
private readonly string[] _VerifyToSeason;
|
||||
@ -34,12 +36,14 @@ public class PropertyLogic
|
||||
|
||||
public PropertyLogic(int maxDegreeOfParallelism, Configuration configuration)
|
||||
{
|
||||
_AllCollection = new();
|
||||
_Configuration = configuration;
|
||||
_ExceptionsDirectories = new();
|
||||
_ASCIIEncoding = new ASCIIEncoding();
|
||||
AngleBracketCollection = new List<string>();
|
||||
_Log = Serilog.Log.ForContext<A_Property>();
|
||||
_MaxDegreeOfParallelism = maxDegreeOfParallelism;
|
||||
Dictionary<int, string[]>? namedFaceInfoDeterministicHashCodeIndices;
|
||||
_WriteIndentedJsonSerializerOptions = new JsonSerializerOptions { WriteIndented = true };
|
||||
if (configuration.VerifyToSeason is null || !configuration.VerifyToSeason.Any())
|
||||
throw new Exception();
|
||||
@ -47,32 +51,47 @@ public class PropertyLogic
|
||||
string json;
|
||||
string[] files;
|
||||
string fullPath;
|
||||
Dictionary<int, int[]>? indicesFromOld;
|
||||
Dictionary<int, string[]>? namedFaceInfo;
|
||||
Dictionary<int, int[]>? keyValuePairs;
|
||||
List<KeyValuePair<int, int[]>>? collection;
|
||||
Dictionary<int, int[]> indicesFromNew = new();
|
||||
Dictionary<int, string[]>? sixCharacterNamedFaceInfo;
|
||||
string? rootDirectoryParent = Path.GetDirectoryName(configuration.RootDirectory);
|
||||
if (string.IsNullOrEmpty(rootDirectoryParent))
|
||||
throw new Exception($"{nameof(rootDirectoryParent)} is null!");
|
||||
files = Directory.GetFiles(rootDirectoryParent, "*Named*.json", SearchOption.TopDirectoryOnly);
|
||||
files = Directory.GetFiles(rootDirectoryParent, "*DeterministicHashCode*.json", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length != 1)
|
||||
namedFaceInfo = new();
|
||||
namedFaceInfoDeterministicHashCodeIndices = new();
|
||||
else
|
||||
{
|
||||
json = File.ReadAllText(files[0]);
|
||||
namedFaceInfo = JsonSerializer.Deserialize<Dictionary<int, string[]>>(json);
|
||||
if (namedFaceInfo is null)
|
||||
throw new Exception($"{nameof(namedFaceInfo)} is null!");
|
||||
namedFaceInfoDeterministicHashCodeIndices = JsonSerializer.Deserialize<Dictionary<int, string[]>>(json);
|
||||
if (namedFaceInfoDeterministicHashCodeIndices is null)
|
||||
throw new Exception($"{nameof(namedFaceInfoDeterministicHashCodeIndices)} is null!");
|
||||
}
|
||||
if (namedFaceInfoDeterministicHashCodeIndices.Any())
|
||||
sixCharacterNamedFaceInfo = new();
|
||||
else
|
||||
{
|
||||
files = Directory.GetFiles(rootDirectoryParent, "*SixCharacter*.json", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length != 1)
|
||||
sixCharacterNamedFaceInfo = new();
|
||||
else
|
||||
{
|
||||
json = File.ReadAllText(files[0]);
|
||||
sixCharacterNamedFaceInfo = JsonSerializer.Deserialize<Dictionary<int, string[]>>(json);
|
||||
if (sixCharacterNamedFaceInfo is null)
|
||||
throw new Exception($"{nameof(sixCharacterNamedFaceInfo)} is null!");
|
||||
}
|
||||
}
|
||||
files = Directory.GetFiles(rootDirectoryParent, "*keyValuePairs*.json", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length != 1)
|
||||
indicesFromOld = new();
|
||||
keyValuePairs = new();
|
||||
else
|
||||
{
|
||||
json = File.ReadAllText(files[0]);
|
||||
indicesFromOld = JsonSerializer.Deserialize<Dictionary<int, int[]>>(json);
|
||||
if (indicesFromOld is null)
|
||||
throw new Exception($"{nameof(indicesFromOld)} is null!");
|
||||
keyValuePairs = JsonSerializer.Deserialize<Dictionary<int, int[]>>(json);
|
||||
if (keyValuePairs is null)
|
||||
throw new Exception($"{nameof(keyValuePairs)} is null!");
|
||||
}
|
||||
foreach (string propertyContentCollectionFile in configuration.PropertyContentCollectionFiles)
|
||||
{
|
||||
@ -92,9 +111,10 @@ public class PropertyLogic
|
||||
indicesFromNew.Add(keyValuePair.Key, keyValuePair.Value);
|
||||
}
|
||||
}
|
||||
_NamedFaceInfo = namedFaceInfo;
|
||||
_KeyValuePairs = keyValuePairs;
|
||||
_IndicesFromNew = indicesFromNew;
|
||||
_IndicesFromOld = indicesFromOld;
|
||||
_SixCharacterNamedFaceInfo = sixCharacterNamedFaceInfo;
|
||||
_NamedFaceInfoDeterministicHashCodeIndices = namedFaceInfoDeterministicHashCodeIndices;
|
||||
}
|
||||
|
||||
public override string ToString()
|
||||
@ -180,6 +200,8 @@ public class PropertyLogic
|
||||
}
|
||||
else if (!isIgnoreExtension && isValidImageFormatExtension)
|
||||
{
|
||||
if (!_IndicesFromNew.Any() && !_KeyValuePairs.Any())
|
||||
throw new Exception("In order to keep six character indices at least one need to have an item!");
|
||||
try
|
||||
{
|
||||
using Image image = Image.FromFile(filteredSourceDirectoryFileInfo.FullName);
|
||||
@ -216,10 +238,10 @@ public class PropertyLogic
|
||||
encodingHash = Stateless.A_Property.GetDeterministicHashCode(encoding);
|
||||
if (_MaxDegreeOfParallelism < 2)
|
||||
ticks = LogDelta(ticks, nameof(Stateless.A_Property.GetDeterministicHashCode));
|
||||
if (!_IndicesFromOld.ContainsKey(encodingHash))
|
||||
if (!_KeyValuePairs.ContainsKey(encodingHash))
|
||||
indices.Add(encodingHash);
|
||||
else
|
||||
indices.AddRange(_IndicesFromOld[encodingHash]);
|
||||
indices.AddRange(_KeyValuePairs[encodingHash]);
|
||||
}
|
||||
}
|
||||
width = image.Width;
|
||||
@ -635,7 +657,7 @@ public class PropertyLogic
|
||||
File.Move(propertyHolder.ImageFileInfo.FullName, filteredSourceDirectoryFileExtensionLowered);
|
||||
if (propertyHolder.Changed is null || propertyHolder.Changed.Value || propertyHolder.Property is null)
|
||||
{
|
||||
property = GetPropertyOfPrivate(angleBracket, propertyHolder, firstPass, filteredSourceDirectoryFileTuples, parseExceptions, isIgnoreExtension, isValidImageFormatExtension, isValidMetadataExtensions, extensionLowered,fileNameWithoutExtension);
|
||||
property = GetPropertyOfPrivate(angleBracket, propertyHolder, firstPass, filteredSourceDirectoryFileTuples, parseExceptions, isIgnoreExtension, isValidImageFormatExtension, isValidMetadataExtensions, extensionLowered, fileNameWithoutExtension);
|
||||
lock (propertyHolder)
|
||||
propertyHolder.Update(property);
|
||||
}
|
||||
@ -799,4 +821,53 @@ public class PropertyLogic
|
||||
return results.OrderBy(l => l.Ticks).ToArray();
|
||||
}
|
||||
|
||||
public void AddToPropertyLogicAllCollection(PropertyHolder[] filteredPropertyHolderCollection)
|
||||
{
|
||||
if (_SixCharacterNamedFaceInfo.Any())
|
||||
{
|
||||
string[] keys;
|
||||
PropertyHolder propertyHolder;
|
||||
for (int i = 0; i < filteredPropertyHolderCollection.Length; i++)
|
||||
{
|
||||
propertyHolder = filteredPropertyHolderCollection[i];
|
||||
if (propertyHolder.Property?.Id is null)
|
||||
continue;
|
||||
foreach (int sixCharacterIndex in propertyHolder.Property.Indices)
|
||||
{
|
||||
if (!_SixCharacterNamedFaceInfo.ContainsKey(sixCharacterIndex))
|
||||
continue;
|
||||
keys = _SixCharacterNamedFaceInfo[sixCharacterIndex];
|
||||
_AllCollection.Add(new(propertyHolder.Property.Id.Value, keys));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void SaveAllCollection()
|
||||
{
|
||||
if (_AllCollection.Any())
|
||||
{
|
||||
string[] keys;
|
||||
string? rootDirectoryParent = Path.GetDirectoryName(_Configuration.RootDirectory);
|
||||
if (string.IsNullOrEmpty(rootDirectoryParent))
|
||||
throw new Exception($"{nameof(rootDirectoryParent)} is null!");
|
||||
Dictionary<int, string[]> namedFaceInfoDeterministicHashCodeIndices = new();
|
||||
List<(int, string[])> allCollection = _AllCollection.OrderBy(l => l.Item1).ToList();
|
||||
foreach ((int deterministicHashCode, string[] values) in allCollection)
|
||||
{
|
||||
if (namedFaceInfoDeterministicHashCodeIndices.ContainsKey(deterministicHashCode))
|
||||
{
|
||||
keys = namedFaceInfoDeterministicHashCodeIndices[deterministicHashCode];
|
||||
if (JsonSerializer.Serialize(values) == JsonSerializer.Serialize(keys))
|
||||
continue;
|
||||
throw new Exception();
|
||||
}
|
||||
namedFaceInfoDeterministicHashCodeIndices.Add(deterministicHashCode, values);
|
||||
}
|
||||
string json = JsonSerializer.Serialize(namedFaceInfoDeterministicHashCodeIndices, new JsonSerializerOptions { WriteIndented = true });
|
||||
string checkFile = Path.Combine(rootDirectoryParent, "NamedFaceInfoDeterministicHashCodeIndices.json");
|
||||
_ = IPath.WriteAllText(checkFile, json, compareBeforeWrite: true);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -69,12 +69,12 @@ public class C_Resize
|
||||
public static (ImageCodecInfo imageCodecInfo, EncoderParameters encoderParameters) GetTuple(string outputExtension, int outputQuality)
|
||||
{
|
||||
(ImageCodecInfo imageCodecInfo, EncoderParameters encoderParameters) result;
|
||||
ImageFormat imageFormat = outputExtension switch
|
||||
System.Drawing.Imaging.ImageFormat imageFormat = outputExtension switch
|
||||
{
|
||||
".gif" => ImageFormat.Gif,
|
||||
".jpg" => ImageFormat.Jpeg,
|
||||
".png" => ImageFormat.Png,
|
||||
".tiff" => ImageFormat.Tiff,
|
||||
".gif" => System.Drawing.Imaging.ImageFormat.Gif,
|
||||
".jpg" => System.Drawing.Imaging.ImageFormat.Jpeg,
|
||||
".png" => System.Drawing.Imaging.ImageFormat.Png,
|
||||
".tiff" => System.Drawing.Imaging.ImageFormat.Tiff,
|
||||
_ => throw new Exception(),
|
||||
};
|
||||
ImageCodecInfo imageCodecInfo = (from l in ImageCodecInfo.GetImageEncoders() where l.FormatID == imageFormat.Guid select l).First();
|
||||
|
@ -1,3 +1,4 @@
|
||||
using System.Drawing;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using View_by_Distance.Shared.Models.Methods;
|
||||
@ -14,18 +15,47 @@ public class FacePoint : Properties.IFacePoint, IFacePoint
|
||||
public int X => _X;
|
||||
public int Y => _Y;
|
||||
|
||||
private readonly Point _Point;
|
||||
|
||||
[JsonConstructor]
|
||||
public FacePoint(int index, int x, int y)
|
||||
{
|
||||
_Index = index;
|
||||
_X = x;
|
||||
_Y = y;
|
||||
_Point = new(x, y);
|
||||
}
|
||||
|
||||
public FacePoint(Point point, int index) :
|
||||
this(index, point.X, point.Y)
|
||||
{ }
|
||||
|
||||
public override bool Equals(object? obj) => obj is FacePoint point && Equals(point);
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
string result = JsonSerializer.Serialize(this, new JsonSerializerOptions() { WriteIndented = true });
|
||||
return result;
|
||||
}
|
||||
|
||||
public override int GetHashCode()
|
||||
{
|
||||
int hashCode = 1861411795;
|
||||
hashCode = hashCode * -1521134295 + _Point.GetHashCode();
|
||||
hashCode = hashCode * -1521134295 + _Index.GetHashCode();
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
public bool Equals(FacePoint? facePoint)
|
||||
{
|
||||
return facePoint is not null
|
||||
&& _X == facePoint.X
|
||||
&& _Y == facePoint.Y
|
||||
&& _Index == facePoint.Index;
|
||||
}
|
||||
|
||||
public static bool operator ==(FacePoint point1, FacePoint point2) => point1.Equals(point2);
|
||||
|
||||
public static bool operator !=(FacePoint point1, FacePoint point2) => !(point1 == point2);
|
||||
|
||||
}
|
@ -1,10 +1,11 @@
|
||||
using System.Drawing;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using View_by_Distance.Shared.Models.Methods;
|
||||
|
||||
namespace View_by_Distance.Shared.Models;
|
||||
|
||||
public class Location : Properties.ILocation, ILocation
|
||||
public class Location : Properties.ILocation, ILocation, IEquatable<Location>
|
||||
{
|
||||
|
||||
protected double _Confidence;
|
||||
@ -28,10 +29,47 @@ public class Location : Properties.ILocation, ILocation
|
||||
_Top = top;
|
||||
}
|
||||
|
||||
public Location(int left, int top, int right, int bottom) :
|
||||
this(-1.0d, bottom, left, right, top)
|
||||
{ }
|
||||
|
||||
public Location(Rectangle rectangle, double confidence) :
|
||||
this(-1.0d, rectangle.Bottom, rectangle.Left, rectangle.Right, rectangle.Top)
|
||||
{ }
|
||||
|
||||
public Location(Location location, double confidence) :
|
||||
this(-1.0d, location.Bottom, location.Left, location.Right, location.Top)
|
||||
{ }
|
||||
|
||||
public override bool Equals(object? obj) => Equals(obj as Location);
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
string result = JsonSerializer.Serialize(this, new JsonSerializerOptions() { WriteIndented = true });
|
||||
return result;
|
||||
}
|
||||
|
||||
public override int GetHashCode()
|
||||
{
|
||||
int hashCode = -773114317;
|
||||
hashCode = hashCode * -1521134295 + _Bottom.GetHashCode();
|
||||
hashCode = hashCode * -1521134295 + _Left.GetHashCode();
|
||||
hashCode = hashCode * -1521134295 + _Right.GetHashCode();
|
||||
hashCode = hashCode * -1521134295 + _Top.GetHashCode();
|
||||
return hashCode;
|
||||
}
|
||||
|
||||
public bool Equals(Location? location)
|
||||
{
|
||||
return location is not null
|
||||
&& _Bottom == location.Bottom
|
||||
&& _Left == location.Left
|
||||
&& _Right == location.Right
|
||||
&& _Top == location.Top;
|
||||
}
|
||||
|
||||
public static bool operator ==(Location location1, Location location2) => EqualityComparer<Location>.Default.Equals(location1, location2);
|
||||
|
||||
public static bool operator !=(Location location1, Location location2) => !(location1 == location2);
|
||||
|
||||
}
|
59
Shared/Models/Stateless/FacePart.cs
Normal file
59
Shared/Models/Stateless/FacePart.cs
Normal file
@ -0,0 +1,59 @@
|
||||
namespace View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the part of face.
|
||||
/// </summary>
|
||||
public enum FacePart
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the chin.
|
||||
/// </summary>
|
||||
Chin,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the left eyebrow.
|
||||
/// </summary>
|
||||
LeftEyebrow,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the right eyebrow.
|
||||
/// </summary>
|
||||
RightEyebrow,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the nose bridge.
|
||||
/// </summary>
|
||||
NoseBridge,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the nose tip.
|
||||
/// </summary>
|
||||
NoseTip,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the left eye.
|
||||
/// </summary>
|
||||
LeftEye,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the right eye.
|
||||
/// </summary>
|
||||
RightEye,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the top lip.
|
||||
/// </summary>
|
||||
TopLip,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the bottom lip.
|
||||
/// </summary>
|
||||
BottomLip,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the nose.
|
||||
/// </summary>
|
||||
Nose,
|
||||
|
||||
}
|
24
Shared/Models/Stateless/ImageFormat.cs
Normal file
24
Shared/Models/Stateless/ImageFormat.cs
Normal file
@ -0,0 +1,24 @@
|
||||
namespace View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the file format of the image.
|
||||
/// </summary>
|
||||
public enum ImageFormat
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Specifies that the bitmap (BMP) image format.
|
||||
/// </summary>
|
||||
Bmp,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies that the Joint Photographic Experts Group (JPEG) image format.
|
||||
/// </summary>
|
||||
Jpeg,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies that the W3C Portable Network Graphics (PNG) image format.
|
||||
/// </summary>
|
||||
Png,
|
||||
|
||||
}
|
@ -18,6 +18,9 @@ public interface IPersonBirthday
|
||||
string TestStatic_GetFormatted(Models.PersonBirthday personBirthday) => PersonBirthday.GetFormatted(personBirthday);
|
||||
static string GetFormatted(Models.PersonBirthday personBirthday) => PersonBirthday.GetFormatted(personBirthday);
|
||||
|
||||
DateTime? TestStatic_Get(string personKey) => PersonBirthday.Get(personKey);
|
||||
static DateTime? Get(string personKey) => PersonBirthday.Get(personKey);
|
||||
|
||||
string TestStatic_GetFileName(Models.PersonBirthday personBirthday) => PersonBirthday.GetFileName(personBirthday);
|
||||
static string GetFileName(Models.PersonBirthday personBirthday) => PersonBirthday.GetFileName(personBirthday);
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace View_by_Distance.Shared.Models.Stateless.Methods;
|
||||
|
||||
internal abstract class PersonBirthday
|
||||
@ -13,5 +15,5 @@ internal abstract class PersonBirthday
|
||||
internal static string GetFileName(Models.PersonBirthday personBirthday) => $"{personBirthday.Value.ToString(GetFormat())}.json";
|
||||
internal static bool DoesBirthDateExits(Properties.IStorage storage, Models.PersonBirthday personBirthday) => File.Exists(GetFileFullName(storage, personBirthday));
|
||||
internal static string GetFileFullName(Properties.IStorage storage, Models.PersonBirthday personBirthday) => Path.Combine(storage.PeopleRootDirectory, "{}", GetFileName(personBirthday));
|
||||
|
||||
internal static DateTime? Get(string personKey) => DateTime.TryParseExact(personKey, GetFormat(), CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTime) ? dateTime : null;
|
||||
}
|
19
Shared/Models/Stateless/Mode.cs
Normal file
19
Shared/Models/Stateless/Mode.cs
Normal file
@ -0,0 +1,19 @@
|
||||
namespace View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the image mode.
|
||||
/// </summary>
|
||||
public enum Mode
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Specifies that the rgb (8-bit Red, Green and Blue, 3 channels) image mode.
|
||||
/// </summary>
|
||||
Rgb,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies that the greyscale image mode.
|
||||
/// </summary>
|
||||
Greyscale
|
||||
|
||||
}
|
24
Shared/Models/Stateless/Model.cs
Normal file
24
Shared/Models/Stateless/Model.cs
Normal file
@ -0,0 +1,24 @@
|
||||
namespace View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the model of face detector.
|
||||
/// </summary>
|
||||
public enum Model
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Specifies that the model is HOG (Histograms of Oriented Gradients) based face detector.
|
||||
/// </summary>
|
||||
Hog,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies that the model is CNN (Convolutional Neural Network) based face detector.
|
||||
/// </summary>
|
||||
Cnn,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies that the custom face detector.
|
||||
/// </summary>
|
||||
Custom
|
||||
|
||||
}
|
24
Shared/Models/Stateless/PredictorModel.cs
Normal file
24
Shared/Models/Stateless/PredictorModel.cs
Normal file
@ -0,0 +1,24 @@
|
||||
namespace View_by_Distance.Shared.Models.Stateless;
|
||||
|
||||
/// <summary>
|
||||
/// Specifies the dimension of vector which be returned from detector.
|
||||
/// </summary>
|
||||
public enum PredictorModel
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Specifies that the large scale detector. The detector returns 68 points for represent face.
|
||||
/// </summary>
|
||||
Large,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies that the small scale detector. The detector returns 5 points for represent face.
|
||||
/// </summary>
|
||||
Small,
|
||||
|
||||
/// <summary>
|
||||
/// Specifies that the custom detector.
|
||||
/// </summary>
|
||||
Custom
|
||||
|
||||
}
|
@ -1,11 +1,11 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
using Serilog;
|
||||
using System.Reflection;
|
||||
using System.Diagnostics;
|
||||
using View_by_Distance.Tests.Models;
|
||||
using View_by_Distance.Shared.Models.Stateless.Methods;
|
||||
using Phares.Shared;
|
||||
using Serilog;
|
||||
using System.Diagnostics;
|
||||
using System.Reflection;
|
||||
using View_by_Distance.Shared.Models.Stateless.Methods;
|
||||
using View_by_Distance.Tests.Models;
|
||||
|
||||
namespace View_by_Distance.Tests;
|
||||
|
||||
|
@ -1,14 +1,14 @@
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
using Serilog;
|
||||
using System.Reflection;
|
||||
using System.Diagnostics;
|
||||
using View_by_Distance.Tests.Models;
|
||||
using View_by_Distance.Shared.Models.Stateless.Methods;
|
||||
using Phares.Shared;
|
||||
using View_by_Distance.Resize.Models;
|
||||
using Serilog;
|
||||
using System.Diagnostics;
|
||||
using System.Drawing.Imaging;
|
||||
using System.Reflection;
|
||||
using View_by_Distance.Metadata.Models;
|
||||
using View_by_Distance.Resize.Models;
|
||||
using View_by_Distance.Shared.Models.Stateless.Methods;
|
||||
using View_by_Distance.Tests.Models;
|
||||
|
||||
namespace View_by_Distance.Tests;
|
||||
|
||||
|
@ -86,7 +86,7 @@
|
||||
"PropertiesChangedForResize": false,
|
||||
"Reverse": false,
|
||||
"xRootDirectory": "C:/Tmp/phares/Pictures",
|
||||
"RootDirectory": "F:/Tmp/Phares/Compare/Images 2022-07-27 - 20220727 - III",
|
||||
"RootDirectory": "F:/Tmp/Phares/Compare/Images 2022-07-27 - f642c5669a1d89d598a2efd70da9dc7129d02c15 - III",
|
||||
"SaveFullYearOfRandomFiles": true,
|
||||
"SaveResizedSubFiles": true,
|
||||
"SaveShortcuts": true,
|
||||
@ -159,7 +159,8 @@
|
||||
"176 x 176",
|
||||
"256 x 256",
|
||||
"353 x 353",
|
||||
"1024 x 768"
|
||||
"1024 x 768",
|
||||
"1920 x 1080"
|
||||
],
|
||||
"OutputResolutions": [
|
||||
"1920 x 1080"
|
||||
|
@ -25,6 +25,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Instance", "Instance\Instan
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Tests", "Tests\Tests.csproj", "{B4FB6B43-36EC-404D-B934-5C695C6E32CC}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FaceRecognitionDotNet", "FaceRecognitionDotNet\FaceRecognitionDotNet.csproj", "{FAD03DA9-E8B1-4BBE-B8D0-2ADD2F2BC758}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@ -78,5 +80,9 @@ Global
|
||||
{B4FB6B43-36EC-404D-B934-5C695C6E32CC}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{B4FB6B43-36EC-404D-B934-5C695C6E32CC}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{B4FB6B43-36EC-404D-B934-5C695C6E32CC}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{FAD03DA9-E8B1-4BBE-B8D0-2ADD2F2BC758}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{FAD03DA9-E8B1-4BBE-B8D0-2ADD2F2BC758}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{FAD03DA9-E8B1-4BBE-B8D0-2ADD2F2BC758}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{FAD03DA9-E8B1-4BBE-B8D0-2ADD2F2BC758}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
Loading…
x
Reference in New Issue
Block a user