Removed Methods Namespace

This commit is contained in:
2024-12-29 10:46:13 -07:00
parent 0215e838e7
commit 1f8c4569ee
62 changed files with 755 additions and 171 deletions

View File

@ -0,0 +1,97 @@
namespace View_by_Distance.FaceRecognitionDotNet.Models;
/// <summary>
/// Represents a class which has managed or unmanaged resources.
/// </summary>
public abstract class DisposableObject : IDisposable
{
#region Properties
/// <summary>
/// Gets a value indicating whether this instance has been disposed.
/// </summary>
/// <returns>true if this instance has been disposed; otherwise, false.</returns>
public bool IsDisposed
{
get;
private set;
}
#endregion
#region Methods
/// <summary>
/// If this object is disposed, then <see cref="ObjectDisposedException"/> is thrown.
/// </summary>
public void ThrowIfDisposed() =>
ObjectDisposedException.ThrowIf(IsDisposed, this);
internal void ThrowIfDisposed(string objectName)
{
#pragma warning disable CA1513
if (IsDisposed)
throw new ObjectDisposedException(objectName);
#pragma warning restore CA1513
}
#region Overrides
/// <summary>
/// Releases all managed resources.
/// </summary>
protected virtual void DisposeManaged()
{
}
/// <summary>
/// Releases all unmanaged resources.
/// </summary>
protected virtual void DisposeUnmanaged()
{
}
#endregion
#endregion
#region IDisposable Members
/// <summary>
/// Releases all resources used by this <see cref="DisposableObject"/>.
/// </summary>
public void Dispose()
{
GC.SuppressFinalize(this);
Dispose(true);
}
/// <summary>
/// Releases all resources used by this <see cref="DisposableObject"/>.
/// </summary>
/// <param name="disposing">Indicate value whether <see cref="IDisposable.Dispose"/> method was called.</param>
private void Dispose(bool disposing)
{
if (IsDisposed)
{
return;
}
IsDisposed = true;
if (disposing)
DisposeManaged();
DisposeUnmanaged();
}
#endregion
}

View File

@ -0,0 +1,105 @@
using DlibDotNet;
using System.Runtime.Serialization;
namespace View_by_Distance.FaceRecognitionDotNet.Models;
/// <summary>
/// Represents a feature data of face. This class cannot be inherited.
/// </summary>
[Serializable]
public sealed class FaceEncoding : DisposableObject, ISerializable
{
#region Fields
[NonSerialized]
private readonly Matrix<double> _Encoding;
#endregion
#region Constructors
internal FaceEncoding(Matrix<double> encoding) => _Encoding = encoding;
private FaceEncoding(SerializationInfo info, StreamingContext context)
{
if (info == null)
throw new NullReferenceException(nameof(info));
double[]? array = info.GetValue(nameof(_Encoding), typeof(double[])) as double[];
int? row = (int?)info.GetValue(nameof(_Encoding.Rows), typeof(int));
int? column = (int?)info.GetValue(nameof(_Encoding.Columns), typeof(int));
if (row is null)
throw new NullReferenceException(nameof(row));
if (column is null)
throw new NullReferenceException(nameof(column));
_Encoding = new Matrix<double>(array, row.Value, column.Value);
}
#endregion
#region Properties
internal Matrix<double> Encoding => _Encoding;
/// <summary>
/// Gets the size of feature data.
/// </summary>
/// <exception cref="ObjectDisposedException">This object is disposed.</exception>
public int Size
{
get
{
ThrowIfDisposed();
return _Encoding.Size;
}
}
#endregion
#region Methods
/// <summary>
/// Gets a feature data of face as raw format.
/// </summary>
/// <returns>A <see cref="double"/> array that represents a feature data.</returns>
/// <remarks><see cref="FaceEncoding"/> class supports serialization. This method is for interoperability between FaceRecognitionotNet and dlib.</remarks>
/// <exception cref="ObjectDisposedException">This object is disposed.</exception>
public double[] GetRawEncoding()
{
ThrowIfDisposed();
return _Encoding.ToArray();
}
#region Overrides
/// <summary>
/// Releases all unmanaged resources.
/// </summary>
protected override void DisposeUnmanaged()
{
base.DisposeUnmanaged();
_Encoding?.Dispose();
}
#endregion
#endregion
#region ISerializable Members
/// <summary>
/// Populates a <see cref="SerializationInfo"/> with the data needed to serialize the target object.
/// </summary>
/// <param name="info">The <see cref="SerializationInfo"/> to populate with data.</param>
/// <param name="context">The destination (see <see cref="StreamingContext"/>) for this serialization.</param>
public void GetObjectData(SerializationInfo info, StreamingContext context)
{
info.AddValue(nameof(_Encoding), _Encoding.ToArray());
info.AddValue(nameof(_Encoding.Rows), _Encoding.Rows);
info.AddValue(nameof(_Encoding.Columns), _Encoding.Columns);
}
#endregion
}

View File

@ -0,0 +1,496 @@
using DlibDotNet;
using DlibDotNet.Dnn;
using System.Collections.ObjectModel;
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
using View_by_Distance.FaceRecognitionDotNet.Dlib.Python;
using View_by_Distance.FaceRecognitionDotNet.Extensions;
using View_by_Distance.Shared.Models;
using View_by_Distance.Shared.Models.Stateless;
namespace View_by_Distance.FaceRecognitionDotNet.Models;
public class FaceRecognition : DisposableObject
{
public FaceDetector? CustomFaceDetector { get; set; }
public FaceLandmarkDetector? CustomFaceLandmarkDetector { get; set; }
private readonly Model _Model;
private readonly int _NumberOfJitters;
private readonly LossMetric _FaceEncoder;
private readonly LossMmod _CnnFaceDetector;
private readonly int _NumberOfTimesToUpsample;
private readonly PredictorModel _PredictorModel;
private readonly FrontalFaceDetector _FaceDetector;
private readonly ShapePredictor _PosePredictor5Point;
private readonly ShapePredictor _PosePredictor68Point;
private record Record(Location Location, List<FaceEncoding?> FaceEncodings, List<List<FacePartAndFacePointArray>> FaceParts);
public FaceRecognition(int numberOfJitters, int numberOfTimesToUpsample, Model model, ModelParameter modelParameter, PredictorModel predictorModel)
{
if (modelParameter is null)
throw new NullReferenceException(nameof(modelParameter));
if (modelParameter.PosePredictor5FaceLandmarksModel is null)
throw new NullReferenceException(nameof(modelParameter.PosePredictor5FaceLandmarksModel));
if (modelParameter.PosePredictor68FaceLandmarksModel is null)
throw new NullReferenceException(nameof(modelParameter.PosePredictor68FaceLandmarksModel));
if (modelParameter.CnnFaceDetectorModel is null)
throw new NullReferenceException(nameof(modelParameter.CnnFaceDetectorModel));
if (modelParameter.FaceRecognitionModel is null)
throw new NullReferenceException(nameof(modelParameter.FaceRecognitionModel));
_Model = model;
_PredictorModel = predictorModel;
_NumberOfJitters = numberOfJitters;
_NumberOfTimesToUpsample = numberOfTimesToUpsample;
_FaceDetector?.Dispose();
_FaceDetector = DlibDotNet.Dlib.GetFrontalFaceDetector();
_PosePredictor68Point?.Dispose();
_PosePredictor68Point = ShapePredictor.Deserialize(modelParameter.PosePredictor68FaceLandmarksModel);
_PosePredictor5Point?.Dispose();
_PosePredictor5Point = ShapePredictor.Deserialize(modelParameter.PosePredictor5FaceLandmarksModel);
_CnnFaceDetector?.Dispose();
_CnnFaceDetector = LossMmod.Deserialize(modelParameter.CnnFaceDetectorModel);
_FaceEncoder?.Dispose();
_FaceEncoder = LossMetric.Deserialize(modelParameter.FaceRecognitionModel);
}
public static double FaceDistance(FaceEncoding faceEncoding, FaceEncoding faceToCompare)
{
if (faceEncoding is null)
throw new NullReferenceException(nameof(faceEncoding));
if (faceToCompare is null)
throw new NullReferenceException(nameof(faceToCompare));
faceEncoding.ThrowIfDisposed();
faceToCompare.ThrowIfDisposed();
if (faceEncoding.Encoding.Size == 0)
return 0;
using Matrix<double>? diff = faceEncoding.Encoding - faceToCompare.Encoding;
return DlibDotNet.Dlib.Length(diff);
}
private static FacePoint[] Join(IEnumerable<FacePoint> facePoints1, IEnumerable<FacePoint> facePoints2)
{
List<FacePoint> results = [.. facePoints1, .. facePoints2];
return results.ToArray();
}
private List<FacePartAndFacePointArray> GetFaceParts(FullObjectDetection fullObjectDetection)
{
List<FacePartAndFacePointArray> results = [];
FacePoint[] facePoints = Enumerable.Range(0, (int)fullObjectDetection.Parts)
.Select(index => new FacePoint(index, fullObjectDetection.GetPart((uint)index).X, fullObjectDetection.GetPart((uint)index).Y))
.ToArray();
switch (_PredictorModel)
{
case PredictorModel.Custom:
throw new NotImplementedException();
case PredictorModel.Large:
if (facePoints.Length == 68)
{
results.Add(new FacePartAndFacePointArray(FacePart.Chin, facePoints.Skip(0).Take(17).ToArray()));
results.Add(new FacePartAndFacePointArray(FacePart.LeftEyebrow, facePoints.Skip(17).Take(5).ToArray()));
results.Add(new FacePartAndFacePointArray(FacePart.RightEyebrow, facePoints.Skip(22).Take(5).ToArray()));
results.Add(new FacePartAndFacePointArray(FacePart.NoseBridge, facePoints.Skip(27).Take(5).ToArray()));
results.Add(new FacePartAndFacePointArray(FacePart.NoseTip, facePoints.Skip(31).Take(5).ToArray()));
results.Add(new FacePartAndFacePointArray(FacePart.LeftEye, facePoints.Skip(36).Take(6).ToArray()));
results.Add(new FacePartAndFacePointArray(FacePart.RightEye, facePoints.Skip(42).Take(6).ToArray()));
results.Add(new FacePartAndFacePointArray(FacePart.TopLip, Join(facePoints.Skip(48).Take(7), facePoints.Skip(60).Take(5))));
results.Add(new FacePartAndFacePointArray(FacePart.BottomLip, Join(facePoints.Skip(55).Take(5), facePoints.Skip(65).Take(3))));
}
break;
case PredictorModel.Small:
if (facePoints.Length == 5)
{
results.Add(new FacePartAndFacePointArray(FacePart.RightEye, facePoints.Skip(0).Take(2).ToArray()));
results.Add(new FacePartAndFacePointArray(FacePart.LeftEye, facePoints.Skip(2).Take(2).ToArray()));
results.Add(new FacePartAndFacePointArray(FacePart.NoseTip, facePoints.Skip(4).Take(1).ToArray()));
}
break;
default:
break;
}
return results;
}
private MModRect[] GetMModRects(Image image)
{
switch (_Model)
{
case Model.Cnn:
return CnnFaceDetectionModelV1.Detect(_CnnFaceDetector, image, _NumberOfTimesToUpsample).ToArray();
case Model.Hog:
IEnumerable<Tuple<DlibDotNet.Rectangle, double>>? locations = SimpleObjectDetector.RunDetectorWithUpscale2(_FaceDetector, image, (uint)_NumberOfTimesToUpsample);
return locations.Select(l => new MModRect { Rect = l.Item1, DetectionConfidence = l.Item2 }).ToArray();
case Model.Custom:
if (CustomFaceDetector is null)
throw new NotSupportedException("The custom face detector is not ready.");
return CustomFaceDetector.Detect(image, _NumberOfTimesToUpsample).Select(rect => new MModRect
{
Rect = new DlibDotNet.Rectangle(rect.Left, rect.Top, rect.Right, rect.Bottom),
DetectionConfidence = rect.Confidence
}).ToArray();
default:
throw new Exception();
}
}
public List<Location> FaceLocations(Image image)
{
if (image is null)
throw new NullReferenceException(nameof(image));
image.ThrowIfDisposed();
ThrowIfDisposed();
List<Location> results = [];
System.Drawing.Rectangle rectangle;
IEnumerable<MModRect> mModRects = GetMModRects(image);
foreach (MModRect? mModRect in mModRects)
{
rectangle = new(mModRect.Rect.Left, mModRect.Rect.Top, (int)mModRect.Rect.Width, (int)mModRect.Rect.Height);
Location location = ILocation.TrimBound(mModRect.DetectionConfidence, rectangle, image.Width, image.Height, mModRects.Count());
mModRect.Dispose();
results.Add(location);
}
return results;
}
private List<FullObjectDetection> GetFullObjectDetections(Image image, List<Location> locations)
{
List<FullObjectDetection> results = [];
if (_PredictorModel == PredictorModel.Custom)
{
if (CustomFaceLandmarkDetector is null)
throw new NullReferenceException(nameof(CustomFaceLandmarkDetector));
foreach (Location location in locations)
{
FullObjectDetection fullObjectDetection = CustomFaceLandmarkDetector.Detect(image, location);
results.Add(fullObjectDetection);
}
}
else
{
ShapePredictor posePredictor = _PredictorModel switch
{
PredictorModel.Large => _PosePredictor68Point,
PredictorModel.Small => _PosePredictor5Point,
PredictorModel.Custom => throw new NotImplementedException(),
_ => throw new Exception()
};
foreach (Location location in locations)
{
DlibDotNet.Rectangle rectangle = new(location.Left, location.Top, location.Right, location.Bottom);
FullObjectDetection fullObjectDetection = posePredictor.Detect(image.Matrix, rectangle);
results.Add(fullObjectDetection);
}
}
return results;
}
private List<Location> GetLocations(Image image)
{
List<Location> results = [];
MModRect[] mModRects = GetMModRects(image);
if (mModRects.Length != 0)
{
Location location;
System.Drawing.Rectangle rectangle;
foreach (MModRect? mModRect in mModRects)
{
rectangle = new(mModRect.Rect.Left, mModRect.Rect.Top, (int)mModRect.Rect.Width, (int)mModRect.Rect.Height);
location = ILocation.TrimBound(mModRect.DetectionConfidence, rectangle, image.Width, image.Height, mModRects.Length);
mModRect.Dispose();
results.Add(location);
}
}
return results;
}
public List<FaceRecognitionGroup> GetCollection(Image image, List<Location> locations, bool includeFaceEncoding, bool includeFaceParts)
{
List<FaceRecognitionGroup> results = [];
if (image is null)
throw new NullReferenceException(nameof(image));
image.ThrowIfDisposed();
ThrowIfDisposed();
if (_PredictorModel == PredictorModel.Custom)
throw new NotSupportedException("FaceRecognition.PredictorModel.Custom is not supported.");
if (locations.Count == 0)
locations.AddRange(GetLocations(image));
List<FullObjectDetection> fullObjectDetections = GetFullObjectDetections(image, locations);
if (fullObjectDetections.Count != locations.Count)
throw new Exception();
Record record;
List<Record> records = [];
foreach (Location location in locations)
{
record = new(location, [], []);
records.Add(record);
}
if (locations.Count != records.Count)
throw new Exception();
if (!includeFaceEncoding)
{
for (int i = 0; i < records.Count; i++)
records[i].FaceEncodings.Add(null);
}
else
{
Matrix<double> doubles;
FaceEncoding faceEncoding;
for (int i = 0; i < records.Count; i++)
{
doubles = FaceRecognitionModelV1.ComputeFaceDescriptor(_FaceEncoder, image, fullObjectDetections[i], _NumberOfJitters);
faceEncoding = new(doubles);
records[i].FaceEncodings.Add(faceEncoding);
}
}
if (!includeFaceParts)
{
for (int i = 0; i < records.Count; i++)
records[i].FaceParts.Add([]);
}
else
{
List<FacePartAndFacePointArray> faceParts;
for (int i = 0; i < records.Count; i++)
{
faceParts = GetFaceParts(fullObjectDetections[i]);
records[i].FaceParts.Add(faceParts);
}
}
foreach (FullObjectDetection fullObjectDetection in fullObjectDetections)
fullObjectDetection.Dispose();
const int indexZero = 0;
FaceRecognitionGroup faceRecognitionGroupB;
Dictionary<FacePart, FacePoint[]> keyValuePairs;
foreach (Record r in records)
{
if (r.FaceEncodings.Count != 1 || r.FaceParts.Count != 1)
continue;
if (r.FaceParts[indexZero].Count == 0)
faceRecognitionGroupB = new(r.Location, r.FaceEncodings[indexZero], null);
else
{
keyValuePairs = [];
foreach (FacePartAndFacePointArray facePartAndFacePointArray in r.FaceParts[indexZero])
keyValuePairs.Add(facePartAndFacePointArray.FacePart, facePartAndFacePointArray.FacePoints);
faceRecognitionGroupB = new(r.Location, r.FaceEncodings[indexZero], keyValuePairs);
}
results.Add(faceRecognitionGroupB);
}
return results;
}
public static FaceEncoding LoadFaceEncoding(double[] encoding)
{
if (encoding is null)
throw new NullReferenceException(nameof(encoding));
if (encoding.Length != 128)
{
string message = $"{nameof(encoding)}.{nameof(encoding.Length)} must be 128.";
throw new ArgumentOutOfRangeException(message);
}
#pragma warning disable
Matrix<double>? matrix = Matrix<double>.CreateTemplateParameterizeMatrix(0, 1);
#pragma warning restore
matrix.SetSize(128);
matrix.Assign(encoding);
return new FaceEncoding(matrix);
}
public static FaceEncoding LoadBFaceEncoding(double[] encoding)
{
if (encoding is null)
throw new NullReferenceException(nameof(encoding));
if (encoding.Length != 512)
{
string message = $"{nameof(encoding)}.{nameof(encoding.Length)} must be 512.";
throw new ArgumentOutOfRangeException(message);
}
#pragma warning disable
Matrix<double>? matrix = Matrix<double>.CreateTemplateParameterizeMatrix(0, 1);
#pragma warning restore
matrix.SetSize(512);
matrix.Assign(encoding);
return new FaceEncoding(matrix);
}
public static Image LoadImageFile(string file, Mode mode = Mode.Rgb)
{
if (!File.Exists(file))
throw new FileNotFoundException(file);
return mode switch
{
Mode.Rgb => new Image(DlibDotNet.Dlib.LoadImageAsMatrix<RgbPixel>(file), mode),
Mode.Greyscale => new Image(DlibDotNet.Dlib.LoadImageAsMatrix<byte>(file), mode),
_ => throw new NotImplementedException()
};
}
#pragma warning disable CA1416
public static Image? LoadImage(Bitmap bitmap)
{
Mode mode;
int dstChannel;
int srcChannel;
int width = bitmap.Width;
int height = bitmap.Height;
PixelFormat format = bitmap.PixelFormat;
System.Drawing.Rectangle rect = new(0, 0, width, height);
#pragma warning disable IDE0010
switch (format)
{
case PixelFormat.Format8bppIndexed:
mode = Mode.Greyscale;
srcChannel = 1;
dstChannel = 1;
break;
case PixelFormat.Format24bppRgb:
mode = Mode.Rgb;
srcChannel = 3;
dstChannel = 3;
break;
case PixelFormat.Format32bppRgb:
case PixelFormat.Format32bppArgb:
mode = Mode.Rgb;
srcChannel = 4;
dstChannel = 3;
break;
default:
throw new ArgumentOutOfRangeException($"{nameof(bitmap)}", $"The specified {nameof(PixelFormat)} is not supported.");
}
#pragma warning restore IDE0010
BitmapData? data = null;
try
{
data = bitmap.LockBits(rect, ImageLockMode.ReadOnly, format);
unsafe
{
byte[]? array = new byte[width * height * dstChannel];
fixed (byte* pArray = &array[0])
{
byte* dst = pArray;
switch (srcChannel)
{
case 1:
{
IntPtr src = data.Scan0;
int stride = data.Stride;
for (int h = 0; h < height; h++)
Marshal.Copy(IntPtr.Add(src, h * stride), array, h * width, width * dstChannel);
}
break;
case 3:
case 4:
{
byte* src = (byte*)data.Scan0;
int stride = data.Stride;
for (int h = 0; h < height; h++)
{
int srcOffset = h * stride;
int dstOffset = h * width * dstChannel;
for (int w = 0; w < width; w++)
{
// BGR order to RGB order
dst[dstOffset + (w * dstChannel) + 0] = src[srcOffset + (w * srcChannel) + 2];
dst[dstOffset + (w * dstChannel) + 1] = src[srcOffset + (w * srcChannel) + 1];
dst[dstOffset + (w * dstChannel) + 2] = src[srcOffset + (w * srcChannel) + 0];
}
}
}
break;
default:
break;
}
IntPtr ptr = (IntPtr)pArray;
switch (mode)
{
case Mode.Rgb:
return new Image(new Matrix<RgbPixel>(ptr, height, width, width * 3), Mode.Rgb);
case Mode.Greyscale:
return new Image(new Matrix<byte>(ptr, height, width, width), Mode.Greyscale);
default:
break;
}
}
}
}
finally
{
if (data != null)
bitmap.UnlockBits(data);
}
return null;
}
public static ReadOnlyCollection<LocationContainer> GetLocationContainers(int permyriad, ReadOnlyCollection<LocationContainer> locationContainers, LocationContainer locationContainer)
{
List<LocationContainer> results = [];
int lengthPermyriad;
if (locationContainers.Count != 0)
{
double length;
LocationContainer result;
if (locationContainer.Encoding is not FaceEncoding faceEncodingToCompare)
throw new NullReferenceException(nameof(locationContainer));
faceEncodingToCompare.ThrowIfDisposed();
foreach (LocationContainer l in locationContainers)
{
#pragma warning disable CA1513
if (l.Encoding is not FaceEncoding faceEncoding || faceEncoding.IsDisposed)
throw new ObjectDisposedException($"{nameof(l)} contains disposed object.");
#pragma warning restore CA1513
using (Matrix<double> diff = faceEncoding.Encoding - faceEncodingToCompare.Encoding)
length = DlibDotNet.Dlib.Length(diff);
lengthPermyriad = (int)(length * permyriad);
result = LocationContainer.Get(locationContainer, l, lengthPermyriad, keepExifDirectory: false, keepEncoding: false);
results.Add(result);
}
}
LocationContainer[] array = results.OrderBy(l => l.LengthPermyriad).ToArray();
return array.AsReadOnly();
}
public static List<FaceDistance> FaceDistances(ReadOnlyCollection<FaceDistance> faceDistances, FaceDistance faceDistanceToCompare)
{
List<FaceDistance> results = [];
if (faceDistances.Count != 0)
{
double length;
FaceDistance result;
if (faceDistanceToCompare.Encoding is not FaceEncoding faceEncodingToCompare)
throw new NullReferenceException(nameof(faceDistanceToCompare));
faceEncodingToCompare.ThrowIfDisposed();
foreach (FaceDistance faceDistance in faceDistances)
{
#pragma warning disable CA1513
if (faceDistance.Encoding is not FaceEncoding faceEncoding || faceEncoding.IsDisposed)
throw new ObjectDisposedException($"{nameof(faceDistances)} contains disposed object.");
#pragma warning restore CA1513
using (Matrix<double> diff = faceEncoding.Encoding - faceEncodingToCompare.Encoding)
length = DlibDotNet.Dlib.Length(diff);
result = new(faceDistance, length);
results.Add(result);
}
}
return results;
}
#pragma warning restore CA1416
protected override void DisposeUnmanaged()
{
base.DisposeUnmanaged();
_PosePredictor68Point?.Dispose();
_PosePredictor5Point?.Dispose();
_CnnFaceDetector?.Dispose();
_FaceEncoder?.Dispose();
_FaceDetector?.Dispose();
}
}

View File

@ -0,0 +1,6 @@
using View_by_Distance.Shared.Models;
using View_by_Distance.Shared.Models.Stateless;
namespace View_by_Distance.FaceRecognitionDotNet.Models;
public record FaceRecognitionGroup(Location Location, FaceEncoding? FaceEncoding, Dictionary<FacePart, FacePoint[]>? KeyValuePairs);

View File

@ -0,0 +1,22 @@
namespace View_by_Distance.FaceRecognitionDotNet.Models;
internal sealed class FaceRecognitionModels
{
public static string GetPosePredictorModelLocation() => "shape_predictor_68_face_landmarks.dat";
public static string GetPosePredictorFivePointModelLocation() => "shape_predictor_5_face_landmarks.dat";
public static string GetFaceRecognitionModelLocation() => "dlib_face_recognition_resnet_model_v1.dat";
public static string GetCnnFaceDetectorModelLocation() => "mmod_human_face_detector.dat";
public static string GetPosePredictor194PointModelLocation() => "helen-dataset.dat";
public static string GetAgeNetworkModelLocation() => "adience-age-network.dat";
public static string GetGenderNetworkModelLocation() => "utkface-gender-network.dat";
public static string GetEmotionNetworkModelLocation() => "corrective-reannotation-of-fer-ck-kdef-emotion-network_test_best.dat";
}

View File

@ -0,0 +1,129 @@
using DlibDotNet;
using DlibDotNet.Extensions;
using System.Drawing;
using View_by_Distance.Shared.Models.Stateless;
namespace View_by_Distance.FaceRecognitionDotNet.Models;
/// <summary>
/// Represents a image data. This class cannot be inherited.
/// </summary>
public sealed class Image : DisposableObject
{
#region Fields
#endregion
#region Constructors
internal Image(MatrixBase matrix, Mode mode)
{
Matrix = matrix;
Mode = mode;
}
#endregion
#region Properties
/// <summary>
/// Gets the height of the image.
/// </summary>
/// <exception cref="ObjectDisposedException">This object is disposed.</exception>
public int Height
{
get
{
ThrowIfDisposed();
return Matrix.Rows;
}
}
internal MatrixBase Matrix { get; private set; }
internal Mode Mode { get; }
/// <summary>
/// Gets the width of the image.
/// </summary>
/// <exception cref="ObjectDisposedException">This object is disposed.</exception>
public int Width
{
get
{
ThrowIfDisposed();
return Matrix.Columns;
}
}
#endregion
#region Methods
/// <summary>
/// Saves this <see cref="Image"/> to the specified file.
/// </summary>
/// <param name="fileName">A string that contains the name of the file to which to save this <see cref="Image"/>.</param>
/// <param name="format">The <see cref="ImageFormat"/> for this <see cref="Image"/>.</param>
/// <exception cref="NullReferenceException"><paramref name="fileName"/> is null.</exception>
/// <exception cref="ObjectDisposedException">This object is disposed.</exception>
public void Save(string fileName, ImageFormat format)
{
if (fileName == null)
throw new NullReferenceException(nameof(fileName));
ThrowIfDisposed();
string? directory = Path.GetDirectoryName(fileName);
if (!Directory.Exists(directory) && !string.IsNullOrWhiteSpace(directory))
_ = Directory.CreateDirectory(directory);
switch (format)
{
case ImageFormat.Bmp:
DlibDotNet.Dlib.SaveBmp(Matrix, fileName);
break;
case ImageFormat.Jpeg:
DlibDotNet.Dlib.SaveJpeg(Matrix, fileName);
break;
case ImageFormat.Png:
DlibDotNet.Dlib.SavePng(Matrix, fileName);
break;
default:
break;
}
}
/// <summary>
/// Converts this <see cref="Image"/> to a GDI+ <see cref="Bitmap"/>.
/// </summary>
/// <returns>A <see cref="Bitmap"/> that represents the converted <see cref="Image"/>.</returns>
/// <exception cref="ObjectDisposedException">This object is disposed.</exception>
/// <exception cref="NotSupportedException">A Greyscale image is not supported.</exception>
public Bitmap ToBitmap()
{
ThrowIfDisposed();
if (Mode == Mode.Greyscale)
throw new NotSupportedException();
return ((Matrix<RgbPixel>)Matrix).ToBitmap();
}
#region Overrides
/// <summary>
/// Releases all unmanaged resources.
/// </summary>
protected override void DisposeUnmanaged()
{
base.DisposeUnmanaged();
Matrix?.Dispose();
}
#endregion
#endregion
}

View File

@ -0,0 +1,49 @@
namespace View_by_Distance.FaceRecognitionDotNet.Models;
/// <summary>
/// Describes the model binary datum. This class cannot be inherited.
/// </summary>
public sealed class ModelParameter
{
#region Properties
/// <summary>
/// Gets or sets the binary data of model for 68 points face landmarks.
/// </summary>
public byte[]? PosePredictor68FaceLandmarksModel
{
get;
set;
}
/// <summary>
/// Gets or sets the binary data of model for 5 points face landmarks.
/// </summary>
public byte[]? PosePredictor5FaceLandmarksModel
{
get;
set;
}
/// <summary>
/// Gets or sets the binary data of model for face encoding.
/// </summary>
public byte[]? FaceRecognitionModel
{
get;
set;
}
/// <summary>
/// Gets or sets the binary data of model for face detector by using CNN.
/// </summary>
public byte[]? CnnFaceDetectorModel
{
get;
set;
}
#endregion
}

View File

@ -0,0 +1,107 @@
namespace View_by_Distance.FaceRecognitionDotNet.Models;
/// <summary>
/// Represents an ordered pair of integer x- and y-coordinates that defines a point in a two-dimensional plane.
/// </summary>
public readonly struct Point : IEquatable<Point>
{
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="Point"/> structure with the specified coordinates.
/// </summary>
/// <param name="x">The horizontal position of the point.</param>
/// <param name="y">The vertical position of the point.</param>
public Point(int x, int y)
{
X = x;
Y = y;
}
internal Point(DlibDotNet.Point point)
{
X = point.X;
Y = point.Y;
}
#endregion
#region Properties
/// <summary>
/// Gets the x-coordinate of this <see cref="Point"/>.
/// </summary>
public int X
{
get;
}
/// <summary>
/// Gets the y-coordinate of this <see cref="Point"/>.
/// </summary>
public int Y
{
get;
}
#endregion
#region Methods
/// <summary>
/// Compares two <see cref="Point"/> structures for equality.
/// </summary>
/// <param name="other">The point to compare to this instance.</param>
/// <returns><code>true</code> if both <see cref="Point"/> structures contain the same <see cref="X"/> and <see cref="Y"/> values; otherwise, <code>false</code>.</returns>
public bool Equals(Point other)
{
return X == other.X &&
Y == other.Y;
}
#region overrides
/// <summary>
/// Determines whether the specified <see cref="object"/> is a <see cref="Point"/> and whether it contains the same coordinates as this <see cref="Point"/>.
/// </summary>
/// <param name="obj">The <see cref="object"/> to compare.</param>
/// <returns><code>true</code> if <paramref name="obj"/> is a <see cref="Point"/> and contains the same <see cref="X"/> and <see cref="Y"/> values as this <see cref="Point"/>; otherwise, <code>false</code>.</returns>
public override bool Equals(object? obj) => obj is Point point && Equals(point);
/// <summary>
/// Returns the hash code for this <see cref="Point"/>.
/// </summary>
/// <returns>The hash code for this <see cref="Point"/> structure.</returns>
#pragma warning disable IDE0070
public override int GetHashCode()
#pragma warning restore IDE0070
{
int hashCode = 1861411795;
hashCode = hashCode * -1521134295 + X.GetHashCode();
hashCode = hashCode * -1521134295 + Y.GetHashCode();
return hashCode;
}
/// <summary>
/// Compares two <see cref="Point"/> structures for equality.
/// </summary>
/// <param name="point1">The first <see cref="Point"/> structure to compare.</param>
/// <param name="point2">The second <see cref="Point"/> structure to compare.</param>
/// <returns><code>true</code> if both the <see cref="X"/> and <see cref="Y"/> coordinates of <paramref name="point1"/> and <paramref name="point2"/> are equal; otherwise, <code>false</code>.</returns>
public static bool operator ==(Point point1, Point point2) => point1.Equals(point2);
/// <summary>
/// Compares two <see cref="Point"/> structures for inequality.
/// </summary>
/// <param name="point1">The first <see cref="Point"/> structure to compare.</param>
/// <param name="point2">The second <see cref="Point"/> structure to compare.</param>
/// <returns><code>true</code> if <paramref name="point1"/> and <paramref name="point2"/> have different <see cref="X"/> or <see cref="Y"/> coordinates; <code>false</code> if <paramref name="point1"/> and <paramref name="point2"/> have the same <see cref="X"/> and <see cref="Y"/> coordinates.</returns>
public static bool operator !=(Point point1, Point point2) => !(point1 == point2);
#endregion
#endregion
}