From 2ebec0b7a96c450f2198e7386c46f673f288e429 Mon Sep 17 00:00:00 2001 From: Mike Phares Date: Sat, 30 Jul 2022 16:43:23 -0700 Subject: [PATCH] Added Class Library FaceRecognitionDotNet --- Compare/Compare.cs | 2 +- Compare/appsettings.Development.json | 4 +- Compare/appsettings.json | 2 +- Date-Group/appsettings.Development.json | 2 +- FaceRecognitionDotNet/DisposableObject.cs | 128 +++ .../Dlib/Python/CnnFaceDetectionModelV1.cs | 111 +++ .../Dlib/Python/FaceRecognitionModelV1.cs | 129 +++ .../Dlib/Python/SimpleObjectDetector.cs | 169 ++++ .../Extensions/FaceDetector.cs | 26 + .../Extensions/FaceLandmarkDetector.cs | 36 + FaceRecognitionDotNet/FaceEncoding.cs | 105 +++ FaceRecognitionDotNet/FaceRecognition.cs | 871 ++++++++++++++++++ .../FaceRecognitionDotNet.csproj | 52 ++ .../FaceRecognitionModels.cs | 22 + FaceRecognitionDotNet/Image.cs | 127 +++ FaceRecognitionDotNet/ModelParameter.cs | 49 + FaceRecognitionDotNet/Point.cs | 114 +++ Instance/DlibDotNet.cs | 17 +- Instance/Instance.csproj | 21 +- Instance/Models/_A2_People.cs | 14 + Instance/Models/_D2_FaceLandmark.cs | 8 +- Instance/Models/_D_Face.cs | 131 +-- Instance/Models/_E2_Navigate.cs | 2 +- Instance/Models/_E3_Rename.cs | 2 +- Instance/Models/_E_Distance.cs | 9 +- Instance/Models/_G_Index.cs | 2 +- Instance/Program.cs | 1 - Instance/appsettings.Development.json | 5 +- Instance/appsettings.Staging.json | 3 +- Property/Models/A_Property.cs | 2 +- Property/Models/PropertyLogic.cs | 111 ++- Resize/Models/_C_Resize.cs | 10 +- Shared/Models/FacePoint.cs | 30 + Shared/Models/Location.cs | 40 +- Shared/Models/Stateless/FacePart.cs | 59 ++ Shared/Models/Stateless/ImageFormat.cs | 24 + .../Stateless/Methods/IPersonBirthday.cs | 3 + .../Stateless/Methods/PersonBirthday.cs | 4 +- Shared/Models/Stateless/Mode.cs | 19 + Shared/Models/Stateless/Model.cs | 24 + Shared/Models/Stateless/PredictorModel.cs | 24 + Tests/UnitTestExample.cs | 10 +- Tests/UnitTestResize.cs | 12 +- Tests/appsettings.Development.json | 5 +- View-by-Distance-MKLink-Console.sln | 6 + 45 files changed, 2398 insertions(+), 149 deletions(-) create mode 100644 FaceRecognitionDotNet/DisposableObject.cs create mode 100644 FaceRecognitionDotNet/Dlib/Python/CnnFaceDetectionModelV1.cs create mode 100644 FaceRecognitionDotNet/Dlib/Python/FaceRecognitionModelV1.cs create mode 100644 FaceRecognitionDotNet/Dlib/Python/SimpleObjectDetector.cs create mode 100644 FaceRecognitionDotNet/Extensions/FaceDetector.cs create mode 100644 FaceRecognitionDotNet/Extensions/FaceLandmarkDetector.cs create mode 100644 FaceRecognitionDotNet/FaceEncoding.cs create mode 100644 FaceRecognitionDotNet/FaceRecognition.cs create mode 100644 FaceRecognitionDotNet/FaceRecognitionDotNet.csproj create mode 100644 FaceRecognitionDotNet/FaceRecognitionModels.cs create mode 100644 FaceRecognitionDotNet/Image.cs create mode 100644 FaceRecognitionDotNet/ModelParameter.cs create mode 100644 FaceRecognitionDotNet/Point.cs create mode 100644 Shared/Models/Stateless/FacePart.cs create mode 100644 Shared/Models/Stateless/ImageFormat.cs create mode 100644 Shared/Models/Stateless/Mode.cs create mode 100644 Shared/Models/Stateless/Model.cs create mode 100644 Shared/Models/Stateless/PredictorModel.cs diff --git a/Compare/Compare.cs b/Compare/Compare.cs index 34197b1..1aa8192 100644 --- a/Compare/Compare.cs +++ b/Compare/Compare.cs @@ -160,7 +160,7 @@ public class Compare ticks = LogDelta(ticks, nameof(Property.Models.Stateless.A_Property.GetGroupCollection)); } PropertyLogic propertyLogic = GetPropertyLogic(); - if (_IsEnvironment.Development && propertyConfiguration.PopulatePropertyId.Value && !propertyLogic.IndicesFromOld.Any()) + if (_IsEnvironment.Development && propertyConfiguration.PopulatePropertyId.Value && !propertyLogic.KeyValuePairs.Any()) throw new Exception("Copy keyValuePairs-####.json file"); List propertyHolderCollections = Property.Models.Stateless.A_Property.Get(propertyConfiguration, reverse, modelName, predictorModelName, propertyLogic); if (!isSilent) diff --git a/Compare/appsettings.Development.json b/Compare/appsettings.Development.json index 3dfd636..ec8990f 100644 --- a/Compare/appsettings.Development.json +++ b/Compare/appsettings.Development.json @@ -87,14 +87,14 @@ "Pattern": "[^ABCDEFGHIJKLMNOPQRSTUVWXYZbcdfghjklmnpqrstvwxyz0-9]", "PopulatePropertyId": true, "PropertiesChangedForProperty": false, - "RootDirectory": "C:/Tmp/Phares/Compare/Images 2022-07-27 - 20220727 - III", + "RootDirectory": "C:/Tmp/Phares/Compare/Images 2022-07-27 - f642c5669a1d89d598a2efd70da9dc7129d02c15 - III", "WriteBitmapDataBytes": false, "IgnoreExtensions": [ ".gif", ".GIF" ], "PropertyContentCollectionFiles": [ - "/Images 2022-07-27 - 20220727 - III - Results/A) Property/2022-07-27/[()]/637869381676042455.json", + "/Images 2022-07-27 - f642c5669a1d89d598a2efd70da9dc7129d02c15 - III - Results/A) Property/2022-07-27/[()]/637869381676042455.json", "/Not-Copy-Copy/Images 2019-06-08 - 34a9240ac28b52da97428d7725153a80a757ee6b - Not-Copy-Copy - Results/A) Property/2022-07-27/[()]/637869733124119330.json", "/Not-Copy-Copy/Images 2018-12-25 - 34a9240ac28b52da97428d7725153a80a757ee6b - Not-Copy-Copy - Results/A) Property/2022-07-27/[()]/637869734240700328.json", "/Not-Copy-Copy/Images 2018-05-12 - b01d4763d8853b6d6057a3870b2723449726da75 - Not-Copy-Copy - Results/A) Property/2022-07-27/[()]/637869734970730630.json", diff --git a/Compare/appsettings.json b/Compare/appsettings.json index d2d9bb8..99122cc 100644 --- a/Compare/appsettings.json +++ b/Compare/appsettings.json @@ -94,7 +94,7 @@ ".GIF" ], "PropertyContentCollectionFiles": [ - "/Images 2022-07-27 - 20220727 - III - Results/A) Property/2022-07-27/[()]/637869381676042455.json", + "/Images 2022-07-27 - f642c5669a1d89d598a2efd70da9dc7129d02c15 - III - Results/A) Property/2022-07-27/[()]/637869381676042455.json", "/Not-Copy-Copy/Images 2019-06-08 - 34a9240ac28b52da97428d7725153a80a757ee6b - Not-Copy-Copy - Results/A) Property/2022-07-27/[()]/637869733124119330.json", "/Not-Copy-Copy/Images 2018-12-25 - 34a9240ac28b52da97428d7725153a80a757ee6b - Not-Copy-Copy - Results/A) Property/2022-07-27/[()]/637869734240700328.json", "/Not-Copy-Copy/Images 2018-05-12 - b01d4763d8853b6d6057a3870b2723449726da75 - Not-Copy-Copy - Results/A) Property/2022-07-27/[()]/637869734970730630.json", diff --git a/Date-Group/appsettings.Development.json b/Date-Group/appsettings.Development.json index aacc305..38ebe5f 100644 --- a/Date-Group/appsettings.Development.json +++ b/Date-Group/appsettings.Development.json @@ -63,7 +63,7 @@ "Pattern": "[^ABCDEFGHIJKLMNOPQRSTUVWXYZbcdfghjklmnpqrstvwxyz0-9]", "PopulatePropertyId": true, "PropertiesChangedForProperty": false, - "RootDirectory": "C:/Tmp/Phares/Compare/Images 2022-07-27 - 20220727 - III", + "RootDirectory": "C:/Tmp/Phares/Compare/Images 2022-07-27 - f642c5669a1d89d598a2efd70da9dc7129d02c15 - III", "WriteBitmapDataBytes": false, "IgnoreExtensions": [ ".gif", diff --git a/FaceRecognitionDotNet/DisposableObject.cs b/FaceRecognitionDotNet/DisposableObject.cs new file mode 100644 index 0000000..1c9ed95 --- /dev/null +++ b/FaceRecognitionDotNet/DisposableObject.cs @@ -0,0 +1,128 @@ +namespace View_by_Distance.FaceRecognitionDotNet; + +/// +/// Represents a class which has managed or unmanaged resources. +/// +public abstract class DisposableObject : IDisposable +{ + + #region Properties + + /// + /// Gets a value indicating whether this instance has been disposed. + /// + /// true if this instance has been disposed; otherwise, false. + public bool IsDisposed + { + get; + private set; + /* Unmerged change from project 'FaceRecognitionotNet(netstandard2.0)' + Before: + /// If this object is disposed, then is thrown. + After: + /// If this object is disposed, then is thrown. + */ + + } + + #endregion + + #region Methods + + /// + /// If this object is disposed, then is thrown. + /// + public void ThrowIfDisposed() + { + if (IsDisposed) + throw new ObjectDisposedException(GetType().FullName); + } + + internal void ThrowIfDisposed(string objectName) + { + if (IsDisposed) + throw new ObjectDisposedException(objectName); + } + + #region Overrides + + /// + /// Releases all managed resources. + /// + protected virtual void DisposeManaged() + { + + } + + /// + /// Releases all unmanaged resources. + /// + protected virtual void DisposeUnmanaged() + { + + } + + #endregion + + #endregion + + #region IDisposable Members + + /// + /// Releases all resources used by this . + /// + public void Dispose() + { + GC.SuppressFinalize(this); + /* Unmerged change from project 'FaceRecognitionotNet(netstandard2.0)' + Before: + Dispose(true); + After: + Dispose(true); + */ + + Dispose(true); + } + + /// + /// Releases all resources used by this . + /// + /// Indicate value whether method was called. + private void Dispose(bool disposing) + { + if (IsDisposed) + { + return; + /* Unmerged change from project 'FaceRecognitionotNet(netstandard2.0)' + Before: + IsDisposed = true; + After: + IsDisposed = true; + */ + + } + + IsDisposed = true; + + if (disposing) + /* Unmerged change from project 'FaceRecognitionotNet(netstandard2.0)' + Before: + DisposeManaged(); + After: + DisposeManaged(); + */ + + DisposeManaged(); + /* Unmerged change from project 'FaceRecognitionotNet(netstandard2.0)' + Before: + DisposeUnmanaged(); + After: + DisposeUnmanaged(); + */ + + DisposeUnmanaged(); + } + + #endregion + +} \ No newline at end of file diff --git a/FaceRecognitionDotNet/Dlib/Python/CnnFaceDetectionModelV1.cs b/FaceRecognitionDotNet/Dlib/Python/CnnFaceDetectionModelV1.cs new file mode 100644 index 0000000..c4636a2 --- /dev/null +++ b/FaceRecognitionDotNet/Dlib/Python/CnnFaceDetectionModelV1.cs @@ -0,0 +1,111 @@ +using DlibDotNet; +using DlibDotNet.Dnn; +using View_by_Distance.Shared.Models.Stateless; + +namespace View_by_Distance.FaceRecognitionDotNet.Dlib.Python; + +internal sealed class CnnFaceDetectionModelV1 +{ + + #region Methods + + public static IEnumerable Detect(LossMmod net, Image image, int upsampleNumTimes) + { + using PyramidDown? pyr = new(2); + List? rects = new(); + + // Copy the data into dlib based objects + using Matrix? matrix = new(); + Mode type = image.Mode; + switch (type) + { + case Mode.Greyscale: + case Mode.Rgb: + DlibDotNet.Dlib.AssignImage(image.Matrix, matrix); + break; + default: + throw new NotSupportedException("Unsupported image type, must be 8bit gray or RGB image."); + } + + // Upsampling the image will allow us to detect smaller faces but will cause the + // program to use more RAM and run longer. + int levels = upsampleNumTimes; + while (levels > 0) + { + levels--; + DlibDotNet.Dlib.PyramidUp(matrix, 2); + } + + OutputLabels>? dets = net.Operator(matrix); + + // Scale the detection locations back to the original image size + // if the image was upscaled. + foreach (MModRect? d in dets.First()) + { + DRectangle drect = pyr.RectDown(new DRectangle(d.Rect), (uint)upsampleNumTimes); + d.Rect = new Rectangle((int)drect.Left, (int)drect.Top, (int)drect.Right, (int)drect.Bottom); + rects.Add(d); + } + + return rects; + } + + public static IEnumerable> DetectMulti(LossMmod net, IEnumerable images, int upsampleNumTimes, int batchSize = 128) + { + List>? destImages = new(); + List>? allRects = new(); + + try + { + using PyramidDown? pyr = new(2); + // Copy the data into dlib based objects + foreach (Image? image in images) + { + Matrix? matrix = new(); + Mode type = image.Mode; + switch (type) + { + case Mode.Greyscale: + case Mode.Rgb: + DlibDotNet.Dlib.AssignImage(image.Matrix, matrix); + break; + default: + throw new NotSupportedException("Unsupported image type, must be 8bit gray or RGB image."); + } + + for (int i = 0; i < upsampleNumTimes; i++) + DlibDotNet.Dlib.PyramidUp(matrix); + + destImages.Add(matrix); + } + + for (int i = 1; i < destImages.Count; i++) + if (destImages[i - 1].Columns != destImages[i].Columns || destImages[i - 1].Rows != destImages[i].Rows) + throw new ArgumentException("Images in list must all have the same dimensions."); + + OutputLabels>? dets = net.Operator(destImages, (ulong)batchSize); + foreach (IEnumerable? det in dets) + { + List? rects = new(); + foreach (MModRect? d in det) + { + DRectangle drect = pyr.RectDown(new DRectangle(d.Rect), (uint)upsampleNumTimes); + d.Rect = new Rectangle((int)drect.Left, (int)drect.Top, (int)drect.Right, (int)drect.Bottom); + rects.Add(d); + } + + allRects.Add(rects); + } + } + finally + { + foreach (Matrix? matrix in destImages) + matrix.Dispose(); + } + + return allRects; + } + + #endregion + +} \ No newline at end of file diff --git a/FaceRecognitionDotNet/Dlib/Python/FaceRecognitionModelV1.cs b/FaceRecognitionDotNet/Dlib/Python/FaceRecognitionModelV1.cs new file mode 100644 index 0000000..f9140d2 --- /dev/null +++ b/FaceRecognitionDotNet/Dlib/Python/FaceRecognitionModelV1.cs @@ -0,0 +1,129 @@ +using DlibDotNet; +using DlibDotNet.Dnn; + +namespace View_by_Distance.FaceRecognitionDotNet.Dlib.Python; + +internal sealed class FaceRecognitionModelV1 +{ + + #region Methods + + public static Matrix ComputeFaceDescriptor(LossMetric net, Image img, FullObjectDetection face, int numJitters) + { + FullObjectDetection[]? faces = new[] { face }; + return ComputeFaceDescriptors(net, img, faces, numJitters).First(); + } + + public static IEnumerable> ComputeFaceDescriptors(LossMetric net, Image img, IEnumerable faces, int numJitters) + { + Image[]? batchImage = new[] { img }; + IEnumerable[]? batchFaces = new[] { faces }; + return BatchComputeFaceDescriptors(net, batchImage, batchFaces, numJitters).First(); + } + + public static IEnumerable>> BatchComputeFaceDescriptors(LossMetric net, + IList batchImages, + IList> batchFaces, + int numJitters) + { + if (batchImages.Count != batchFaces.Count) + throw new ArgumentException("The array of images and the array of array of locations must be of the same size"); + + foreach (IEnumerable? faces in batchFaces) + foreach (FullObjectDetection? f in faces) + { + if (f.Parts is not 68 and not 5) + throw new ArgumentException("The full_object_detection must use the iBUG 300W 68 point face landmark style or dlib's 5 point style."); + } + + List>>? faceChipsArray = new(batchImages.Count); + List>? faceChips = new(); + for (int i = 0; i < batchImages.Count; ++i) + { + IEnumerable? faces = batchFaces[i]; + Image? img = batchImages[i]; + + List? dets = new(faces.Count()); + foreach (FullObjectDetection? f in faces) + dets.Add(DlibDotNet.Dlib.GetFaceChipDetails(f, 150, 0.25)); + + Array>? thisImageFaceChips = DlibDotNet.Dlib.ExtractImageChips(img.Matrix, dets); + foreach (Matrix? chip in thisImageFaceChips) + faceChips.Add(chip); + faceChipsArray.Add(thisImageFaceChips); + + foreach (ChipDetails? det in dets) + det.Dispose(); + } + + List>>? faceDescriptors = new(); + for (int i = 0, count = batchImages.Count; i < count; i++) + faceDescriptors.Add(new List>()); + + if (numJitters <= 1) + { + // extract descriptors and convert from float vectors to double vectors + OutputLabels>? descriptors = net.Operator(faceChips, 16); + int index = 0; + Matrix[]? list = descriptors.Select(matrix => matrix).ToArray(); + for (int i = 0; i < batchFaces.Count; ++i) + for (int j = 0; j < batchFaces[i].Count(); ++j) + faceDescriptors[i].Add(DlibDotNet.Dlib.MatrixCast(list[index++])); + + if (index != list.Length) + throw new ApplicationException(); + } + else + { + // extract descriptors and convert from float vectors to double vectors + int index = 0; + for (int i = 0; i < batchFaces.Count; ++i) + for (int j = 0; j < batchFaces[i].Count(); ++j) + { + Matrix[]? tmp = JitterImage(faceChips[index++], numJitters).ToArray(); + using (OutputLabels>? tmp2 = net.Operator(tmp, 16)) + using (MatrixOp? mat = DlibDotNet.Dlib.Mat(tmp2)) + { + Matrix? r = DlibDotNet.Dlib.Mean(mat); + faceDescriptors[i].Add(r); + } + + foreach (Matrix? matrix in tmp) + matrix.Dispose(); + } + + if (index != faceChips.Count) + throw new ApplicationException(); + } + + if (faceChipsArray.Any()) + { + foreach (Array>? array in faceChipsArray) + { + foreach (Matrix? faceChip in array) + faceChip.Dispose(); + array.Dispose(); + } + } + + return faceDescriptors; + } + + #region Helpers + + private static readonly Rand _Rand = new(); + + private static IEnumerable> JitterImage(Matrix img, int numJitters) + { + List>? crops = new(); + for (int i = 0; i < numJitters; ++i) + crops.Add(DlibDotNet.Dlib.JitterImage(img, _Rand)); + + return crops; + } + + #endregion + + #endregion + +} \ No newline at end of file diff --git a/FaceRecognitionDotNet/Dlib/Python/SimpleObjectDetector.cs b/FaceRecognitionDotNet/Dlib/Python/SimpleObjectDetector.cs new file mode 100644 index 0000000..57f36d2 --- /dev/null +++ b/FaceRecognitionDotNet/Dlib/Python/SimpleObjectDetector.cs @@ -0,0 +1,169 @@ +using DlibDotNet; +using View_by_Distance.Shared.Models.Stateless; + +namespace View_by_Distance.FaceRecognitionDotNet.Dlib.Python; + +internal sealed class SimpleObjectDetector +{ + + #region Methods + + public static IEnumerable RunDetectorWithUpscale1(FrontalFaceDetector detector, + Image img, + uint upsamplingAmount, + double adjustThreshold, + List detectionConfidences, + List weightIndices) + { + List? rectangles = new(); + + if (img.Mode == Mode.Greyscale) + { + Matrix? greyscaleMatrix = img.Matrix as Matrix; + if (upsamplingAmount == 0) + { + detector.Operator(greyscaleMatrix, out IEnumerable? rectDetections, adjustThreshold); + + RectDetection[]? dets = rectDetections.ToArray(); + SplitRectDetections(dets, rectangles, detectionConfidences, weightIndices); + + foreach (RectDetection? rectDetection in dets) + rectDetection.Dispose(); + } + else + { + using PyramidDown? pyr = new(2); + Matrix? temp = null; + + try + { + DlibDotNet.Dlib.PyramidUp(greyscaleMatrix, pyr, out temp); + + uint levels = upsamplingAmount - 1; + while (levels > 0) + { + levels--; + DlibDotNet.Dlib.PyramidUp(temp); + } + + detector.Operator(temp, out IEnumerable? rectDetections, adjustThreshold); + + RectDetection[]? dets = rectDetections.ToArray(); + foreach (RectDetection? t in dets) + t.Rect = pyr.RectDown(t.Rect, upsamplingAmount); + + SplitRectDetections(dets, rectangles, detectionConfidences, weightIndices); + + foreach (RectDetection? rectDetection in dets) + rectDetection.Dispose(); + } + finally + { + temp?.Dispose(); + } + } + + return rectangles; + } + else + { + Matrix? rgbMatrix = img.Matrix as Matrix; + if (upsamplingAmount == 0) + { + detector.Operator(rgbMatrix, out IEnumerable? rectDetections, adjustThreshold); + + RectDetection[]? dets = rectDetections.ToArray(); + SplitRectDetections(dets, rectangles, detectionConfidences, weightIndices); + + foreach (RectDetection? rectDetection in dets) + rectDetection.Dispose(); + } + else + { + using PyramidDown? pyr = new(2); + Matrix? temp = null; + + try + { + DlibDotNet.Dlib.PyramidUp(rgbMatrix, pyr, out temp); + + uint levels = upsamplingAmount - 1; + while (levels > 0) + { + levels--; + DlibDotNet.Dlib.PyramidUp(temp); + } + + detector.Operator(temp, out IEnumerable? rectDetections, adjustThreshold); + + RectDetection[]? dets = rectDetections.ToArray(); + foreach (RectDetection? t in dets) + t.Rect = pyr.RectDown(t.Rect, upsamplingAmount); + + SplitRectDetections(dets, rectangles, detectionConfidences, weightIndices); + + foreach (RectDetection? rectDetection in dets) + rectDetection.Dispose(); + } + finally + { + temp?.Dispose(); + } + } + + return rectangles; + } + } + + public static IEnumerable> RunDetectorWithUpscale2(FrontalFaceDetector detector, + Image image, + uint upsamplingAmount) + { + if (detector == null) + throw new ArgumentNullException(nameof(detector)); + if (image == null) + throw new ArgumentNullException(nameof(image)); + + detector.ThrowIfDisposed(); + image.ThrowIfDisposed(); + + List? detectionConfidences = new(); + List? weightIndices = new(); + const double adjustThreshold = 0.0; + + Rectangle[]? rects = RunDetectorWithUpscale1(detector, + image, + upsamplingAmount, + adjustThreshold, + detectionConfidences, + weightIndices).ToArray(); + + int index = 0; + foreach (Rectangle rect in rects) + yield return new Tuple(rect, detectionConfidences[index++]); + } + + #region Helpers + + private static void SplitRectDetections(RectDetection[] rectDetections, + List rectangles, + List detectionConfidences, + List weightIndices) + { + rectangles.Clear(); + detectionConfidences.Clear(); + weightIndices.Clear(); + + foreach (RectDetection? rectDetection in rectDetections) + { + rectangles.Add(rectDetection.Rect); + detectionConfidences.Add(rectDetection.DetectionConfidence); + weightIndices.Add(rectDetection.WeightIndex); + } + } + + #endregion + + #endregion + +} \ No newline at end of file diff --git a/FaceRecognitionDotNet/Extensions/FaceDetector.cs b/FaceRecognitionDotNet/Extensions/FaceDetector.cs new file mode 100644 index 0000000..d04b035 --- /dev/null +++ b/FaceRecognitionDotNet/Extensions/FaceDetector.cs @@ -0,0 +1,26 @@ +using DlibDotNet; +using View_by_Distance.Shared.Models; + +namespace View_by_Distance.FaceRecognitionDotNet.Extensions; + +/// +/// An abstract base class that provides functionality to detect face locations from image. +/// +public abstract class FaceDetector : DisposableObject +{ + + #region Methods + + internal IEnumerable Detect(Image image, int numberOfTimesToUpsample) => RawDetect(image.Matrix, numberOfTimesToUpsample); + + /// + /// Returns an enumerable collection of face location correspond to all faces in specified image. + /// + /// The matrix contains a face. + /// The number of times to up-sample the image when finding faces. + /// An enumerable collection of face location correspond to all faces. + protected abstract IEnumerable RawDetect(MatrixBase matrix, int numberOfTimesToUpsample); + + #endregion + +} \ No newline at end of file diff --git a/FaceRecognitionDotNet/Extensions/FaceLandmarkDetector.cs b/FaceRecognitionDotNet/Extensions/FaceLandmarkDetector.cs new file mode 100644 index 0000000..a222887 --- /dev/null +++ b/FaceRecognitionDotNet/Extensions/FaceLandmarkDetector.cs @@ -0,0 +1,36 @@ +using DlibDotNet; +using View_by_Distance.Shared.Models; +using View_by_Distance.Shared.Models.Stateless; + +namespace View_by_Distance.FaceRecognitionDotNet.Extensions; + +/// +/// An abstract base class that provides functionality to detect face parts locations from face image. +/// +public abstract class FaceLandmarkDetector : DisposableObject +{ + + #region Methods + + internal FullObjectDetection Detect(Image image, Location location) => RawDetect(image.Matrix, location); + + internal IEnumerable>> GetLandmarks(IEnumerable landmarkTuples) => RawGetLandmarks(landmarkTuples); + + /// + /// Returns an object contains information of face parts corresponds to specified location in specified image. + /// + /// The matrix contains a face. + /// The location rectangle for a face. + /// An object contains information of face parts. + protected abstract FullObjectDetection RawDetect(MatrixBase matrix, Location location); + + /// + /// Returns an enumerable collection of dictionary of face parts locations (eyes, nose, etc). + /// + /// The enumerable collection of face parts location. + /// An enumerable collection of dictionary of face parts locations (eyes, nose, etc). + protected abstract IEnumerable>> RawGetLandmarks(IEnumerable landmarkTuples); + + #endregion + +} \ No newline at end of file diff --git a/FaceRecognitionDotNet/FaceEncoding.cs b/FaceRecognitionDotNet/FaceEncoding.cs new file mode 100644 index 0000000..cfaf1ab --- /dev/null +++ b/FaceRecognitionDotNet/FaceEncoding.cs @@ -0,0 +1,105 @@ +using DlibDotNet; +using System.Runtime.Serialization; + +namespace View_by_Distance.FaceRecognitionDotNet; + +/// +/// Represents a feature data of face. This class cannot be inherited. +/// +[Serializable] +public sealed class FaceEncoding : DisposableObject, ISerializable +{ + + #region Fields + + [NonSerialized] + private readonly Matrix _Encoding; + + #endregion + + #region Constructors + + internal FaceEncoding(Matrix encoding) => _Encoding = encoding; + + private FaceEncoding(SerializationInfo info, StreamingContext context) + { + if (info == null) + throw new ArgumentNullException(nameof(info)); + + double[]? array = info.GetValue(nameof(_Encoding), typeof(double[])) as double[]; + int? row = (int?)info.GetValue(nameof(_Encoding.Rows), typeof(int)); + int? column = (int?)info.GetValue(nameof(_Encoding.Columns), typeof(int)); + if (row is null) + throw new Exception($"{nameof(row)} is null"); + if (column is null) + throw new Exception($"{nameof(column)} is null"); + _Encoding = new Matrix(array, row.Value, column.Value); + } + + #endregion + + #region Properties + + internal Matrix Encoding => _Encoding; + + /// + /// Gets the size of feature data. + /// + /// This object is disposed. + public int Size + { + get + { + ThrowIfDisposed(); + return _Encoding.Size; + } + } + + #endregion + + #region Methods + + /// + /// Gets a feature data of face as raw format. + /// + /// A array that represents a feature data. + /// class supports serialization. This method is for interoperability between FaceRecognitionotNet and dlib. + /// This object is disposed. + public double[] GetRawEncoding() + { + ThrowIfDisposed(); + return _Encoding.ToArray(); + } + + #region Overrides + + /// + /// Releases all unmanaged resources. + /// + protected override void DisposeUnmanaged() + { + base.DisposeUnmanaged(); + _Encoding?.Dispose(); + } + + #endregion + + #endregion + + #region ISerializable Members + + /// + /// Populates a with the data needed to serialize the target object. + /// + /// The to populate with data. + /// The destination (see ) for this serialization. + public void GetObjectData(SerializationInfo info, StreamingContext context) + { + info.AddValue(nameof(_Encoding), _Encoding.ToArray()); + info.AddValue(nameof(_Encoding.Rows), _Encoding.Rows); + info.AddValue(nameof(_Encoding.Columns), _Encoding.Columns); + } + + #endregion + +} \ No newline at end of file diff --git a/FaceRecognitionDotNet/FaceRecognition.cs b/FaceRecognitionDotNet/FaceRecognition.cs new file mode 100644 index 0000000..1e6a806 --- /dev/null +++ b/FaceRecognitionDotNet/FaceRecognition.cs @@ -0,0 +1,871 @@ +using DlibDotNet; +using DlibDotNet.Dnn; +using System.Drawing; +using System.Drawing.Imaging; +using System.Runtime.InteropServices; +using System.Text; +using View_by_Distance.FaceRecognitionDotNet.Dlib.Python; +using View_by_Distance.FaceRecognitionDotNet.Extensions; +using View_by_Distance.Shared.Models; +using View_by_Distance.Shared.Models.Stateless; + +namespace View_by_Distance.FaceRecognitionDotNet; + +/// +/// Provides the method to find and recognize face methods. This class cannot be inherited. +/// +public sealed class FaceRecognition : DisposableObject +{ + + #region Fields + + private readonly ShapePredictor _PosePredictor68Point; + + private readonly ShapePredictor _PosePredictor5Point; + + private readonly LossMmod _CnnFaceDetector; + + private readonly LossMetric _FaceEncoder; + + private readonly FrontalFaceDetector _FaceDetector; + + #endregion + + #region Constructors + + /// + /// Initializes a new instance of the class with the directory path that stores model files. + /// + /// The directory path that stores model files. + /// The model file is not found. + /// The specified directory path is not found. + private FaceRecognition(string directory) + { + if (!Directory.Exists(directory)) + throw new DirectoryNotFoundException(directory); + + string? predictor68PointModel = Path.Combine(directory, FaceRecognitionModels.GetPosePredictorModelLocation()); + if (!File.Exists(predictor68PointModel)) + throw new FileNotFoundException(predictor68PointModel); + + string? predictor5PointModel = Path.Combine(directory, FaceRecognitionModels.GetPosePredictorFivePointModelLocation()); + if (!File.Exists(predictor5PointModel)) + throw new FileNotFoundException(predictor5PointModel); + + string? cnnFaceDetectionModel = Path.Combine(directory, FaceRecognitionModels.GetCnnFaceDetectorModelLocation()); + if (!File.Exists(cnnFaceDetectionModel)) + throw new FileNotFoundException(cnnFaceDetectionModel); + + string? faceRecognitionModel = Path.Combine(directory, FaceRecognitionModels.GetFaceRecognitionModelLocation()); + if (!File.Exists(faceRecognitionModel)) + throw new FileNotFoundException(faceRecognitionModel); + + _FaceDetector?.Dispose(); + _FaceDetector = DlibDotNet.Dlib.GetFrontalFaceDetector(); + + _PosePredictor68Point?.Dispose(); + _PosePredictor68Point = ShapePredictor.Deserialize(predictor68PointModel); + + _PosePredictor5Point?.Dispose(); + _PosePredictor5Point = ShapePredictor.Deserialize(predictor5PointModel); + + _CnnFaceDetector?.Dispose(); + _CnnFaceDetector = LossMmod.Deserialize(cnnFaceDetectionModel); + + _FaceEncoder?.Dispose(); + _FaceEncoder = LossMetric.Deserialize(faceRecognitionModel); + } + + /// + /// Initializes a new instance of the class with the instance that contains model binary datum. + /// + /// The instance that contains model binary datum. + /// is null. + /// The model data is null. + private FaceRecognition(ModelParameter parameter) + { + if (parameter == null) + throw new ArgumentNullException(nameof(parameter)); + + if (parameter.PosePredictor5FaceLandmarksModel == null) + throw new NullReferenceException(nameof(parameter.PosePredictor5FaceLandmarksModel)); + + if (parameter.PosePredictor68FaceLandmarksModel == null) + throw new NullReferenceException(nameof(parameter.PosePredictor68FaceLandmarksModel)); + + if (parameter.CnnFaceDetectorModel == null) + throw new NullReferenceException(nameof(parameter.CnnFaceDetectorModel)); + + if (parameter.FaceRecognitionModel == null) + throw new NullReferenceException(nameof(parameter.FaceRecognitionModel)); + + _FaceDetector?.Dispose(); + _FaceDetector = DlibDotNet.Dlib.GetFrontalFaceDetector(); + + _PosePredictor68Point?.Dispose(); + _PosePredictor68Point = ShapePredictor.Deserialize(parameter.PosePredictor68FaceLandmarksModel); + + _PosePredictor5Point?.Dispose(); + _PosePredictor5Point = ShapePredictor.Deserialize(parameter.PosePredictor5FaceLandmarksModel); + + _CnnFaceDetector?.Dispose(); + _CnnFaceDetector = LossMmod.Deserialize(parameter.CnnFaceDetectorModel); + + _FaceEncoder?.Dispose(); + _FaceEncoder = LossMetric.Deserialize(parameter.FaceRecognitionModel); + } + + #endregion + + #region Properties + + /// + /// Gets or sets the custom face detector that user defined. + /// + public FaceDetector? CustomFaceDetector { get; set; } + + /// + /// Gets or sets the custom face landmark detector that user defined. + /// + public FaceLandmarkDetector? CustomFaceLandmarkDetector { get; set; } + + /// + /// Gets or sets the character encoding to convert to array of for internal library. + /// + public static Encoding InternalEncoding + { + get => DlibDotNet.Dlib.Encoding; + set => DlibDotNet.Dlib.Encoding = value ?? Encoding.UTF8; + } + + #endregion + + #region Methods + + /// + /// Returns an enumerable collection of array of bounding boxes of human faces in a image using the cnn face detector. + /// + /// An enumerable collection of images. + /// The number of image looking for faces. Higher numbers find smaller faces. + /// The number of images to include in each GPU processing batch. + /// An enumerable collection of array of found face locations. + /// is null. + public IEnumerable BatchFaceLocations(IEnumerable images, int numberOfTimesToUpsample = 1, int batchSize = 128) + { + if (images == null) + throw new ArgumentNullException(nameof(images)); + + List? results = new(); + + Image[]? imagesArray = images.ToArray(); + if (!imagesArray.Any()) + return results; + + IEnumerable[]? rawDetectionsBatched = RawFaceLocationsBatched(imagesArray, numberOfTimesToUpsample, batchSize).ToArray(); + + Image? image = imagesArray[0]; + for (int index = 0; index < rawDetectionsBatched.Length; index++) + { + MModRect[]? faces = rawDetectionsBatched[index].ToArray(); + Location[]? locations = faces.Select(rect => new Location(TrimBound(rect.Rect, image.Width, image.Height), rect.DetectionConfidence)).ToArray(); + foreach (MModRect? face in faces) + face.Dispose(); + results.Add(locations); + } + + return results; + } + + /// + /// Compare a known face encoding against a candidate encoding to see if they match. + /// + /// A known face encodings. + /// A single face encoding to compare against a known face encoding. + /// The distance between faces to consider it a match. Lower is more strict. The default value is 0.6. + /// A True/False value indicating which known a face encoding matches the face encoding to check. + /// or is null. + /// or . + public static bool CompareFace(FaceEncoding knownFaceEncoding, FaceEncoding faceEncodingToCheck, double tolerance = 0.6d) + { + if (knownFaceEncoding == null) + throw new ArgumentNullException(nameof(knownFaceEncoding)); + if (faceEncodingToCheck == null) + throw new ArgumentNullException(nameof(faceEncodingToCheck)); + + knownFaceEncoding.ThrowIfDisposed(); + faceEncodingToCheck.ThrowIfDisposed(); + + return FaceDistance(knownFaceEncoding, faceEncodingToCheck) <= tolerance; + } + + /// + /// Compare an enumerable collection of face encodings against a candidate encoding to see if they match. + /// + /// An enumerable collection of known face encodings. + /// A single face encoding to compare against the enumerable collection. + /// The distance between faces to consider it a match. Lower is more strict. The default value is 0.6. + /// An enumerable collection of True/False values indicating which known face encodings match the face encoding to check. + /// or is null. + /// is disposed. Or contains disposed object. + public static IEnumerable CompareFaces(IEnumerable knownFaceEncodings, FaceEncoding faceEncodingToCheck, double tolerance = 0.6d) + { + if (knownFaceEncodings == null) + throw new ArgumentNullException(nameof(knownFaceEncodings)); + if (faceEncodingToCheck == null) + throw new ArgumentNullException(nameof(faceEncodingToCheck)); + + faceEncodingToCheck.ThrowIfDisposed(); + + FaceEncoding[]? array = knownFaceEncodings.ToArray(); + if (array.Any(encoding => encoding.IsDisposed)) + throw new ObjectDisposedException($"{nameof(knownFaceEncodings)} contains disposed object."); + + List? results = new(); + if (array.Length == 0) + return results; + + foreach (FaceEncoding? faceEncoding in array) + results.Add(FaceDistance(faceEncoding, faceEncodingToCheck) <= tolerance); + + return results; + } + + /// + /// Create a new instance of the class. + /// + /// The directory path that stores model files. + /// The model file is not found. + /// The specified directory path is not found. + public static FaceRecognition Create(string directory) => new(directory); + + /// + /// Create a new instance of the class. + /// + /// The instance that contains model binary datum. + /// is null. + /// The model data is null. + public static FaceRecognition Create(ModelParameter parameter) => new(parameter); + + /// + /// Crop a specified image with enumerable collection of face locations. + /// + /// The image contains a face. + /// The enumerable collection of location rectangle for faces. + /// + /// or is null. + /// is disposed. + public static IEnumerable CropFaces(Image image, IEnumerable locations) + { + if (image == null) + throw new ArgumentNullException(nameof(image)); + if (locations == null) + throw new ArgumentNullException(nameof(locations)); + + image.ThrowIfDisposed(); + + List? results = new(); + foreach (Location? location in locations) + { + DlibDotNet.Rectangle rect = new(location.Left, location.Top, location.Right, location.Bottom); + DPoint[]? dPoint = new[] + { + new DPoint(rect.Left, rect.Top), + new DPoint(rect.Right, rect.Top), + new DPoint(rect.Left, rect.Bottom), + new DPoint(rect.Right, rect.Bottom), + }; + + int width = (int)rect.Width; + int height = (int)rect.Height; + + switch (image.Mode) + { + case Mode.Rgb: + Matrix? rgb = image.Matrix as Matrix; + results.Add(new Image(DlibDotNet.Dlib.ExtractImage4Points(rgb, dPoint, width, height), + Mode.Rgb)); + break; + case Mode.Greyscale: + Matrix? gray = image.Matrix as Matrix; + results.Add(new Image(DlibDotNet.Dlib.ExtractImage4Points(gray, dPoint, width, height), + Mode.Greyscale)); + break; + } + } + + return results; + } + + /// + /// Compare a face encoding to a known face encoding and get a euclidean distance for comparison face. + /// + /// The face encoding to compare. + /// The face encoding to compare against. + /// The euclidean distance for comparison face. If 0, faces are completely equal. + /// or is null. + /// or is disposed. + public static double FaceDistance(FaceEncoding faceEncoding, FaceEncoding faceToCompare) + { + if (faceEncoding == null) + throw new ArgumentNullException(nameof(faceEncoding)); + if (faceToCompare == null) + throw new ArgumentNullException(nameof(faceToCompare)); + + faceEncoding.ThrowIfDisposed(); + faceToCompare.ThrowIfDisposed(); + + if (faceEncoding.Encoding.Size == 0) + return 0; + + using Matrix? diff = faceEncoding.Encoding - faceToCompare.Encoding; + return DlibDotNet.Dlib.Length(diff); + } + + /// + /// Compare an enumerable collection of face encoding to a known face encoding and get an enumerable collection of euclidean distance for comparison face. + /// + /// The enumerable collection of face encoding to compare. + /// The face encoding to compare against. + /// The enumerable collection of euclidean distance for comparison face. If 0, faces are completely equal. + /// or is null. + /// is disposed. Or contains disposed object. + public static IEnumerable FaceDistances(IEnumerable faceEncodings, FaceEncoding faceToCompare) + { + if (faceEncodings == null) + throw new ArgumentNullException(nameof(faceEncodings)); + if (faceToCompare == null) + throw new ArgumentNullException(nameof(faceToCompare)); + + faceToCompare.ThrowIfDisposed(); + + FaceEncoding[]? array = faceEncodings.ToArray(); + if (array.Any(encoding => encoding.IsDisposed)) + throw new ObjectDisposedException($"{nameof(faceEncodings)} contains disposed object."); + + List? results = new(); + if (array.Length == 0) + return results; + + foreach (FaceEncoding? faceEncoding in array) + using (Matrix? diff = faceEncoding.Encoding - faceToCompare.Encoding) + results.Add(DlibDotNet.Dlib.Length(diff)); + + return results; + } + + /// + /// Returns an enumerable collection of face feature data corresponds to all faces in specified image. + /// + /// The image contains faces. The image can contain multiple faces. + /// The enumerable collection of location rectangle for faces. If specified null, method will find face locations. + /// The number of times to re-sample the face when calculating encoding. + /// The dimension of vector which be returned from detector. + /// The model of face detector to detect in image. If is not null, this value is ignored. + /// An enumerable collection of face feature data corresponds to all faces in specified image. + /// is null. + /// contains no elements. + /// or this object or custom face landmark detector is disposed. + /// is not supported. + public IEnumerable FaceEncodings(Image image, + IEnumerable? knownFaceLocation = null, + int numJitters = 1, + PredictorModel predictorModel = PredictorModel.Small, + Model model = Model.Hog) + { + if (image == null) + throw new ArgumentNullException(nameof(image)); + if (predictorModel == PredictorModel.Custom) + throw new NotSupportedException("FaceRecognition.PredictorModel.Custom is not supported."); + + if (knownFaceLocation != null && !knownFaceLocation.Any()) + throw new InvalidOperationException($"{nameof(knownFaceLocation)} contains no elements."); + + image.ThrowIfDisposed(); + ThrowIfDisposed(); + + IEnumerable? rawLandmarks = RawFaceLandmarks(image, knownFaceLocation, predictorModel, model); + + List? results = new(); + foreach (FullObjectDetection? landmark in rawLandmarks) + { + FaceEncoding? ret = new(FaceRecognitionModelV1.ComputeFaceDescriptor(_FaceEncoder, image, landmark, numJitters)); + landmark.Dispose(); + results.Add(ret); + } + + return results; + } + + /// + /// Returns an enumerable collection of dictionary of face parts locations (eyes, nose, etc) for each face in the image. + /// + /// The image contains faces. The image can contain multiple faces. + /// The enumerable collection of location rectangle for faces. If specified null, method will find face locations. + /// The dimension of vector which be returned from detector. + /// The model of face detector to detect in image. If is not null, this value is ignored. + /// An enumerable collection of dictionary of face parts locations (eyes, nose, etc). + /// is null. + /// contains no elements. + /// or this object or custom face landmark detector is disposed. + /// The custom face landmark detector is not ready. + public IEnumerable>> FaceLandmark(Image faceImage, + IEnumerable? faceLocations = null, + PredictorModel predictorModel = PredictorModel.Large, + Model model = Model.Hog) + { + if (faceImage == null) + throw new ArgumentNullException(nameof(faceImage)); + + if (faceLocations != null && !faceLocations.Any()) + throw new InvalidOperationException($"{nameof(faceLocations)} contains no elements."); + + faceImage.ThrowIfDisposed(); + ThrowIfDisposed(); + + if (predictorModel == PredictorModel.Custom) + { + if (CustomFaceLandmarkDetector == null) + throw new NotSupportedException("The custom face landmark detector is not ready."); + + if (CustomFaceLandmarkDetector.IsDisposed) + throw new ObjectDisposedException($"{nameof(CustomFaceLandmarkDetector)}", "The custom face landmark detector is disposed."); + } + + FullObjectDetection[]? landmarks = RawFaceLandmarks(faceImage, faceLocations, predictorModel, model).ToArray(); + IEnumerable? landmarkTuples = landmarks.Select(landmark => Enumerable.Range(0, (int)landmark.Parts) + .Select(index => new FacePoint(index, landmark.GetPart((uint)index).X, landmark.GetPart((uint)index).Y)).ToArray()); + + List>>? results = new(); + + try + { + + // For a definition of each point index, see https://cdn-images-1.medium.com/max/1600/1*AbEg31EgkbXSQehuNJBlWg.png + switch (predictorModel) + { + case PredictorModel.Large: + results.AddRange(landmarkTuples.Select(landmarkTuple => new Dictionary> + { + { FacePart.Chin, Enumerable.Range(0,17).Select(i => landmarkTuple[i]).ToArray() }, + { FacePart.LeftEyebrow, Enumerable.Range(17,5).Select(i => landmarkTuple[i]).ToArray() }, + { FacePart.RightEyebrow, Enumerable.Range(22,5).Select(i => landmarkTuple[i]).ToArray() }, + { FacePart.NoseBridge, Enumerable.Range(27,5).Select(i => landmarkTuple[i]).ToArray() }, + { FacePart.NoseTip, Enumerable.Range(31,5).Select(i => landmarkTuple[i]).ToArray() }, + { FacePart.LeftEye, Enumerable.Range(36,6).Select(i => landmarkTuple[i]).ToArray() }, + { FacePart.RightEye, Enumerable.Range(42,6).Select(i => landmarkTuple[i]).ToArray() }, + { FacePart.TopLip, Enumerable.Range(48,7).Select(i => landmarkTuple[i]) + .Concat( new [] { landmarkTuple[64] }) + .Concat( new [] { landmarkTuple[63] }) + .Concat( new [] { landmarkTuple[62] }) + .Concat( new [] { landmarkTuple[61] }) + .Concat( new [] { landmarkTuple[60] }) }, + { FacePart.BottomLip, Enumerable.Range(54,6).Select(i => landmarkTuple[i]) + .Concat( new [] { landmarkTuple[48] }) + .Concat( new [] { landmarkTuple[60] }) + .Concat( new [] { landmarkTuple[67] }) + .Concat( new [] { landmarkTuple[66] }) + .Concat( new [] { landmarkTuple[65] }) + .Concat( new [] { landmarkTuple[64] }) } + })); + break; + case PredictorModel.Small: + results.AddRange(landmarkTuples.Select(landmarkTuple => new Dictionary> + { + { FacePart.NoseTip, Enumerable.Range(4,1).Select(i => landmarkTuple[i]).ToArray() }, + { FacePart.LeftEye, Enumerable.Range(2,2).Select(i => landmarkTuple[i]).ToArray() }, + { FacePart.RightEye, Enumerable.Range(0,2).Select(i => landmarkTuple[i]).ToArray() } + })); + break; + case PredictorModel.Custom: + if (CustomFaceLandmarkDetector is null) + throw new Exception($"{nameof(CustomFaceLandmarkDetector)} is null"); + results.AddRange(CustomFaceLandmarkDetector.GetLandmarks(landmarkTuples)); + break; + default: + throw new ArgumentOutOfRangeException(nameof(predictorModel), predictorModel, null); + } + } + finally + { + foreach (FullObjectDetection? landmark in landmarks) + landmark.Dispose(); + } + + return results.ToArray(); + } + + /// + /// Returns an enumerable collection of face location correspond to all faces in specified image. + /// + /// The image contains faces. The image can contain multiple faces. + /// The number of times to up-sample the image when finding faces. + /// The model of face detector to detect in image. + /// An enumerable collection of face location correspond to all faces in specified image. + /// is null. + /// or this object is disposed. + public IEnumerable FaceLocations(Image image, int numberOfTimesToUpsample = 1, Model model = Model.Hog) + { + if (image == null) + throw new ArgumentNullException(nameof(image)); + + image.ThrowIfDisposed(); + ThrowIfDisposed(); + + List? results = new(); + foreach (MModRect? face in RawFaceLocations(image, numberOfTimesToUpsample, model)) + { + Location? ret = TrimBound(face.Rect, image.Width, image.Height); + double confidence = face.DetectionConfidence; + face.Dispose(); + results.Add(new Location(ret, confidence)); + } + + return results; + } + + /// + /// Creates an from the array. + /// + /// The array contains face encoding data. + /// The this method creates. + /// is null. + /// must be 128. + public static FaceEncoding LoadFaceEncoding(double[] encoding) + { + if (encoding == null) + throw new ArgumentNullException(nameof(encoding)); + if (encoding.Length != 128) + { + string message = $"{nameof(encoding)}.{nameof(encoding.Length)} must be 128."; + throw new ArgumentOutOfRangeException(message); + } +#pragma warning disable + Matrix? matrix = Matrix.CreateTemplateParameterizeMatrix(0, 1); +#pragma warning restore + matrix.SetSize(128); + matrix.Assign(encoding); + return new FaceEncoding(matrix); + } + +#pragma warning disable CA1416 + + /// + /// Creates an from the specified existing bitmap image. + /// + /// The from which to create the new . + /// The this method creates. + /// is null. + /// The specified is not supported. + public static Image? LoadImage(Bitmap bitmap) + { + int width = bitmap.Width; + int height = bitmap.Height; + System.Drawing.Rectangle rect = new(0, 0, width, height); + PixelFormat format = bitmap.PixelFormat; + + Mode mode; + int srcChannel; + int dstChannel; + switch (format) + { + case PixelFormat.Format8bppIndexed: + mode = Mode.Greyscale; + srcChannel = 1; + dstChannel = 1; + break; + case PixelFormat.Format24bppRgb: + mode = Mode.Rgb; + srcChannel = 3; + dstChannel = 3; + break; + case PixelFormat.Format32bppRgb: + case PixelFormat.Format32bppArgb: + mode = Mode.Rgb; + srcChannel = 4; + dstChannel = 3; + break; + default: + throw new ArgumentOutOfRangeException($"{nameof(bitmap)}", $"The specified {nameof(PixelFormat)} is not supported."); + } + + BitmapData? data = null; + + try + { + data = bitmap.LockBits(rect, ImageLockMode.ReadOnly, format); + + unsafe + { + byte[]? array = new byte[width * height * dstChannel]; + fixed (byte* pArray = &array[0]) + { + byte* dst = pArray; + + switch (srcChannel) + { + case 1: + { + IntPtr src = data.Scan0; + int stride = data.Stride; + + for (int h = 0; h < height; h++) + Marshal.Copy(IntPtr.Add(src, h * stride), array, h * width, width * dstChannel); + } + break; + case 3: + case 4: + { + byte* src = (byte*)data.Scan0; + int stride = data.Stride; + + for (int h = 0; h < height; h++) + { + int srcOffset = h * stride; + int dstOffset = h * width * dstChannel; + + for (int w = 0; w < width; w++) + { + // BGR order to RGB order + dst[dstOffset + w * dstChannel + 0] = src[srcOffset + w * srcChannel + 2]; + dst[dstOffset + w * dstChannel + 1] = src[srcOffset + w * srcChannel + 1]; + dst[dstOffset + w * dstChannel + 2] = src[srcOffset + w * srcChannel + 0]; + } + } + } + break; + } + + IntPtr ptr = (IntPtr)pArray; + switch (mode) + { + case Mode.Rgb: + return new Image(new Matrix(ptr, height, width, width * 3), Mode.Rgb); + case Mode.Greyscale: + return new Image(new Matrix(ptr, height, width, width), Mode.Greyscale); + } + } + } + } + finally + { + if (data != null) + bitmap.UnlockBits(data); + } + + return null; + } + +#pragma warning restore CA1416 + + /// + /// Creates an from the array. + /// + /// The array contains image data. + /// The number of rows in a image data. + /// The number of columns in a image data. + /// The stride width in bytes. + /// A image color mode. + /// The this method creates. + /// is null. + /// is less than 0. + /// is less than 0. + /// is less than 0. + /// is less than . + /// x is less than . + public static Image? LoadImage(byte[] array, int row, int column, int stride, Mode mode) + { + if (array == null) + throw new ArgumentNullException(nameof(array)); + if (row < 0) + throw new ArgumentOutOfRangeException($"{nameof(row)}", $"{nameof(row)} is less than 0."); + if (column < 0) + throw new ArgumentOutOfRangeException($"{nameof(column)}", $"{nameof(column)} is less than 0."); + if (stride < 0) + throw new ArgumentOutOfRangeException($"{nameof(stride)}", $"{nameof(stride)} is less than 0."); + if (stride < column) + throw new ArgumentOutOfRangeException($"{nameof(stride)}", $"{nameof(stride)} is less than {nameof(column)}."); + int min = row * stride; + if (!(array.Length >= min)) + throw new ArgumentOutOfRangeException("", $"{nameof(row)} x {nameof(stride)} is less than {nameof(Array)}.{nameof(Array.Length)}."); + + unsafe + { + fixed (byte* p = &array[0]) + { + IntPtr ptr = (IntPtr)p; + switch (mode) + { + case Mode.Rgb: + return new Image(new Matrix(ptr, row, column, stride), Mode.Rgb); + case Mode.Greyscale: + return new Image(new Matrix(ptr, row, column, stride), Mode.Greyscale); + } + } + } + + return null; + } + + /// + /// Creates an from the unmanaged memory pointer indicates array image data. + /// + /// The unmanaged memory pointer indicates array image data. + /// The number of rows in a image data. + /// The number of columns in a image data. + /// The stride width in bytes. + /// A image color mode. + /// The this method creates. + /// is . + /// is less than 0. + /// is less than 0. + /// is less than 0. + /// is less than . + public static Image? LoadImage(IntPtr array, int row, int column, int stride, Mode mode) + { + if (array == IntPtr.Zero) + throw new ArgumentException($"{nameof(array)} is {nameof(IntPtr)}.{nameof(IntPtr.Zero)}", nameof(array)); + if (row < 0) + throw new ArgumentOutOfRangeException($"{nameof(row)}", $"{nameof(row)} is less than 0."); + if (column < 0) + throw new ArgumentOutOfRangeException($"{nameof(column)}", $"{nameof(column)} is less than 0."); + if (stride < 0) + throw new ArgumentOutOfRangeException($"{nameof(stride)}", $"{nameof(stride)} is less than 0."); + if (stride < column) + throw new ArgumentOutOfRangeException($"{nameof(stride)}", $"{nameof(stride)} is less than {nameof(column)}."); + + return mode switch + { + Mode.Rgb => new Image(new Matrix(array, row, column, stride), mode), + Mode.Greyscale => new Image(new Matrix(array, row, column, stride), mode), + _ => null, + }; + } + + /// + /// Creates an from the specified path. + /// + /// A string that contains the path of the file from which to create the . + /// A image color mode. + /// The this method creates. + /// The specified path does not exist. + public static Image? LoadImageFile(string file, Mode mode = Mode.Rgb) + { + if (!File.Exists(file)) + throw new FileNotFoundException(file); + + return mode switch + { + Mode.Rgb => new Image(DlibDotNet.Dlib.LoadImageAsMatrix(file), mode), + Mode.Greyscale => new Image(DlibDotNet.Dlib.LoadImageAsMatrix(file), mode), + _ => null, + }; + } + + #region Helpers + + private IEnumerable RawFaceLandmarks(Image faceImage, + IEnumerable? faceLocations = null, + PredictorModel predictorModel = PredictorModel.Large, + Model model = Model.Hog) + { + IEnumerable rects; + + if (faceLocations == null) + { + List? list = new(); + IEnumerable? tmp = RawFaceLocations(faceImage, 1, model); + foreach (MModRect? mModRect in tmp) + { + list.Add(new Location(mModRect.DetectionConfidence, mModRect.Rect.Bottom, mModRect.Rect.Left, mModRect.Rect.Right, mModRect.Rect.Top)); + mModRect.Dispose(); + } + + rects = list; + } + else + { + rects = faceLocations; + } + + List? results = new(); + if (predictorModel == PredictorModel.Custom) + { + if (CustomFaceLandmarkDetector is null) + throw new Exception($"{nameof(CustomFaceLandmarkDetector)} is null"); + foreach (Location? rect in rects) + { + FullObjectDetection? ret = CustomFaceLandmarkDetector.Detect(faceImage, rect); + results.Add(ret); + } + } + else + { + ShapePredictor? posePredictor = _PosePredictor68Point; + switch (predictorModel) + { + case PredictorModel.Small: + posePredictor = _PosePredictor5Point; + break; + } + + foreach (Location? rect in rects) + { + FullObjectDetection? ret = posePredictor.Detect(faceImage.Matrix, new DlibDotNet.Rectangle(rect.Left, rect.Top, rect.Right, rect.Bottom)); + results.Add(ret); + } + } + + return results; + } + + private IEnumerable RawFaceLocations(Image faceImage, int numberOfTimesToUpsample = 1, Model model = Model.Hog) + { + switch (model) + { + case Model.Custom: + if (CustomFaceDetector == null) + throw new NotSupportedException("The custom face detector is not ready."); + return CustomFaceDetector.Detect(faceImage, numberOfTimesToUpsample).Select(rect => new MModRect + { + Rect = new DlibDotNet.Rectangle(rect.Left, rect.Top, rect.Right, rect.Bottom), + DetectionConfidence = rect.Confidence + }); + case Model.Cnn: + return CnnFaceDetectionModelV1.Detect(_CnnFaceDetector, faceImage, numberOfTimesToUpsample); + default: + IEnumerable>? locations = SimpleObjectDetector.RunDetectorWithUpscale2(_FaceDetector, faceImage, (uint)numberOfTimesToUpsample); + return locations.Select(tuple => new MModRect { Rect = tuple.Item1, DetectionConfidence = tuple.Item2 }); + } + } + + private IEnumerable> RawFaceLocationsBatched(IEnumerable faceImages, int numberOfTimesToUpsample = 1, int batchSize = 128) => CnnFaceDetectionModelV1.DetectMulti(_CnnFaceDetector, faceImages, numberOfTimesToUpsample, batchSize); + + private static Location TrimBound(DlibDotNet.Rectangle location, int width, int height) => new(Math.Max(location.Left, 0), Math.Max(location.Top, 0), Math.Min(location.Right, width), Math.Min(location.Bottom, height)); + + #endregion + + #endregion + + #region Methods + + #region Overrides + + /// + /// Releases all unmanaged resources. + /// + protected override void DisposeUnmanaged() + { + base.DisposeUnmanaged(); + + _PosePredictor68Point?.Dispose(); + _PosePredictor5Point?.Dispose(); + _CnnFaceDetector?.Dispose(); + _FaceEncoder?.Dispose(); + _FaceDetector?.Dispose(); + } + + #endregion + + #endregion + +} \ No newline at end of file diff --git a/FaceRecognitionDotNet/FaceRecognitionDotNet.csproj b/FaceRecognitionDotNet/FaceRecognitionDotNet.csproj new file mode 100644 index 0000000..56da923 --- /dev/null +++ b/FaceRecognitionDotNet/FaceRecognitionDotNet.csproj @@ -0,0 +1,52 @@ + + + enable + 10.0 + enable + library + win-x64 + net6.0 + + + Phares.View.by.Distance.FaceRecognitionDotNet + false + 5.0.402.104 + Mike Phares + Phares + true + snupkg + + + true + + + true + true + true + + + Windows + + + OSX + + + Linux + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/FaceRecognitionDotNet/FaceRecognitionModels.cs b/FaceRecognitionDotNet/FaceRecognitionModels.cs new file mode 100644 index 0000000..91d1bf8 --- /dev/null +++ b/FaceRecognitionDotNet/FaceRecognitionModels.cs @@ -0,0 +1,22 @@ +namespace View_by_Distance.FaceRecognitionDotNet; + +internal sealed class FaceRecognitionModels +{ + + public static string GetPosePredictorModelLocation() => "shape_predictor_68_face_landmarks.dat"; + + public static string GetPosePredictorFivePointModelLocation() => "shape_predictor_5_face_landmarks.dat"; + + public static string GetFaceRecognitionModelLocation() => "dlib_face_recognition_resnet_model_v1.dat"; + + public static string GetCnnFaceDetectorModelLocation() => "mmod_human_face_detector.dat"; + + public static string GetPosePredictor194PointModelLocation() => "helen-dataset.dat"; + + public static string GetAgeNetworkModelLocation() => "adience-age-network.dat"; + + public static string GetGenderNetworkModelLocation() => "utkface-gender-network.dat"; + + public static string GetEmotionNetworkModelLocation() => "corrective-reannotation-of-fer-ck-kdef-emotion-network_test_best.dat"; + +} \ No newline at end of file diff --git a/FaceRecognitionDotNet/Image.cs b/FaceRecognitionDotNet/Image.cs new file mode 100644 index 0000000..ff99c72 --- /dev/null +++ b/FaceRecognitionDotNet/Image.cs @@ -0,0 +1,127 @@ +using DlibDotNet; +using DlibDotNet.Extensions; +using System.Drawing; +using View_by_Distance.Shared.Models.Stateless; + +namespace View_by_Distance.FaceRecognitionDotNet; + +/// +/// Represents a image data. This class cannot be inherited. +/// +public sealed class Image : DisposableObject +{ + + #region Fields + + #endregion + + #region Constructors + + internal Image(MatrixBase matrix, Mode mode) + { + Matrix = matrix; + Mode = mode; + } + + #endregion + + #region Properties + + /// + /// Gets the height of the image. + /// + /// This object is disposed. + public int Height + { + get + { + ThrowIfDisposed(); + return Matrix.Rows; + } + } + + internal MatrixBase Matrix { get; private set; } + + internal Mode Mode { get; } + + /// + /// Gets the width of the image. + /// + /// This object is disposed. + public int Width + { + get + { + ThrowIfDisposed(); + return Matrix.Columns; + } + } + + #endregion + + #region Methods + + /// + /// Saves this to the specified file. + /// + /// A string that contains the name of the file to which to save this . + /// The for this . + /// is null. + /// This object is disposed. + public void Save(string filename, ImageFormat format) + { + if (filename == null) + throw new ArgumentNullException(nameof(filename)); + + ThrowIfDisposed(); + + string? directory = Path.GetDirectoryName(filename); + if (!Directory.Exists(directory) && !string.IsNullOrWhiteSpace(directory)) + _ = Directory.CreateDirectory(directory); + + switch (format) + { + case ImageFormat.Bmp: + DlibDotNet.Dlib.SaveBmp(Matrix, filename); + break; + case ImageFormat.Jpeg: + DlibDotNet.Dlib.SaveJpeg(Matrix, filename); + break; + case ImageFormat.Png: + DlibDotNet.Dlib.SavePng(Matrix, filename); + break; + } + } + + /// + /// Converts this to a GDI+ . + /// + /// A that represents the converted . + /// This object is disposed. + /// A Greyscale image is not supported. + public Bitmap ToBitmap() + { + ThrowIfDisposed(); + + if (Mode == Mode.Greyscale) + throw new NotSupportedException(); + + return ((Matrix)Matrix).ToBitmap(); + } + + #region Overrides + + /// + /// Releases all unmanaged resources. + /// + protected override void DisposeUnmanaged() + { + base.DisposeUnmanaged(); + Matrix?.Dispose(); + } + + #endregion + + #endregion + +} \ No newline at end of file diff --git a/FaceRecognitionDotNet/ModelParameter.cs b/FaceRecognitionDotNet/ModelParameter.cs new file mode 100644 index 0000000..7443b8e --- /dev/null +++ b/FaceRecognitionDotNet/ModelParameter.cs @@ -0,0 +1,49 @@ +namespace View_by_Distance.FaceRecognitionDotNet; + +/// +/// Describes the model binary datum. This class cannot be inherited. +/// +public sealed class ModelParameter +{ + + #region Properties + + /// + /// Gets or sets the binary data of model for 68 points face landmarks. + /// + public byte[]? PosePredictor68FaceLandmarksModel + { + get; + set; + } + + /// + /// Gets or sets the binary data of model for 5 points face landmarks. + /// + public byte[]? PosePredictor5FaceLandmarksModel + { + get; + set; + } + + /// + /// Gets or sets the binary data of model for face encoding. + /// + public byte[]? FaceRecognitionModel + { + get; + set; + } + + /// + /// Gets or sets the binary data of model for face detector by using CNN. + /// + public byte[]? CnnFaceDetectorModel + { + get; + set; + } + + #endregion + +} \ No newline at end of file diff --git a/FaceRecognitionDotNet/Point.cs b/FaceRecognitionDotNet/Point.cs new file mode 100644 index 0000000..7f29bc2 --- /dev/null +++ b/FaceRecognitionDotNet/Point.cs @@ -0,0 +1,114 @@ +namespace View_by_Distance.FaceRecognitionDotNet; + +/// +/// Represents an ordered pair of integer x- and y-coordinates that defines a point in a two-dimensional plane. +/// +public struct Point : IEquatable +{ + + #region Constructors + + /// + /// Initializes a new instance of the structure with the specified coordinates. + /// + /// The horizontal position of the point. + /// The vertical position of the point. + public Point(int x, int y) + { + X = x; + Y = y; + } + + internal Point(DlibDotNet.Point point) + { + X = point.X; + Y = point.Y; + } + + #endregion + + #region Properties + + /// + /// Gets the x-coordinate of this . + /// + public int X + { + get; + } + + /// + /// Gets the y-coordinate of this . + /// + public int Y + { + get; + } + + #endregion + + #region Methods + + /// + /// Compares two structures for equality. + /// + /// The point to compare to this instance. + /// true if both structures contain the same and values; otherwise, false. + public bool Equals(Point other) + { + return X == other.X && + Y == other.Y; + } + + #region overrides + + /// + /// Determines whether the specified is a and whether it contains the same coordinates as this . + /// + /// The to compare. + /// true if is a and contains the same and values as this ; otherwise, false. + public override bool Equals(object? obj) => obj is Point point && Equals(point); + + /// + /// Returns the hash code for this . + /// + /// The hash code for this structure. + public override int GetHashCode() + { + int hashCode = 1861411795; + hashCode = hashCode * -1521134295 + X.GetHashCode(); + hashCode = hashCode * -1521134295 + Y.GetHashCode(); + return hashCode; + } + + /// + /// Compares two structures for equality. + /// + /// The first structure to compare. + /// The second structure to compare. + /// true if both the and coordinates of and are equal; otherwise, false. + public static bool operator ==(Point point1, Point point2) => point1.Equals(point2); + + /// + /// Compares two structures for inequality. + /// + /// The first structure to compare. + /// The second structure to compare. + /// true if and have different or coordinates; false if and have the same and coordinates. + + /* Unmerged change from project 'FaceRecognition(netstandard2.0)' + Before: + public static bool operator !=(Point point1, Point point2) + { + return !(point1 == point2); + } + After: + public static bool operator !=(Point point1, Point point2) => !(point1 == point2); + */ + public static bool operator !=(Point point1, Point point2) => !(point1 == point2); + + #endregion + + #endregion + +} \ No newline at end of file diff --git a/Instance/DlibDotNet.cs b/Instance/DlibDotNet.cs index 1ae0570..affd173 100644 --- a/Instance/DlibDotNet.cs +++ b/Instance/DlibDotNet.cs @@ -1,15 +1,16 @@ -using FaceRecognitionDotNet; -using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Configuration; using Phares.Shared; using ShellProgressBar; using System.Drawing.Imaging; using System.Text.Json; +using View_by_Distance.FaceRecognitionDotNet; using View_by_Distance.Instance.Models; using View_by_Distance.Metadata.Models; using View_by_Distance.Property.Models; using View_by_Distance.Resize.Models; using View_by_Distance.Shared.Models; using View_by_Distance.Shared.Models.Methods; +using View_by_Distance.Shared.Models.Stateless; namespace View_by_Distance.Instance; @@ -548,7 +549,7 @@ public class DlibDotNet } } - private void FullDoWork(Property.Models.Configuration configuration, string[] juliePhares, Model model, PredictorModel predictorModel, string argZero, Person[] people, PropertyLogic propertyLogic, List propertyHolderCollections) + private void FullDoWork(Property.Models.Configuration configuration, string[] juliePhares, Model model, PredictorModel predictorModel, string argZero, Dictionary> peopleCollection, PropertyLogic propertyLogic, List propertyHolderCollections) { if (_Log is null) throw new Exception($"{nameof(_Log)} is null!"); @@ -673,8 +674,10 @@ public class DlibDotNet _Exceptions.Add(sourceDirectory); if (exceptionCount == 0 && _ArgZeroIsConfigurationRootDirectory) WriteGroup(configuration, propertyLogic, propertyCollection, metadataCollection, faceCollections, resizeKeyValuePairs, sourceDirectory, filteredPropertyHolderCollection); - if (exceptionCount == 0 && _Configuration.LoadOrCreateThenSaveImageFacesResults.Value && _Configuration.SaveShortcuts.HasValue && _Configuration.SaveShortcuts.Value) - _Faces.SaveShortcuts(configuration, juliePhares, model, predictorModel, people, propertyLogic, outputResolution, filteredPropertyHolderCollection, propertyCollection, faceCollections); + if (exceptionCount == 0) + propertyLogic.AddToPropertyLogicAllCollection(filteredPropertyHolderCollection); + if (exceptionCount == 0 && _Configuration.LoadOrCreateThenSaveImageFacesResults.Value && _Configuration.SaveShortcuts.HasValue && _Configuration.SaveShortcuts.Value && propertyLogic.NamedFaceInfoDeterministicHashCodeIndices.Any()) + _Faces.SaveShortcuts(configuration, juliePhares, model, predictorModel, propertyLogic, peopleCollection, outputResolution, filteredPropertyHolderCollection, propertyCollection, faceCollections); if (exceptionCount == 0 && _Configuration.LoadOrCreateThenSaveDistanceResults.HasValue && _Configuration.LoadOrCreateThenSaveDistanceResults.Value) _Distance.LoadOrCreateThenSaveDistanceResults(configuration, model, predictorModel, sourceDirectory, outputResolution, sourceDirectoryChanges, filteredPropertyHolderCollection, faceCollections); if (_Resize.AngleBracketCollection.Any()) @@ -696,6 +699,7 @@ public class DlibDotNet } if (_ArgZeroIsConfigurationRootDirectory && outputResolution == _Configuration.OutputResolutions[0]) { + propertyLogic.SaveAllCollection(); if (!_Configuration.LoadOrCreateThenSaveImageFacesResults.Value && !_Configuration.LoadOrCreateThenSaveDirectoryDistanceResults.Value && !_Configuration.LoadOrCreateThenSaveDistanceResults.Value) break; if (_Exceptions.Count == 0) @@ -729,8 +733,9 @@ public class DlibDotNet private void Search(Property.Models.Configuration configuration, string[] juliePhares, bool reverse, Model model, PredictorModel predictorModel, string argZero, Person[] people) { PropertyLogic propertyLogic = GetPropertyLogic(); + Dictionary> peopleCollection = A2_People.Convert(people); List propertyHolderCollections = Property.Models.Stateless.A_Property.Get(configuration, reverse, model.ToString(), predictorModel.ToString(), propertyLogic); - FullDoWork(configuration, juliePhares, model, predictorModel, argZero, people, propertyLogic, propertyHolderCollections); + FullDoWork(configuration, juliePhares, model, predictorModel, argZero, peopleCollection, propertyLogic, propertyHolderCollections); } internal void RenameQueue(Property.Models.Configuration configuration, Model model, PredictorModel predictorModel) => _Rename.RenameQueue(configuration, model, predictorModel); diff --git a/Instance/Instance.csproj b/Instance/Instance.csproj index 7376eee..074a1a9 100644 --- a/Instance/Instance.csproj +++ b/Instance/Instance.csproj @@ -52,31 +52,12 @@ - - - - - - - - - - - - - - - - - - - - + diff --git a/Instance/Models/_A2_People.cs b/Instance/Models/_A2_People.cs index fbcb6c0..c1083bc 100644 --- a/Instance/Models/_A2_People.cs +++ b/Instance/Models/_A2_People.cs @@ -76,4 +76,18 @@ internal class A2_People return results.ToArray(); } + internal static Dictionary> Convert(Person[] people) + { + Dictionary> results = new(); + string personKey; + foreach (Person person in people) + { + personKey = Shared.Models.Stateless.Methods.IPersonBirthday.GetFormatted(person.Birthday); + if (!results.ContainsKey(personKey)) + results.Add(personKey, new List()); + results[personKey].Add(person); + } + return results; + } + } \ No newline at end of file diff --git a/Instance/Models/_D2_FaceLandmark.cs b/Instance/Models/_D2_FaceLandmark.cs index 6a329b7..95d560d 100644 --- a/Instance/Models/_D2_FaceLandmark.cs +++ b/Instance/Models/_D2_FaceLandmark.cs @@ -1,9 +1,9 @@ -using FaceRecognitionDotNet; using System.Drawing; using System.Text.Json; using View_by_Distance.Metadata.Models; using View_by_Distance.Property.Models; using View_by_Distance.Resize.Models; +using View_by_Distance.Shared.Models.Stateless; namespace View_by_Distance.Instance.Models; @@ -33,7 +33,7 @@ internal class D2_FaceLandmarks #pragma warning disable CA1416 - private static Bitmap RotateBitmap(System.Drawing.Image image, float angle) + private static Bitmap RotateBitmap(Image image, float angle) { Bitmap result; Bitmap bitmap = new(image); @@ -63,7 +63,7 @@ internal class D2_FaceLandmarks rotatedImageFileFullName = imageFiles[i][1]; try { - using (System.Drawing.Image image = System.Drawing.Image.FromFile(resizedFileInfo.FullName)) + using (Image image = Image.FromFile(resizedFileInfo.FullName)) { using Graphics graphic = Graphics.FromImage(image); if (face.FaceLandmarks is null || !face.FaceLandmarks.Any()) @@ -92,7 +92,7 @@ internal class D2_FaceLandmarks } if (face.α.HasValue) { - using System.Drawing.Image image = System.Drawing.Image.FromFile(resizedFileInfo.FullName); + using Image image = Image.FromFile(resizedFileInfo.FullName); rotated = RotateBitmap(image, (float)face.α.Value); if (rotated is not null) { diff --git a/Instance/Models/_D_Face.cs b/Instance/Models/_D_Face.cs index 56d6457..60c7b31 100644 --- a/Instance/Models/_D_Face.cs +++ b/Instance/Models/_D_Face.cs @@ -1,14 +1,15 @@ -using FaceRecognitionDotNet; using System.Drawing; using System.Drawing.Drawing2D; using System.Text.Json; using System.Text.Json.Serialization; using System.Text.RegularExpressions; +using View_by_Distance.FaceRecognitionDotNet; using View_by_Distance.Metadata.Models; using View_by_Distance.Property.Models; using View_by_Distance.Resize.Models; using View_by_Distance.Shared.Models; using View_by_Distance.Shared.Models.Methods; +using View_by_Distance.Shared.Models.Stateless; using WindowsShortcutFactory; namespace View_by_Distance.Instance.Models; @@ -32,8 +33,8 @@ public class D_Face : Shared.Models.Properties.IFace, IFace protected double? _Α; protected DateTime _DateTime; protected Shared.Models.FaceEncoding _FaceEncoding; - protected Dictionary _FaceLandmarks; - protected Shared.Models.Location _Location; + protected Dictionary _FaceLandmarks; + protected Location _Location; protected int? _LocationIndex; protected OutputResolution _OutputResolution; protected bool _Populated; @@ -41,9 +42,9 @@ public class D_Face : Shared.Models.Properties.IFace, IFace public double? α => _Α; public DateTime DateTime => _DateTime; public Shared.Models.FaceEncoding FaceEncoding => _FaceEncoding; - public Dictionary FaceLandmarks => _FaceLandmarks; + public Dictionary FaceLandmarks => _FaceLandmarks; public OutputResolution OutputResolution => _OutputResolution; - public Shared.Models.Location Location => _Location; + public Location Location => _Location; public int? LocationIndex => _LocationIndex; public bool Populated => _Populated; public string RelativePath => _RelativePath; @@ -51,7 +52,7 @@ public class D_Face : Shared.Models.Properties.IFace, IFace #nullable disable [JsonConstructor] - public D_Face(double? α, DateTime dateTime, Shared.Models.FaceEncoding faceEncoding, Dictionary faceLandmarks, Shared.Models.Location location, int? locationIndex, OutputResolution outputResolution, bool populated, string relativePath) + public D_Face(double? α, DateTime dateTime, Shared.Models.FaceEncoding faceEncoding, Dictionary faceLandmarks, Location location, int? locationIndex, OutputResolution outputResolution, bool populated, string relativePath) { _Α = α; _DateTime = dateTime; @@ -76,7 +77,7 @@ public class D_Face : Shared.Models.Properties.IFace, IFace _WriteIndentedJsonSerializerOptions = new JsonSerializerOptions { WriteIndented = true }; } - private D_Face(Shared.Models.Location location) + private D_Face(Location location) { _Α = α; _DateTime = DateTime.MinValue; @@ -102,12 +103,12 @@ public class D_Face : Shared.Models.Properties.IFace, IFace _RelativePath = string.Empty; } - private D_Face(A_Property property, int outputResolutionWidth, int outputResolutionHeight, int outputResolutionOrientation, string relativePath, int? i, Shared.Models.Location location) + private D_Face(A_Property property, int outputResolutionWidth, int outputResolutionHeight, int outputResolutionOrientation, string relativePath, int? i, Location location) { DateTime?[] dateTimes; dateTimes = new DateTime?[] { property.CreationTime, property.LastWriteTime, property.DateTime, property.DateTimeDigitized, property.DateTimeOriginal, property.GPSDateStamp }; _DateTime = (from l in dateTimes where l.HasValue select l.Value).Min(); - _FaceLandmarks = new Dictionary(); + _FaceLandmarks = new Dictionary(); _OutputResolution = new(outputResolutionHeight, outputResolutionOrientation, outputResolutionWidth); _Location = location; _LocationIndex = i; @@ -223,15 +224,15 @@ public class D_Face : Shared.Models.Properties.IFace, IFace int width; int height; Graphics graphics; - Rectangle rectangle; + Location location; Bitmap preRotated; - Shared.Models.Location location; + Rectangle rectangle; using Bitmap source = new(resizedFileInfo.FullName); for (int i = 0; i < faceCollection.Count; i++) { if (!faceCollection[i].Populated || faceCollection[i]?.Location is null) continue; - location = new Shared.Models.Location(faceCollection[i].Location.Confidence, + location = new Location(faceCollection[i].Location.Confidence, faceCollection[i].Location.Bottom, faceCollection[i].Location.Left, faceCollection[i].Location.Right, @@ -255,7 +256,7 @@ public class D_Face : Shared.Models.Properties.IFace, IFace throw new Exception(); if (_Configuration.NumJitters is null) throw new Exception(); - FaceRecognitionDotNet.Location[] locations; + Location[] locations; const int numberOfTimesToUpSample = 1; FaceRecognitionDotNet.Image? unknownImage = null; if (resizedFileInfo.Exists) @@ -284,18 +285,18 @@ public class D_Face : Shared.Models.Properties.IFace, IFace int rightEyeY; Bitmap rotated; string faceFile; + Location location; Bitmap preRotated; Graphics graphics; D_Face? face = null; Rectangle rectangle; double[] rawEncoding; - Shared.Models.Location location; - FaceRecognitionDotNet.Image knownImage; + FaceRecognitionDotNet.Image? knownImage; + FaceRecognitionDotNet.Image? rotatedImage; Shared.Models.FaceEncoding faceEncoding; - FaceRecognitionDotNet.Image rotatedImage; FaceRecognitionDotNet.FaceEncoding[] faceEncodings; - IEnumerable facePoints; - IDictionary>[] faceLandmarks; + IEnumerable facePoints; + IDictionary>[] faceLandmarks; using Bitmap source = unknownImage.ToBitmap(); padding = (int)((source.Width + source.Height) / 2 * .01); for (int i = 0; i < locations.Length; i++) @@ -319,27 +320,31 @@ public class D_Face : Shared.Models.Properties.IFace, IFace // source.Save(Path.Combine(_Configuration.RootDirectory, "source.jpg")); // preRotated.Save(Path.Combine(_Configuration.RootDirectory, $"{p} - preRotated.jpg")); using (knownImage = FaceRecognition.LoadImage(preRotated)) - faceLandmarks = faceRecognition.FaceLandmark(knownImage, faceLocations: null, _PredictorModel, _Model).ToArray(); + if (knownImage is null) + throw new Exception($"{nameof(knownImage)} is null"); + faceLandmarks = faceRecognition.FaceLandmark(knownImage, faceLocations: null, _PredictorModel, _Model).ToArray(); if (faceLandmarks.Length == 0 && p < _Configuration.PaddingLoops.Value) continue; else if (faceLandmarks.Length != 1) continue; - foreach (KeyValuePair> keyValuePair in faceLandmarks[0]) - face.FaceLandmarks.Add(keyValuePair.Key.ToString(), (from l in keyValuePair.Value select new Shared.Models.FacePoint(l.Index, l.Point.X, l.Point.Y)).ToArray()); + foreach (KeyValuePair> keyValuePair in faceLandmarks[0]) + face.FaceLandmarks.Add(keyValuePair.Key.ToString(), keyValuePair.Value.ToArray()); if (!faceLandmarks[0].ContainsKey(FacePart.LeftEye) || !faceLandmarks[0].ContainsKey(FacePart.RightEye)) continue; facePoints = faceLandmarks[0][FacePart.LeftEye]; - leftEyeX = (int)(from l in facePoints select l.Point.X).Average(); - leftEyeY = (int)(from l in facePoints select l.Point.Y).Average(); + leftEyeX = (int)(from l in facePoints select l.X).Average(); + leftEyeY = (int)(from l in facePoints select l.Y).Average(); facePoints = faceLandmarks[0][FacePart.RightEye]; - rightEyeX = (int)(from l in facePoints select l.Point.X).Average(); - rightEyeY = (int)(from l in facePoints select l.Point.Y).Average(); + rightEyeX = (int)(from l in facePoints select l.X).Average(); + rightEyeY = (int)(from l in facePoints select l.Y).Average(); α = Shared.Models.Stateless.Methods.IFace.Getα(rightEyeX, leftEyeX, rightEyeY, leftEyeY); using (rotated = RotateBitmap(preRotated, (float)α.Value)) { // rotated.Save(Path.Combine(_Configuration.RootDirectory, $"{p} - rotated.jpg")); using (rotatedImage = FaceRecognition.LoadImage(rotated)) - faceEncodings = faceRecognition.FaceEncodings(rotatedImage, knownFaceLocation: null, _Configuration.NumJitters.Value, _PredictorModel, _Model).ToArray(); + if (rotatedImage is null) + throw new Exception($"{nameof(rotatedImage)} is null"); + faceEncodings = faceRecognition.FaceEncodings(rotatedImage, knownFaceLocation: null, _Configuration.NumJitters.Value, _PredictorModel, _Model).ToArray(); if (faceEncodings.Length == 0 && p < _Configuration.PaddingLoops.Value) continue; else if (faceEncodings.Length != 1) @@ -495,29 +500,26 @@ public class D_Face : Shared.Models.Properties.IFace, IFace SaveFaces(faceCollection, resizedFileInfo, imageFiles); } - internal void SaveShortcuts(Property.Models.Configuration configuration, string[] juliePhares, Model model, PredictorModel predictorModel, Person[] people, PropertyLogic propertyLogic, string outputResolution, PropertyHolder[] filteredPropertyHolderCollection, List propertyCollection, List> faceCollections) + internal void SaveShortcuts(Property.Models.Configuration configuration, string[] juliePhares, Model model, PredictorModel predictorModel, PropertyLogic propertyLogic, Dictionary> peopleCollection, string outputResolution, PropertyHolder[] filteredPropertyHolderCollection, List propertyCollection, List> faceCollections) { - int oldIndex; string[] keys; string fileName; string fullName; string personKey; string directory; + bool? isWrongYear; FileInfo fileInfo; + TimeSpan timeSpan; + DateTime? birthDate; string copyDirectory; string? relativePath; + string isWrongYearFlag; + string subDirectoryName; + DateTime minimumDateTime; List faceCollection; PropertyHolder propertyHolder; WindowsShortcut windowsShortcut; const string pattern = @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]"; - Dictionary> peopleCollection = new(); - foreach (Person person in people) - { - personKey = Shared.Models.Stateless.Methods.IPersonBirthday.GetFormatted(person.Birthday); - if (!peopleCollection.ContainsKey(personKey)) - peopleCollection.Add(personKey, new List()); - peopleCollection[personKey].Add(person); - } string dFacesContentDirectory = Path.Combine(Property.Models.Stateless.IResult.GetResultsFullGroupDirectory(configuration, model.ToString(), predictorModel.ToString(), nameof(D_Face), outputResolution, includeResizeGroup: true, includeModel: true, includePredictorModel: true), "(_)"); for (int i = 0; i < filteredPropertyHolderCollection.Length; i++) { @@ -532,37 +534,42 @@ public class D_Face : Shared.Models.Properties.IFace, IFace continue; if (propertyHolder.Property?.Id is null || propertyHolder.MinimumDateTime is null || propertyHolder.ResizedFileInfo is null) continue; - if (propertyHolder.Property.Indices.Length < 2) - directory = Path.Combine(dFacesContentDirectory, $"New{relativePath[2..]}"); + if (!propertyLogic.NamedFaceInfoDeterministicHashCodeIndices.ContainsKey(propertyHolder.Property.Id.Value)) + directory = Path.Combine(dFacesContentDirectory, $"Unnamed{relativePath[2..]}"); else { - oldIndex = propertyHolder.Property.Indices[1]; - if (!propertyLogic.NamedFaceInfo.ContainsKey(oldIndex)) - directory = Path.Combine(dFacesContentDirectory, $"Unnamed{relativePath[2..]}"); + faceCollection = faceCollections[i]; + keys = propertyLogic.NamedFaceInfoDeterministicHashCodeIndices[propertyHolder.Property.Id.Value]; + minimumDateTime = Property.Models.Stateless.A_Property.GetMinimumDateTime(propertyHolder.Property); + (isWrongYear, _) = propertyHolder.Property.IsWrongYear(propertyHolder.ImageFileInfo.FullName, minimumDateTime); + isWrongYearFlag = isWrongYear is null ? "#" : isWrongYear.Value ? "~" : "="; + subDirectoryName = $"{isWrongYearFlag}{minimumDateTime:yyyy}"; + if (!faceCollection.Any()) + directory = Path.Combine(dFacesContentDirectory, $"None{relativePath[2..]}", subDirectoryName); + else if (keys.Length != 1) + directory = Path.Combine(dFacesContentDirectory, $"Not Supported{relativePath[2..]}", subDirectoryName); + else if (faceCollection.Count != 1) + directory = Path.Combine(dFacesContentDirectory, $"Many{relativePath[2..]}", subDirectoryName); else { - faceCollection = faceCollections[i]; - keys = propertyLogic.NamedFaceInfo[oldIndex]; - if (!faceCollection.Any()) - directory = Path.Combine(dFacesContentDirectory, $"None{relativePath[2..]}"); - else if (keys.Length != 1) - directory = Path.Combine(dFacesContentDirectory, $"Not Supported{relativePath[2..]}"); - else if (faceCollection.Count == 1) + personKey = keys[0]; + if (isWrongYear is not null && !isWrongYear.Value && personKey[..2] is "19" or "20") { - personKey = keys[0]; - if (juliePhares.Contains(personKey)) - copyDirectory = Path.Combine(dFacesContentDirectory, "Named Images"); - directory = Path.Combine(dFacesContentDirectory, "Named Shortcuts", personKey); + birthDate = Shared.Models.Stateless.Methods.IPersonBirthday.Get(personKey); + if (birthDate.HasValue) + { + if (minimumDateTime < birthDate.Value) + subDirectoryName = "!---"; + else + { + timeSpan = new(minimumDateTime.Ticks - birthDate.Value.Ticks); + subDirectoryName = $"^{Math.Floor(timeSpan.TotalDays / 365):000}"; + } + } } - else if ((from l in faceCollection where HasLeftAndRight(l.FaceLandmarks) select true).Count() == 1) - { - personKey = keys[0]; - if (juliePhares.Contains(personKey)) - copyDirectory = Path.Combine(dFacesContentDirectory, "Named Images^"); - directory = Path.Combine(dFacesContentDirectory, "Named Shortcuts", $"{personKey}^"); - } - else - directory = Path.Combine(dFacesContentDirectory, $"Many{relativePath[2..]}"); + directory = Path.Combine(dFacesContentDirectory, "Named Shortcuts", personKey, subDirectoryName); + if (juliePhares.Contains(personKey)) + copyDirectory = Path.Combine(dFacesContentDirectory, "Named Images", personKey, subDirectoryName); } } if (!Directory.Exists(directory)) @@ -603,7 +610,7 @@ public class D_Face : Shared.Models.Properties.IFace, IFace Face[] Shared.Models.Stateless.Methods.IFace.TestStatic_GetFaces(string jsonFileFullName) => throw new NotImplementedException(); - private static bool HasLeftAndRight(Dictionary faceLandmarks) + private static bool HasLeftAndRight(Dictionary faceLandmarks) { bool result = true; if (!faceLandmarks.ContainsKey(FacePart.LeftEye.ToString())) diff --git a/Instance/Models/_E2_Navigate.cs b/Instance/Models/_E2_Navigate.cs index 16b7a8a..b63792f 100644 --- a/Instance/Models/_E2_Navigate.cs +++ b/Instance/Models/_E2_Navigate.cs @@ -1,10 +1,10 @@ -using FaceRecognitionDotNet; using System.Text.Json; using View_by_Distance.Instance.Models.Stateless; using View_by_Distance.Metadata.Models; using View_by_Distance.Resize.Models; using View_by_Distance.Shared.Models; using View_by_Distance.Shared.Models.Methods; +using View_by_Distance.Shared.Models.Stateless; namespace View_by_Distance.Instance.Models; diff --git a/Instance/Models/_E3_Rename.cs b/Instance/Models/_E3_Rename.cs index 05a4a4f..a8f5bd7 100644 --- a/Instance/Models/_E3_Rename.cs +++ b/Instance/Models/_E3_Rename.cs @@ -1,8 +1,8 @@ -using FaceRecognitionDotNet; using System.Text.Json; using View_by_Distance.Metadata.Models; using View_by_Distance.Property.Models; using View_by_Distance.Resize.Models; +using View_by_Distance.Shared.Models.Stateless; namespace View_by_Distance.Instance.Models; diff --git a/Instance/Models/_E_Distance.cs b/Instance/Models/_E_Distance.cs index 9072277..9a25094 100644 --- a/Instance/Models/_E_Distance.cs +++ b/Instance/Models/_E_Distance.cs @@ -1,9 +1,10 @@ -using FaceRecognitionDotNet; using System.Text; using System.Text.Json; +using View_by_Distance.FaceRecognitionDotNet; using View_by_Distance.Metadata.Models; using View_by_Distance.Property.Models; using View_by_Distance.Resize.Models; +using View_by_Distance.Shared.Models.Stateless; namespace View_by_Distance.Instance.Models; @@ -203,10 +204,10 @@ internal class E_Distance bool check = false; string parentCheck; FileInfo[] fileInfoCollection; - System.IO.DirectoryInfo directoryInfo; - System.IO.DirectoryInfo tvsDirectoryInfo; string fileNameWithoutExtension; List directories = new(); + System.IO.DirectoryInfo directoryInfo; + System.IO.DirectoryInfo tvsDirectoryInfo; string[] changesFrom = new string[] { nameof(A_Property), nameof(B_Metadata), nameof(C_Resize), nameof(D_Face) }; List dateTimes = (from l in sourceDirectoryChanges where changesFrom.Contains(l.Item1) select l.Item2).ToList(); List directoryInfoCollection = Property.Models.Stateless.IResult.GetDirectoryInfoCollection(configuration, @@ -289,8 +290,8 @@ internal class E_Distance private static List<(string, List, List)> GetMatches(List<(string, List>)> files) { List<(string, List, List)> results = new(); - List faces; FaceEncoding faceEncoding; + List faces; List faceEncodings; foreach ((string, List>) file in files) { diff --git a/Instance/Models/_G_Index.cs b/Instance/Models/_G_Index.cs index c1a0ea4..4e699f2 100644 --- a/Instance/Models/_G_Index.cs +++ b/Instance/Models/_G_Index.cs @@ -1,8 +1,8 @@ -using FaceRecognitionDotNet; using System.Text.Json; using System.Text.Json.Serialization; using View_by_Distance.Property.Models; using View_by_Distance.Shared.Models.Methods; +using View_by_Distance.Shared.Models.Stateless; namespace View_by_Distance.Instance.Models; diff --git a/Instance/Program.cs b/Instance/Program.cs index c485fd3..43be07c 100644 --- a/Instance/Program.cs +++ b/Instance/Program.cs @@ -10,7 +10,6 @@ namespace View_by_Distance.Instance; public class Program { - public static void Secondary(List args) { LoggerConfiguration loggerConfiguration = new(); diff --git a/Instance/appsettings.Development.json b/Instance/appsettings.Development.json index 93ea56d..14a5b3f 100644 --- a/Instance/appsettings.Development.json +++ b/Instance/appsettings.Development.json @@ -86,7 +86,7 @@ "PropertiesChangedForResize": false, "Reverse": false, "xRootDirectory": "C:/Tmp/phares/Pictures", - "RootDirectory": "F:/Tmp/Phares/Compare/Images 2022-07-27 - 20220727 - III", + "RootDirectory": "C:/Tmp/Phares/Compare/Images 2022-07-27 - f642c5669a1d89d598a2efd70da9dc7129d02c15 - III", "SaveFullYearOfRandomFiles": true, "SaveResizedSubFiles": true, "SaveShortcuts": true, @@ -159,7 +159,8 @@ "176 x 176", "256 x 256", "353 x 353", - "1024 x 768" + "1024 x 768", + "1920 x 1080" ], "OutputResolutions": [ "1920 x 1080" diff --git a/Instance/appsettings.Staging.json b/Instance/appsettings.Staging.json index 6f2a89f..5e5852d 100644 --- a/Instance/appsettings.Staging.json +++ b/Instance/appsettings.Staging.json @@ -100,7 +100,8 @@ "176 x 176", "256 x 256", "353 x 353", - "1024 x 768" + "1024 x 768", + "1920 x 1080" ], "PropertyContentCollectionFiles": [], "SaveFaceLandmarkForOutputResolutions": [ diff --git a/Property/Models/A_Property.cs b/Property/Models/A_Property.cs index 4f410d9..ea8640b 100644 --- a/Property/Models/A_Property.cs +++ b/Property/Models/A_Property.cs @@ -95,7 +95,7 @@ public class A_Property : Shared.Models.Properties.IProperty, IProperty string year; string directoryName; string[] directorySegments; - string? check = Path.GetPathRoot(filteredSourceDirectoryFile); + string? check = Path.GetFullPath(filteredSourceDirectoryFile); string? pathRoot = Path.GetPathRoot(filteredSourceDirectoryFile); if (string.IsNullOrEmpty(pathRoot)) throw new Exception(); diff --git a/Property/Models/PropertyLogic.cs b/Property/Models/PropertyLogic.cs index 2ebba0c..836ceec 100644 --- a/Property/Models/PropertyLogic.cs +++ b/Property/Models/PropertyLogic.cs @@ -14,16 +14,18 @@ namespace View_by_Distance.Property.Models; public class PropertyLogic { + protected readonly List<(int, string[])> _AllCollection; protected readonly List _ExceptionsDirectories; + protected readonly Dictionary _KeyValuePairs; protected readonly Dictionary _IndicesFromNew; - protected readonly Dictionary _IndicesFromOld; - protected readonly Dictionary _NamedFaceInfo; + protected readonly Dictionary _SixCharacterNamedFaceInfo; + protected readonly Dictionary _NamedFaceInfoDeterministicHashCodeIndices; public List AngleBracketCollection { get; } + public Dictionary KeyValuePairs => _KeyValuePairs; public Dictionary IndicesFromNew => _IndicesFromNew; - public Dictionary IndicesFromOld => _IndicesFromOld; - public Dictionary NamedFaceInfo => _NamedFaceInfo; public List ExceptionsDirectories => _ExceptionsDirectories; + public Dictionary NamedFaceInfoDeterministicHashCodeIndices => _NamedFaceInfoDeterministicHashCodeIndices; private readonly Serilog.ILogger? _Log; private readonly string[] _VerifyToSeason; @@ -34,12 +36,14 @@ public class PropertyLogic public PropertyLogic(int maxDegreeOfParallelism, Configuration configuration) { + _AllCollection = new(); _Configuration = configuration; _ExceptionsDirectories = new(); _ASCIIEncoding = new ASCIIEncoding(); AngleBracketCollection = new List(); _Log = Serilog.Log.ForContext(); _MaxDegreeOfParallelism = maxDegreeOfParallelism; + Dictionary? namedFaceInfoDeterministicHashCodeIndices; _WriteIndentedJsonSerializerOptions = new JsonSerializerOptions { WriteIndented = true }; if (configuration.VerifyToSeason is null || !configuration.VerifyToSeason.Any()) throw new Exception(); @@ -47,32 +51,47 @@ public class PropertyLogic string json; string[] files; string fullPath; - Dictionary? indicesFromOld; - Dictionary? namedFaceInfo; + Dictionary? keyValuePairs; List>? collection; Dictionary indicesFromNew = new(); + Dictionary? sixCharacterNamedFaceInfo; string? rootDirectoryParent = Path.GetDirectoryName(configuration.RootDirectory); if (string.IsNullOrEmpty(rootDirectoryParent)) throw new Exception($"{nameof(rootDirectoryParent)} is null!"); - files = Directory.GetFiles(rootDirectoryParent, "*Named*.json", SearchOption.TopDirectoryOnly); + files = Directory.GetFiles(rootDirectoryParent, "*DeterministicHashCode*.json", SearchOption.TopDirectoryOnly); if (files.Length != 1) - namedFaceInfo = new(); + namedFaceInfoDeterministicHashCodeIndices = new(); else { json = File.ReadAllText(files[0]); - namedFaceInfo = JsonSerializer.Deserialize>(json); - if (namedFaceInfo is null) - throw new Exception($"{nameof(namedFaceInfo)} is null!"); + namedFaceInfoDeterministicHashCodeIndices = JsonSerializer.Deserialize>(json); + if (namedFaceInfoDeterministicHashCodeIndices is null) + throw new Exception($"{nameof(namedFaceInfoDeterministicHashCodeIndices)} is null!"); + } + if (namedFaceInfoDeterministicHashCodeIndices.Any()) + sixCharacterNamedFaceInfo = new(); + else + { + files = Directory.GetFiles(rootDirectoryParent, "*SixCharacter*.json", SearchOption.TopDirectoryOnly); + if (files.Length != 1) + sixCharacterNamedFaceInfo = new(); + else + { + json = File.ReadAllText(files[0]); + sixCharacterNamedFaceInfo = JsonSerializer.Deserialize>(json); + if (sixCharacterNamedFaceInfo is null) + throw new Exception($"{nameof(sixCharacterNamedFaceInfo)} is null!"); + } } files = Directory.GetFiles(rootDirectoryParent, "*keyValuePairs*.json", SearchOption.TopDirectoryOnly); if (files.Length != 1) - indicesFromOld = new(); + keyValuePairs = new(); else { json = File.ReadAllText(files[0]); - indicesFromOld = JsonSerializer.Deserialize>(json); - if (indicesFromOld is null) - throw new Exception($"{nameof(indicesFromOld)} is null!"); + keyValuePairs = JsonSerializer.Deserialize>(json); + if (keyValuePairs is null) + throw new Exception($"{nameof(keyValuePairs)} is null!"); } foreach (string propertyContentCollectionFile in configuration.PropertyContentCollectionFiles) { @@ -92,9 +111,10 @@ public class PropertyLogic indicesFromNew.Add(keyValuePair.Key, keyValuePair.Value); } } - _NamedFaceInfo = namedFaceInfo; + _KeyValuePairs = keyValuePairs; _IndicesFromNew = indicesFromNew; - _IndicesFromOld = indicesFromOld; + _SixCharacterNamedFaceInfo = sixCharacterNamedFaceInfo; + _NamedFaceInfoDeterministicHashCodeIndices = namedFaceInfoDeterministicHashCodeIndices; } public override string ToString() @@ -180,6 +200,8 @@ public class PropertyLogic } else if (!isIgnoreExtension && isValidImageFormatExtension) { + if (!_IndicesFromNew.Any() && !_KeyValuePairs.Any()) + throw new Exception("In order to keep six character indices at least one need to have an item!"); try { using Image image = Image.FromFile(filteredSourceDirectoryFileInfo.FullName); @@ -216,10 +238,10 @@ public class PropertyLogic encodingHash = Stateless.A_Property.GetDeterministicHashCode(encoding); if (_MaxDegreeOfParallelism < 2) ticks = LogDelta(ticks, nameof(Stateless.A_Property.GetDeterministicHashCode)); - if (!_IndicesFromOld.ContainsKey(encodingHash)) + if (!_KeyValuePairs.ContainsKey(encodingHash)) indices.Add(encodingHash); else - indices.AddRange(_IndicesFromOld[encodingHash]); + indices.AddRange(_KeyValuePairs[encodingHash]); } } width = image.Width; @@ -635,7 +657,7 @@ public class PropertyLogic File.Move(propertyHolder.ImageFileInfo.FullName, filteredSourceDirectoryFileExtensionLowered); if (propertyHolder.Changed is null || propertyHolder.Changed.Value || propertyHolder.Property is null) { - property = GetPropertyOfPrivate(angleBracket, propertyHolder, firstPass, filteredSourceDirectoryFileTuples, parseExceptions, isIgnoreExtension, isValidImageFormatExtension, isValidMetadataExtensions, extensionLowered,fileNameWithoutExtension); + property = GetPropertyOfPrivate(angleBracket, propertyHolder, firstPass, filteredSourceDirectoryFileTuples, parseExceptions, isIgnoreExtension, isValidImageFormatExtension, isValidMetadataExtensions, extensionLowered, fileNameWithoutExtension); lock (propertyHolder) propertyHolder.Update(property); } @@ -799,4 +821,53 @@ public class PropertyLogic return results.OrderBy(l => l.Ticks).ToArray(); } + public void AddToPropertyLogicAllCollection(PropertyHolder[] filteredPropertyHolderCollection) + { + if (_SixCharacterNamedFaceInfo.Any()) + { + string[] keys; + PropertyHolder propertyHolder; + for (int i = 0; i < filteredPropertyHolderCollection.Length; i++) + { + propertyHolder = filteredPropertyHolderCollection[i]; + if (propertyHolder.Property?.Id is null) + continue; + foreach (int sixCharacterIndex in propertyHolder.Property.Indices) + { + if (!_SixCharacterNamedFaceInfo.ContainsKey(sixCharacterIndex)) + continue; + keys = _SixCharacterNamedFaceInfo[sixCharacterIndex]; + _AllCollection.Add(new(propertyHolder.Property.Id.Value, keys)); + } + } + } + } + + public void SaveAllCollection() + { + if (_AllCollection.Any()) + { + string[] keys; + string? rootDirectoryParent = Path.GetDirectoryName(_Configuration.RootDirectory); + if (string.IsNullOrEmpty(rootDirectoryParent)) + throw new Exception($"{nameof(rootDirectoryParent)} is null!"); + Dictionary namedFaceInfoDeterministicHashCodeIndices = new(); + List<(int, string[])> allCollection = _AllCollection.OrderBy(l => l.Item1).ToList(); + foreach ((int deterministicHashCode, string[] values) in allCollection) + { + if (namedFaceInfoDeterministicHashCodeIndices.ContainsKey(deterministicHashCode)) + { + keys = namedFaceInfoDeterministicHashCodeIndices[deterministicHashCode]; + if (JsonSerializer.Serialize(values) == JsonSerializer.Serialize(keys)) + continue; + throw new Exception(); + } + namedFaceInfoDeterministicHashCodeIndices.Add(deterministicHashCode, values); + } + string json = JsonSerializer.Serialize(namedFaceInfoDeterministicHashCodeIndices, new JsonSerializerOptions { WriteIndented = true }); + string checkFile = Path.Combine(rootDirectoryParent, "NamedFaceInfoDeterministicHashCodeIndices.json"); + _ = IPath.WriteAllText(checkFile, json, compareBeforeWrite: true); + } + } + } \ No newline at end of file diff --git a/Resize/Models/_C_Resize.cs b/Resize/Models/_C_Resize.cs index 272b28f..cd000e3 100644 --- a/Resize/Models/_C_Resize.cs +++ b/Resize/Models/_C_Resize.cs @@ -69,12 +69,12 @@ public class C_Resize public static (ImageCodecInfo imageCodecInfo, EncoderParameters encoderParameters) GetTuple(string outputExtension, int outputQuality) { (ImageCodecInfo imageCodecInfo, EncoderParameters encoderParameters) result; - ImageFormat imageFormat = outputExtension switch + System.Drawing.Imaging.ImageFormat imageFormat = outputExtension switch { - ".gif" => ImageFormat.Gif, - ".jpg" => ImageFormat.Jpeg, - ".png" => ImageFormat.Png, - ".tiff" => ImageFormat.Tiff, + ".gif" => System.Drawing.Imaging.ImageFormat.Gif, + ".jpg" => System.Drawing.Imaging.ImageFormat.Jpeg, + ".png" => System.Drawing.Imaging.ImageFormat.Png, + ".tiff" => System.Drawing.Imaging.ImageFormat.Tiff, _ => throw new Exception(), }; ImageCodecInfo imageCodecInfo = (from l in ImageCodecInfo.GetImageEncoders() where l.FormatID == imageFormat.Guid select l).First(); diff --git a/Shared/Models/FacePoint.cs b/Shared/Models/FacePoint.cs index d7a7381..bfd5f2e 100644 --- a/Shared/Models/FacePoint.cs +++ b/Shared/Models/FacePoint.cs @@ -1,3 +1,4 @@ +using System.Drawing; using System.Text.Json; using System.Text.Json.Serialization; using View_by_Distance.Shared.Models.Methods; @@ -14,18 +15,47 @@ public class FacePoint : Properties.IFacePoint, IFacePoint public int X => _X; public int Y => _Y; + private readonly Point _Point; + [JsonConstructor] public FacePoint(int index, int x, int y) { _Index = index; _X = x; _Y = y; + _Point = new(x, y); } + public FacePoint(Point point, int index) : + this(index, point.X, point.Y) + { } + + public override bool Equals(object? obj) => obj is FacePoint point && Equals(point); + public override string ToString() { string result = JsonSerializer.Serialize(this, new JsonSerializerOptions() { WriteIndented = true }); return result; } + public override int GetHashCode() + { + int hashCode = 1861411795; + hashCode = hashCode * -1521134295 + _Point.GetHashCode(); + hashCode = hashCode * -1521134295 + _Index.GetHashCode(); + return hashCode; + } + + public bool Equals(FacePoint? facePoint) + { + return facePoint is not null + && _X == facePoint.X + && _Y == facePoint.Y + && _Index == facePoint.Index; + } + + public static bool operator ==(FacePoint point1, FacePoint point2) => point1.Equals(point2); + + public static bool operator !=(FacePoint point1, FacePoint point2) => !(point1 == point2); + } \ No newline at end of file diff --git a/Shared/Models/Location.cs b/Shared/Models/Location.cs index 12e4c1b..c3b3dcb 100644 --- a/Shared/Models/Location.cs +++ b/Shared/Models/Location.cs @@ -1,10 +1,11 @@ +using System.Drawing; using System.Text.Json; using System.Text.Json.Serialization; using View_by_Distance.Shared.Models.Methods; namespace View_by_Distance.Shared.Models; -public class Location : Properties.ILocation, ILocation +public class Location : Properties.ILocation, ILocation, IEquatable { protected double _Confidence; @@ -28,10 +29,47 @@ public class Location : Properties.ILocation, ILocation _Top = top; } + public Location(int left, int top, int right, int bottom) : + this(-1.0d, bottom, left, right, top) + { } + + public Location(Rectangle rectangle, double confidence) : + this(-1.0d, rectangle.Bottom, rectangle.Left, rectangle.Right, rectangle.Top) + { } + + public Location(Location location, double confidence) : + this(-1.0d, location.Bottom, location.Left, location.Right, location.Top) + { } + + public override bool Equals(object? obj) => Equals(obj as Location); + public override string ToString() { string result = JsonSerializer.Serialize(this, new JsonSerializerOptions() { WriteIndented = true }); return result; } + public override int GetHashCode() + { + int hashCode = -773114317; + hashCode = hashCode * -1521134295 + _Bottom.GetHashCode(); + hashCode = hashCode * -1521134295 + _Left.GetHashCode(); + hashCode = hashCode * -1521134295 + _Right.GetHashCode(); + hashCode = hashCode * -1521134295 + _Top.GetHashCode(); + return hashCode; + } + + public bool Equals(Location? location) + { + return location is not null + && _Bottom == location.Bottom + && _Left == location.Left + && _Right == location.Right + && _Top == location.Top; + } + + public static bool operator ==(Location location1, Location location2) => EqualityComparer.Default.Equals(location1, location2); + + public static bool operator !=(Location location1, Location location2) => !(location1 == location2); + } \ No newline at end of file diff --git a/Shared/Models/Stateless/FacePart.cs b/Shared/Models/Stateless/FacePart.cs new file mode 100644 index 0000000..30931b1 --- /dev/null +++ b/Shared/Models/Stateless/FacePart.cs @@ -0,0 +1,59 @@ +namespace View_by_Distance.Shared.Models.Stateless; + +/// +/// Specifies the part of face. +/// +public enum FacePart +{ + + /// + /// Specifies the chin. + /// + Chin, + + /// + /// Specifies the left eyebrow. + /// + LeftEyebrow, + + /// + /// Specifies the right eyebrow. + /// + RightEyebrow, + + /// + /// Specifies the nose bridge. + /// + NoseBridge, + + /// + /// Specifies the nose tip. + /// + NoseTip, + + /// + /// Specifies the left eye. + /// + LeftEye, + + /// + /// Specifies the right eye. + /// + RightEye, + + /// + /// Specifies the top lip. + /// + TopLip, + + /// + /// Specifies the bottom lip. + /// + BottomLip, + + /// + /// Specifies the nose. + /// + Nose, + +} \ No newline at end of file diff --git a/Shared/Models/Stateless/ImageFormat.cs b/Shared/Models/Stateless/ImageFormat.cs new file mode 100644 index 0000000..b1024a7 --- /dev/null +++ b/Shared/Models/Stateless/ImageFormat.cs @@ -0,0 +1,24 @@ +namespace View_by_Distance.Shared.Models.Stateless; + +/// +/// Specifies the file format of the image. +/// +public enum ImageFormat +{ + + /// + /// Specifies that the bitmap (BMP) image format. + /// + Bmp, + + /// + /// Specifies that the Joint Photographic Experts Group (JPEG) image format. + /// + Jpeg, + + /// + /// Specifies that the W3C Portable Network Graphics (PNG) image format. + /// + Png, + +} \ No newline at end of file diff --git a/Shared/Models/Stateless/Methods/IPersonBirthday.cs b/Shared/Models/Stateless/Methods/IPersonBirthday.cs index 202c227..234f7eb 100644 --- a/Shared/Models/Stateless/Methods/IPersonBirthday.cs +++ b/Shared/Models/Stateless/Methods/IPersonBirthday.cs @@ -18,6 +18,9 @@ public interface IPersonBirthday string TestStatic_GetFormatted(Models.PersonBirthday personBirthday) => PersonBirthday.GetFormatted(personBirthday); static string GetFormatted(Models.PersonBirthday personBirthday) => PersonBirthday.GetFormatted(personBirthday); + DateTime? TestStatic_Get(string personKey) => PersonBirthday.Get(personKey); + static DateTime? Get(string personKey) => PersonBirthday.Get(personKey); + string TestStatic_GetFileName(Models.PersonBirthday personBirthday) => PersonBirthday.GetFileName(personBirthday); static string GetFileName(Models.PersonBirthday personBirthday) => PersonBirthday.GetFileName(personBirthday); diff --git a/Shared/Models/Stateless/Methods/PersonBirthday.cs b/Shared/Models/Stateless/Methods/PersonBirthday.cs index b5b4983..4404bae 100644 --- a/Shared/Models/Stateless/Methods/PersonBirthday.cs +++ b/Shared/Models/Stateless/Methods/PersonBirthday.cs @@ -1,3 +1,5 @@ +using System.Globalization; + namespace View_by_Distance.Shared.Models.Stateless.Methods; internal abstract class PersonBirthday @@ -13,5 +15,5 @@ internal abstract class PersonBirthday internal static string GetFileName(Models.PersonBirthday personBirthday) => $"{personBirthday.Value.ToString(GetFormat())}.json"; internal static bool DoesBirthDateExits(Properties.IStorage storage, Models.PersonBirthday personBirthday) => File.Exists(GetFileFullName(storage, personBirthday)); internal static string GetFileFullName(Properties.IStorage storage, Models.PersonBirthday personBirthday) => Path.Combine(storage.PeopleRootDirectory, "{}", GetFileName(personBirthday)); - + internal static DateTime? Get(string personKey) => DateTime.TryParseExact(personKey, GetFormat(), CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTime) ? dateTime : null; } \ No newline at end of file diff --git a/Shared/Models/Stateless/Mode.cs b/Shared/Models/Stateless/Mode.cs new file mode 100644 index 0000000..bf9f996 --- /dev/null +++ b/Shared/Models/Stateless/Mode.cs @@ -0,0 +1,19 @@ +namespace View_by_Distance.Shared.Models.Stateless; + +/// +/// Specifies the image mode. +/// +public enum Mode +{ + + /// + /// Specifies that the rgb (8-bit Red, Green and Blue, 3 channels) image mode. + /// + Rgb, + + /// + /// Specifies that the greyscale image mode. + /// + Greyscale + +} \ No newline at end of file diff --git a/Shared/Models/Stateless/Model.cs b/Shared/Models/Stateless/Model.cs new file mode 100644 index 0000000..96214dc --- /dev/null +++ b/Shared/Models/Stateless/Model.cs @@ -0,0 +1,24 @@ +namespace View_by_Distance.Shared.Models.Stateless; + +/// +/// Specifies the model of face detector. +/// +public enum Model +{ + + /// + /// Specifies that the model is HOG (Histograms of Oriented Gradients) based face detector. + /// + Hog, + + /// + /// Specifies that the model is CNN (Convolutional Neural Network) based face detector. + /// + Cnn, + + /// + /// Specifies that the custom face detector. + /// + Custom + +} \ No newline at end of file diff --git a/Shared/Models/Stateless/PredictorModel.cs b/Shared/Models/Stateless/PredictorModel.cs new file mode 100644 index 0000000..8770e80 --- /dev/null +++ b/Shared/Models/Stateless/PredictorModel.cs @@ -0,0 +1,24 @@ +namespace View_by_Distance.Shared.Models.Stateless; + +/// +/// Specifies the dimension of vector which be returned from detector. +/// +public enum PredictorModel +{ + + /// + /// Specifies that the large scale detector. The detector returns 68 points for represent face. + /// + Large, + + /// + /// Specifies that the small scale detector. The detector returns 5 points for represent face. + /// + Small, + + /// + /// Specifies that the custom detector. + /// + Custom + +} \ No newline at end of file diff --git a/Tests/UnitTestExample.cs b/Tests/UnitTestExample.cs index a79924e..ccc67b2 100644 --- a/Tests/UnitTestExample.cs +++ b/Tests/UnitTestExample.cs @@ -1,11 +1,11 @@ using Microsoft.Extensions.Configuration; using Microsoft.VisualStudio.TestTools.UnitTesting; -using Serilog; -using System.Reflection; -using System.Diagnostics; -using View_by_Distance.Tests.Models; -using View_by_Distance.Shared.Models.Stateless.Methods; using Phares.Shared; +using Serilog; +using System.Diagnostics; +using System.Reflection; +using View_by_Distance.Shared.Models.Stateless.Methods; +using View_by_Distance.Tests.Models; namespace View_by_Distance.Tests; diff --git a/Tests/UnitTestResize.cs b/Tests/UnitTestResize.cs index d0d3bd3..c61df4d 100644 --- a/Tests/UnitTestResize.cs +++ b/Tests/UnitTestResize.cs @@ -1,14 +1,14 @@ using Microsoft.Extensions.Configuration; using Microsoft.VisualStudio.TestTools.UnitTesting; -using Serilog; -using System.Reflection; -using System.Diagnostics; -using View_by_Distance.Tests.Models; -using View_by_Distance.Shared.Models.Stateless.Methods; using Phares.Shared; -using View_by_Distance.Resize.Models; +using Serilog; +using System.Diagnostics; using System.Drawing.Imaging; +using System.Reflection; using View_by_Distance.Metadata.Models; +using View_by_Distance.Resize.Models; +using View_by_Distance.Shared.Models.Stateless.Methods; +using View_by_Distance.Tests.Models; namespace View_by_Distance.Tests; diff --git a/Tests/appsettings.Development.json b/Tests/appsettings.Development.json index 156bcd1..59afabe 100644 --- a/Tests/appsettings.Development.json +++ b/Tests/appsettings.Development.json @@ -86,7 +86,7 @@ "PropertiesChangedForResize": false, "Reverse": false, "xRootDirectory": "C:/Tmp/phares/Pictures", - "RootDirectory": "F:/Tmp/Phares/Compare/Images 2022-07-27 - 20220727 - III", + "RootDirectory": "F:/Tmp/Phares/Compare/Images 2022-07-27 - f642c5669a1d89d598a2efd70da9dc7129d02c15 - III", "SaveFullYearOfRandomFiles": true, "SaveResizedSubFiles": true, "SaveShortcuts": true, @@ -159,7 +159,8 @@ "176 x 176", "256 x 256", "353 x 353", - "1024 x 768" + "1024 x 768", + "1920 x 1080" ], "OutputResolutions": [ "1920 x 1080" diff --git a/View-by-Distance-MKLink-Console.sln b/View-by-Distance-MKLink-Console.sln index fcbf8a2..3dfaebb 100644 --- a/View-by-Distance-MKLink-Console.sln +++ b/View-by-Distance-MKLink-Console.sln @@ -25,6 +25,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Instance", "Instance\Instan EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Tests", "Tests\Tests.csproj", "{B4FB6B43-36EC-404D-B934-5C695C6E32CC}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "FaceRecognitionDotNet", "FaceRecognitionDotNet\FaceRecognitionDotNet.csproj", "{FAD03DA9-E8B1-4BBE-B8D0-2ADD2F2BC758}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -78,5 +80,9 @@ Global {B4FB6B43-36EC-404D-B934-5C695C6E32CC}.Debug|Any CPU.Build.0 = Debug|Any CPU {B4FB6B43-36EC-404D-B934-5C695C6E32CC}.Release|Any CPU.ActiveCfg = Release|Any CPU {B4FB6B43-36EC-404D-B934-5C695C6E32CC}.Release|Any CPU.Build.0 = Release|Any CPU + {FAD03DA9-E8B1-4BBE-B8D0-2ADD2F2BC758}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FAD03DA9-E8B1-4BBE-B8D0-2ADD2F2BC758}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FAD03DA9-E8B1-4BBE-B8D0-2ADD2F2BC758}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FAD03DA9-E8B1-4BBE-B8D0-2ADD2F2BC758}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection EndGlobal