using FaceRecognitionDotNet;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Text.Json;
using System.Text.Json.Serialization;
using View_by_Distance.Metadata.Models;
using View_by_Distance.Property.Models;
using View_by_Distance.Resize.Models;
using View_by_Distance.Shared.Models;
using View_by_Distance.Shared.Models.Methods;
namespace View_by_Distance.Instance.Models;
///
// List
///
public class D_Face : Shared.Models.Properties.IFace, IFace
{
internal List AngleBracketCollection { get; }
private readonly Model _Model;
private readonly string _ArgZero;
private readonly Serilog.ILogger? _Log;
private readonly ModelParameter _ModelParameter;
private readonly PredictorModel _PredictorModel;
private readonly Configuration _Configuration;
private readonly JsonSerializerOptions _WriteIndentedJsonSerializerOptions;
protected double? _Α;
protected DateTime _DateTime;
protected Shared.Models.FaceEncoding _FaceEncoding;
protected Dictionary _FaceLandmarks;
protected Shared.Models.Location _Location;
protected int? _LocationIndex;
protected OutputResolution _OutputResolution;
protected bool _Populated;
protected string _RelativePath;
public double? α => _Α;
public DateTime DateTime => _DateTime;
public Shared.Models.FaceEncoding FaceEncoding => _FaceEncoding;
public Dictionary FaceLandmarks => _FaceLandmarks;
public OutputResolution OutputResolution => _OutputResolution;
public Shared.Models.Location Location => _Location;
public int? LocationIndex => _LocationIndex;
public bool Populated => _Populated;
public string RelativePath => _RelativePath;
#nullable disable
[JsonConstructor]
public D_Face(double? α, DateTime dateTime, Shared.Models.FaceEncoding faceEncoding, Dictionary faceLandmarks, Shared.Models.Location location, int? locationIndex, OutputResolution outputResolution, bool populated, string relativePath)
{
_Α = α;
_DateTime = dateTime;
_FaceEncoding = faceEncoding;
_FaceLandmarks = faceLandmarks;
_Location = location;
_LocationIndex = locationIndex;
_OutputResolution = outputResolution;
_Populated = populated;
_RelativePath = relativePath;
}
internal D_Face(Configuration configuration, string argZero, Model model, ModelParameter modelParameter, PredictorModel predictorModel)
{
_Model = model;
_ArgZero = argZero;
_Configuration = configuration;
_ModelParameter = modelParameter;
_PredictorModel = predictorModel;
AngleBracketCollection = new List();
_Log = Serilog.Log.ForContext();
_WriteIndentedJsonSerializerOptions = new JsonSerializerOptions { WriteIndented = true };
}
private D_Face(Shared.Models.Location location)
{
_Α = α;
_DateTime = DateTime.MinValue;
_FaceEncoding = null;
_FaceLandmarks = null;
_OutputResolution = null;
_Location = location;
_LocationIndex = null;
_Populated = false;
_RelativePath = string.Empty;
}
private D_Face()
{
_Α = α;
_DateTime = DateTime.MinValue;
_FaceEncoding = null;
_FaceLandmarks = null;
_OutputResolution = null;
_Location = null;
_LocationIndex = null;
_Populated = false;
_RelativePath = string.Empty;
}
private D_Face(A_Property property, int outputResolutionWidth, int outputResolutionHeight, int outputResolutionOrientation, string relativePath, int? i, Shared.Models.Location location)
{
DateTime?[] dateTimes;
dateTimes = new DateTime?[] { property.CreationTime, property.LastWriteTime, property.DateTime, property.DateTimeDigitized, property.DateTimeOriginal, property.GPSDateStamp };
_DateTime = (from l in dateTimes where l.HasValue select l.Value).Min();
_FaceLandmarks = new Dictionary();
_OutputResolution = new(outputResolutionHeight, outputResolutionOrientation, outputResolutionWidth);
_Location = location;
_LocationIndex = i;
_Populated = false;
_RelativePath = relativePath;
}
private D_Face(int outputResolutionWidth, int outputResolutionHeight, int outputResolutionOrientation, Shared.Models.Properties.IFace face)
{
_Α = face.α;
_DateTime = face.DateTime;
_FaceEncoding = face.FaceEncoding;
_FaceLandmarks = face.FaceLandmarks;
_OutputResolution = new(outputResolutionHeight, outputResolutionOrientation, outputResolutionWidth);
_Location = face.Location;
_LocationIndex = face.LocationIndex;
_Populated = face.Populated;
_RelativePath = face.RelativePath;
}
private static void GetPointBounds(PointF[] points, out float xmin, out float xmax, out float ymin, out float ymax)
{
xmin = points[0].X;
xmax = xmin;
ymin = points[0].Y;
ymax = ymin;
foreach (PointF point in points)
{
if (xmin > point.X)
xmin = point.X;
if (xmax < point.X)
xmax = point.X;
if (ymin > point.Y)
ymin = point.Y;
if (ymax < point.Y)
ymax = point.Y;
}
}
public override string ToString()
{
string result = JsonSerializer.Serialize(this, new JsonSerializerOptions() { WriteIndented = true });
return result;
}
#pragma warning disable CA1416
internal static Bitmap RotateBitmap(Bitmap bitmap, float angle)
{
Bitmap result;
#if Linux
throw new Exception("Built on Linux!");
#elif OSX
throw new Exception("Built on macOS!");
#elif Windows
// Make a Matrix to represent rotation
// by this angle.
Matrix rotate_at_origin = new();
rotate_at_origin.Rotate(angle);
// Rotate the image's corners to see how big
// it will be after rotation.
PointF[] points =
{
new PointF(0, 0),
new PointF(bitmap.Width, 0),
new PointF(bitmap.Width, bitmap.Height),
new PointF(0, bitmap.Height),
};
rotate_at_origin.TransformPoints(points);
float xmin, xmax, ymin, ymax;
GetPointBounds(points, out xmin, out xmax, out ymin, out ymax);
// Make a bitmap to hold the rotated result.
int wid = (int)Math.Round(xmax - xmin);
int hgt = (int)Math.Round(ymax - ymin);
result = new Bitmap(wid, hgt);
// Create the real rotation transformation.
Matrix rotate_at_center = new();
rotate_at_center.RotateAt(angle,
new PointF(wid / 2f, hgt / 2f));
// Draw the image onto the new bitmap rotated.
using (Graphics gr = Graphics.FromImage(result))
{
// Use smooth image interpolation.
gr.InterpolationMode = InterpolationMode.High;
// Clear with the color in the image's upper left corner.
gr.Clear(bitmap.GetPixel(0, 0));
// For debugging. (It's easier to see the background.)
// gr.Clear(Color.LightBlue);
// Set up the transformation to rotate.
gr.Transform = rotate_at_center;
// Draw the image centered on the bitmap.
int x = (wid - bitmap.Width) / 2;
int y = (hgt - bitmap.Height) / 2;
gr.DrawImage(bitmap, x, y);
}
#endif
// Return the result bitmap.
return result;
}
private static void SaveFaces(List faceCollection, FileInfo resizedFileInfo, List imageFiles)
{
int width;
int height;
Graphics graphics;
Rectangle rectangle;
Bitmap preRotated;
Shared.Models.Location location;
using Bitmap source = new(resizedFileInfo.FullName);
for (int i = 0; i < faceCollection.Count; i++)
{
if (!faceCollection[i].Populated || faceCollection[i]?.Location is null)
continue;
location = new Shared.Models.Location(faceCollection[i].Location.Confidence,
faceCollection[i].Location.Bottom,
faceCollection[i].Location.Left,
faceCollection[i].Location.Right,
faceCollection[i].Location.Top);
width = location.Right - location.Left;
height = location.Bottom - location.Top;
rectangle = new Rectangle(location.Left, location.Top, width, height);
using (preRotated = new(width, height))
{
using (graphics = Graphics.FromImage(preRotated))
graphics.DrawImage(source, new Rectangle(0, 0, width, height), rectangle, GraphicsUnit.Pixel);
preRotated.Save(imageFiles[i], System.Drawing.Imaging.ImageFormat.Png);
}
}
}
private List GetFaces(FileInfo resizedFileInfo, string relativePath, string fileNameWithoutExtension, A_Property property, int outputResolutionWidth, int outputResolutionHeight, int outputResolutionOrientation, string facesDirectory)
{
List results = new();
if (_Configuration.PaddingLoops is null)
throw new Exception();
if (_Configuration.NumJitters is null)
throw new Exception();
FaceRecognitionDotNet.Location[] locations;
FaceRecognitionDotNet.Image unknownImage = null;
if (resizedFileInfo.Exists)
{
try
{ unknownImage = FaceRecognition.LoadImageFile(resizedFileInfo.FullName); }
catch (Exception) { }
}
if (unknownImage is null)
results.Add(new D_Face(property, outputResolutionWidth, outputResolutionHeight, outputResolutionOrientation, relativePath, i: null, location: null));
else
{
FaceRecognition faceRecognition = FaceRecognition.Create(_ModelParameter);
locations = faceRecognition.FaceLocations(unknownImage, numberOfTimesToUpsample: 1, _Model).ToArray();
if (!locations.Any())
results.Add(new D_Face(property, outputResolutionWidth, outputResolutionHeight, outputResolutionOrientation, relativePath, i: null, location: null));
else
{
double? α;
int width;
int height;
int padding;
int leftEyeX;
int leftEyeY;
int rightEyeX;
int rightEyeY;
string faceFile;
Graphics graphics;
D_Face face = null;
Rectangle rectangle;
double[] rawEncoding;
Bitmap rotated;
Bitmap preRotated;
FaceRecognitionDotNet.Image knownImage;
FaceRecognitionDotNet.Image rotatedImage;
Shared.Models.Location location;
FaceRecognitionDotNet.FaceEncoding[] faceEncodings;
IEnumerable facePoints;
Shared.Models.FaceEncoding faceEncoding;
IDictionary>[] faceLandmarks;
using Bitmap source = unknownImage.ToBitmap();
padding = (int)((source.Width + source.Height) / 2 * .01);
for (int i = 0; i < locations.Length; i++)
{
for (int p = 0; p <= _Configuration.PaddingLoops.Value; p++)
{
//Location(double confidence, int bottom, int left, int right, int top)
location = new(locations[i].Confidence,
locations[i].Bottom + (padding * p),
locations[i].Left - (padding * p),
locations[i].Right + (padding * p),
locations[i].Top - (padding * p));
face = new D_Face(property, outputResolutionWidth, outputResolutionHeight, outputResolutionOrientation, relativePath, i, location);
width = location.Right - location.Left;
height = location.Bottom - location.Top;
rectangle = new Rectangle(location.Left, location.Top, width, height);
using (preRotated = new Bitmap(width, height))
{
using (graphics = Graphics.FromImage(preRotated))
graphics.DrawImage(source, new Rectangle(0, 0, width, height), rectangle, GraphicsUnit.Pixel);
// source.Save(Path.Combine(_Configuration.RootDirectory, "source.jpg"));
// preRotated.Save(Path.Combine(_Configuration.RootDirectory, $"{p} - preRotated.jpg"));
using (knownImage = FaceRecognition.LoadImage(preRotated))
faceLandmarks = faceRecognition.FaceLandmark(knownImage, faceLocations: null, _PredictorModel, _Model).ToArray();
if (faceLandmarks.Length == 0 && p < _Configuration.PaddingLoops.Value)
continue;
else if (faceLandmarks.Length != 1)
continue;
foreach (KeyValuePair> keyValuePair in faceLandmarks[0])
face.FaceLandmarks.Add(keyValuePair.Key.ToString(), (from l in keyValuePair.Value select new Shared.Models.FacePoint(l.Index, l.Point.X, l.Point.Y)).ToArray());
if (!faceLandmarks[0].ContainsKey(FacePart.LeftEye) || !faceLandmarks[0].ContainsKey(FacePart.RightEye))
continue;
facePoints = faceLandmarks[0][FacePart.LeftEye];
leftEyeX = (int)(from l in facePoints select l.Point.X).Average();
leftEyeY = (int)(from l in facePoints select l.Point.Y).Average();
facePoints = faceLandmarks[0][FacePart.RightEye];
rightEyeX = (int)(from l in facePoints select l.Point.X).Average();
rightEyeY = (int)(from l in facePoints select l.Point.Y).Average();
α = Shared.Models.Stateless.Methods.IFace.Getα(rightEyeX, leftEyeX, rightEyeY, leftEyeY);
using (rotated = RotateBitmap(preRotated, (float)α.Value))
{
// rotated.Save(Path.Combine(_Configuration.RootDirectory, $"{p} - rotated.jpg"));
using (rotatedImage = FaceRecognition.LoadImage(rotated))
faceEncodings = faceRecognition.FaceEncodings(rotatedImage, knownFaceLocation: null, _Configuration.NumJitters.Value, _PredictorModel, _Model).ToArray();
if (faceEncodings.Length == 0 && p < _Configuration.PaddingLoops.Value)
continue;
else if (faceEncodings.Length != 1)
continue;
rawEncoding = faceEncodings[0].GetRawEncoding();
faceEncoding = new(rawEncoding, faceEncodings[0].Size);
face.Update(α, faceEncoding, populated: true);
}
faceFile = Path.Combine(facesDirectory, $"{i} - {fileNameWithoutExtension}.png");
preRotated.Save(faceFile, System.Drawing.Imaging.ImageFormat.Png);
results.Add(face);
}
if (face.Populated)
break;
}
if (face is null || !face.Populated)
{
location = new(locations[i].Confidence,
locations[i].Bottom,
locations[i].Left,
locations[i].Right,
locations[i].Top);
face = new D_Face(property, outputResolutionWidth, outputResolutionHeight, outputResolutionOrientation, relativePath, i, location);
results.Add(face);
}
}
}
unknownImage.Dispose();
faceRecognition.Dispose();
}
if (!results.Any())
throw new Exception();
return results;
}
#pragma warning restore CA1416
private void Update(double? α, Shared.Models.FaceEncoding faceEncoding, bool populated)
{
_Α = α;
_FaceEncoding = faceEncoding;
_Populated = populated;
}
internal List GetFaces(Property.Models.Configuration configuration, string outputResolution, List> subFileTuples, List parseExceptions, string relativePath, string fileNameWithoutExtension, A_Property property, FileInfo resizedFileInfo, int outputResolutionWidth, int outputResolutionHeight, int outputResolutionOrientation)
{
List results;
if (_Configuration.PropertiesChangedForFaces is null)
throw new Exception();
string json;
D_Face face;
bool checkForOutputResolutionChange = false;
string[] changesFrom = new string[] { nameof(A_Property), nameof(B_Metadata), nameof(C_Resize) };
string facesDirectory = Path.Combine(AngleBracketCollection[0].Replace("<>", "()"), fileNameWithoutExtension);
List dateTimes = (from l in subFileTuples where changesFrom.Contains(l.Item1) select l.Item2).ToList();
FileInfo fileInfo = new(Path.Combine(AngleBracketCollection[0].Replace("<>", "[]"), $"{fileNameWithoutExtension}.json"));
if (!fileInfo.Exists)
{
if (fileInfo.Directory?.Parent is null)
throw new Exception();
string parentCheck = Path.Combine(fileInfo.Directory.Parent.FullName, fileInfo.Name);
if (File.Exists(parentCheck))
File.Delete(parentCheck);
}
if (!Directory.Exists(facesDirectory))
_ = Directory.CreateDirectory(facesDirectory);
if (_Configuration.PropertiesChangedForFaces.Value)
results = null;
else if (!fileInfo.Exists)
results = null;
else if (dateTimes.Any() && dateTimes.Max() > fileInfo.LastWriteTime)
results = null;
else
{
json = Shared.Models.Stateless.Methods.IFace.GetJson(fileInfo.FullName);
try
{
results = JsonSerializer.Deserialize>(json);
for (int i = 0; i < results.Count; i++)
{
face = results[i];
if (face.OutputResolution is not null)
continue;
if (!checkForOutputResolutionChange)
checkForOutputResolutionChange = true;
results[i] = new(outputResolutionWidth, outputResolutionHeight, outputResolutionOrientation, face);
}
subFileTuples.Add(new Tuple(nameof(D_Face), fileInfo.LastWriteTime));
}
catch (Exception)
{
results = null;
parseExceptions.Add(nameof(D_Face));
}
}
if (results is not null && checkForOutputResolutionChange)
{
json = JsonSerializer.Serialize(results, _WriteIndentedJsonSerializerOptions);
if (Property.Models.Stateless.IPath.WriteAllText(fileInfo.FullName, json, compareBeforeWrite: true))
File.SetLastWriteTime(fileInfo.FullName, fileInfo.CreationTime);
}
else if (results is null)
{
results = GetFaces(resizedFileInfo, relativePath, fileNameWithoutExtension, property, outputResolutionWidth, outputResolutionHeight, outputResolutionOrientation, facesDirectory);
json = JsonSerializer.Serialize(results, _WriteIndentedJsonSerializerOptions);
if (Property.Models.Stateless.IPath.WriteAllText(fileInfo.FullName, json, compareBeforeWrite: true))
subFileTuples.Add(new Tuple(nameof(D_Face), DateTime.Now));
}
return results;
}
internal void SaveFaces(Property.Models.Configuration configuration, List> subFileTuples, List parseExceptions, string relativePath, string fileNameWithoutExtension, FileInfo resizedFileInfo, List faceCollection)
{
if (_Configuration.OverrideForFaceImages is null)
throw new Exception();
FileInfo fileInfo;
bool check = false;
string parentCheck;
List imageFiles = new();
string[] changesFrom = new string[] { nameof(A_Property), nameof(B_Metadata), nameof(C_Resize) };
string facesDirectory = Path.Combine(AngleBracketCollection[0].Replace("<>", "()"), fileNameWithoutExtension);
List dateTimes = (from l in subFileTuples where changesFrom.Contains(l.Item1) select l.Item2).ToList();
bool facesDirectoryExisted = Directory.Exists(facesDirectory);
if (!facesDirectoryExisted)
_ = Directory.CreateDirectory(facesDirectory);
for (int i = 0; i < faceCollection.Count; i++)
{
if (!faceCollection[i].Populated || faceCollection[i]?.Location is null)
{
imageFiles.Add(string.Empty);
continue;
}
fileInfo = new FileInfo(Path.Combine(facesDirectory, $"{i} - {fileNameWithoutExtension}.png"));
if (!fileInfo.Exists)
{
if (fileInfo.Directory?.Parent is null)
throw new Exception();
parentCheck = Path.Combine(fileInfo.Directory.Parent.FullName, fileInfo.Name);
if (File.Exists(parentCheck))
File.Delete(parentCheck);
}
imageFiles.Add(fileInfo.FullName);
if (_Configuration.OverrideForFaceImages.Value)
check = true;
else if (!fileInfo.Exists)
check = true;
else if (dateTimes.Any() && dateTimes.Max() > fileInfo.LastWriteTime)
check = true;
}
if (check)
SaveFaces(faceCollection, resizedFileInfo, imageFiles);
}
double Shared.Models.Stateless.Methods.IFace.TestStatic_Getα(int x1, int x2, int y1, int y2) => throw new NotImplementedException();
string Shared.Models.Stateless.Methods.IFace.TestStatic_GetJson(string jsonFileFullName) => throw new NotImplementedException();
Face Shared.Models.Stateless.Methods.IFace.TestStatic_GetFace(string jsonFileFullName) => throw new NotImplementedException();
Face[] Shared.Models.Stateless.Methods.IFace.TestStatic_GetFaces(string jsonFileFullName) => throw new NotImplementedException();
}