Refactor Description and ProcessData classes to improve JSON handling; add GetDefaultJsonElement method and streamline GetExtractResult logic

This commit is contained in:
2025-11-17 13:14:42 -07:00
parent 9810cfd43c
commit 9423feaaf5
3 changed files with 35 additions and 422 deletions

View File

@ -408,7 +408,7 @@ public class Description : IDescription, Shared.Properties.IDescription
return result; return result;
} }
private Description GetDefault(IFileRead fileRead, Logistics logistics) private static Description GetDefault(IFileRead fileRead, Logistics logistics)
{ {
Description result = new() Description result = new()
{ {
@ -516,6 +516,15 @@ public class Description : IDescription, Shared.Properties.IDescription
return result; return result;
} }
internal static JsonElement GetDefaultJsonElement(IFileRead fileRead, Logistics logistics)
{
JsonElement result;
Description description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, DescriptionSourceGenerationContext.Default.Description);
result = JsonSerializer.Deserialize<JsonElement>(json);
return result;
}
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt"; internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
} }

View File

@ -109,39 +109,38 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime) private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{ {
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>()); Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Test[] tests = Array.Empty<Test>();
List<JsonElement> jsonElements = new();
List<FileInfo> fileInfoCollection = new();
_TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks; _TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
_Logistics = new Logistics(this, _TickOffset.Value, reportFullPath, useSplitForMID: true); _Logistics = new Logistics(this, _TickOffset.Value, reportFullPath, useSplitForMID: true);
SetFileParameterLotIDToLogisticsMID(); SetFileParameterLotIDToLogisticsMID();
fileInfoCollection.Add(_Logistics.FileInfo);
if (_Logistics.FileInfo.Length < _MinFileLength) if (_Logistics.FileInfo.Length < _MinFileLength)
results.Item4.Add(_Logistics.FileInfo); results = new(string.Empty, tests, jsonElements.ToArray(), fileInfoCollection);
else else
{ {
ReadOnlyCollection<string> lines = Convert.PDF(_Logistics, _GhostPCLFileName, _PDFTextStripperFileName, results.Item4); ReadOnlyCollection<string> lines = Convert.PDF(_Logistics, _GhostPCLFileName, _PDFTextStripperFileName, fileInfoCollection);
Run? run = Run.Get(_Logistics, results.Item4, lines); Run? run = Run.Get(_Logistics, fileInfoCollection, lines);
if (run is null) if (run is null)
throw new Exception(string.Concat("A) No Data - ", dateTime.Ticks)); results = new(string.Concat("A) No Data - ", dateTime.Ticks), tests, jsonElements.ToArray(), fileInfoCollection);
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, lines);
if (iProcessData is not ProcessData processData)
results = new(string.Concat("B) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
else else
{ {
string mid; string mid;
if (!string.IsNullOrEmpty(processData.Lot) && string.IsNullOrEmpty(processData.Reactor) && string.IsNullOrEmpty(processData.RDS) && string.IsNullOrEmpty(processData.PSN)) Descriptor descriptor = ProcessData.GetDescriptor(run.Header.Lot);
mid = processData.Lot; if (!string.IsNullOrEmpty(descriptor.Lot) && string.IsNullOrEmpty(descriptor.Reactor) && string.IsNullOrEmpty(descriptor.RDS) && string.IsNullOrEmpty(descriptor.PSN))
else if (!string.IsNullOrEmpty(processData.Employee) && string.IsNullOrEmpty(processData.Reactor) && string.IsNullOrEmpty(processData.RDS) && string.IsNullOrEmpty(processData.PSN)) mid = descriptor.Lot;
mid = processData.Employee; else if (!string.IsNullOrEmpty(descriptor.Employee) && string.IsNullOrEmpty(descriptor.Reactor) && string.IsNullOrEmpty(descriptor.RDS) && string.IsNullOrEmpty(descriptor.PSN))
mid = descriptor.Employee;
else else
{ mid = string.Concat(descriptor.Reactor, "-", descriptor.RDS, "-", descriptor.PSN);
mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0]; mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
}
SetFileParameterLotID(mid); SetFileParameterLotID(mid);
_Logistics.Update(mid, processData.Reactor); _Logistics.Update(mid, descriptor.Reactor);
if (iProcessData.Details.Count > 0) JsonElement jsonElement = Description.GetDefaultJsonElement(this, _Logistics);
results = iProcessData.GetResults(this, _Logistics, results.Item4); jsonElements.Add(jsonElement);
else results = new(_Logistics.Logistics1[0], tests, jsonElements.ToArray(), fileInfoCollection);
results = new(string.Concat("C) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
} }
} }
return results; return results;

View File

@ -1,14 +1,9 @@
using Adaptation.Shared; using Adaptation.Shared;
using Adaptation.Shared.Methods; using Adaptation.Shared.Methods;
using log4net;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Data;
using System.IO; using System.IO;
using System.Linq;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization;
using System.Text.RegularExpressions; using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.pcl; namespace Adaptation.FileHandlers.pcl;
@ -16,12 +11,6 @@ namespace Adaptation.FileHandlers.pcl;
internal class ProcessData : IProcessData internal class ProcessData : IProcessData
{ {
private int _I;
private string _Data;
private readonly ILog _Log;
private readonly List<object> _Details;
public string JobID { get; set; } public string JobID { get; set; }
public string MesEntity { get; set; } public string MesEntity { get; set; }
public string Area { get; set; } public string Area { get; set; }
@ -85,114 +74,16 @@ internal class ProcessData : IProcessData
public string NineMean { get; set; } public string NineMean { get; set; }
public string NineResRangePercent { get; set; } public string NineResRangePercent { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details; List<object> Shared.Properties.IProcessData.Details { get; }
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, ReadOnlyCollection<string> lines)
{
_Details = new List<object>();
_I = 0;
_Data = string.Empty;
JobID = logistics.JobID;
Date = GetDateTime(logistics);
MesEntity = logistics.MesEntity;
_Log = LogManager.GetLogger(typeof(ProcessData));
Parse(fileRead, logistics, fileInfoCollection, lines);
}
private static DateTime GetDateTime(Logistics logistics) => private static DateTime GetDateTime(Logistics logistics) =>
logistics.DateTimeFromSequence; logistics.DateTimeFromSequence;
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse))); string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) =>
throw new Exception(string.Concat("See ", nameof(IProcessData)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection) Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection) =>
{ throw new NotImplementedException();
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.HgCV);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
FileInfo fileInfo = new($"{logistics.ReportFullPath}.descriptions.json");
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
File.WriteAllText(fileInfo.FullName, json);
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
fileInfoCollection.Add(fileInfo);
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json) ?? throw new Exception();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
private void ScanPast(string text)
{
int num = _Data.IndexOf(text, _I);
if (num > -1)
{
_I = num + text.Length;
}
else
{
_I = _Data.Length;
}
}
private string GetBefore(string text)
{
int num = _Data.IndexOf(text, _I);
string text2;
if (num > -1)
{
text2 = _Data.Substring(_I, num - _I);
_I = num + text.Length;
return text2.Trim();
}
text2 = _Data.Substring(_I);
_I = _Data.Length;
return text2.Trim();
}
private static bool IsNullOrWhiteSpace(string text)
{
for (int i = 0; i < text.Length; i++)
{
if (!char.IsWhiteSpace(text[i]))
{
return false;
}
}
return true;
}
private bool IsBlankLine()
{
int num = _Data.IndexOf("\n", _I);
return IsNullOrWhiteSpace((num > -1) ? _Data.Substring(_I, num - _I) : _Data.Substring(_I));
}
private string GetToEOL() => GetBefore("\n");
private string GetToken()
{
while (_I < _Data.Length && IsNullOrWhiteSpace(_Data.Substring(_I, 1)))
{
_I++;
}
int j;
for (j = _I; j < _Data.Length && !IsNullOrWhiteSpace(_Data.Substring(j, 1)); j++)
{
}
string text = _Data.Substring(_I, j - _I);
_I = j;
return text.Trim();
}
private static (string, string) GetReactorAndRDS(string defaultReactor, string defaultRDS, string text, string formattedText, string[] segments) private static (string, string) GetReactorAndRDS(string defaultReactor, string defaultRDS, string text, string formattedText, string[] segments)
{ {
@ -323,294 +214,8 @@ internal class ProcessData : IProcessData
return result; return result;
} }
private void Set(Logistics logistics, string headerText)
{
string lot;
string rds;
string psn;
string zone;
string layer;
string reactor;
string employee;
ScanPast("Lot :");
if (headerText.Contains("Ramp Rate :"))
lot = GetBefore("Ramp Rate :");
else if (headerText.Contains("Forward Rate :"))
lot = GetBefore("Forward Rate :");
else if (headerText.Contains("Conduct Type:"))
lot = GetBefore("Conduct Type:");
else
lot = string.Empty;
Descriptor descriptor = GetDescriptor(lot);
lot = descriptor.Lot;
psn = descriptor.PSN;
rds = descriptor.RDS;
zone = descriptor.Zone;
layer = descriptor.Layer;
reactor = descriptor.Reactor;
employee = descriptor.Employee;
Lot = lot;
PSN = psn;
RDS = rds;
Zone = zone;
Layer = layer;
Reactor = reactor;
Employee = employee;
UniqueId = string.Format("{0}_{1}_{2}", logistics.JobID, lot, Path.GetFileNameWithoutExtension(logistics.ReportFullPath));
}
private List<Detail> GetDetails()
{
List<Detail> results = new();
Detail detail;
string token = GetToken();
while (!string.IsNullOrEmpty(token))
{
detail = new()
{ NAvg = token };
_ = GetToEOL();
detail.Nsl = GetToken();
_ = GetToEOL();
detail.Vd = GetToken();
_ = GetToEOL();
detail.FlatZ = GetToken();
_ = GetToEOL();
detail.RhoAvg = GetToken();
_ = GetToEOL();
detail.Rhosl = GetToken();
_ = GetToEOL();
detail.Phase = GetToken();
_ = GetToEOL();
detail.Grade = GetToken();
detail.UniqueId = string.Concat("_Point-", results.Count + 1);
results.Add(detail);
_ = GetToken();
_ = GetToken();
_ = GetToken();
_ = GetToken();
token = GetToken();
//if (token.Contains("LincPDF") || token.Contains("MULTIPLE"))
if (token.Contains("MULTIPLE"))
{
//ScanPast("Flat Z: Grade : % Flat Z: Grade : % Flat Z: Grade : %");
//ScanPast("Flat Z: Grade : % Flat Z: Grade : % Flat Z: Grade : %");
ScanPast("Flat Z: Grade : % Flat Z: Grade : % Flat Z: Grade : %");
ScanPast("Flat Z: Grade : % Flat Z: Grade : % Flat Z: Grade : %");
token = GetToken();
}
}
return results;
}
private static double Variance(List<double> samples)
{
if (samples.Count <= 1)
return double.NaN;
double variance = 0;
double t = samples[0];
for (int i = 1; i < samples.Count; i++)
{
t += samples[i];
double diff = ((i + 1) * samples[i]) - t;
variance += diff * diff / ((i + 1.0) * i);
}
return variance / (samples.Count - 1);
}
private static double StandardDeviation(List<double> samples) =>
Math.Sqrt(Variance(samples));
private void PopulateCalculated(List<Detail> details)
{
if (details.Count != 9)
{
NineMean = string.Empty;
Nine4mmEdgeMean = string.Empty;
Nine10mmEdgeMean = string.Empty;
NineEdgeMeanDelta = string.Empty;
NineResRangePercent = string.Empty;
NineCriticalPointsStdDev = string.Empty;
NineCriticalPointsAverage = string.Empty;
NineCriticalPointsPhaseAngleAverage = string.Empty;
}
else
{
int j;
double phase;
double rhoAvg;
double phaseValue;
double rhoAvgValue;
List<double> allRhoPoints = new();
List<double> edge4mmRhoPoints = new();
List<double> edge10mmRhoPoints = new();
List<double> criticalRhoPoints = new();
List<double> criticalPhasePoints = new();
for (int i = 0; i < details.Count; i++)
{
j = i + 1;
if (!double.TryParse(details[i].RhoAvg, out rhoAvg))
rhoAvgValue = 0;
else
rhoAvgValue = rhoAvg;
if (!double.TryParse(details[i].Phase, out phase))
phaseValue = 0;
else
phaseValue = phase;
allRhoPoints.Add(rhoAvgValue);
if (j is 3 or 4 or 7 or 8)
edge4mmRhoPoints.Add(rhoAvgValue);
else if (j == 1)
{
criticalRhoPoints.Add(rhoAvgValue);
criticalPhasePoints.Add(phaseValue);
}
else if (j is 2 or 5 or 6 or 9)
{
criticalRhoPoints.Add(rhoAvgValue);
edge10mmRhoPoints.Add(rhoAvgValue);
criticalPhasePoints.Add(phaseValue);
}
else
throw new NotSupportedException();
}
double nineMean = allRhoPoints.Average();
double nine4mmEdgeSum = edge4mmRhoPoints.Sum();
double nine10mmEdgeSum = edge10mmRhoPoints.Sum();
double nine4mmEdgeMean = edge4mmRhoPoints.Average();
double nine10mmEdgeMean = edge10mmRhoPoints.Average();
double nineCriticalPointsAverage = criticalRhoPoints.Average();
double nineCriticalPointsStdDev = StandardDeviation(criticalRhoPoints);
double nineCriticalPointsPhaseAngleAverage = criticalPhasePoints.Average();
double nineEdgeMeanDelta = (nine4mmEdgeSum - nine10mmEdgeSum) / nine10mmEdgeSum * 100;
double nineResRangePercent = (criticalRhoPoints.Max() - criticalRhoPoints.Min()) / nineCriticalPointsAverage * 100;
NineMean = nineMean.ToString("0.0000000");
Nine4mmEdgeMean = nine4mmEdgeMean.ToString("0.0000000");
Nine10mmEdgeMean = nine10mmEdgeMean.ToString("0.0000000");
NineEdgeMeanDelta = nineEdgeMeanDelta.ToString("0.0000000");
NineResRangePercent = nineResRangePercent.ToString("0.0000000");
NineCriticalPointsStdDev = nineCriticalPointsStdDev.ToString("0.0000000");
NineCriticalPointsAverage = nineCriticalPointsAverage.ToString("0.0000000");
NineCriticalPointsPhaseAngleAverage = nineCriticalPointsPhaseAngleAverage.ToString("0.0000000");
}
}
#nullable enable #nullable enable
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, ReadOnlyCollection<string> lines)
{
if (fileRead is null)
throw new ArgumentNullException(nameof(fileRead));
string headerText = string.Join(Environment.NewLine, lines);
if (headerText.Contains("G A T E V O L T A G E"))
throw new Exception("Ignore: GATEVOLTAGE runs are not parsed.");
if (!string.IsNullOrEmpty(headerText))
{
headerText = headerText.Replace("box", "");
headerText = headerText.Replace("bar", "");
headerText = headerText.Replace("horiz", "");
headerText = headerText.Replace("center", "");
headerText = headerText.Replace("upper", "");
headerText = headerText.Replace("lower", "");
headerText = headerText.Replace("right", "");
headerText = headerText.Replace("left", "");
headerText = headerText.Replace("thin", "");
headerText = headerText.Replace("vertical", "");
headerText = headerText.Replace("line", "");
headerText = headerText.Replace("middle", "");
headerText = headerText.Replace("side", "");
headerText = headerText.Replace("top", "");
headerText = headerText.Replace("corner", "");
headerText = headerText.Replace("bottom", "");
headerText = headerText.Replace("ruleunder", "_");
headerText = headerText.Replace("@", "");
headerText = headerText.Replace("*", "");
_I = 0;
_Data = headerText;
_Log.Debug($"****MERCURY-DATA [002]= {headerText}");
ScanPast("Operator:");
_ = GetBefore("Start Voltage:");
StartVoltage = GetBefore("V");
ScanPast("Wafer :");
Wafer = GetBefore("S Voltage :");
StopVoltage = GetBefore("V");
Set(logistics, headerText);
RampRate = GetBefore("mV/sec");
ScanPast("Plan :");
Plan = GetBefore("G limit :");
//GLimit = GetBefore("S ");
GLimit = GetBefore("S");
ScanPast("Setup File:");
//SetupFile = GetBefore("O O");
SetupFile = GetBefore("O O");
ScanPast("Wafer size :");
WaferSize = GetBefore("mm");
ScanPast("Folder :");
//Folder = GetBefore("N N");
Folder = GetBefore("N N");
ScanPast("Ccomp : ");
Ccomp = GetBefore("pF");
ScanPast("Pattern :");
//Pattern = GetBefore("C C");
Pattern = GetBefore("C C");
ScanPast("Area:");
Area = GetBefore("cm2");
ScanPast("Cond Type :");
CondType = GetBefore("Rho Method:");
//RhoMethod = GetBefore("N N");
RhoMethod = GetBefore("N N");
ScanPast("Model :");
//Model = GetBefore("T T");
Model = GetBefore("T T");
ScanPast("Navg :");
NAvgMean = GetToken();
NAvgStdDev = GetToken();
NAvgRadialGradient = GetToken();
ScanPast("Nsl :");
NslMean = GetToken();
NslStdDev = GetToken();
NslRadialGradient = GetToken();
ScanPast("Vd :");
VdMean = GetToken();
VdStdDev = GetToken();
VdRadialGradient = GetToken();
ScanPast("Flat Z:");
FlatZMean = GetToken();
FlatZStdDev = GetToken();
FlatZRadialGradient = GetToken();
ScanPast("Rhoavg:");
RhoAvgMean = GetToken();
RhoAvgStdDev = GetToken();
RhoAvgRadialGradient = GetToken();
ScanPast("Rhosl :");
RhoslMean = GetToken();
RhoslStdDev = GetToken();
RhoslRadialGradient = GetToken();
ScanPast("Phase :");
PhaseMean = GetToken();
PhaseStdDev = GetToken();
PhaseRadialGradient = GetToken();
ScanPast("Grade :");
GradeMean = GetToken();
GradeStdDev = GetToken();
GradeRadialGradient = GetToken();
ScanPast("Rs :");
RsMean = GetToken();
RsStdDev = GetToken();
RsRadialGradient = GetToken();
//ScanPast("Flat Z: Grade : % Flat Z: Grade : % Flat Z: Grade : %");
ScanPast("Flat Z: Grade : % Flat Z: Grade : % Flat Z: Grade : %");
List<Detail> details = GetDetails();
PopulateCalculated(details);
_Details.AddRange(details);
}
foreach (Detail detail in _Details.Cast<Detail>())
{
detail.HeaderUniqueId = UniqueId;
detail.UniqueId = string.Concat(detail, detail.UniqueId);
}
fileInfoCollection.Add(logistics.FileInfo);
}
internal static List<Description> GetDescriptions(JsonElement[] jsonElements) internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{ {
List<Description> results = new(); List<Description> results = new();