Ready to test
This commit is contained in:
499
Adaptation/FileHandlers/pcl/Description.cs
Normal file
499
Adaptation/FileHandlers/pcl/Description.cs
Normal file
@ -0,0 +1,499 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.pcl;
|
||||
|
||||
public class Description : IDescription, Shared.Properties.IDescription
|
||||
{
|
||||
|
||||
public int Test { get; set; }
|
||||
public int Count { get; set; }
|
||||
public int Index { get; set; }
|
||||
//
|
||||
public string EventName { get; set; }
|
||||
public string NullData { get; set; }
|
||||
public string JobID { get; set; }
|
||||
public string Sequence { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string ReportFullPath { get; set; }
|
||||
public string ProcessJobID { get; set; }
|
||||
public string MID { get; set; }
|
||||
//
|
||||
public string Date { get; set; }
|
||||
public string Employee { get; set; }
|
||||
public string Lot { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
//
|
||||
public string Comments { get; set; }
|
||||
public string Diameter { get; set; }
|
||||
public string Exclusion { get; set; }
|
||||
public string Gain { get; set; }
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string Laser { get; set; }
|
||||
public string ParseErrorText { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Slot { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
//
|
||||
public string AreaCount { get; set; }
|
||||
public string AreaCountAvg { get; set; }
|
||||
public string AreaCountMax { get; set; }
|
||||
public string AreaCountMin { get; set; }
|
||||
public string AreaCountStdDev { get; set; }
|
||||
public string AreaTotal { get; set; }
|
||||
public string AreaTotalAvg { get; set; }
|
||||
public string AreaTotalMax { get; set; }
|
||||
public string AreaTotalMin { get; set; }
|
||||
public string AreaTotalStdDev { get; set; }
|
||||
public string Bin1 { get; set; }
|
||||
public string Bin2 { get; set; }
|
||||
public string Bin3 { get; set; }
|
||||
public string Bin4 { get; set; }
|
||||
public string Bin5 { get; set; }
|
||||
public string Bin6 { get; set; }
|
||||
public string Bin7 { get; set; }
|
||||
public string Bin8 { get; set; }
|
||||
public string HazeAverage { get; set; }
|
||||
public string HazeAverageAvg { get; set; }
|
||||
public string HazeAverageMax { get; set; }
|
||||
public string HazeAverageMin { get; set; }
|
||||
public string HazeAverageStdDev { get; set; }
|
||||
public string HazePeak { get; set; }
|
||||
public string HazeRegion { get; set; }
|
||||
public string HazeRegionAvg { get; set; }
|
||||
public string HazeRegionMax { get; set; }
|
||||
public string HazeRegionMin { get; set; }
|
||||
public string HazeRegionStdDev { get; set; }
|
||||
public string HazeRng { get; set; }
|
||||
public string LPDCM2 { get; set; }
|
||||
public string LPDCM2Avg { get; set; }
|
||||
public string LPDCM2Max { get; set; }
|
||||
public string LPDCM2Min { get; set; }
|
||||
public string LPDCM2StdDev { get; set; }
|
||||
public string LPDCount { get; set; }
|
||||
public string LPDCountAvg { get; set; }
|
||||
public string LPDCountMax { get; set; }
|
||||
public string LPDCountMin { get; set; }
|
||||
public string LPDCountStdDev { get; set; }
|
||||
public string Mean { get; set; }
|
||||
public string ScratchCount { get; set; }
|
||||
public string ScratchCountAvg { get; set; }
|
||||
public string ScratchCountMax { get; set; }
|
||||
public string ScratchCountMin { get; set; }
|
||||
public string ScratchCountStdDev { get; set; }
|
||||
public string ScratchTotal { get; set; }
|
||||
public string ScratchTotalAvg { get; set; }
|
||||
public string ScratchTotalMax { get; set; }
|
||||
public string ScratchTotalMin { get; set; }
|
||||
public string ScratchTotalStdDev { get; set; }
|
||||
public string Sort { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string SumOfDefects { get; set; }
|
||||
public string SumOfDefectsAvg { get; set; }
|
||||
public string SumOfDefectsMax { get; set; }
|
||||
public string SumOfDefectsMin { get; set; }
|
||||
public string SumOfDefectsStdDev { get; set; }
|
||||
public string Thresh { get; set; }
|
||||
public string Thruput { get; set; }
|
||||
//
|
||||
public object Data { get; set; }
|
||||
public object Parameters { get; set; }
|
||||
|
||||
string IDescription.GetEventDescription() => "File Has been read and parsed";
|
||||
|
||||
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
List<string> results = new();
|
||||
IDescription description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
if (@object is not JsonElement jsonElement)
|
||||
throw new Exception();
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
results.Add(jsonProperty.Name);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetDetailNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
nameof(Comments),
|
||||
nameof(Diameter),
|
||||
nameof(Exclusion),
|
||||
nameof(Gain),
|
||||
nameof(HeaderUniqueId),
|
||||
nameof(Laser),
|
||||
nameof(ParseErrorText),
|
||||
nameof(RDS),
|
||||
nameof(Slot),
|
||||
nameof(UniqueId)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetHeaderNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
nameof(Date),
|
||||
nameof(Employee),
|
||||
nameof(Lot),
|
||||
nameof(PSN),
|
||||
nameof(Reactor),
|
||||
nameof(Recipe)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
IDescription IDescription.GetDisplayNames()
|
||||
{
|
||||
Description result = GetDisplayNames();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetParameterNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
nameof(AreaCount),
|
||||
nameof(AreaCountAvg),
|
||||
nameof(AreaCountMax),
|
||||
nameof(AreaCountMin),
|
||||
nameof(AreaCountStdDev),
|
||||
nameof(AreaTotal),
|
||||
nameof(AreaTotalAvg),
|
||||
nameof(AreaTotalMax),
|
||||
nameof(AreaTotalMin),
|
||||
nameof(AreaTotalStdDev),
|
||||
nameof(Bin1),
|
||||
nameof(Bin2),
|
||||
nameof(Bin3),
|
||||
nameof(Bin4),
|
||||
nameof(Bin5),
|
||||
nameof(Bin6),
|
||||
nameof(Bin7),
|
||||
nameof(Bin8),
|
||||
nameof(HazeAverage),
|
||||
nameof(HazeAverageAvg),
|
||||
nameof(HazeAverageMax),
|
||||
nameof(HazeAverageMin),
|
||||
nameof(HazeAverageStdDev),
|
||||
nameof(HazePeak),
|
||||
nameof(HazeRegion),
|
||||
nameof(HazeRegionAvg),
|
||||
nameof(HazeRegionMax),
|
||||
nameof(HazeRegionMin),
|
||||
nameof(HazeRegionStdDev),
|
||||
nameof(HazeRng),
|
||||
nameof(LPDCM2),
|
||||
nameof(LPDCM2Avg),
|
||||
nameof(LPDCM2Max),
|
||||
nameof(LPDCM2Min),
|
||||
nameof(LPDCM2StdDev),
|
||||
nameof(LPDCount),
|
||||
nameof(LPDCountAvg),
|
||||
nameof(LPDCountMax),
|
||||
nameof(LPDCountMin),
|
||||
nameof(LPDCountStdDev),
|
||||
nameof(Mean),
|
||||
nameof(ScratchCount),
|
||||
nameof(ScratchCountAvg),
|
||||
nameof(ScratchCountMax),
|
||||
nameof(ScratchCountMin),
|
||||
nameof(ScratchCountStdDev),
|
||||
nameof(ScratchTotal),
|
||||
nameof(ScratchTotalAvg),
|
||||
nameof(ScratchTotalMax),
|
||||
nameof(ScratchTotalMin),
|
||||
nameof(ScratchTotalStdDev),
|
||||
nameof(Sort),
|
||||
nameof(StdDev),
|
||||
nameof(SumOfDefects),
|
||||
nameof(SumOfDefectsAvg),
|
||||
nameof(SumOfDefectsMax),
|
||||
nameof(SumOfDefectsMin),
|
||||
nameof(SumOfDefectsStdDev),
|
||||
nameof(Thresh),
|
||||
nameof(Thruput)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
JsonProperty[] results;
|
||||
IDescription description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
results = ((JsonElement)@object).EnumerateObject().ToArray();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetPairedParameterNames()
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetIgnoreParameterNames(Test test)
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = GetDefault(fileRead, logistics);
|
||||
return result;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
|
||||
{
|
||||
Dictionary<string, string> results = new();
|
||||
IDescription description = GetDisplayNames();
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
{
|
||||
if (!results.ContainsKey(jsonProperty.Name))
|
||||
results.Add(jsonProperty.Name, string.Empty);
|
||||
if (jsonProperty.Value is JsonElement jsonPropertyValue)
|
||||
results[jsonProperty.Name] = jsonPropertyValue.ToString();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
|
||||
{
|
||||
List<IDescription> results = new();
|
||||
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData processData)
|
||||
results.Add(GetDefault(fileRead, logistics));
|
||||
else
|
||||
{
|
||||
string nullData;
|
||||
Description description;
|
||||
object configDataNullData = fileRead.NullData;
|
||||
if (configDataNullData is null)
|
||||
nullData = string.Empty;
|
||||
else
|
||||
nullData = configDataNullData.ToString();
|
||||
for (int i = 0; i < iProcessData.Details.Count; i++)
|
||||
{
|
||||
if (iProcessData.Details[i] is not Detail detail)
|
||||
continue;
|
||||
description = new Description
|
||||
{
|
||||
Test = (int)tests[i],
|
||||
Count = tests.Count,
|
||||
Index = i,
|
||||
//
|
||||
EventName = fileRead.EventName,
|
||||
NullData = nullData,
|
||||
JobID = fileRead.CellInstanceName,
|
||||
Sequence = logistics.Sequence.ToString(),
|
||||
MesEntity = logistics.MesEntity,
|
||||
ReportFullPath = logistics.ReportFullPath,
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
//
|
||||
Date = processData.Date,
|
||||
Employee = processData.PSN,
|
||||
Lot = processData.Lot,
|
||||
PSN = processData.PSN,
|
||||
Reactor = processData.Reactor,
|
||||
Recipe = processData.Recipe,
|
||||
//
|
||||
Comments = detail.Comments,
|
||||
Diameter = detail.Diameter,
|
||||
Exclusion = detail.Exclusion,
|
||||
Gain = detail.Gain,
|
||||
HeaderUniqueId = detail.UniqueId,
|
||||
Laser = detail.Laser,
|
||||
ParseErrorText = processData.ParseErrorText,
|
||||
RDS = processData.RDS,
|
||||
Slot = detail.Slot,
|
||||
UniqueId = detail.UniqueId,
|
||||
//
|
||||
AreaCount = detail.AreaCount,
|
||||
AreaCountAvg = processData.AreaCountAvg,
|
||||
AreaCountMax = processData.AreaCountMax,
|
||||
AreaCountMin = processData.AreaCountMin,
|
||||
AreaCountStdDev = processData.AreaCountStdDev,
|
||||
AreaTotal = detail.AreaTotal,
|
||||
AreaTotalAvg = processData.AreaTotalAvg,
|
||||
AreaTotalMax = processData.AreaTotalMax,
|
||||
AreaTotalMin = processData.AreaTotalMin,
|
||||
AreaTotalStdDev = processData.AreaTotalStdDev,
|
||||
Bin1 = detail.Bin1,
|
||||
Bin2 = detail.Bin2,
|
||||
Bin3 = detail.Bin3,
|
||||
Bin4 = detail.Bin4,
|
||||
Bin5 = detail.Bin5,
|
||||
Bin6 = detail.Bin6,
|
||||
Bin7 = detail.Bin7,
|
||||
Bin8 = detail.Bin8,
|
||||
HazeAverage = detail.HazeAverage,
|
||||
HazeAverageAvg = processData.HazeAverageAvg,
|
||||
HazeAverageMax = processData.HazeAverageMax,
|
||||
HazeAverageMin = processData.HazeAverageMin,
|
||||
HazeAverageStdDev = processData.HazeAverageStdDev,
|
||||
HazePeak = detail.HazePeak,
|
||||
HazeRegion = detail.HazeRegion,
|
||||
HazeRegionAvg = processData.HazeRegionAvg,
|
||||
HazeRegionMax = processData.HazeRegionMax,
|
||||
HazeRegionMin = processData.HazeRegionMin,
|
||||
HazeRegionStdDev = processData.HazeRegionStdDev,
|
||||
HazeRng = detail.HazeRng,
|
||||
LPDCM2 = detail.LPDCM2,
|
||||
LPDCM2Avg = processData.LPDCM2Avg,
|
||||
LPDCM2Max = processData.LPDCM2Max,
|
||||
LPDCM2Min = processData.LPDCM2Min,
|
||||
LPDCM2StdDev = processData.LPDCM2StdDev,
|
||||
LPDCount = detail.LPDCount,
|
||||
LPDCountAvg = processData.LPDCountAvg,
|
||||
LPDCountMax = processData.LPDCountMax,
|
||||
LPDCountMin = processData.LPDCountMin,
|
||||
LPDCountStdDev = processData.LPDCountStdDev,
|
||||
Mean = detail.Mean,
|
||||
ScratchCount = detail.ScratchCount,
|
||||
ScratchCountAvg = processData.ScratchCountAvg,
|
||||
ScratchCountMax = processData.ScratchCountMax,
|
||||
ScratchCountMin = processData.ScratchCountMin,
|
||||
ScratchCountStdDev = processData.ScratchCountStdDev,
|
||||
ScratchTotal = detail.ScratchTotal,
|
||||
ScratchTotalAvg = processData.ScratchTotalAvg,
|
||||
ScratchTotalMax = processData.ScratchTotalMax,
|
||||
ScratchTotalMin = processData.ScratchTotalMin,
|
||||
ScratchTotalStdDev = processData.ScratchTotalStdDev,
|
||||
Sort = detail.Sort,
|
||||
StdDev = detail.StdDev,
|
||||
SumOfDefects = detail.SumOfDefects,
|
||||
SumOfDefectsAvg = processData.SumOfDefectsAvg,
|
||||
SumOfDefectsMax = processData.SumOfDefectsMax,
|
||||
SumOfDefectsMin = processData.SumOfDefectsMin,
|
||||
SumOfDefectsStdDev = processData.SumOfDefectsStdDev,
|
||||
Thresh = detail.Thresh,
|
||||
Thruput = detail.Thruput
|
||||
};
|
||||
results.Add(description);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static Description GetDisplayNames()
|
||||
{
|
||||
Description result = new();
|
||||
return result;
|
||||
}
|
||||
|
||||
private Description GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = new()
|
||||
{
|
||||
Test = -1,
|
||||
Count = 0,
|
||||
Index = -1,
|
||||
//
|
||||
EventName = fileRead.EventName,
|
||||
NullData = fileRead.NullData,
|
||||
JobID = fileRead.CellInstanceName,
|
||||
Sequence = logistics.Sequence.ToString(),
|
||||
MesEntity = fileRead.MesEntity,
|
||||
ReportFullPath = logistics.ReportFullPath,
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
//
|
||||
Date = nameof(Date),
|
||||
Employee = nameof(Employee),
|
||||
Lot = nameof(Lot),
|
||||
PSN = nameof(PSN),
|
||||
Reactor = nameof(Reactor),
|
||||
Recipe = nameof(Recipe),
|
||||
//
|
||||
Comments = nameof(Comments),
|
||||
Diameter = nameof(Diameter),
|
||||
Exclusion = nameof(Exclusion),
|
||||
Gain = nameof(Gain),
|
||||
HeaderUniqueId = nameof(HeaderUniqueId),
|
||||
Laser = nameof(Laser),
|
||||
ParseErrorText = nameof(ParseErrorText),
|
||||
RDS = nameof(RDS),
|
||||
Slot = nameof(Slot),
|
||||
UniqueId = nameof(UniqueId),
|
||||
//
|
||||
AreaCount = nameof(AreaCount),
|
||||
AreaCountAvg = nameof(AreaCountAvg),
|
||||
AreaCountMax = nameof(AreaCountMax),
|
||||
AreaCountMin = nameof(AreaCountMin),
|
||||
AreaCountStdDev = nameof(AreaCountStdDev),
|
||||
AreaTotal = nameof(AreaTotal),
|
||||
AreaTotalAvg = nameof(AreaTotalAvg),
|
||||
AreaTotalMax = nameof(AreaTotalMax),
|
||||
AreaTotalMin = nameof(AreaTotalMin),
|
||||
AreaTotalStdDev = nameof(AreaTotalStdDev),
|
||||
Bin1 = nameof(Bin1),
|
||||
Bin2 = nameof(Bin2),
|
||||
Bin3 = nameof(Bin3),
|
||||
Bin4 = nameof(Bin4),
|
||||
Bin5 = nameof(Bin5),
|
||||
Bin6 = nameof(Bin6),
|
||||
Bin7 = nameof(Bin7),
|
||||
Bin8 = nameof(Bin8),
|
||||
HazeAverage = nameof(HazeAverage),
|
||||
HazeAverageAvg = nameof(HazeAverageAvg),
|
||||
HazeAverageMax = nameof(HazeAverageMax),
|
||||
HazeAverageMin = nameof(HazeAverageMin),
|
||||
HazeAverageStdDev = nameof(HazeAverageStdDev),
|
||||
HazePeak = nameof(HazePeak),
|
||||
HazeRegion = nameof(HazeRegion),
|
||||
HazeRegionAvg = nameof(HazeRegionAvg),
|
||||
HazeRegionMax = nameof(HazeRegionMax),
|
||||
HazeRegionMin = nameof(HazeRegionMin),
|
||||
HazeRegionStdDev = nameof(HazeRegionStdDev),
|
||||
HazeRng = nameof(HazeRng),
|
||||
LPDCM2 = nameof(LPDCM2),
|
||||
LPDCM2Avg = nameof(LPDCM2Avg),
|
||||
LPDCM2Max = nameof(LPDCM2Max),
|
||||
LPDCM2Min = nameof(LPDCM2Min),
|
||||
LPDCM2StdDev = nameof(LPDCM2StdDev),
|
||||
LPDCount = nameof(LPDCount),
|
||||
LPDCountAvg = nameof(LPDCountAvg),
|
||||
LPDCountMax = nameof(LPDCountMax),
|
||||
LPDCountMin = nameof(LPDCountMin),
|
||||
LPDCountStdDev = nameof(LPDCountStdDev),
|
||||
Mean = nameof(Mean),
|
||||
ScratchCount = nameof(ScratchCount),
|
||||
ScratchCountAvg = nameof(ScratchCountAvg),
|
||||
ScratchCountMax = nameof(ScratchCountMax),
|
||||
ScratchCountMin = nameof(ScratchCountMin),
|
||||
ScratchCountStdDev = nameof(ScratchCountStdDev),
|
||||
ScratchTotal = nameof(ScratchTotal),
|
||||
ScratchTotalAvg = nameof(ScratchTotalAvg),
|
||||
ScratchTotalMax = nameof(ScratchTotalMax),
|
||||
ScratchTotalMin = nameof(ScratchTotalMin),
|
||||
ScratchTotalStdDev = nameof(ScratchTotalStdDev),
|
||||
Sort = nameof(Sort),
|
||||
StdDev = nameof(StdDev),
|
||||
SumOfDefects = nameof(SumOfDefects),
|
||||
SumOfDefectsAvg = nameof(SumOfDefectsAvg),
|
||||
SumOfDefectsMax = nameof(SumOfDefectsMax),
|
||||
SumOfDefectsMin = nameof(SumOfDefectsMin),
|
||||
SumOfDefectsStdDev = nameof(SumOfDefectsStdDev),
|
||||
Thresh = nameof(Thresh),
|
||||
Thruput = nameof(Thruput),
|
||||
//
|
||||
Data = nameof(Data),
|
||||
Parameters = nameof(Parameters)
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
45
Adaptation/FileHandlers/pcl/Detail.cs
Normal file
45
Adaptation/FileHandlers/pcl/Detail.cs
Normal file
@ -0,0 +1,45 @@
|
||||
namespace Adaptation.FileHandlers.pcl;
|
||||
|
||||
public class Detail
|
||||
{
|
||||
|
||||
public long Id { get; set; }
|
||||
public string AreaCount { get; set; }
|
||||
public string AreaTotal { get; set; }
|
||||
public string Bin1 { get; set; }
|
||||
public string Bin2 { get; set; }
|
||||
public string Bin3 { get; set; }
|
||||
public string Bin4 { get; set; }
|
||||
public string Bin5 { get; set; }
|
||||
public string Bin6 { get; set; }
|
||||
public string Bin7 { get; set; }
|
||||
public string Bin8 { get; set; }
|
||||
public string Comments { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string Diameter { get; set; }
|
||||
public string Exclusion { get; set; }
|
||||
public string Gain { get; set; }
|
||||
public string HazeAverage { get; set; }
|
||||
public string HazePeak { get; set; }
|
||||
public string HazeRegion { get; set; }
|
||||
public string HazeRng { get; set; }
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string LPDCM2 { get; set; }
|
||||
public string LPDCount { get; set; }
|
||||
public string Laser { get; set; }
|
||||
public string Mean { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string ScratchCount { get; set; }
|
||||
public string ScratchTotal { get; set; }
|
||||
public string Slot { get; set; }
|
||||
public string Sort { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string SumOfDefects { get; set; }
|
||||
public string Thresh { get; set; }
|
||||
public string Thruput { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Data { get; set; }
|
||||
public int i { get; set; }
|
||||
|
||||
}
|
131
Adaptation/FileHandlers/pcl/FileRead.cs
Normal file
131
Adaptation/FileHandlers/pcl/FileRead.cs
Normal file
@ -0,0 +1,131 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.pcl;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly string _GhostPCLFileName;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_GhostPCLFileName = string.Concat(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), @"\gpcl6win64.exe");
|
||||
if (_IsEAFHosted && !File.Exists(_GhostPCLFileName))
|
||||
throw new Exception("Ghost PCL FileName doesn't Exist!");
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
if (reportFullPath.Length < _MinFileLength)
|
||||
results.Item4.Add(new FileInfo(reportFullPath));
|
||||
else
|
||||
{
|
||||
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, _GhostPCLFileName);
|
||||
if (iProcessData is ProcessData processData)
|
||||
{
|
||||
string mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
|
||||
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
_Logistics.MID = mid;
|
||||
SetFileParameterLotID(mid);
|
||||
_Logistics.ProcessJobID = processData.Reactor;
|
||||
}
|
||||
if (!iProcessData.Details.Any())
|
||||
throw new Exception(string.Concat("No Data - ", dateTime.Ticks));
|
||||
results = iProcessData.GetResults(this, _Logistics, results.Item4);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
672
Adaptation/FileHandlers/pcl/ProcessData.cs
Normal file
672
Adaptation/FileHandlers/pcl/ProcessData.cs
Normal file
@ -0,0 +1,672 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using log4net;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Data;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.pcl;
|
||||
|
||||
public class ProcessData : IProcessData
|
||||
{
|
||||
|
||||
private int _I;
|
||||
private string _Data;
|
||||
|
||||
private readonly ILog _Log;
|
||||
private readonly List<object> _Details;
|
||||
|
||||
public string JobID { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string AreaCountAvg { get; set; }
|
||||
public string AreaCountMax { get; set; }
|
||||
public string AreaCountMin { get; set; }
|
||||
public string AreaCountStdDev { get; set; }
|
||||
public string AreaTotalAvg { get; set; }
|
||||
public string AreaTotalMax { get; set; }
|
||||
public string AreaTotalMin { get; set; }
|
||||
public string AreaTotalStdDev { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string HazeAverageAvg { get; set; }
|
||||
public string HazeAverageMax { get; set; }
|
||||
public string HazeAverageMin { get; set; }
|
||||
public string HazeAverageStdDev { get; set; }
|
||||
public string HazeRegionAvg { get; set; }
|
||||
public string HazeRegionMax { get; set; }
|
||||
public string HazeRegionMin { get; set; }
|
||||
public string HazeRegionStdDev { get; set; }
|
||||
public string LPDCM2Avg { get; set; }
|
||||
public string LPDCM2Max { get; set; }
|
||||
public string LPDCM2Min { get; set; }
|
||||
public string LPDCM2StdDev { get; set; }
|
||||
public string LPDCountAvg { get; set; }
|
||||
public string LPDCountMax { get; set; }
|
||||
public string LPDCountMin { get; set; }
|
||||
public string LPDCountStdDev { get; set; }
|
||||
public string Lot { get; set; }
|
||||
public string ParseErrorText { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string ScratchCountAvg { get; set; }
|
||||
public string ScratchCountMax { get; set; }
|
||||
public string ScratchCountMin { get; set; }
|
||||
public string ScratchCountStdDev { get; set; }
|
||||
public string ScratchTotalAvg { get; set; }
|
||||
public string ScratchTotalMax { get; set; }
|
||||
public string ScratchTotalMin { get; set; }
|
||||
public string ScratchTotalStdDev { get; set; }
|
||||
public string SumOfDefectsAvg { get; set; }
|
||||
public string SumOfDefectsMax { get; set; }
|
||||
public string SumOfDefectsMin { get; set; }
|
||||
public string SumOfDefectsStdDev { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
|
||||
List<object> Shared.Properties.IProcessData.Details => _Details;
|
||||
|
||||
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string ghostPCLFileName)
|
||||
{
|
||||
fileInfoCollection.Clear();
|
||||
_Details = new List<object>();
|
||||
_I = 0;
|
||||
_Data = string.Empty;
|
||||
JobID = logistics.JobID;
|
||||
MesEntity = logistics.MesEntity;
|
||||
_Log = LogManager.GetLogger(typeof(ProcessData));
|
||||
Parse(fileRead, logistics, fileInfoCollection, ghostPCLFileName);
|
||||
}
|
||||
|
||||
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<Test> tests = new();
|
||||
foreach (object item in _Details)
|
||||
tests.Add(Test.Tencor);
|
||||
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
|
||||
if (tests.Count != descriptions.Count)
|
||||
throw new Exception();
|
||||
for (int i = 0; i < tests.Count; i++)
|
||||
{
|
||||
if (descriptions[i] is not Description description)
|
||||
throw new Exception();
|
||||
if (description.Test != (int)tests[i])
|
||||
throw new Exception();
|
||||
}
|
||||
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
|
||||
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
|
||||
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test and fix a data line from the Lot Summary page if there are two values that are merged.
|
||||
/// </summary>
|
||||
/// <param name="toEol">data line from Lot Summary</param>
|
||||
private void FixToEolArray(ref string[] toEol)
|
||||
{
|
||||
const int MAX_COLUMNS = 9;
|
||||
int[] mColumnWidths = new int[MAX_COLUMNS] { 8, 6, 6, 6, 6, 7, 7, 5, 7 };
|
||||
// is it short at least one data point
|
||||
if (toEol.Length < MAX_COLUMNS)
|
||||
{
|
||||
_Log.Debug($"****FixToEolArray - Starting array:");
|
||||
_Log.Debug(toEol);
|
||||
_Log.Debug($"****FixToEolArray - Column widths:");
|
||||
_Log.Debug(mColumnWidths);
|
||||
string leftVal, rightVal;
|
||||
|
||||
// size up and assign a working list
|
||||
List<string> toEolList = new(toEol);
|
||||
if (string.IsNullOrEmpty(toEolList[toEolList.Count - 1]))
|
||||
toEolList.RemoveAt(toEolList.Count - 1); // removes a null element at end
|
||||
_Log.Debug($"****FixToEolArray - New toEolList:");
|
||||
_Log.Debug(toEolList);
|
||||
for (int i = toEolList.Count; i < MAX_COLUMNS; i++)
|
||||
toEolList.Insert(0, ""); // insert to top of list
|
||||
_Log.Debug(toEolList);
|
||||
|
||||
// start at the end
|
||||
for (int i = MAX_COLUMNS - 1; i >= 0; i--)
|
||||
{
|
||||
// test for a bad value - does it have too many characters
|
||||
_Log.Debug($"****FixToEolArray - toEolList[i].Length: {toEolList[i].Length}, mColumnWidths[i]: {mColumnWidths[i]}");
|
||||
if (toEolList[i].Length > mColumnWidths[i])
|
||||
{
|
||||
// split it up into its two parts
|
||||
leftVal = toEolList[i].Substring(0, toEolList[i].Length - mColumnWidths[i]);
|
||||
rightVal = toEolList[i].Substring(leftVal.Length);
|
||||
_Log.Debug($"****FixToEolArray - Split leftVal: {leftVal}");
|
||||
_Log.Debug($"****FixToEolArray - Split rightVal: {rightVal}");
|
||||
|
||||
// insert new value
|
||||
toEolList[i] = rightVal;
|
||||
toEolList.Insert(i, leftVal);
|
||||
if (string.IsNullOrEmpty(toEolList[0]))
|
||||
toEolList.RemoveAt(0); // removes a null element at end
|
||||
_Log.Debug($"****FixToEolArray - Fixed toEolList:");
|
||||
_Log.Debug(toEolList);
|
||||
}
|
||||
}
|
||||
toEol = toEolList.ToArray();
|
||||
_Log.Debug($"****FixToEolArray - Ending array:");
|
||||
_Log.Debug(toEol);
|
||||
}
|
||||
}
|
||||
|
||||
private void ScanPast(string text)
|
||||
{
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num > -1)
|
||||
_I = num + text.Length;
|
||||
else
|
||||
_I = _Data.Length;
|
||||
}
|
||||
|
||||
private string GetBefore(string text)
|
||||
{
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num > -1)
|
||||
{
|
||||
string str = _Data.Substring(_I, num - _I);
|
||||
_I = num + text.Length;
|
||||
return str.Trim();
|
||||
}
|
||||
string str1 = _Data.Substring(_I);
|
||||
_I = _Data.Length;
|
||||
return str1.Trim();
|
||||
}
|
||||
|
||||
private string GetBefore(string text, bool trim)
|
||||
{
|
||||
if (trim)
|
||||
return GetBefore(text);
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num > -1)
|
||||
{
|
||||
string str = _Data.Substring(_I, num - _I);
|
||||
_I = num + text.Length;
|
||||
return str;
|
||||
}
|
||||
string str1 = _Data.Substring(_I);
|
||||
_I = _Data.Length;
|
||||
return str1;
|
||||
}
|
||||
|
||||
private static bool IsNullOrWhiteSpace(string text)
|
||||
{
|
||||
for (int index = 0; index < text.Length; ++index)
|
||||
{
|
||||
if (!char.IsWhiteSpace(text[index]))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool IsBlankLine()
|
||||
{
|
||||
int num = _Data.IndexOf("\n", _I);
|
||||
return IsNullOrWhiteSpace(num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I));
|
||||
}
|
||||
|
||||
private string GetToEOL() => GetBefore("\n");
|
||||
|
||||
private string GetToEOL(bool trim)
|
||||
{
|
||||
if (trim)
|
||||
return GetToEOL();
|
||||
return GetBefore("\n", false);
|
||||
}
|
||||
|
||||
private string GetToText(string text) => _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
|
||||
|
||||
private string GetToken()
|
||||
{
|
||||
while (_I < _Data.Length && IsNullOrWhiteSpace(_Data.Substring(_I, 1)))
|
||||
++_I;
|
||||
int j = _I;
|
||||
while (j < _Data.Length && !IsNullOrWhiteSpace(_Data.Substring(j, 1)))
|
||||
++j;
|
||||
string str = _Data.Substring(_I, j - _I);
|
||||
_I = j;
|
||||
return str.Trim();
|
||||
}
|
||||
|
||||
private string PeekNextLine()
|
||||
{
|
||||
int j = _I;
|
||||
string toEol = GetToEOL();
|
||||
_I = j;
|
||||
return toEol;
|
||||
}
|
||||
|
||||
private void ParseLotSummary(IFileRead fileRead, ILogistics logistics, string headerFileName, Dictionary<string, string> pages, Dictionary<string, List<Detail>> slots)
|
||||
{
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
_I = 0;
|
||||
//string headerText;
|
||||
//string altHeaderFileName = Path.ChangeExtension(headerFileName, ".txt");
|
||||
//if (File.Exists(altHeaderFileName))
|
||||
// headerText = File.ReadAllText(altHeaderFileName);
|
||||
//else
|
||||
//{
|
||||
// //Pdfbox, IKVM.AWT.WinForms
|
||||
// org.apache.pdfbox.pdmodel.PDDocument pdfDocument = org.apache.pdfbox.pdmodel.PDDocument.load(headerFileName);
|
||||
// org.apache.pdfbox.util.PDFTextStripper stripper = new org.apache.pdfbox.util.PDFTextStripper();
|
||||
// headerText = stripper.getText(pdfDocument);
|
||||
// pdfDocument.close();
|
||||
// File.AppendAllText(altHeaderFileName, headerText);
|
||||
//}
|
||||
//result.Id = h;
|
||||
//result.Title = h;
|
||||
//result.Zone = h;
|
||||
//result.PSN = h;
|
||||
//result.Layer = h;
|
||||
ParseErrorText = string.Empty;
|
||||
if (!pages.ContainsKey(headerFileName))
|
||||
throw new Exception();
|
||||
_I = 0;
|
||||
_Data = pages[headerFileName];
|
||||
ScanPast("Date:");
|
||||
Date = GetToEOL();
|
||||
ScanPast("Recipe ID:");
|
||||
Recipe = GetBefore("LotID:");
|
||||
Recipe = Recipe.Replace(";", "");
|
||||
if (_Data.Contains("[]"))
|
||||
Lot = GetBefore("[]");
|
||||
else if (_Data.Contains("[7]"))
|
||||
Lot = GetBefore("[7]");
|
||||
else
|
||||
Lot = GetBefore("[");
|
||||
|
||||
// Remove illegal characters \/:*?"<>| found in the Lot.
|
||||
Lot = Regex.Replace(Lot, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
|
||||
// determine number of wafers and their slot numbers
|
||||
_Log.Debug(_Data.Substring(_I));
|
||||
string slot;
|
||||
string toEOL;
|
||||
int slotCount = _Data.Substring(_I).Split('*').Length - 1;
|
||||
_Log.Debug($"****HeaderFile - Slot Count: {slotCount}.");
|
||||
for (int i = 0; i < slotCount; i++)
|
||||
{
|
||||
ScanPast("*");
|
||||
toEOL = GetToEOL(false);
|
||||
slot = string.Concat("*", toEOL.Substring(0, 2));
|
||||
if (!slots.ContainsKey(slot))
|
||||
slots.Add(slot, new List<Detail>());
|
||||
}
|
||||
_Log.Debug($"****HeaderFile - Slots:");
|
||||
_Log.Debug(slots);
|
||||
|
||||
ScanPast("Min:");
|
||||
|
||||
string[] toEol1 = GetToEOL(false).Trim().Split(' ');
|
||||
_Log.Debug($"****HeaderFile - toEol1 Count: {toEol1.Length}.");
|
||||
FixToEolArray(ref toEol1);
|
||||
LPDCountMin = toEol1[0].Trim();
|
||||
LPDCM2Min = toEol1[1].Trim();
|
||||
AreaCountMin = toEol1[2].Trim();
|
||||
AreaTotalMin = toEol1[3].Trim();
|
||||
ScratchCountMin = toEol1[4].Trim();
|
||||
ScratchTotalMin = toEol1[5].Trim();
|
||||
SumOfDefectsMin = toEol1[6].Trim();
|
||||
HazeRegionMin = toEol1[7].Trim();
|
||||
HazeAverageMin = toEol1[8].Trim();
|
||||
ScanPast("Max:");
|
||||
|
||||
string[] toEol2 = GetToEOL(false).Trim().Split(' ');
|
||||
_Log.Debug($"****HeaderFile - toEol2 Count: {toEol2.Length}.");
|
||||
FixToEolArray(ref toEol2);
|
||||
LPDCountMax = toEol2[0].Trim();
|
||||
LPDCM2Max = toEol2[1].Trim();
|
||||
AreaCountMax = toEol2[2].Trim();
|
||||
AreaTotalMax = toEol2[3].Trim();
|
||||
ScratchCountMax = toEol2[4].Trim();
|
||||
ScratchTotalMax = toEol2[5].Trim();
|
||||
SumOfDefectsMax = toEol2[6].Trim();
|
||||
HazeRegionMax = toEol2[7].Trim();
|
||||
HazeAverageMax = toEol2[8].Trim();
|
||||
ScanPast("Average:");
|
||||
|
||||
string[] toEol3 = GetToEOL(false).Trim().Split(' ');
|
||||
_Log.Debug($"****HeaderFile - toEol3 Count: {toEol3.Length}.");
|
||||
FixToEolArray(ref toEol3);
|
||||
LPDCountAvg = toEol3[0].Trim();
|
||||
LPDCM2Avg = toEol3[1].Trim();
|
||||
AreaCountAvg = toEol3[2].Trim();
|
||||
AreaTotalAvg = toEol3[3].Trim();
|
||||
ScratchCountAvg = toEol3[4].Trim();
|
||||
ScratchTotalAvg = toEol3[5].Trim();
|
||||
SumOfDefectsAvg = toEol3[6].Trim();
|
||||
HazeRegionAvg = toEol3[7].Trim();
|
||||
HazeAverageAvg = toEol3[8].Trim();
|
||||
ScanPast("Std Dev:");
|
||||
|
||||
string[] toEol4 = GetToEOL(false).Trim().Split(' ');
|
||||
_Log.Debug($"****HeaderFile - toEol4 Count: {toEol4.Length}.");
|
||||
FixToEolArray(ref toEol4);
|
||||
LPDCountStdDev = toEol4[0].Trim();
|
||||
LPDCM2StdDev = toEol4[1].Trim();
|
||||
AreaCountStdDev = toEol4[2].Trim();
|
||||
AreaTotalStdDev = toEol4[3].Trim();
|
||||
ScratchCountStdDev = toEol4[4].Trim();
|
||||
ScratchTotalStdDev = toEol4[5].Trim();
|
||||
SumOfDefectsStdDev = toEol4[6].Trim();
|
||||
HazeRegionStdDev = toEol4[7].Trim();
|
||||
HazeAverageStdDev = toEol4[8].Trim();
|
||||
|
||||
string[] segments = Lot.Split('-');
|
||||
if (segments.Length > 0)
|
||||
Reactor = segments[0];
|
||||
if (segments.Length > 1)
|
||||
RDS = segments[1];
|
||||
if (segments.Length > 2)
|
||||
PSN = segments[2];
|
||||
// Example of header.UniqueId is TENCOR1_33-289217-4693_201901300556533336
|
||||
UniqueId = string.Format("{0}_{1}_{2}", logistics.JobID, Lot, Path.GetFileNameWithoutExtension(logistics.ReportFullPath));
|
||||
}
|
||||
|
||||
private Detail ParseWaferSummary(string waferFileName, Dictionary<string, string> pages)
|
||||
{
|
||||
Detail result = new() { Data = "*Data*", i = -1, };
|
||||
_I = 0;
|
||||
//string waferText;
|
||||
//string altWaferFileName = Path.ChangeExtension(waferFileName, ".txt");
|
||||
//if (File.Exists(altWaferFileName))
|
||||
// waferText = File.ReadAllText(altWaferFileName);
|
||||
//else
|
||||
//{
|
||||
// //Pdfbox, IKVM.AWT.WinForms
|
||||
// org.apache.pdfbox.pdmodel.PDDocument pdfDocument = org.apache.pdfbox.pdmodel.PDDocument.load(waferFileName);
|
||||
// org.apache.pdfbox.util.PDFTextStripper dataStripper = new org.apache.pdfbox.util.PDFTextStripper();
|
||||
// waferText = dataStripper.getText(pdfDocument);
|
||||
// pdfDocument.close();
|
||||
// File.AppendAllText(altWaferFileName, waferText);
|
||||
//}
|
||||
List<string> stringList = new();
|
||||
result.HeaderUniqueId = UniqueId;
|
||||
result.Id = 0;
|
||||
result.Title = null;
|
||||
if (!pages.ContainsKey(waferFileName))
|
||||
throw new Exception();
|
||||
_I = 0;
|
||||
_Data = pages[waferFileName];
|
||||
ScanPast("Date:");
|
||||
result.Date = GetToEOL();
|
||||
ScanPast("ID#");
|
||||
result.Slot = GetToEOL();
|
||||
if (result.Slot.Length > 5)
|
||||
result.Slot = string.Concat(result.Slot.Substring(0, 5), "... - ***");
|
||||
//result.Slot = result.Slot.Replace("*", "");
|
||||
ScanPast("Comments:");
|
||||
result.Comments = GetToEOL();
|
||||
ScanPast("Sort:");
|
||||
result.Sort = GetToEOL();
|
||||
ScanPast("LPD Count:");
|
||||
result.LPDCount = GetToEOL();
|
||||
ScanPast("LPD / cm2:");
|
||||
result.LPDCM2 = GetToEOL();
|
||||
while (GetBefore(":").Contains("Bin"))
|
||||
stringList.Add(GetToEOL());
|
||||
if (stringList.Count >= 1)
|
||||
result.Bin1 = stringList[0];
|
||||
if (stringList.Count >= 2)
|
||||
result.Bin2 = stringList[1];
|
||||
if (stringList.Count >= 3)
|
||||
result.Bin3 = stringList[2];
|
||||
if (stringList.Count >= 4)
|
||||
result.Bin4 = stringList[3];
|
||||
if (stringList.Count >= 5)
|
||||
result.Bin5 = stringList[4];
|
||||
if (stringList.Count >= 6)
|
||||
result.Bin6 = stringList[5];
|
||||
if (stringList.Count >= 7)
|
||||
result.Bin7 = stringList[6];
|
||||
if (stringList.Count >= 8)
|
||||
result.Bin8 = stringList[7];
|
||||
result.Mean = GetToEOL();
|
||||
ScanPast("Std Dev:");
|
||||
result.StdDev = GetToEOL();
|
||||
ScanPast("Area Count:");
|
||||
result.AreaCount = GetToEOL();
|
||||
ScanPast("Area Total:");
|
||||
result.AreaTotal = GetToEOL();
|
||||
ScanPast("Scratch Count:");
|
||||
result.ScratchCount = GetToEOL();
|
||||
ScanPast("Scratch Total:");
|
||||
result.ScratchTotal = GetToEOL();
|
||||
ScanPast("Sum of All Defects:");
|
||||
result.SumOfDefects = GetToEOL();
|
||||
ScanPast("Haze Region:");
|
||||
result.HazeRegion = GetToEOL();
|
||||
ScanPast("Haze Average:");
|
||||
result.HazeAverage = GetToEOL();
|
||||
ScanPast("Haze Peak:");
|
||||
result.HazePeak = GetToEOL();
|
||||
ScanPast("Laser:");
|
||||
result.Laser = GetBefore("Gain:");
|
||||
result.Gain = GetBefore("Diameter:");
|
||||
result.Diameter = GetToEOL();
|
||||
ScanPast("Thresh:");
|
||||
result.Thresh = GetBefore("Exclusion:");
|
||||
result.Exclusion = GetToEOL();
|
||||
ScanPast("Haze Rng:");
|
||||
result.HazeRng = GetBefore("Thruput:");
|
||||
result.Thruput = GetToEOL();
|
||||
ScanPast("Recipe ID:");
|
||||
result.Recipe = GetToEOL();
|
||||
result.UniqueId = string.Format("{0}_{1}", UniqueId, result.Slot.Replace("*", string.Empty).TrimStart('0'));
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert the raw data file to parsable file format - in this case from PCL to PDF
|
||||
/// </summary>
|
||||
/// <param name="sourceFile">source file to be converted to PDF</param>
|
||||
/// <returns></returns>
|
||||
private static string ConvertSourceFileToPdf(string ghostPCLFileName, Logistics logistics)
|
||||
{
|
||||
string result = Path.ChangeExtension(logistics.ReportFullPath, ".pdf");
|
||||
if (!File.Exists(result))
|
||||
{
|
||||
//string arguments = string.Concat("-i \"", sourceFile, "\" -o \"", result, "\"");
|
||||
string arguments = string.Concat("-dSAFER -dBATCH -dNOPAUSE -sOutputFile=\"", result, "\" -sDEVICE=pdfwrite \"", logistics.ReportFullPath, "\"");
|
||||
//Process process = Process.Start(configData.LincPDFCFileName, arguments);
|
||||
Process process = Process.Start(ghostPCLFileName, arguments);
|
||||
_ = process.WaitForExit(30000);
|
||||
if (!File.Exists(result))
|
||||
throw new Exception("PDF file wasn't created");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string ghostPCLFileName)
|
||||
{
|
||||
object item;
|
||||
string pageText;
|
||||
string pagePDFFile;
|
||||
string pageTextFile;
|
||||
List<string> sourceFiles = new();
|
||||
List<string> missingSlots = new();
|
||||
List<Detail> dataFiles = new();
|
||||
Dictionary<string, string> pages = new();
|
||||
string sourcePath = Path.GetDirectoryName(logistics.ReportFullPath);
|
||||
Dictionary<string, List<Detail>> slots = new();
|
||||
string sourceFileNamePdf = ConvertSourceFileToPdf(ghostPCLFileName, logistics);
|
||||
sourceFiles.Add(sourceFileNamePdf);
|
||||
string sourceFileNameNoExt = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
|
||||
////PdfSharp open pdf
|
||||
//using (PdfSharp.Pdf.PdfDocument sourceDocument = PdfSharp.Pdf.IO.PdfReader.Open(sourceFileNamePdf, PdfSharp.Pdf.IO.PdfDocumentOpenMode.Import))
|
||||
//{
|
||||
// for (int idxPage = 0; idxPage < sourceDocument.PageCount; idxPage++)
|
||||
// {
|
||||
// // split the pdf into separate pages. Odd pages are wafer image, even are wafer summary. Last page is Lot Summary.
|
||||
// _Log.Debug($"****ParseData - Splitting page: {idxPage}, sourceDocument: {sourceDocument.FullPath}, sourcePathFileNoExt: {sourcePathFileNoExt}");
|
||||
// //SplitPage(sourceDocument, sourcePathFileNoExt, idxPage);
|
||||
// pageNum = idxPage + 1;
|
||||
// pageFile = string.Format("{0}_{1}.pdf", sourcePathFileNoExt, pageNum);
|
||||
// _Log.Debug($"****SplitPage - Page {pageNum} Source file: {sourceDocument.FullPath}");
|
||||
// _Log.Debug($"****SplitPage - Page {pageNum} Output file: {pageFile}");
|
||||
// //PdfSharp Create new document
|
||||
// PdfSharp.Pdf.PdfDocument outputDocument = new PdfSharp.Pdf.PdfDocument { Version = sourceDocument.Version };
|
||||
// outputDocument.Info.Title = string.Format("Page {0} of {1}", pageNum, sourceDocument.Info.Title);
|
||||
// outputDocument.Info.Creator = sourceDocument.Info.Creator;
|
||||
// outputDocument.AddPage(sourceDocument.Pages[idxPage]);
|
||||
// outputDocument.Pages[0].CropBox = new PdfSharp.Pdf.PdfRectangle(new PdfSharp.Drawing.XRect(0, 100, 700, 700));
|
||||
// outputDocument.Save(pageFile);
|
||||
// }
|
||||
// sourceDocumentPageCount = sourceDocument.PageCount;
|
||||
// sourceDocument.Close();
|
||||
//}
|
||||
java.io.File file = new(sourceFileNamePdf);
|
||||
org.apache.pdfbox.util.Splitter splitter = new();
|
||||
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
|
||||
java.util.List list = splitter.split(pdDocument);
|
||||
java.util.ListIterator iterator = list.listIterator();
|
||||
org.apache.pdfbox.util.PDFTextStripper dataStripper = new();
|
||||
for (short i = 1; i < short.MaxValue; i++)
|
||||
{
|
||||
if (!iterator.hasNext())
|
||||
break;
|
||||
item = iterator.next();
|
||||
pagePDFFile = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_", i, ".pdf");
|
||||
pageTextFile = Path.ChangeExtension(pagePDFFile, ".txt");
|
||||
if (File.Exists(pageTextFile))
|
||||
{
|
||||
pageText = File.ReadAllText(pageTextFile);
|
||||
sourceFiles.Add(pageTextFile);
|
||||
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
|
||||
continue;
|
||||
pd.close();
|
||||
}
|
||||
else if (File.Exists(pagePDFFile))
|
||||
{
|
||||
org.apache.pdfbox.pdmodel.PDDocument document = org.apache.pdfbox.pdmodel.PDDocument.load(pagePDFFile);
|
||||
pageText = dataStripper.getText(document);
|
||||
document.close();
|
||||
sourceFiles.Add(pagePDFFile);
|
||||
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
|
||||
continue;
|
||||
pd.close();
|
||||
}
|
||||
else
|
||||
{
|
||||
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
|
||||
continue;
|
||||
pageText = dataStripper.getText(pd);
|
||||
pd.save(pagePDFFile);
|
||||
sourceFiles.Add(pagePDFFile);
|
||||
pd.close();
|
||||
File.WriteAllText(pageTextFile, pageText);
|
||||
sourceFiles.Add(pageTextFile);
|
||||
}
|
||||
pages.Add(pagePDFFile, pageText);
|
||||
}
|
||||
pdDocument.close();
|
||||
// parse lot summary
|
||||
_Log.Debug($"****ParseData - Parsing lot summary");
|
||||
List<Tuple<string, string>> pageMapping = new();
|
||||
string headerFileName = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_", pages.Count, ".pdf");
|
||||
ParseLotSummary(fileRead, logistics, headerFileName, pages, slots);
|
||||
foreach (KeyValuePair<string, string> keyValuePair in pages)
|
||||
{
|
||||
if (keyValuePair.Key == headerFileName)
|
||||
continue;
|
||||
if (string.IsNullOrEmpty(keyValuePair.Value.Trim()))
|
||||
{
|
||||
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
|
||||
continue;
|
||||
}
|
||||
if (!pages.ContainsKey(keyValuePair.Key))
|
||||
throw new Exception();
|
||||
Detail dataFile = ParseWaferSummary(keyValuePair.Key, pages);
|
||||
if (string.IsNullOrEmpty(dataFile.Recipe) || dataFile.Recipe != Recipe)
|
||||
{
|
||||
missingSlots.Add(keyValuePair.Key);
|
||||
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
|
||||
continue;
|
||||
}
|
||||
if (!slots.ContainsKey(dataFile.Slot))
|
||||
{
|
||||
missingSlots.Add(keyValuePair.Key);
|
||||
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
|
||||
continue;
|
||||
}
|
||||
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_", dataFile.Slot.Replace('*', 's'), "_data.pdf")));
|
||||
slots[dataFile.Slot].Add(dataFile);
|
||||
}
|
||||
string checkFileName = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_data.pdf");
|
||||
if (!File.Exists(checkFileName))
|
||||
{
|
||||
File.Move(headerFileName, checkFileName);
|
||||
_ = sourceFiles.Remove(headerFileName);
|
||||
sourceFiles.Add(checkFileName);
|
||||
}
|
||||
checkFileName = string.Empty;
|
||||
for (int i = pageMapping.Count - 1; i > -1; i--)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(pageMapping[i].Item2))
|
||||
{
|
||||
checkFileName = pageMapping[i].Item2;
|
||||
if (!File.Exists(checkFileName))
|
||||
{
|
||||
File.Move(pageMapping[i].Item1, checkFileName);
|
||||
_ = sourceFiles.Remove(pageMapping[i].Item1);
|
||||
sourceFiles.Add(checkFileName);
|
||||
}
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(checkFileName))
|
||||
{
|
||||
//if (i == 0 || !string.IsNullOrEmpty(pageMapping[i - 1].Item2))
|
||||
//{
|
||||
checkFileName = checkFileName.Replace("_data.pdf", "_image.pdf");
|
||||
if (!File.Exists(checkFileName))
|
||||
{
|
||||
File.Move(pageMapping[i].Item1, checkFileName);
|
||||
_ = sourceFiles.Remove(pageMapping[i].Item1);
|
||||
sourceFiles.Add(checkFileName);
|
||||
}
|
||||
//}
|
||||
checkFileName = string.Empty;
|
||||
}
|
||||
}
|
||||
foreach (KeyValuePair<string, List<Detail>> keyValuePair in slots)
|
||||
{
|
||||
if (!keyValuePair.Value.Any() || keyValuePair.Value[0] is null)
|
||||
missingSlots.Add(string.Concat("Slot ", keyValuePair.Key, ") is missing."));
|
||||
else
|
||||
{
|
||||
foreach (Detail data in keyValuePair.Value)
|
||||
dataFiles.Add(data);
|
||||
}
|
||||
}
|
||||
if (missingSlots.Any())
|
||||
{
|
||||
string missingSlotsFile = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_MissingSlots.txt");
|
||||
File.WriteAllLines(missingSlotsFile, missingSlots);
|
||||
sourceFiles.Add(missingSlotsFile);
|
||||
}
|
||||
Date = DateTime.Parse(Date).ToString();
|
||||
//Equipment data is wrong!!!
|
||||
Date = DateTime.Now.ToString();
|
||||
//Equipment data is wrong!!!
|
||||
//for (int i = 0; i < dataFiles.Count; i++)
|
||||
// dataFiles[i].Date = DateTime.Parse(dataFiles[i].Date).ToString();
|
||||
foreach (string sourceFile in sourceFiles)
|
||||
fileInfoCollection.Add(new FileInfo(sourceFile));
|
||||
fileInfoCollection.Add(new FileInfo(logistics.ReportFullPath));
|
||||
}
|
||||
|
||||
}
|
Reference in New Issue
Block a user