Mike Phares c507cd4028 Removed Infineon.Mesa.gpcl6 and added Includes
Bug in yml
dotnet tool
PackageReference arrangement
RDS Blank
Change int dotnet test
nuget ^
[spcepiworld].[dbo].[evnt_inf]
Assembly Version
WS Result bug fix and Nuget bump, PSN, Reactor and Extra RDS rule
OpenInsightApi and testRunTitle
editorconfig bugs
Fix Type
serializerValue
RDS oversight
PropertyNameCaseInsensitive
Save check for file already present
NoWaitDirectory
MoveArchive allow empty directory and continueOnError for clean files
CreatePointerFile and more on NoWaitDirectory
2023-03-20 14:19:21 -07:00

775 lines
30 KiB
C#

using Adaptation.Shared;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.Data;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.pcl;
public class ProcessData : IProcessData
{
private int _I;
private string _Data;
private readonly ILog _Log;
private readonly List<object> _Details;
public string JobID { get; set; }
public string MesEntity { get; set; }
public string AreaCountAvg { get; set; }
public string AreaCountMax { get; set; }
public string AreaCountMin { get; set; }
public string AreaCountStdDev { get; set; }
public string AreaTotalAvg { get; set; }
public string AreaTotalMax { get; set; }
public string AreaTotalMin { get; set; }
public string AreaTotalStdDev { get; set; }
public DateTime Date { get; set; }
public string Employee { get; set; }
public string HazeAverageAvg { get; set; }
public string HazeAverageMax { get; set; }
public string HazeAverageMin { get; set; }
public string HazeAverageStdDev { get; set; }
public string HazeRegionAvg { get; set; }
public string HazeRegionMax { get; set; }
public string HazeRegionMin { get; set; }
public string HazeRegionStdDev { get; set; }
public string LPDCM2Avg { get; set; }
public string LPDCM2Max { get; set; }
public string LPDCM2Min { get; set; }
public string LPDCM2StdDev { get; set; }
public string LPDCountAvg { get; set; }
public string LPDCountMax { get; set; }
public string LPDCountMin { get; set; }
public string LPDCountStdDev { get; set; }
public string Lot { get; set; }
public string PSN { get; set; }
public string ParseErrorText { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string ScratchCountAvg { get; set; }
public string ScratchCountMax { get; set; }
public string ScratchCountMin { get; set; }
public string ScratchCountStdDev { get; set; }
public string ScratchTotalAvg { get; set; }
public string ScratchTotalMax { get; set; }
public string ScratchTotalMin { get; set; }
public string ScratchTotalStdDev { get; set; }
public string SumOfDefectsAvg { get; set; }
public string SumOfDefectsMax { get; set; }
public string SumOfDefectsMin { get; set; }
public string SumOfDefectsStdDev { get; set; }
public string UniqueId { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details;
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string ghostPCLFileName)
{
fileInfoCollection.Clear();
_Details = new List<object>();
_I = 0;
_Data = string.Empty;
JobID = logistics.JobID;
MesEntity = logistics.MesEntity;
_Log = LogManager.GetLogger(typeof(ProcessData));
Parse(fileRead, logistics, fileInfoCollection, ghostPCLFileName);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.Tencor);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
/// <summary>
/// Test and fix a data line from the Lot Summary page if there are two values that are merged.
/// </summary>
/// <param name="toEol">data line from Lot Summary</param>
private void FixToEolArray(ref string[] toEol)
{
const int MAX_COLUMNS = 9;
int[] mColumnWidths = new int[MAX_COLUMNS] { 8, 6, 6, 6, 6, 7, 7, 5, 7 };
// is it short at least one data point
if (toEol.Length < MAX_COLUMNS)
{
_Log.Debug($"****FixToEolArray - Starting array:");
_Log.Debug(toEol);
_Log.Debug($"****FixToEolArray - Column widths:");
_Log.Debug(mColumnWidths);
string leftVal, rightVal;
// size up and assign a working list
List<string> toEolList = new(toEol);
if (string.IsNullOrEmpty(toEolList[toEolList.Count - 1]))
toEolList.RemoveAt(toEolList.Count - 1); // removes a null element at end
_Log.Debug($"****FixToEolArray - New toEolList:");
_Log.Debug(toEolList);
for (int i = toEolList.Count; i < MAX_COLUMNS; i++)
toEolList.Insert(0, ""); // insert to top of list
_Log.Debug(toEolList);
// start at the end
for (int i = MAX_COLUMNS - 1; i >= 0; i--)
{
// test for a bad value - does it have too many characters
_Log.Debug($"****FixToEolArray - toEolList[i].Length: {toEolList[i].Length}, mColumnWidths[i]: {mColumnWidths[i]}");
if (toEolList[i].Length > mColumnWidths[i])
{
// split it up into its two parts
leftVal = toEolList[i].Substring(0, toEolList[i].Length - mColumnWidths[i]);
rightVal = toEolList[i].Substring(leftVal.Length);
_Log.Debug($"****FixToEolArray - Split leftVal: {leftVal}");
_Log.Debug($"****FixToEolArray - Split rightVal: {rightVal}");
// insert new value
toEolList[i] = rightVal;
toEolList.Insert(i, leftVal);
if (string.IsNullOrEmpty(toEolList[0]))
toEolList.RemoveAt(0); // removes a null element at end
_Log.Debug($"****FixToEolArray - Fixed toEolList:");
_Log.Debug(toEolList);
}
}
toEol = toEolList.ToArray();
_Log.Debug($"****FixToEolArray - Ending array:");
_Log.Debug(toEol);
}
}
private void ScanPast(string text)
{
int num = _Data.IndexOf(text, _I);
if (num > -1)
_I = num + text.Length;
else
_I = _Data.Length;
}
private string GetBefore(string text)
{
int num = _Data.IndexOf(text, _I);
if (num > -1)
{
string str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
return str.Trim();
}
string str1 = _Data.Substring(_I);
_I = _Data.Length;
return str1.Trim();
}
private string GetBefore(string text, bool trim)
{
if (trim)
return GetBefore(text);
int num = _Data.IndexOf(text, _I);
if (num > -1)
{
string str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
return str;
}
string str1 = _Data.Substring(_I);
_I = _Data.Length;
return str1;
}
private static bool IsNullOrWhiteSpace(string text)
{
for (int index = 0; index < text.Length; ++index)
{
if (!char.IsWhiteSpace(text[index]))
return false;
}
return true;
}
private bool IsBlankLine()
{
int num = _Data.IndexOf("\n", _I);
return IsNullOrWhiteSpace(num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I));
}
private string GetToEOL() => GetBefore("\n");
private string GetToEOL(bool trim)
{
if (trim)
return GetToEOL();
return GetBefore("\n", false);
}
private string GetToText(string text) => _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
private string GetToken()
{
while (_I < _Data.Length && IsNullOrWhiteSpace(_Data.Substring(_I, 1)))
++_I;
int j = _I;
while (j < _Data.Length && !IsNullOrWhiteSpace(_Data.Substring(j, 1)))
++j;
string str = _Data.Substring(_I, j - _I);
_I = j;
return str.Trim();
}
private string PeekNextLine()
{
int j = _I;
string result = GetToEOL();
_I = j;
return result;
}
private static (string, string) GetReactorAndRDS(string defaultReactor, string defaultRDS, string text, string formattedText, string[] segments, bool hasRDS)
{
string rds;
string reactor;
if (string.IsNullOrEmpty(text) || segments.Length == 0 || string.IsNullOrEmpty(formattedText) || (segments.Length > 1 && !hasRDS))
reactor = defaultReactor;
else
reactor = segments[0];
if (segments.Length <= 1 || !int.TryParse(segments[1], out int rdsValue) || rdsValue < 99)
rds = defaultRDS;
else
rds = segments[1];
if (reactor.Length > 3)
{
rds = reactor;
reactor = defaultReactor;
}
return new(reactor, rds);
}
private static (string, string) GetLayerAndPSN(string defaultLayer, string defaultPSN, string[] segments, bool hasRDS)
{
string psn;
string layer;
if (segments.Length <= 2 || (segments.Length > 1 && !hasRDS))
{
psn = defaultPSN;
layer = defaultLayer;
}
else
{
string[] segmentsB = segments[2].Split('.');
psn = segmentsB[0];
if (segmentsB.Length <= 1)
layer = defaultLayer;
else
{
layer = segmentsB[1];
if (layer.Length > 1 && layer[0] == '0')
layer = layer.Substring(1);
}
}
return (layer, psn);
}
private static string GetZone(string[] segments)
{
string result;
if (segments.Length <= 3)
result = string.Empty;
else
{
result = segments[3];
if (result.Length > 1 && result[0] == '0')
result = result.Substring(1);
}
return result;
}
public static Descriptor GetDescriptor(string text)
{
Descriptor result;
string lot;
string psn;
string rds;
string zone;
string layer;
string reactor;
string employee;
string defaultPSN = string.Empty;
string defaultRDS = string.Empty;
string defaultZone = string.Empty;
string defaultLayer = string.Empty;
string defaultReactor = string.Empty;
string defaultEmployee = string.Empty;
if (string.IsNullOrEmpty(text) || (text.Length is 2 or 3 && Regex.IsMatch(text, "^[a-zA-z]{2,3}")))
{
lot = text;
employee = text;
psn = defaultPSN;
rds = defaultRDS;
zone = defaultZone;
layer = defaultLayer;
reactor = defaultReactor;
}
else if (Regex.IsMatch(text, @"^[0-9]{2}[.][0-9]{1}[.]?[0-9]{0,1}"))
{
string[] segments = text.Split('.');
lot = text;
psn = defaultPSN;
rds = defaultRDS;
layer = segments[1];
reactor = segments[0];
employee = defaultEmployee;
if (segments.Length <= 2)
zone = defaultZone;
else
zone = segments[2];
}
else
{
// Remove illegal characters \/:*?"<>| found in the Lot.
lot = Regex.Replace(text, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
if (lot.Length > 2 && lot[0] == '1' && (lot[1] == 'T' || lot[1] == 't'))
lot = lot.Substring(2);
string[] segments = lot.Split('-');
bool hasRDS = Regex.IsMatch(lot, "[-]?[0-9]{5,}[-]?");
(reactor, rds) = GetReactorAndRDS(defaultReactor, defaultRDS, text, lot, segments, hasRDS);
(layer, psn) = GetLayerAndPSN(defaultLayer, defaultPSN, segments, hasRDS);
zone = GetZone(segments);
employee = defaultEmployee;
}
result = new(employee, layer, lot, psn, rds, reactor, zone);
return result;
}
private void Set(ILogistics logistics)
{
string lot;
string rds;
string psn;
string recipe;
string reactor;
string employee;
ScanPast("Recipe ID:");
recipe = GetBefore("LotID:");
recipe = recipe.Replace(";", "");
if (_Data.Contains("[]"))
lot = GetBefore("[]");
else if (_Data.Contains("[7]"))
lot = GetBefore("[7]");
else
lot = GetBefore("[");
Descriptor descriptor = GetDescriptor(lot);
lot = descriptor.Lot;
psn = descriptor.PSN;
rds = descriptor.RDS;
reactor = descriptor.Reactor;
employee = descriptor.Employee;
Lot = lot;
PSN = psn;
RDS = rds;
Recipe = recipe;
Reactor = reactor;
Employee = employee;
UniqueId = string.Format("{0}_{1}_{2}", logistics.JobID, lot, Path.GetFileNameWithoutExtension(logistics.ReportFullPath));
}
private void ParseLotSummary(IFileRead fileRead, ILogistics logistics, string headerFileName, Dictionary<string, string> pages, Dictionary<string, List<Detail>> slots)
{
if (fileRead is null)
{ }
_I = 0;
ParseErrorText = string.Empty;
if (!pages.ContainsKey(headerFileName))
throw new Exception();
_I = 0;
_Data = pages[headerFileName];
ScanPast("Date:");
string dateTime = GetToEOL();
Date = DateTime.Parse(dateTime);
Set(logistics);
// determine number of wafers and their slot numbers
_Log.Debug(_Data.Substring(_I));
string slot;
string toEOL;
int slotCount = _Data.Substring(_I).Split('*').Length - 1;
_Log.Debug($"****HeaderFile - Slot Count: {slotCount}.");
for (int i = 0; i < slotCount; i++)
{
ScanPast("*");
toEOL = GetToEOL(false);
slot = string.Concat("*", toEOL.Substring(0, 2));
if (!slots.ContainsKey(slot))
slots.Add(slot, new List<Detail>());
}
_Log.Debug($"****HeaderFile - Slots:");
_Log.Debug(slots);
ScanPast("Min:");
string[] toEol1 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol1 Count: {toEol1.Length}.");
FixToEolArray(ref toEol1);
LPDCountMin = toEol1[0].Trim();
LPDCM2Min = toEol1[1].Trim();
AreaCountMin = toEol1[2].Trim();
AreaTotalMin = toEol1[3].Trim();
ScratchCountMin = toEol1[4].Trim();
ScratchTotalMin = toEol1[5].Trim();
SumOfDefectsMin = toEol1[6].Trim();
HazeRegionMin = toEol1[7].Trim();
HazeAverageMin = toEol1[8].Trim();
ScanPast("Max:");
string[] toEol2 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol2 Count: {toEol2.Length}.");
FixToEolArray(ref toEol2);
LPDCountMax = toEol2[0].Trim();
LPDCM2Max = toEol2[1].Trim();
AreaCountMax = toEol2[2].Trim();
AreaTotalMax = toEol2[3].Trim();
ScratchCountMax = toEol2[4].Trim();
ScratchTotalMax = toEol2[5].Trim();
SumOfDefectsMax = toEol2[6].Trim();
HazeRegionMax = toEol2[7].Trim();
HazeAverageMax = toEol2[8].Trim();
ScanPast("Average:");
string[] toEol3 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol3 Count: {toEol3.Length}.");
FixToEolArray(ref toEol3);
LPDCountAvg = toEol3[0].Trim();
LPDCM2Avg = toEol3[1].Trim();
AreaCountAvg = toEol3[2].Trim();
AreaTotalAvg = toEol3[3].Trim();
ScratchCountAvg = toEol3[4].Trim();
ScratchTotalAvg = toEol3[5].Trim();
SumOfDefectsAvg = toEol3[6].Trim();
HazeRegionAvg = toEol3[7].Trim();
HazeAverageAvg = toEol3[8].Trim();
ScanPast("Std Dev:");
string[] toEol4 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol4 Count: {toEol4.Length}.");
FixToEolArray(ref toEol4);
LPDCountStdDev = toEol4[0].Trim();
LPDCM2StdDev = toEol4[1].Trim();
AreaCountStdDev = toEol4[2].Trim();
AreaTotalStdDev = toEol4[3].Trim();
ScratchCountStdDev = toEol4[4].Trim();
ScratchTotalStdDev = toEol4[5].Trim();
SumOfDefectsStdDev = toEol4[6].Trim();
HazeRegionStdDev = toEol4[7].Trim();
HazeAverageStdDev = toEol4[8].Trim();
}
private Detail ParseWaferSummary(string waferFileName, Dictionary<string, string> pages)
{
Detail result = new() { Data = "*Data*", i = -1, };
_I = 0;
List<string> stringList = new();
result.HeaderUniqueId = UniqueId;
result.Id = 0;
result.Title = null;
if (!pages.ContainsKey(waferFileName))
throw new Exception();
_I = 0;
_Data = pages[waferFileName];
ScanPast("Date:");
result.Date = GetToEOL();
ScanPast("ID#");
result.Slot = GetToEOL();
if (result.Slot.Length > 5)
result.Slot = string.Concat(result.Slot.Substring(0, 5), "... - ***");
//result.Slot = result.Slot.Replace("*", "");
ScanPast("Comments:");
result.Comments = GetToEOL();
ScanPast("Sort:");
result.Sort = GetToEOL();
ScanPast("LPD Count:");
result.LPDCount = GetToEOL();
ScanPast("LPD / cm2:");
result.LPDCM2 = GetToEOL();
while (GetBefore(":").Contains("Bin"))
stringList.Add(GetToEOL());
if (stringList.Count >= 1)
result.Bin1 = stringList[0];
if (stringList.Count >= 2)
result.Bin2 = stringList[1];
if (stringList.Count >= 3)
result.Bin3 = stringList[2];
if (stringList.Count >= 4)
result.Bin4 = stringList[3];
if (stringList.Count >= 5)
result.Bin5 = stringList[4];
if (stringList.Count >= 6)
result.Bin6 = stringList[5];
if (stringList.Count >= 7)
result.Bin7 = stringList[6];
if (stringList.Count >= 8)
result.Bin8 = stringList[7];
result.Mean = GetToEOL();
ScanPast("Std Dev:");
result.StdDev = GetToEOL();
ScanPast("Area Count:");
result.AreaCount = GetToEOL();
ScanPast("Area Total:");
result.AreaTotal = GetToEOL();
ScanPast("Scratch Count:");
result.ScratchCount = GetToEOL();
ScanPast("Scratch Total:");
result.ScratchTotal = GetToEOL();
ScanPast("Sum of All Defects:");
result.SumOfDefects = GetToEOL();
ScanPast("Haze Region:");
result.HazeRegion = GetToEOL();
ScanPast("Haze Average:");
result.HazeAverage = GetToEOL();
ScanPast("Haze Peak:");
result.HazePeak = GetToEOL();
ScanPast("Laser:");
result.Laser = GetBefore("Gain:");
result.Gain = GetBefore("Diameter:");
result.Diameter = GetToEOL();
ScanPast("Thresh:");
result.Thresh = GetBefore("Exclusion:");
result.Exclusion = GetToEOL();
ScanPast("Haze Rng:");
result.HazeRng = GetBefore("Thruput:");
result.Thruput = GetToEOL();
ScanPast("Recipe ID:");
result.Recipe = GetToEOL();
result.UniqueId = string.Format("{0}_{1}", UniqueId, result.Slot.Replace("*", string.Empty).TrimStart('0'));
return result;
}
/// <summary>
/// Convert the raw data file to parsable file format - in this case from PCL to PDF
/// </summary>
/// <param name="sourceFile">source file to be converted to PDF</param>
/// <returns></returns>
private static string ConvertSourceFileToPdf(string ghostPCLFileName, Logistics logistics)
{
string result = Path.ChangeExtension(logistics.ReportFullPath, ".pdf");
if (!File.Exists(result))
{
//string arguments = string.Concat("-i \"", sourceFile, "\" -o \"", result, "\"");
string arguments = string.Concat("-dSAFER -dBATCH -dNOPAUSE -sOutputFile=\"", result, "\" -sDEVICE=pdfwrite \"", logistics.ReportFullPath, "\"");
//Process process = Process.Start(configData.LincPDFCFileName, arguments);
Process process = Process.Start(ghostPCLFileName, arguments);
_ = process.WaitForExit(30000);
if (!File.Exists(result))
throw new Exception("PDF file wasn't created");
}
return result;
}
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string ghostPCLFileName)
{
object item;
string pageText;
string pagePDFFile;
string pageTextFile;
List<string> sourceFiles = new();
List<string> missingSlots = new();
Dictionary<string, string> pages = new();
Dictionary<string, List<Detail>> slots = new();
string sourcePath = Path.GetDirectoryName(logistics.ReportFullPath);
string sourceFileNamePdf = ConvertSourceFileToPdf(ghostPCLFileName, logistics);
sourceFiles.Add(sourceFileNamePdf);
string sourceFileNameWithoutExtension = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
string[] txtFiles = Directory.GetFiles(sourcePath, $"{sourceFileNameWithoutExtension}_*.txt", SearchOption.TopDirectoryOnly);
if (txtFiles.Any())
{
foreach (string txtFile in txtFiles)
{
sourceFiles.Add(txtFile);
pageText = File.ReadAllText(txtFile);
pagePDFFile = Path.ChangeExtension(txtFile, ".pdf");
if (!File.Exists(pagePDFFile))
continue;
pages.Add(pagePDFFile, pageText);
}
}
if (!pages.Any())
{
java.io.File file = new(sourceFileNamePdf);
org.apache.pdfbox.util.Splitter splitter = new();
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
java.util.List list = splitter.split(pdDocument);
java.util.ListIterator iterator = list.listIterator();
org.apache.pdfbox.util.PDFTextStripper dataStripper = new();
for (short i = 1; i < short.MaxValue; i++)
{
if (!iterator.hasNext())
break;
item = iterator.next();
pagePDFFile = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_", i, ".pdf");
pageTextFile = Path.ChangeExtension(pagePDFFile, ".txt");
if (File.Exists(pageTextFile))
{
pageText = File.ReadAllText(pageTextFile);
sourceFiles.Add(pageTextFile);
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
continue;
pd.close();
}
else if (File.Exists(pagePDFFile))
{
org.apache.pdfbox.pdmodel.PDDocument document = org.apache.pdfbox.pdmodel.PDDocument.load(pagePDFFile);
pageText = dataStripper.getText(document);
document.close();
sourceFiles.Add(pagePDFFile);
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
continue;
pd.close();
}
else
{
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
continue;
pageText = dataStripper.getText(pd);
pd.save(pagePDFFile);
sourceFiles.Add(pagePDFFile);
pd.close();
File.WriteAllText(pageTextFile, pageText);
sourceFiles.Add(pageTextFile);
}
pages.Add(pagePDFFile, pageText);
}
pdDocument.close();
}
// parse lot summary
_Log.Debug($"****ParseData - Parsing lot summary");
List<Tuple<string, string>> pageMapping = new();
string headerFileName = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_", pages.Count, ".pdf");
ParseLotSummary(fileRead, logistics, headerFileName, pages, slots);
foreach (KeyValuePair<string, string> keyValuePair in pages)
{
if (keyValuePair.Key == headerFileName)
continue;
if (string.IsNullOrEmpty(keyValuePair.Value.Trim()))
{
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
continue;
}
if (!pages.ContainsKey(keyValuePair.Key))
throw new Exception();
Detail dataFile = ParseWaferSummary(keyValuePair.Key, pages);
if (string.IsNullOrEmpty(dataFile.Recipe) || dataFile.Recipe != Recipe)
{
missingSlots.Add(keyValuePair.Key);
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
continue;
}
if (!slots.ContainsKey(dataFile.Slot))
{
missingSlots.Add(keyValuePair.Key);
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
continue;
}
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_", dataFile.Slot.Replace('*', 's'), "_data.pdf")));
slots[dataFile.Slot].Add(dataFile);
}
string checkFileName = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_data.pdf");
if (!File.Exists(checkFileName))
{
File.Move(headerFileName, checkFileName);
_ = sourceFiles.Remove(headerFileName);
sourceFiles.Add(checkFileName);
}
checkFileName = string.Empty;
for (int i = pageMapping.Count - 1; i > -1; i--)
{
if (!string.IsNullOrEmpty(pageMapping[i].Item2))
{
checkFileName = pageMapping[i].Item2;
if (!File.Exists(checkFileName))
{
File.Move(pageMapping[i].Item1, checkFileName);
_ = sourceFiles.Remove(pageMapping[i].Item1);
sourceFiles.Add(checkFileName);
}
}
else if (!string.IsNullOrEmpty(checkFileName))
{
//if (i == 0 || !string.IsNullOrEmpty(pageMapping[i - 1].Item2))
//{
checkFileName = checkFileName.Replace("_data.pdf", "_image.pdf");
if (!File.Exists(checkFileName))
{
File.Move(pageMapping[i].Item1, checkFileName);
_ = sourceFiles.Remove(pageMapping[i].Item1);
sourceFiles.Add(checkFileName);
}
//}
checkFileName = string.Empty;
}
}
foreach (KeyValuePair<string, List<Detail>> keyValuePair in slots)
{
if (!keyValuePair.Value.Any() || keyValuePair.Value[0] is null)
missingSlots.Add(string.Concat("Slot ", keyValuePair.Key, ") is missing."));
else
{
foreach (Detail data in keyValuePair.Value)
_Details.Add(data);
}
}
if (missingSlots.Any())
{
string missingSlotsFile = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_MissingSlots.txt");
File.WriteAllLines(missingSlotsFile, missingSlots);
sourceFiles.Add(missingSlotsFile);
}
//for (int i = 0; i < dataFiles.Count; i++)
// dataFiles[i].Date = DateTime.Parse(dataFiles[i].Date).ToString();
foreach (string sourceFile in sourceFiles)
fileInfoCollection.Add(new FileInfo(sourceFile));
fileInfoCollection.Add(logistics.FileInfo);
}
#nullable enable
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
List<Description> results = new();
Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
if (description is null)
continue;
results.Add(description);
}
return results;
}
}