Add Transmission Control Protocol file handling and update PCL serialization
- Introduced FileRead and Record classes for handling file reading in the Transmission Control Protocol. - Enhanced Description, Detail, and other related classes with JSON serialization attributes for improved data handling. - Implemented methods for reading and processing files, including network stream management. - Updated unit tests to cover new functionality and ensure robust testing. - Added new PDSF file handling classes and integrated them into the project structure. - Refactored existing code to utilize source generation for JSON serialization, improving performance and maintainability.
This commit is contained in:
@ -21,9 +21,11 @@ public class CellInstanceConnectionName
|
||||
nameof(OpenInsight) => new OpenInsight.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(OpenInsightMetrologyViewer) => new OpenInsightMetrologyViewer.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(pdsf) => new pdsf.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(pcl) => new pcl.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(TransmissionControlProtocol) => new TransmissionControlProtocol.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
_ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped")
|
||||
};
|
||||
return result;
|
||||
|
@ -206,10 +206,11 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
string[] lines = File.ReadAllLines(reportFullPath);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
|
@ -110,7 +110,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return results;
|
||||
}
|
||||
|
||||
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<pcl.Description> descriptions, Test[] tests)
|
||||
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, List<pcl.Description> descriptions, Test[] tests)
|
||||
{
|
||||
string duplicateFile;
|
||||
bool isDummyRun = false;
|
||||
@ -143,7 +143,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
else
|
||||
duplicateFile = Path.Combine(duplicateDirectory, $"{$"Viewer {subgroupId}".TrimEnd()} {fileName.Replace("Viewer", string.Empty)}");
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), subgroupId, weekOfYear);
|
||||
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, descriptions.First(), subgroupId, weekOfYear);
|
||||
}
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
{
|
||||
@ -155,14 +155,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
string[] lines = File.ReadAllLines(reportFullPath);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests);
|
||||
SaveOpenInsightFile(reportFullPath, dateTime, descriptions, tests);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
@ -325,74 +325,18 @@ public class FromIQS
|
||||
return new(result, count, commandText);
|
||||
}
|
||||
|
||||
private static string GetJson(Logistics logistics, ProcessDataStandardFormat processDataStandardFormat, pcl.Description description)
|
||||
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, pcl.Description description, long? subGroupId, string weekOfYear)
|
||||
{
|
||||
string result;
|
||||
StringBuilder stringBuilder = new();
|
||||
var @object = new
|
||||
{
|
||||
description.MesEntity,
|
||||
description.Employee,
|
||||
// description.Layer,
|
||||
description.PSN,
|
||||
description.RDS,
|
||||
description.Reactor,
|
||||
description.Recipe,
|
||||
// description.Zone,
|
||||
logistics.DateTimeFromSequence.Ticks
|
||||
};
|
||||
string[] pair;
|
||||
string safeValue;
|
||||
string[] segments;
|
||||
string serializerValue;
|
||||
foreach (string line in processDataStandardFormat.Logistics)
|
||||
{
|
||||
segments = line.Split('\t');
|
||||
if (segments.Length < 2)
|
||||
continue;
|
||||
segments = segments[1].Split(';');
|
||||
_ = stringBuilder.Append('{');
|
||||
foreach (string segment in segments)
|
||||
{
|
||||
pair = segment.Split('=');
|
||||
if (pair.Length != 2 || pair[0].Length < 3)
|
||||
continue;
|
||||
serializerValue = JsonSerializer.Serialize(pair[1]);
|
||||
safeValue = serializerValue.Substring(1, serializerValue.Length - 2);
|
||||
_ = stringBuilder.Append('"').Append(pair[0].Substring(2)).Append('"').Append(':').Append('"').Append(safeValue).Append('"').Append(',');
|
||||
}
|
||||
if (stringBuilder.Length > 0)
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
_ = stringBuilder.Append('}').Append(',');
|
||||
}
|
||||
if (stringBuilder.Length > 0)
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
_ = stringBuilder.Append(']').Append('}');
|
||||
_ = stringBuilder.Insert(0, ",\"Logistics\":[");
|
||||
string json = JsonSerializer.Serialize(@object);
|
||||
_ = stringBuilder.Insert(0, json.Substring(0, json.Length - 1));
|
||||
JsonElement? jsonElement = JsonSerializer.Deserialize<JsonElement>(stringBuilder.ToString());
|
||||
result = jsonElement is null ? "{}" : JsonSerializer.Serialize(jsonElement, new JsonSerializerOptions { WriteIndented = true });
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, pcl.Description description, long? subGroupId, string weekOfYear)
|
||||
{
|
||||
string checkFile;
|
||||
string fileName = Path.GetFileName(reportFullPath);
|
||||
string json = GetJson(logistics, processDataStandardFormat, description);
|
||||
string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);
|
||||
bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot);
|
||||
string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
|
||||
string ecDirectory = Path.Combine(openInsightApiECDirectory, weekYear, $"-{description.PSN}", $"-{description.Reactor}", $"-{description.RDS}", $"-{subGroupId}");
|
||||
if (ecExists && !Directory.Exists(ecDirectory))
|
||||
_ = Directory.CreateDirectory(ecDirectory);
|
||||
checkFile = Path.Combine(ecDirectory, fileName);
|
||||
string checkFile = Path.Combine(ecDirectory, fileName);
|
||||
if (ecExists && !File.Exists(checkFile))
|
||||
File.Copy(reportFullPath, checkFile);
|
||||
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
|
||||
if (ecExists && !File.Exists(checkFile))
|
||||
File.WriteAllText(checkFile, json);
|
||||
}
|
||||
|
||||
}
|
@ -110,10 +110,10 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return results;
|
||||
}
|
||||
|
||||
private void SendData(string reportFullPath, DateTime dateTime, List<pcl.Description> descriptions)
|
||||
private void SendData(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<pcl.Description> descriptions)
|
||||
{
|
||||
string checkDirectory;
|
||||
WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
|
||||
int weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
|
||||
string directory = Path.Combine(_OpenInsightMetrologyViewerFileShare, dateTime.Year.ToString(), $"WW{weekOfYear:00}");
|
||||
checkDirectory = Path.Combine(directory, _Logistics.Sequence.ToString());
|
||||
@ -139,14 +139,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
string[] lines = File.ReadAllLines(reportFullPath);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SendData(reportFullPath, dateTime, descriptions);
|
||||
SendData(reportFullPath, dateTime, jsonElements, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
@ -5,6 +5,7 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer;
|
||||
|
||||
@ -70,7 +71,7 @@ public class WSRequest
|
||||
[Obsolete("For json")] public WSRequest() { }
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
internal WSRequest(IFileRead fileRead, Logistics logistics, List<pcl.Description> descriptions, string processDataStandardFormat = null)
|
||||
internal WSRequest(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, List<pcl.Description> descriptions, string processDataStandardFormat = null)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
i = -1;
|
||||
@ -290,14 +291,14 @@ public class WSRequest
|
||||
pdDocument.close();
|
||||
}
|
||||
|
||||
internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, List<pcl.Description> descriptions)
|
||||
internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, JsonElement[] jsonElements, List<pcl.Description> descriptions)
|
||||
{
|
||||
long result;
|
||||
if (results is not null && results.HeaderId is not null)
|
||||
result = results.HeaderId.Value;
|
||||
else
|
||||
{
|
||||
WSRequest wsRequest = new(fileRead, logistics, descriptions);
|
||||
WSRequest wsRequest = new(fileRead, logistics, jsonElements, descriptions);
|
||||
string directory = Path.Combine(openInsightMetrologyViewerFileShare, logistics.DateTimeFromSequence.Year.ToString(), $"WW{weekOfYear:00}");
|
||||
(_, WS.Results wsResults) = WS.SendData(openInsightMetrologyViewerAPI, logistics.Sequence, directory, wsRequest);
|
||||
if (wsResults.Success is null || !wsResults.Success.Value)
|
||||
|
@ -135,7 +135,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return result;
|
||||
}
|
||||
|
||||
private void PostOpenInsightMetrologyViewerAttachments(List<pcl.Description> descriptions)
|
||||
private void PostOpenInsightMetrologyViewerAttachments(JsonElement[] jsonElements, List<pcl.Description> descriptions)
|
||||
{
|
||||
Shared.Metrology.WS.Results? results;
|
||||
string jobIdDirectory = Path.Combine(Path.GetDirectoryName(_FileConnectorConfiguration.AlternateTargetFolder) ?? throw new Exception(), _Logistics.JobID);
|
||||
@ -151,7 +151,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
results = wsResults[0];
|
||||
}
|
||||
int weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
|
||||
long headerId = !_IsEAFHosted ? -1 : OpenInsightMetrologyViewer.WSRequest.GetHeaderId(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OpenInsightMetrologyViewerFileShare, weekOfYear, results, descriptions);
|
||||
long headerId = !_IsEAFHosted ? -1 : OpenInsightMetrologyViewer.WSRequest.GetHeaderId(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OpenInsightMetrologyViewerFileShare, weekOfYear, results, jsonElements, descriptions);
|
||||
string? headerIdDirectory = GetHeaderIdDirectory(headerId);
|
||||
if (string.IsNullOrEmpty(headerIdDirectory))
|
||||
throw new Exception($"Didn't find header id directory <{headerId}>");
|
||||
@ -163,14 +163,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
if (dateTime == DateTime.MinValue)
|
||||
throw new ArgumentNullException(nameof(dateTime));
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
string[] lines = File.ReadAllLines(reportFullPath);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
PostOpenInsightMetrologyViewerAttachments(descriptions);
|
||||
PostOpenInsightMetrologyViewerAttachments(jsonElements, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
}
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
private void DirectoryMove(string reportFullPath, DateTime dateTime, List<pcl.Description> descriptions)
|
||||
private void DirectoryMove(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<pcl.Description> descriptions)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
@ -122,7 +122,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
|
||||
OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
|
||||
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
|
||||
string directoryName = $"{Path.GetFileName(matchDirectories[0]).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0]}{_Logistics.DateTimeFromSequence:yyyy-MM-dd_hh;mm_tt_}{DateTime.Now.Ticks - _Logistics.Sequence}";
|
||||
@ -166,23 +166,24 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
string[] lines = File.ReadAllLines(reportFullPath);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
DirectoryMove(reportFullPath, dateTime, descriptions);
|
||||
DirectoryMove(reportFullPath, dateTime, jsonElements, descriptions);
|
||||
else if (!_IsEAFHosted)
|
||||
{
|
||||
OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
|
||||
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
|
||||
string check = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
|
||||
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
|
||||
string historicalText = File.ReadAllText(jsonFileName);
|
||||
if (json != historicalText)
|
||||
if (check != historicalText)
|
||||
throw new Exception("File doesn't match historical!");
|
||||
}
|
||||
return results;
|
||||
|
250
Adaptation/FileHandlers/TransmissionControlProtocol/FileRead.cs
Normal file
250
Adaptation/FileHandlers/TransmissionControlProtocol/FileRead.cs
Normal file
@ -0,0 +1,250 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net.Sockets;
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.TransmissionControlProtocol;
|
||||
|
||||
#nullable enable
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly int _Port;
|
||||
private readonly Timer _Timer;
|
||||
private static Record? _Record;
|
||||
private static long _LastWrite;
|
||||
private readonly string _IPAddress;
|
||||
private readonly string _RawDirectory;
|
||||
private readonly int _DelimiterSeconds;
|
||||
private readonly string[] _DelimiterPatterns;
|
||||
private static readonly object _Lock = new();
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
string sourceFileLocation = fileConnectorConfiguration.SourceFileLocation.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
|
||||
string? ipAddress = Path.GetDirectoryName(sourceFileLocation);
|
||||
string port = Path.GetFileName(sourceFileLocation);
|
||||
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
|
||||
_Port = int.Parse(port, System.Globalization.CultureInfo.InvariantCulture);
|
||||
_DelimiterPatterns = fileConnectorConfiguration.SourceFileFilter.Split('*');
|
||||
_IPAddress = Path.GetFileName(ipAddress) ?? throw new Exception(sourceFileLocation);
|
||||
_RawDirectory = Path.GetDirectoryName(ipAddress) ?? throw new Exception(sourceFileLocation);
|
||||
DateTime fileAgeThresholdTimeOnly = GetFileAgeThresholdTimeOnly(_FileConnectorConfiguration.FileAgeThreshold);
|
||||
_DelimiterSeconds = fileAgeThresholdTimeOnly.Second;
|
||||
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process || _FileConnectorConfiguration.FileScanningIntervalInSeconds is null)
|
||||
Callback(null);
|
||||
else
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName) => throw new Exception(string.Concat("See ", nameof(Callback)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract() => throw new Exception(string.Concat("See ", nameof(Callback)));
|
||||
|
||||
private static DateTime GetFileAgeThresholdTimeOnly(string fileAgeThreshold)
|
||||
{
|
||||
DateTime result = DateTime.MinValue;
|
||||
string[] segments = fileAgeThreshold.Split(':');
|
||||
for (int i = 0; i < segments.Length; i++)
|
||||
{
|
||||
result = i switch
|
||||
{
|
||||
0 => result.AddDays(double.Parse(segments[i])),
|
||||
1 => result.AddHours(double.Parse(segments[i])),
|
||||
2 => result.AddMinutes(double.Parse(segments[i])),
|
||||
3 => result.AddSeconds(double.Parse(segments[i])),
|
||||
_ => throw new Exception(),
|
||||
};
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static void ReadFiles(log4net.ILog log, FileConnectorConfiguration fileConnectorConfiguration, string ipAddress, string rawDirectory)
|
||||
{
|
||||
List<byte> bytes = new();
|
||||
string[] files = Directory.GetFiles(rawDirectory, $"{ipAddress}-*.raw", SearchOption.TopDirectoryOnly);
|
||||
log.Info($"Read {files.Length} files");
|
||||
foreach (string file in files)
|
||||
{
|
||||
foreach (byte @byte in File.ReadAllBytes(file))
|
||||
bytes.Add(@byte);
|
||||
}
|
||||
if (bytes.Count > 0)
|
||||
{
|
||||
string bytesFile = Path.Combine(fileConnectorConfiguration.TargetFileLocation, $"{ipAddress}-{DateTime.Now.Ticks}{fileConnectorConfiguration.TargetFileName}");
|
||||
File.WriteAllBytes(bytesFile, bytes.ToArray());
|
||||
foreach (string file in files)
|
||||
File.Delete(file);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateClient(log4net.ILog log, string ipAddress, int port)
|
||||
{
|
||||
log.Debug(ipAddress);
|
||||
TcpClient tcpClient = new(ipAddress, port);
|
||||
NetworkStream networkStream = tcpClient.GetStream();
|
||||
Type baseType = typeof(NetworkStream);
|
||||
PropertyInfo? propertyInfo = baseType.GetProperty("Socket", BindingFlags.Instance | BindingFlags.NonPublic);
|
||||
_Record = new(binaryReader: new(networkStream), binaryWriter: new(networkStream), networkStream: networkStream, propertyInfo: propertyInfo, readTimes: new());
|
||||
}
|
||||
|
||||
private static byte[] GetBytes(NetworkStream networkStream)
|
||||
{
|
||||
List<byte> results = new();
|
||||
byte[] bytes = new byte[1024];
|
||||
do
|
||||
{
|
||||
int count = networkStream.Read(bytes, 0, bytes.Length);
|
||||
if (count > 0)
|
||||
results.AddRange(bytes.Take(count));
|
||||
}
|
||||
while (networkStream.DataAvailable);
|
||||
return results.ToArray();
|
||||
}
|
||||
|
||||
private void Callback()
|
||||
{
|
||||
if (_Record?.NetworkStream is null || _Record.PropertyInfo is null || _Record.PropertyInfo.GetValue(_Record.NetworkStream) is not Socket socket || !socket.Connected)
|
||||
CreateClient(_Log, _IPAddress, _Port);
|
||||
if (_Record is not null)
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.Ticks - _LastWrite);
|
||||
if (_LastWrite == 0 || timeSpan.TotalMinutes > 1)
|
||||
{
|
||||
try
|
||||
{
|
||||
_Record.NetworkStream.WriteByte(Convert.ToByte('\0'));
|
||||
_LastWrite = DateTime.Now.Ticks;
|
||||
}
|
||||
catch (Exception)
|
||||
{ }
|
||||
}
|
||||
}
|
||||
if (_Record?.NetworkStream is not null && _Record.NetworkStream.CanRead && _Record.NetworkStream.DataAvailable)
|
||||
{
|
||||
byte[] bytes = GetBytes(_Record.NetworkStream);
|
||||
_Log.Info($"Read {bytes.Length} bytes");
|
||||
if (bytes.Length > 0)
|
||||
{
|
||||
string path = Path.Combine(_RawDirectory, $"{_IPAddress}-{DateTime.Now.Ticks}.raw");
|
||||
File.WriteAllBytes(path, bytes);
|
||||
string content = Encoding.ASCII.GetString(bytes);
|
||||
_Log.Debug($"Content {content}");
|
||||
foreach (string delimiterPattern in _DelimiterPatterns)
|
||||
{
|
||||
if (content.Contains(delimiterPattern))
|
||||
_Record.ReadTimes.Add(DateTime.Now.Ticks);
|
||||
}
|
||||
if (_Record.ReadTimes.Count > 0)
|
||||
_Record.ReadTimes.Add(DateTime.Now.Ticks);
|
||||
}
|
||||
}
|
||||
if (_Record is not null && _Record.ReadTimes.Count > 0 && _DelimiterSeconds > 0)
|
||||
{
|
||||
TimeSpan? timeSpan = new(DateTime.Now.Ticks - _Record.ReadTimes.Last());
|
||||
if (timeSpan.Value.TotalSeconds > _DelimiterSeconds)
|
||||
{
|
||||
ReadFiles(_Log, _FileConnectorConfiguration, _IPAddress, _RawDirectory);
|
||||
_Record.ReadTimes.Clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void Callback(object? state)
|
||||
{
|
||||
try
|
||||
{
|
||||
lock (_Lock)
|
||||
Callback();
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
try
|
||||
{
|
||||
if (_FileConnectorConfiguration.FileScanningIntervalInSeconds is null)
|
||||
throw new Exception(_CellInstanceConnectionName);
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Net.Sockets;
|
||||
using System.Reflection;
|
||||
|
||||
namespace Adaptation.FileHandlers.TransmissionControlProtocol;
|
||||
|
||||
#nullable enable
|
||||
|
||||
internal class Record
|
||||
{
|
||||
|
||||
public BinaryReader BinaryReader { get; }
|
||||
public BinaryWriter BinaryWriter { get; }
|
||||
public NetworkStream NetworkStream { get; }
|
||||
public PropertyInfo? PropertyInfo { get; }
|
||||
public List<long> ReadTimes { get; }
|
||||
|
||||
public Record(BinaryReader binaryReader, BinaryWriter binaryWriter, NetworkStream networkStream, PropertyInfo? propertyInfo, List<long> readTimes)
|
||||
{
|
||||
BinaryReader = binaryReader;
|
||||
BinaryWriter = binaryWriter;
|
||||
NetworkStream = networkStream;
|
||||
PropertyInfo = propertyInfo;
|
||||
ReadTimes = readTimes;
|
||||
}
|
||||
|
||||
}
|
@ -4,103 +4,104 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.pcl;
|
||||
|
||||
public class Description : IDescription, Shared.Properties.IDescription
|
||||
{
|
||||
|
||||
public int Test { get; set; }
|
||||
public int Count { get; set; }
|
||||
public int Index { get; set; }
|
||||
[JsonPropertyName("EventId")] public int Test { get; set; }
|
||||
[JsonPropertyName("Count")] public int Count { get; set; }
|
||||
[JsonPropertyName("Index")] public int Index { get; set; }
|
||||
//
|
||||
public string EventName { get; set; }
|
||||
public string NullData { get; set; }
|
||||
public string JobID { get; set; }
|
||||
public string Sequence { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
[JsonPropertyName("MesEntity")] public string MesEntity { get; set; }
|
||||
public string ReportFullPath { get; set; }
|
||||
public string ProcessJobID { get; set; }
|
||||
public string MID { get; set; }
|
||||
//
|
||||
public string Date { get; set; }
|
||||
public string Employee { get; set; }
|
||||
[JsonPropertyName("Date")] public string Date { get; set; }
|
||||
[JsonPropertyName("Employee")] public string Employee { get; set; }
|
||||
public string Lot { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
[JsonPropertyName("PSN")] public string PSN { get; set; }
|
||||
[JsonPropertyName("Reactor")] public string Reactor { get; set; }
|
||||
[JsonPropertyName("Recipe")] public string Recipe { get; set; }
|
||||
//
|
||||
public string Comments { get; set; }
|
||||
public string Diameter { get; set; }
|
||||
public string Exclusion { get; set; }
|
||||
public string Gain { get; set; }
|
||||
[JsonPropertyName("Comments")] public string Comments { get; set; }
|
||||
[JsonPropertyName("Diameter")] public string Diameter { get; set; }
|
||||
[JsonPropertyName("Exclusion")] public string Exclusion { get; set; }
|
||||
[JsonPropertyName("Gain")] public string Gain { get; set; }
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string Laser { get; set; }
|
||||
public string ParseErrorText { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Slot { get; set; }
|
||||
[JsonPropertyName("Laser")] public string Laser { get; set; }
|
||||
[JsonPropertyName("ParseErrorText")] public string ParseErrorText { get; set; }
|
||||
[JsonPropertyName("RDS")] public string RDS { get; set; }
|
||||
[JsonPropertyName("WaferId")] public string Slot { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
//
|
||||
public string AreaCount { get; set; }
|
||||
public string AreaCountAvg { get; set; }
|
||||
public string AreaCountMax { get; set; }
|
||||
public string AreaCountMin { get; set; }
|
||||
public string AreaCountStdDev { get; set; }
|
||||
public string AreaTotal { get; set; }
|
||||
public string AreaTotalAvg { get; set; }
|
||||
public string AreaTotalMax { get; set; }
|
||||
public string AreaTotalMin { get; set; }
|
||||
public string AreaTotalStdDev { get; set; }
|
||||
public string Bin1 { get; set; }
|
||||
public string Bin2 { get; set; }
|
||||
public string Bin3 { get; set; }
|
||||
public string Bin4 { get; set; }
|
||||
public string Bin5 { get; set; }
|
||||
public string Bin6 { get; set; }
|
||||
public string Bin7 { get; set; }
|
||||
public string Bin8 { get; set; }
|
||||
public string HazeAverage { get; set; }
|
||||
public string HazeAverageAvg { get; set; }
|
||||
public string HazeAverageMax { get; set; }
|
||||
public string HazeAverageMin { get; set; }
|
||||
public string HazeAverageStdDev { get; set; }
|
||||
public string HazePeak { get; set; }
|
||||
public string HazeRegion { get; set; }
|
||||
public string HazeRegionAvg { get; set; }
|
||||
public string HazeRegionMax { get; set; }
|
||||
public string HazeRegionMin { get; set; }
|
||||
public string HazeRegionStdDev { get; set; }
|
||||
public string HazeRng { get; set; }
|
||||
public string LPDCM2 { get; set; }
|
||||
public string LPDCM2Avg { get; set; }
|
||||
public string LPDCM2Max { get; set; }
|
||||
public string LPDCM2Min { get; set; }
|
||||
public string LPDCM2StdDev { get; set; }
|
||||
public string LPDCount { get; set; }
|
||||
public string LPDCountAvg { get; set; }
|
||||
public string LPDCountMax { get; set; }
|
||||
public string LPDCountMin { get; set; }
|
||||
public string LPDCountStdDev { get; set; }
|
||||
public string Mean { get; set; }
|
||||
public string ScratchCount { get; set; }
|
||||
public string ScratchCountAvg { get; set; }
|
||||
public string ScratchCountMax { get; set; }
|
||||
public string ScratchCountMin { get; set; }
|
||||
public string ScratchCountStdDev { get; set; }
|
||||
public string ScratchTotal { get; set; }
|
||||
public string ScratchTotalAvg { get; set; }
|
||||
public string ScratchTotalMax { get; set; }
|
||||
public string ScratchTotalMin { get; set; }
|
||||
public string ScratchTotalStdDev { get; set; }
|
||||
public string Sort { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string SumOfDefects { get; set; }
|
||||
public string SumOfDefectsAvg { get; set; }
|
||||
public string SumOfDefectsMax { get; set; }
|
||||
public string SumOfDefectsMin { get; set; }
|
||||
public string SumOfDefectsStdDev { get; set; }
|
||||
public string Thresh { get; set; }
|
||||
public string Thruput { get; set; }
|
||||
[JsonPropertyName("AreaCount")] public string AreaCount { get; set; }
|
||||
[JsonPropertyName("AreaCountAvg")] public string AreaCountAvg { get; set; }
|
||||
[JsonPropertyName("AreaCountMax")] public string AreaCountMax { get; set; }
|
||||
[JsonPropertyName("AreaCountMin")] public string AreaCountMin { get; set; }
|
||||
[JsonPropertyName("AreaCountStdDev")] public string AreaCountStdDev { get; set; }
|
||||
[JsonPropertyName("WaferAreaTotal")] public string AreaTotal { get; set; }
|
||||
[JsonPropertyName("AreaTotalAvg")] public string AreaTotalAvg { get; set; }
|
||||
[JsonPropertyName("AreaTotalMax")] public string AreaTotalMax { get; set; }
|
||||
[JsonPropertyName("AreaTotalMin")] public string AreaTotalMin { get; set; }
|
||||
[JsonPropertyName("AreaTotalStdDev")] public string AreaTotalStdDev { get; set; }
|
||||
[JsonPropertyName("Bin1")] public string Bin1 { get; set; }
|
||||
[JsonPropertyName("Bin2")] public string Bin2 { get; set; }
|
||||
[JsonPropertyName("Bin3")] public string Bin3 { get; set; }
|
||||
[JsonPropertyName("Bin4")] public string Bin4 { get; set; }
|
||||
[JsonPropertyName("Bin5")] public string Bin5 { get; set; }
|
||||
[JsonPropertyName("Bin6")] public string Bin6 { get; set; }
|
||||
[JsonPropertyName("Bin7")] public string Bin7 { get; set; }
|
||||
[JsonPropertyName("Bin8")] public string Bin8 { get; set; }
|
||||
[JsonPropertyName("WaferHazeAverage")] public string HazeAverage { get; set; }
|
||||
[JsonPropertyName("HazeAverageAvg")] public string HazeAverageAvg { get; set; }
|
||||
[JsonPropertyName("HazeAverageMax")] public string HazeAverageMax { get; set; }
|
||||
[JsonPropertyName("HazeAverageMin")] public string HazeAverageMin { get; set; }
|
||||
[JsonPropertyName("HazeAverageStdDev")] public string HazeAverageStdDev { get; set; }
|
||||
[JsonPropertyName("HazePeak")] public string HazePeak { get; set; }
|
||||
[JsonPropertyName("WaferHazeRegion")] public string HazeRegion { get; set; }
|
||||
[JsonPropertyName("HazeRegionAvg")] public string HazeRegionAvg { get; set; }
|
||||
[JsonPropertyName("HazeRegionMax")] public string HazeRegionMax { get; set; }
|
||||
[JsonPropertyName("HazeRegionMin")] public string HazeRegionMin { get; set; }
|
||||
[JsonPropertyName("HazeRegionStdDev")] public string HazeRegionStdDev { get; set; }
|
||||
[JsonPropertyName("HazeRng")] public string HazeRng { get; set; }
|
||||
[JsonPropertyName("LPDCM2")] public string LPDCM2 { get; set; }
|
||||
[JsonPropertyName("LPDCM2Avg")] public string LPDCM2Avg { get; set; }
|
||||
[JsonPropertyName("LPDCM2Max")] public string LPDCM2Max { get; set; }
|
||||
[JsonPropertyName("LPDCM2Min")] public string LPDCM2Min { get; set; }
|
||||
[JsonPropertyName("LPDCM2StdDev")] public string LPDCM2StdDev { get; set; }
|
||||
[JsonPropertyName("LPDCount")] public string LPDCount { get; set; }
|
||||
[JsonPropertyName("LPDCountAvg")] public string LPDCountAvg { get; set; }
|
||||
[JsonPropertyName("LPDCountMax")] public string LPDCountMax { get; set; }
|
||||
[JsonPropertyName("LPDCountMin")] public string LPDCountMin { get; set; }
|
||||
[JsonPropertyName("LPDCountStdDev")] public string LPDCountStdDev { get; set; }
|
||||
[JsonPropertyName("Mean")] public string Mean { get; set; }
|
||||
[JsonPropertyName("ScratchCount")] public string ScratchCount { get; set; }
|
||||
[JsonPropertyName("ScratchCountAvg")] public string ScratchCountAvg { get; set; }
|
||||
[JsonPropertyName("ScratchCountMax")] public string ScratchCountMax { get; set; }
|
||||
[JsonPropertyName("ScratchCountMin")] public string ScratchCountMin { get; set; }
|
||||
[JsonPropertyName("ScratchCountStdDev")] public string ScratchCountStdDev { get; set; }
|
||||
[JsonPropertyName("WaferScratchTotal")] public string ScratchTotal { get; set; }
|
||||
[JsonPropertyName("ScratchTotalAvg")] public string ScratchTotalAvg { get; set; }
|
||||
[JsonPropertyName("ScratchTotalMax")] public string ScratchTotalMax { get; set; }
|
||||
[JsonPropertyName("ScratchTotalMin")] public string ScratchTotalMin { get; set; }
|
||||
[JsonPropertyName("ScratchTotalStdDev")] public string ScratchTotalStdDev { get; set; }
|
||||
[JsonPropertyName("Sort")] public string Sort { get; set; }
|
||||
[JsonPropertyName("StdDev")] public string StdDev { get; set; }
|
||||
[JsonPropertyName("SumOfDefects")] public string SumOfDefects { get; set; }
|
||||
[JsonPropertyName("SumOfDefectsAvg")] public string SumOfDefectsAvg { get; set; }
|
||||
[JsonPropertyName("SumOfDefectsMax")] public string SumOfDefectsMax { get; set; }
|
||||
[JsonPropertyName("SumOfDefectsMin")] public string SumOfDefectsMin { get; set; }
|
||||
[JsonPropertyName("SumOfDefectsStdDev")] public string SumOfDefectsStdDev { get; set; }
|
||||
[JsonPropertyName("Thresh")] public string Thresh { get; set; }
|
||||
[JsonPropertyName("Thruput")] public string Thruput { get; set; }
|
||||
|
||||
string IDescription.GetEventDescription() => "File Has been read and parsed";
|
||||
|
||||
@ -492,4 +493,16 @@ public class Description : IDescription, Shared.Properties.IDescription
|
||||
|
||||
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
|
||||
[JsonSerializable(typeof(Description))]
|
||||
internal partial class DescriptionSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
|
||||
[JsonSerializable(typeof(Description[]))]
|
||||
internal partial class DescriptionArraySourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
@ -1,46 +1,60 @@
|
||||
namespace Adaptation.FileHandlers.pcl;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.pcl;
|
||||
|
||||
public class Detail
|
||||
{
|
||||
|
||||
public long Id { get; set; }
|
||||
public string AreaCount { get; set; }
|
||||
public string AreaTotal { get; set; }
|
||||
public string Bin1 { get; set; }
|
||||
public string Bin2 { get; set; }
|
||||
public string Bin3 { get; set; }
|
||||
public string Bin4 { get; set; }
|
||||
public string Bin5 { get; set; }
|
||||
public string Bin6 { get; set; }
|
||||
public string Bin7 { get; set; }
|
||||
public string Bin8 { get; set; }
|
||||
public string Comments { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string Diameter { get; set; }
|
||||
public string Exclusion { get; set; }
|
||||
public string Gain { get; set; }
|
||||
public string HazeAverage { get; set; }
|
||||
public string HazePeak { get; set; }
|
||||
public string HazeRegion { get; set; }
|
||||
public string HazeRng { get; set; }
|
||||
[JsonPropertyName("AreaCount")] public string AreaCount { get; set; }
|
||||
[JsonPropertyName("WaferAreaTotal")] public string AreaTotal { get; set; } // different name
|
||||
[JsonPropertyName("Bin1")] public string Bin1 { get; set; }
|
||||
[JsonPropertyName("Bin2")] public string Bin2 { get; set; }
|
||||
[JsonPropertyName("Bin3")] public string Bin3 { get; set; }
|
||||
[JsonPropertyName("Bin4")] public string Bin4 { get; set; }
|
||||
[JsonPropertyName("Bin5")] public string Bin5 { get; set; }
|
||||
[JsonPropertyName("Bin6")] public string Bin6 { get; set; }
|
||||
[JsonPropertyName("Bin7")] public string Bin7 { get; set; }
|
||||
[JsonPropertyName("Bin8")] public string Bin8 { get; set; }
|
||||
[JsonPropertyName("Comments")] public string Comments { get; set; }
|
||||
[JsonPropertyName("Date")] public string Date { get; set; }
|
||||
[JsonPropertyName("Diameter")] public string Diameter { get; set; }
|
||||
[JsonPropertyName("Exclusion")] public string Exclusion { get; set; }
|
||||
[JsonPropertyName("Gain")] public string Gain { get; set; }
|
||||
[JsonPropertyName("WaferHazeAverage")] public string HazeAverage { get; set; } // different name
|
||||
[JsonPropertyName("HazePeak")] public string HazePeak { get; set; }
|
||||
[JsonPropertyName("WaferHazeRegion")] public string HazeRegion { get; set; } // different name
|
||||
[JsonPropertyName("HazeRng")] public string HazeRng { get; set; }
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string LPDCM2 { get; set; }
|
||||
public string LPDCount { get; set; }
|
||||
public string Laser { get; set; }
|
||||
public string Mean { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string ScratchCount { get; set; }
|
||||
public string ScratchTotal { get; set; }
|
||||
public string Slot { get; set; }
|
||||
public string Sort { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string SumOfDefects { get; set; }
|
||||
public string Thresh { get; set; }
|
||||
public string Thruput { get; set; }
|
||||
[JsonPropertyName("LPDCM2")] public string LPDCM2 { get; set; }
|
||||
[JsonPropertyName("LPDCount")] public string LPDCount { get; set; }
|
||||
[JsonPropertyName("Laser")] public string Laser { get; set; }
|
||||
[JsonPropertyName("Mean")] public string Mean { get; set; }
|
||||
[JsonPropertyName("Recipe")] public string Recipe { get; set; }
|
||||
[JsonPropertyName("ScratchCount")] public string ScratchCount { get; set; }
|
||||
[JsonPropertyName("WaferScratchTotal")] public string ScratchTotal { get; set; } // different name
|
||||
[JsonPropertyName("WaferId")] public string Slot { get; set; } // different name
|
||||
[JsonPropertyName("Sort")] public string Sort { get; set; }
|
||||
[JsonPropertyName("StdDev")] public string StdDev { get; set; }
|
||||
[JsonPropertyName("SumOfDefects")] public string SumOfDefects { get; set; }
|
||||
[JsonPropertyName("Thresh")] public string Thresh { get; set; }
|
||||
[JsonPropertyName("Thruput")] public string Thruput { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Data { get; set; }
|
||||
#pragma warning disable IDE1006
|
||||
public int i { get; set; }
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
|
||||
[JsonSerializable(typeof(Detail))]
|
||||
internal partial class DetailSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
|
||||
[JsonSerializable(typeof(Detail[]))]
|
||||
internal partial class DetailArraySourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
@ -317,6 +317,6 @@ public class Header
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(Header))]
|
||||
internal partial class HeaderSourceGenerationContext : JsonSerializerContext
|
||||
internal partial class PCLHeaderSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
@ -703,12 +703,11 @@ public class ProcessData : IProcessData
|
||||
{
|
||||
List<Description> results = new();
|
||||
Description? description;
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
|
||||
foreach (JsonElement jsonElement in jsonElements)
|
||||
{
|
||||
if (jsonElement.ValueKind != JsonValueKind.Object)
|
||||
throw new Exception();
|
||||
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
|
||||
description = JsonSerializer.Deserialize(jsonElement.ToString(), DescriptionSourceGenerationContext.Default.Description);
|
||||
if (description is null)
|
||||
continue;
|
||||
results.Add(description);
|
||||
|
@ -188,6 +188,6 @@ internal class Row
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(Row))]
|
||||
internal partial class RowSourceGenerationContext : JsonSerializerContext
|
||||
internal partial class PCLRowSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
@ -49,7 +49,7 @@ internal class Run
|
||||
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, Run result)
|
||||
{
|
||||
FileInfo fileInfo = new($"{logistics.ReportFullPath}.run.json");
|
||||
string json = JsonSerializer.Serialize(result, RunSourceGenerationContext.Default.Run);
|
||||
string json = JsonSerializer.Serialize(result, PCLRunSourceGenerationContext.Default.Run);
|
||||
File.WriteAllText(fileInfo.FullName, json);
|
||||
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
|
||||
fileInfoCollection.Add(fileInfo);
|
||||
@ -141,6 +141,6 @@ internal class Run
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(Run))]
|
||||
internal partial class RunSourceGenerationContext : JsonSerializerContext
|
||||
internal partial class PCLRunSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
@ -235,6 +235,6 @@ public class Wafer
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(Wafer))]
|
||||
internal partial class WaferSourceGenerationContext : JsonSerializerContext
|
||||
internal partial class PCLWaferSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
@ -38,6 +38,6 @@ public class WaferSummary
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(WaferSummary))]
|
||||
internal partial class WaferSummarySourceGenerationContext : JsonSerializerContext
|
||||
internal partial class PCLWaferSummarySourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
13
Adaptation/FileHandlers/pdsf/Constant.cs
Normal file
13
Adaptation/FileHandlers/pdsf/Constant.cs
Normal file
@ -0,0 +1,13 @@
|
||||
namespace Adaptation.FileHandlers.pdsf;
|
||||
|
||||
internal class Constant
|
||||
{
|
||||
|
||||
public string Id { get; } = "ID#";
|
||||
public string Max { get; } = "Max:";
|
||||
public string Min { get; } = "Min:";
|
||||
public string Date { get; } = "Date:";
|
||||
public string StdDev { get; } = "Std Dev:";
|
||||
public string Average { get; } = "Average:";
|
||||
|
||||
}
|
150
Adaptation/FileHandlers/pdsf/Convert.cs
Normal file
150
Adaptation/FileHandlers/pdsf/Convert.cs
Normal file
@ -0,0 +1,150 @@
|
||||
using Adaptation.Shared;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
|
||||
namespace Adaptation.FileHandlers.pdsf;
|
||||
|
||||
internal class Convert
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Convert the raw data file to parsable file format - in this case from PCL to PDF
|
||||
/// </summary>
|
||||
/// <param name="sourceFile">source file to be converted to PDF</param>
|
||||
/// <returns></returns>
|
||||
private static string ConvertSourceFileToPdf(string ghostPCLFileName, Logistics logistics)
|
||||
{
|
||||
string result = Path.ChangeExtension(logistics.ReportFullPath, ".pdf");
|
||||
if (!File.Exists(result))
|
||||
{
|
||||
//string arguments = string.Concat("-i \"", sourceFile, "\" -o \"", result, "\"");
|
||||
string arguments = string.Concat("-dSAFER -dBATCH -dNOPAUSE -sOutputFile=\"", result, "\" -sDEVICE=pdfwrite \"", logistics.ReportFullPath, "\"");
|
||||
//Process process = Process.Start(configData.LincPDFCFileName, arguments);
|
||||
Process process = Process.Start(ghostPCLFileName, arguments);
|
||||
_ = process.WaitForExit(30000);
|
||||
if (!File.Exists(result))
|
||||
throw new Exception("PDF file wasn't created");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static Dictionary<string, string> PortableDocumentFormatSplit(string pdfTextStripperFileName, string sourcePath, string sourceFileNamePdf)
|
||||
{
|
||||
Dictionary<string, string> results = new();
|
||||
ProcessStartInfo processStartInfo = new(pdfTextStripperFileName, $"s \"{sourceFileNamePdf}\"")
|
||||
{
|
||||
UseShellExecute = false,
|
||||
RedirectStandardError = true,
|
||||
RedirectStandardOutput = true,
|
||||
};
|
||||
Process process = Process.Start(processStartInfo);
|
||||
_ = process.WaitForExit(30000);
|
||||
string text;
|
||||
string checkFile;
|
||||
string[] pdfFiles = Directory.GetFiles(sourcePath, "*.pdf", SearchOption.TopDirectoryOnly);
|
||||
string[] textFiles = Directory.GetFiles(sourcePath, "*.txt", SearchOption.TopDirectoryOnly);
|
||||
foreach (string pdfFile in pdfFiles)
|
||||
{
|
||||
if (pdfFile == sourceFileNamePdf)
|
||||
continue;
|
||||
checkFile = Path.ChangeExtension(pdfFile, ".txt");
|
||||
if (!textFiles.Contains(checkFile))
|
||||
continue;
|
||||
text = File.ReadAllText(checkFile);
|
||||
results.Add(pdfFile, text);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
internal static ReadOnlyDictionary<string, string> PDF(Logistics logistics, string ghostPCLFileName, string pdfTextStripperFileName, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Dictionary<string, string> results = new();
|
||||
object item;
|
||||
string pageText;
|
||||
string pagePDFFile;
|
||||
string pageTextFile;
|
||||
List<string> sourceFiles = new();
|
||||
string sourceFileNamePdf = ConvertSourceFileToPdf(ghostPCLFileName, logistics);
|
||||
sourceFiles.Add(sourceFileNamePdf);
|
||||
string sourcePath = Path.GetDirectoryName(logistics.ReportFullPath) ?? throw new Exception();
|
||||
string sourceFileNameWithoutExtension = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
|
||||
string[] txtFiles = Directory.GetFiles(sourcePath, $"{sourceFileNameWithoutExtension}_*.txt", SearchOption.TopDirectoryOnly);
|
||||
if (txtFiles.Length != 0)
|
||||
{
|
||||
txtFiles = (from l in txtFiles orderby l.Length, l select l).ToArray();
|
||||
foreach (string txtFile in txtFiles)
|
||||
{
|
||||
sourceFiles.Add(txtFile);
|
||||
pageText = File.ReadAllText(txtFile);
|
||||
pagePDFFile = Path.ChangeExtension(txtFile, ".pdf");
|
||||
if (!File.Exists(pagePDFFile))
|
||||
continue;
|
||||
results.Add(pagePDFFile, pageText);
|
||||
}
|
||||
}
|
||||
if (results.Count == 0)
|
||||
{
|
||||
try
|
||||
{
|
||||
java.io.File file = new(sourceFileNamePdf);
|
||||
org.apache.pdfbox.util.Splitter splitter = new();
|
||||
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
|
||||
java.util.List list = splitter.split(pdDocument);
|
||||
java.util.ListIterator iterator = list.listIterator();
|
||||
org.apache.pdfbox.util.PDFTextStripper dataStripper = new();
|
||||
for (short i = 1; i < short.MaxValue; i++)
|
||||
{
|
||||
if (!iterator.hasNext())
|
||||
break;
|
||||
item = iterator.next();
|
||||
pagePDFFile = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_", i, ".pdf");
|
||||
pageTextFile = Path.ChangeExtension(pagePDFFile, ".txt");
|
||||
if (File.Exists(pageTextFile))
|
||||
{
|
||||
pageText = File.ReadAllText(pageTextFile);
|
||||
sourceFiles.Add(pageTextFile);
|
||||
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
|
||||
continue;
|
||||
pd.close();
|
||||
}
|
||||
else if (File.Exists(pagePDFFile))
|
||||
{
|
||||
org.apache.pdfbox.pdmodel.PDDocument document = org.apache.pdfbox.pdmodel.PDDocument.load(pagePDFFile);
|
||||
pageText = dataStripper.getText(document);
|
||||
document.close();
|
||||
sourceFiles.Add(pagePDFFile);
|
||||
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
|
||||
continue;
|
||||
pd.close();
|
||||
}
|
||||
else
|
||||
{
|
||||
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
|
||||
continue;
|
||||
pageText = dataStripper.getText(pd);
|
||||
pd.save(pagePDFFile);
|
||||
sourceFiles.Add(pagePDFFile);
|
||||
pd.close();
|
||||
File.WriteAllText(pageTextFile, pageText);
|
||||
sourceFiles.Add(pageTextFile);
|
||||
}
|
||||
results.Add(pagePDFFile, pageText);
|
||||
}
|
||||
pdDocument.close();
|
||||
}
|
||||
catch (MissingMethodException)
|
||||
{
|
||||
if (results.Count == 0)
|
||||
results = PortableDocumentFormatSplit(pdfTextStripperFileName, sourcePath, sourceFileNamePdf);
|
||||
}
|
||||
}
|
||||
foreach (string sourceFile in sourceFiles)
|
||||
fileInfoCollection.Add(new FileInfo(sourceFile));
|
||||
return new(results);
|
||||
}
|
||||
|
||||
}
|
136
Adaptation/FileHandlers/pdsf/FileRead.cs
Normal file
136
Adaptation/FileHandlers/pdsf/FileRead.cs
Normal file
@ -0,0 +1,136 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.pdsf;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly string _GhostPCLFileName;
|
||||
private readonly string _PDFTextStripperFileName;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 15;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_GhostPCLFileName = Path.Combine(AppContext.BaseDirectory, "gpcl6win64.exe");
|
||||
if (!File.Exists(_GhostPCLFileName))
|
||||
throw new Exception("Ghost PCL FileName doesn't Exist!");
|
||||
_PDFTextStripperFileName = Path.Combine(AppContext.BaseDirectory, "PDF-Text-Stripper.exe");
|
||||
if (!File.Exists(_PDFTextStripperFileName))
|
||||
throw new Exception("PDF-Text-Stripper FileName doesn't Exist!");
|
||||
if (_IsEAFHosted)
|
||||
NestExistingFiles(_FileConnectorConfiguration);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
#nullable enable
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
string result;
|
||||
JsonElement[] jsonElements;
|
||||
Test[] tests = Array.Empty<Test>();
|
||||
List<FileInfo> fileInfoCollection = new();
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
ReadOnlyDictionary<string, string> pages = Convert.PDF(_Logistics, _GhostPCLFileName, _PDFTextStripperFileName, fileInfoCollection);
|
||||
Run? run = Run.Get(_Logistics, fileInfoCollection, pages);
|
||||
if (run is null)
|
||||
{
|
||||
jsonElements = Array.Empty<JsonElement>();
|
||||
result = string.Concat("A) No Data - ", dateTime.Ticks);
|
||||
results = new(result, tests, jsonElements, fileInfoCollection);
|
||||
}
|
||||
else
|
||||
{
|
||||
result = string.Join(Environment.NewLine, _Logistics.Logistics1);
|
||||
jsonElements = _IsEAFHosted ? Array.Empty<JsonElement>() : ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
results = new(result, tests, jsonElements, fileInfoCollection);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
322
Adaptation/FileHandlers/pdsf/Header.cs
Normal file
322
Adaptation/FileHandlers/pdsf/Header.cs
Normal file
@ -0,0 +1,322 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.pdsf;
|
||||
|
||||
#nullable enable
|
||||
|
||||
public class Header
|
||||
{
|
||||
|
||||
public Header(string date,
|
||||
string recipe,
|
||||
string id,
|
||||
ReadOnlyCollection<WaferSummary> waferSummary,
|
||||
string lPDCountMin,
|
||||
string lPDCM2Min,
|
||||
string areaCountMin,
|
||||
string areaTotalMin,
|
||||
string scratchCountMin,
|
||||
string scratchTotalMin,
|
||||
string sumOfDefectsMin,
|
||||
string hazeRegionMin,
|
||||
string hazeAverageMin,
|
||||
string lPDCountMax,
|
||||
string lPDCM2Max,
|
||||
string areaCountMax,
|
||||
string areaTotalMax,
|
||||
string scratchCountMax,
|
||||
string scratchTotalMax,
|
||||
string sumOfDefectsMax,
|
||||
string hazeRegionMax,
|
||||
string hazeAverageMax,
|
||||
string lPDCountAvg,
|
||||
string lPDCM2Avg,
|
||||
string areaCountAvg,
|
||||
string areaTotalAvg,
|
||||
string scratchCountAvg,
|
||||
string scratchTotalAvg,
|
||||
string sumOfDefectsAvg,
|
||||
string hazeRegionAvg,
|
||||
string hazeAverageAvg,
|
||||
string lPDCountStdDev,
|
||||
string lPDCM2StdDev,
|
||||
string areaCountStdDev,
|
||||
string areaTotalStdDev,
|
||||
string scratchCountStdDev,
|
||||
string scratchTotalStdDev,
|
||||
string sumOfDefectsStdDev,
|
||||
string hazeRegionStdDev,
|
||||
string hazeAverageStdDev)
|
||||
{
|
||||
Date = date;
|
||||
Recipe = recipe;
|
||||
Id = id;
|
||||
WaferSummary = waferSummary;
|
||||
LPDCountMin = lPDCountMin;
|
||||
LPDCM2Min = lPDCM2Min;
|
||||
AreaCountMin = areaCountMin;
|
||||
AreaTotalMin = areaTotalMin;
|
||||
ScratchCountMin = scratchCountMin;
|
||||
ScratchTotalMin = scratchTotalMin;
|
||||
SumOfDefectsMin = sumOfDefectsMin;
|
||||
HazeRegionMin = hazeRegionMin;
|
||||
HazeAverageMin = hazeAverageMin;
|
||||
LPDCountMax = lPDCountMax;
|
||||
LPDCM2Max = lPDCM2Max;
|
||||
AreaCountMax = areaCountMax;
|
||||
AreaTotalMax = areaTotalMax;
|
||||
ScratchCountMax = scratchCountMax;
|
||||
ScratchTotalMax = scratchTotalMax;
|
||||
SumOfDefectsMax = sumOfDefectsMax;
|
||||
HazeRegionMax = hazeRegionMax;
|
||||
HazeAverageMax = hazeAverageMax;
|
||||
LPDCountAvg = lPDCountAvg;
|
||||
LPDCM2Avg = lPDCM2Avg;
|
||||
AreaCountAvg = areaCountAvg;
|
||||
AreaTotalAvg = areaTotalAvg;
|
||||
ScratchCountAvg = scratchCountAvg;
|
||||
ScratchTotalAvg = scratchTotalAvg;
|
||||
SumOfDefectsAvg = sumOfDefectsAvg;
|
||||
HazeRegionAvg = hazeRegionAvg;
|
||||
HazeAverageAvg = hazeAverageAvg;
|
||||
LPDCountStdDev = lPDCountStdDev;
|
||||
LPDCM2StdDev = lPDCM2StdDev;
|
||||
AreaCountStdDev = areaCountStdDev;
|
||||
AreaTotalStdDev = areaTotalStdDev;
|
||||
ScratchCountStdDev = scratchCountStdDev;
|
||||
ScratchTotalStdDev = scratchTotalStdDev;
|
||||
SumOfDefectsStdDev = sumOfDefectsStdDev;
|
||||
HazeRegionStdDev = hazeRegionStdDev;
|
||||
HazeAverageStdDev = hazeAverageStdDev;
|
||||
}
|
||||
|
||||
public string Date { get; }
|
||||
public string Recipe { get; }
|
||||
public string Id { get; }
|
||||
public ReadOnlyCollection<WaferSummary> WaferSummary { get; }
|
||||
public string LPDCountMin { get; }
|
||||
public string LPDCM2Min { get; }
|
||||
public string AreaCountMin { get; }
|
||||
public string AreaTotalMin { get; }
|
||||
public string ScratchCountMin { get; }
|
||||
public string ScratchTotalMin { get; }
|
||||
public string SumOfDefectsMin { get; }
|
||||
public string HazeRegionMin { get; }
|
||||
public string HazeAverageMin { get; }
|
||||
public string LPDCountMax { get; }
|
||||
public string LPDCM2Max { get; }
|
||||
public string AreaCountMax { get; }
|
||||
public string AreaTotalMax { get; }
|
||||
public string ScratchCountMax { get; }
|
||||
public string ScratchTotalMax { get; }
|
||||
public string SumOfDefectsMax { get; }
|
||||
public string HazeRegionMax { get; }
|
||||
public string HazeAverageMax { get; }
|
||||
public string LPDCountAvg { get; }
|
||||
public string LPDCM2Avg { get; }
|
||||
public string AreaCountAvg { get; }
|
||||
public string AreaTotalAvg { get; }
|
||||
public string ScratchCountAvg { get; }
|
||||
public string ScratchTotalAvg { get; }
|
||||
public string SumOfDefectsAvg { get; }
|
||||
public string HazeRegionAvg { get; }
|
||||
public string HazeAverageAvg { get; }
|
||||
public string LPDCountStdDev { get; }
|
||||
public string LPDCM2StdDev { get; }
|
||||
public string AreaCountStdDev { get; }
|
||||
public string AreaTotalStdDev { get; }
|
||||
public string ScratchCountStdDev { get; }
|
||||
public string ScratchTotalStdDev { get; }
|
||||
public string SumOfDefectsStdDev { get; }
|
||||
public string HazeRegionStdDev { get; }
|
||||
public string HazeAverageStdDev { get; }
|
||||
|
||||
private static ReadOnlyCollection<string> FixToEolArray(string[] toEol)
|
||||
{
|
||||
List<string> results = new();
|
||||
const int MAX_COLUMNS = 9;
|
||||
if (toEol.Length >= MAX_COLUMNS)
|
||||
results.AddRange(toEol);
|
||||
else
|
||||
{
|
||||
string leftVal, rightVal;
|
||||
List<string> toEolList = new(toEol);
|
||||
int[] mColumnWidths = new int[MAX_COLUMNS] { 8, 6, 6, 6, 6, 7, 7, 5, 7 };
|
||||
if (string.IsNullOrEmpty(toEolList[toEolList.Count - 1]))
|
||||
toEolList.RemoveAt(toEolList.Count - 1);
|
||||
for (int i = toEolList.Count; i < MAX_COLUMNS; i++)
|
||||
toEolList.Insert(0, "");
|
||||
for (int i = MAX_COLUMNS - 1; i >= 0; i--)
|
||||
{
|
||||
if (toEolList[i].Length > mColumnWidths[i])
|
||||
{
|
||||
leftVal = toEolList[i].Substring(0, toEolList[i].Length - mColumnWidths[i]);
|
||||
rightVal = toEolList[i].Substring(leftVal.Length);
|
||||
toEolList[i] = rightVal;
|
||||
toEolList.Insert(i, leftVal);
|
||||
if (string.IsNullOrEmpty(toEolList[0]))
|
||||
toEolList.RemoveAt(0);
|
||||
}
|
||||
}
|
||||
results.AddRange(toEolList);
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
internal static void ScanPast(string text, int[] i, string search)
|
||||
{
|
||||
int num = text.IndexOf(search, i[0]);
|
||||
if (num > -1)
|
||||
i[0] = num + search.Length;
|
||||
else
|
||||
i[0] = text.Length;
|
||||
}
|
||||
|
||||
internal static string GetBefore(string text, int[] i, string search)
|
||||
{
|
||||
int num = text.IndexOf(search, i[0]);
|
||||
if (num > -1)
|
||||
{
|
||||
string str = text.Substring(i[0], num - i[0]);
|
||||
i[0] = num + search.Length;
|
||||
return str.Trim();
|
||||
}
|
||||
string str1 = text.Substring(i[0]);
|
||||
i[0] = text.Length;
|
||||
return str1.Trim();
|
||||
}
|
||||
|
||||
private static string GetBefore(string text, int[] i, string search, bool trim)
|
||||
{
|
||||
if (trim)
|
||||
return GetBefore(text, i, search);
|
||||
int num = text.IndexOf(search, i[0]);
|
||||
if (num > -1)
|
||||
{
|
||||
string str = text.Substring(i[0], num - i[0]);
|
||||
i[0] = num + search.Length;
|
||||
return str;
|
||||
}
|
||||
string str1 = text.Substring(i[0]);
|
||||
i[0] = text.Length;
|
||||
return str1;
|
||||
}
|
||||
|
||||
internal static string GetToEOL(string text, int[] i) =>
|
||||
GetBefore(text, i, "\n");
|
||||
|
||||
private static string GetToEOL(string text, int[] i, bool trim)
|
||||
{
|
||||
if (trim)
|
||||
return GetToEOL(text, i);
|
||||
return GetBefore(text, i, "\n", false);
|
||||
}
|
||||
|
||||
internal static Header Get(ReadOnlyDictionary<string, string> pages, Constant constant, string headerFileName)
|
||||
{
|
||||
Header? result;
|
||||
string id;
|
||||
string? text;
|
||||
string[] segmentsB;
|
||||
string[] segmentsC;
|
||||
int[] i = new int[] { 0 };
|
||||
WaferSummary waferSummary;
|
||||
List<WaferSummary> collection = new();
|
||||
if (!pages.TryGetValue(headerFileName, out text))
|
||||
throw new Exception();
|
||||
ScanPast(text, i, constant.Date);
|
||||
string date = GetToEOL(text, i);
|
||||
ScanPast(text, i, "Recipe ID:");
|
||||
string recipe = GetBefore(text, i, "LotID:");
|
||||
recipe = recipe.Replace(";", "");
|
||||
if (text.Contains("[]"))
|
||||
id = GetBefore(text, i, "[]");
|
||||
else if (text.Contains("[7]"))
|
||||
id = GetBefore(text, i, "[7]");
|
||||
else
|
||||
id = GetBefore(text, i, "[");
|
||||
ScanPast(text, i, "*");
|
||||
string[] segments = text.Substring(i[0]).Split('*');
|
||||
string[] split = new string[] { Environment.NewLine };
|
||||
foreach (string segment in segments)
|
||||
{
|
||||
segmentsB = segment.Split(split, StringSplitOptions.None);
|
||||
segmentsC = segmentsB[0].Split(' ');
|
||||
waferSummary = new(id: segmentsC.Length < 1 ? string.Empty : segmentsC[0].Trim(),
|
||||
lPDCount: segmentsC.Length < 2 ? string.Empty : segmentsC[1].Trim(),
|
||||
lPDCM2: segmentsC.Length < 3 ? string.Empty : segmentsC[2].Trim(),
|
||||
areaCount: segmentsC.Length < 4 ? string.Empty : segmentsC[3].Trim(),
|
||||
areaTotal: segmentsC.Length < 5 ? string.Empty : segmentsC[4].Trim(),
|
||||
scratchCount: segmentsC.Length < 6 ? string.Empty : segmentsC[5].Trim(),
|
||||
scratchTotal: segmentsC.Length < 7 ? string.Empty : segmentsC[6].Trim(),
|
||||
sumOfDefects: segmentsC.Length < 8 ? string.Empty : segmentsC[7].Trim(),
|
||||
hazeRegion: segmentsC.Length < 9 ? string.Empty : segmentsC[8].Trim(),
|
||||
hazeAverage: segmentsC.Length < 10 ? string.Empty : segmentsC[9].Trim(),
|
||||
grade: segmentsC.Length < 11 ? string.Empty : segmentsC[10].Trim());
|
||||
collection.Add(waferSummary);
|
||||
}
|
||||
ScanPast(text, i, constant.Min);
|
||||
string[] preToEol1 = GetToEOL(text, i, false).Trim().Split(' ');
|
||||
ReadOnlyCollection<string> toEol1 = FixToEolArray(preToEol1);
|
||||
ScanPast(text, i, constant.Max);
|
||||
string[] preToEol2 = GetToEOL(text, i, false).Trim().Split(' ');
|
||||
ReadOnlyCollection<string> toEol2 = FixToEolArray(preToEol2);
|
||||
ScanPast(text, i, constant.Average);
|
||||
string[] preToEol3 = GetToEOL(text, i, false).Trim().Split(' ');
|
||||
ReadOnlyCollection<string> toEol3 = FixToEolArray(preToEol3);
|
||||
ScanPast(text, i, constant.StdDev);
|
||||
string[] preToEol4 = GetToEOL(text, i, false).Trim().Split(' ');
|
||||
ReadOnlyCollection<string> toEol4 = FixToEolArray(preToEol4);
|
||||
result = new(date: date,
|
||||
recipe: recipe,
|
||||
id: id,
|
||||
waferSummary: collection.AsReadOnly(),
|
||||
lPDCountMin: toEol1[0].Trim(),
|
||||
lPDCM2Min: toEol1[1].Trim(),
|
||||
areaCountMin: toEol1[2].Trim(),
|
||||
areaTotalMin: toEol1[3].Trim(),
|
||||
scratchCountMin: toEol1[4].Trim(),
|
||||
scratchTotalMin: toEol1[5].Trim(),
|
||||
sumOfDefectsMin: toEol1[6].Trim(),
|
||||
hazeRegionMin: toEol1[7].Trim(),
|
||||
hazeAverageMin: toEol1[8].Trim(),
|
||||
lPDCountMax: toEol2[0].Trim(),
|
||||
lPDCM2Max: toEol2[1].Trim(),
|
||||
areaCountMax: toEol2[2].Trim(),
|
||||
areaTotalMax: toEol2[3].Trim(),
|
||||
scratchCountMax: toEol2[4].Trim(),
|
||||
scratchTotalMax: toEol2[5].Trim(),
|
||||
sumOfDefectsMax: toEol2[6].Trim(),
|
||||
hazeRegionMax: toEol2[7].Trim(),
|
||||
hazeAverageMax: toEol2[8].Trim(),
|
||||
lPDCountAvg: toEol3[0].Trim(),
|
||||
lPDCM2Avg: toEol3[1].Trim(),
|
||||
areaCountAvg: toEol3[2].Trim(),
|
||||
areaTotalAvg: toEol3[3].Trim(),
|
||||
scratchCountAvg: toEol3[4].Trim(),
|
||||
scratchTotalAvg: toEol3[5].Trim(),
|
||||
sumOfDefectsAvg: toEol3[6].Trim(),
|
||||
hazeRegionAvg: toEol3[7].Trim(),
|
||||
hazeAverageAvg: toEol3[8].Trim(),
|
||||
lPDCountStdDev: toEol4[0].Trim(),
|
||||
lPDCM2StdDev: toEol4[1].Trim(),
|
||||
areaCountStdDev: toEol4[2].Trim(),
|
||||
areaTotalStdDev: toEol4[3].Trim(),
|
||||
scratchCountStdDev: toEol4[4].Trim(),
|
||||
scratchTotalStdDev: toEol4[5].Trim(),
|
||||
sumOfDefectsStdDev: toEol4[6].Trim(),
|
||||
hazeRegionStdDev: toEol4[7].Trim(),
|
||||
hazeAverageStdDev: toEol4[8].Trim());
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(Header))]
|
||||
internal partial class HeaderSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
193
Adaptation/FileHandlers/pdsf/Row.cs
Normal file
193
Adaptation/FileHandlers/pdsf/Row.cs
Normal file
@ -0,0 +1,193 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.pdsf;
|
||||
|
||||
#nullable enable
|
||||
|
||||
internal class Row
|
||||
{
|
||||
|
||||
public Row(Run run, int i)
|
||||
{
|
||||
Index = i;
|
||||
//
|
||||
Date = run.Header.Date;
|
||||
Recipe = run.Header.Recipe;
|
||||
Id = run.Header.Id;
|
||||
//
|
||||
WaferId = run.Header.WaferSummary[i].Id;
|
||||
LPDCount = run.Header.WaferSummary[i].LPDCount;
|
||||
LPDCM2 = run.Header.WaferSummary[i].LPDCM2;
|
||||
AreaCount = run.Header.WaferSummary[i].AreaCount;
|
||||
AreaTotal = run.Header.WaferSummary[i].AreaTotal;
|
||||
ScratchCount = run.Header.WaferSummary[i].ScratchCount;
|
||||
ScratchTotal = run.Header.WaferSummary[i].ScratchTotal;
|
||||
SumOfDefects = run.Header.WaferSummary[i].SumOfDefects;
|
||||
HazeRegion = run.Header.WaferSummary[i].HazeRegion;
|
||||
HazeAverage = run.Header.WaferSummary[i].HazeAverage;
|
||||
Grade = run.Header.WaferSummary[i].Grade;
|
||||
//
|
||||
LPDCountMin = run.Header.LPDCountMin;
|
||||
LPDCM2Min = run.Header.LPDCM2Min;
|
||||
AreaCountMin = run.Header.AreaCountMin;
|
||||
AreaTotalMin = run.Header.AreaTotalMin;
|
||||
ScratchCountMin = run.Header.ScratchCountMin;
|
||||
ScratchTotalMin = run.Header.ScratchTotalMin;
|
||||
SumOfDefectsMin = run.Header.SumOfDefectsMin;
|
||||
HazeRegionMin = run.Header.HazeRegionMin;
|
||||
HazeAverageMin = run.Header.HazeAverageMin;
|
||||
LPDCountMax = run.Header.LPDCountMax;
|
||||
LPDCM2Max = run.Header.LPDCM2Max;
|
||||
AreaCountMax = run.Header.AreaCountMax;
|
||||
AreaTotalMax = run.Header.AreaTotalMax;
|
||||
ScratchCountMax = run.Header.ScratchCountMax;
|
||||
ScratchTotalMax = run.Header.ScratchTotalMax;
|
||||
SumOfDefectsMax = run.Header.SumOfDefectsMax;
|
||||
HazeRegionMax = run.Header.HazeRegionMax;
|
||||
HazeAverageMax = run.Header.HazeAverageMax;
|
||||
LPDCountAvg = run.Header.LPDCountAvg;
|
||||
LPDCM2Avg = run.Header.LPDCM2Avg;
|
||||
AreaCountAvg = run.Header.AreaCountAvg;
|
||||
AreaTotalAvg = run.Header.AreaTotalAvg;
|
||||
ScratchCountAvg = run.Header.ScratchCountAvg;
|
||||
ScratchTotalAvg = run.Header.ScratchTotalAvg;
|
||||
SumOfDefectsAvg = run.Header.SumOfDefectsAvg;
|
||||
HazeRegionAvg = run.Header.HazeRegionAvg;
|
||||
HazeAverageAvg = run.Header.HazeAverageAvg;
|
||||
LPDCountStdDev = run.Header.LPDCountStdDev;
|
||||
LPDCM2StdDev = run.Header.LPDCM2StdDev;
|
||||
AreaCountStdDev = run.Header.AreaCountStdDev;
|
||||
AreaTotalStdDev = run.Header.AreaTotalStdDev;
|
||||
ScratchCountStdDev = run.Header.ScratchCountStdDev;
|
||||
ScratchTotalStdDev = run.Header.ScratchTotalStdDev;
|
||||
SumOfDefectsStdDev = run.Header.SumOfDefectsStdDev;
|
||||
HazeRegionStdDev = run.Header.HazeRegionStdDev;
|
||||
HazeAverageStdDev = run.Header.HazeAverageStdDev;
|
||||
//
|
||||
WaferDate = run.Wafers[i].Date;
|
||||
Comments = run.Wafers[i].Comments;
|
||||
Sort = run.Wafers[i].Sort;
|
||||
WaferLPDCount = run.Wafers[i].LPDCount;
|
||||
WaferLPDCM2 = run.Wafers[i].LPDCM2;
|
||||
Bin1 = run.Wafers[i].Bin1;
|
||||
Bin2 = run.Wafers[i].Bin2;
|
||||
Bin3 = run.Wafers[i].Bin3;
|
||||
Bin4 = run.Wafers[i].Bin4;
|
||||
Bin5 = run.Wafers[i].Bin5;
|
||||
Bin6 = run.Wafers[i].Bin6;
|
||||
Bin7 = run.Wafers[i].Bin7;
|
||||
Bin8 = run.Wafers[i].Bin8;
|
||||
Mean = run.Wafers[i].Mean;
|
||||
StdDev = run.Wafers[i].StdDev;
|
||||
WaferAreaCount = run.Wafers[i].AreaCount;
|
||||
WaferAreaTotal = run.Wafers[i].AreaTotal;
|
||||
WaferScratchCount = run.Wafers[i].ScratchCount;
|
||||
WaferScratchTotal = run.Wafers[i].ScratchTotal;
|
||||
WaferSumOfDefects = run.Wafers[i].SumOfDefects;
|
||||
WaferHazeRegion = run.Wafers[i].HazeRegion;
|
||||
WaferHazeAverage = run.Wafers[i].HazeAverage;
|
||||
HazePeak = run.Wafers[i].HazePeak;
|
||||
Laser = run.Wafers[i].Laser;
|
||||
Gain = run.Wafers[i].Gain;
|
||||
Diameter = run.Wafers[i].Diameter;
|
||||
Thresh = run.Wafers[i].Thresh;
|
||||
Exclusion = run.Wafers[i].Exclusion;
|
||||
HazeRng = run.Wafers[i].HazeRng;
|
||||
Thruput = run.Wafers[i].Thruput;
|
||||
WaferRecipe = run.Wafers[i].Recipe;
|
||||
}
|
||||
|
||||
public int Index { get; }
|
||||
//
|
||||
public string Date { get; }
|
||||
public string Recipe { get; }
|
||||
public string Id { get; }
|
||||
//
|
||||
public string WaferId { get; }
|
||||
public string LPDCount { get; }
|
||||
public string LPDCM2 { get; }
|
||||
public string AreaCount { get; }
|
||||
public string AreaTotal { get; }
|
||||
public string ScratchCount { get; }
|
||||
public string ScratchTotal { get; }
|
||||
public string SumOfDefects { get; }
|
||||
public string HazeRegion { get; }
|
||||
public string HazeAverage { get; }
|
||||
public string Grade { get; }
|
||||
//
|
||||
public string LPDCountMin { get; }
|
||||
public string LPDCM2Min { get; }
|
||||
public string AreaCountMin { get; }
|
||||
public string AreaTotalMin { get; }
|
||||
public string ScratchCountMin { get; }
|
||||
public string ScratchTotalMin { get; }
|
||||
public string SumOfDefectsMin { get; }
|
||||
public string HazeRegionMin { get; }
|
||||
public string HazeAverageMin { get; }
|
||||
public string LPDCountMax { get; }
|
||||
public string LPDCM2Max { get; }
|
||||
public string AreaCountMax { get; }
|
||||
public string AreaTotalMax { get; }
|
||||
public string ScratchCountMax { get; }
|
||||
public string ScratchTotalMax { get; }
|
||||
public string SumOfDefectsMax { get; }
|
||||
public string HazeRegionMax { get; }
|
||||
public string HazeAverageMax { get; }
|
||||
public string LPDCountAvg { get; }
|
||||
public string LPDCM2Avg { get; }
|
||||
public string AreaCountAvg { get; }
|
||||
public string AreaTotalAvg { get; }
|
||||
public string ScratchCountAvg { get; }
|
||||
public string ScratchTotalAvg { get; }
|
||||
public string SumOfDefectsAvg { get; }
|
||||
public string HazeRegionAvg { get; }
|
||||
public string HazeAverageAvg { get; }
|
||||
public string LPDCountStdDev { get; }
|
||||
public string LPDCM2StdDev { get; }
|
||||
public string AreaCountStdDev { get; }
|
||||
public string AreaTotalStdDev { get; }
|
||||
public string ScratchCountStdDev { get; }
|
||||
public string ScratchTotalStdDev { get; }
|
||||
public string SumOfDefectsStdDev { get; }
|
||||
public string HazeRegionStdDev { get; }
|
||||
public string HazeAverageStdDev { get; }
|
||||
//
|
||||
public string WaferDate { get; }
|
||||
public string Comments { get; }
|
||||
public string Sort { get; }
|
||||
public string WaferLPDCount { get; }
|
||||
public string WaferLPDCM2 { get; }
|
||||
public string Bin1 { get; }
|
||||
public string Bin2 { get; }
|
||||
public string Bin3 { get; }
|
||||
public string Bin4 { get; }
|
||||
public string Bin5 { get; }
|
||||
public string Bin6 { get; }
|
||||
public string Bin7 { get; }
|
||||
public string Bin8 { get; }
|
||||
public string Mean { get; }
|
||||
public string StdDev { get; }
|
||||
public string WaferAreaCount { get; }
|
||||
public string WaferAreaTotal { get; }
|
||||
public string WaferScratchCount { get; }
|
||||
public string WaferScratchTotal { get; }
|
||||
public string WaferSumOfDefects { get; }
|
||||
public string WaferHazeRegion { get; }
|
||||
public string WaferHazeAverage { get; }
|
||||
public string HazePeak { get; }
|
||||
public string Laser { get; }
|
||||
public string Gain { get; }
|
||||
public string Diameter { get; }
|
||||
public string Thresh { get; }
|
||||
public string Exclusion { get; }
|
||||
public string HazeRng { get; }
|
||||
public string Thruput { get; }
|
||||
public string WaferRecipe { get; }
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(Row))]
|
||||
internal partial class RowSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
146
Adaptation/FileHandlers/pdsf/Run.cs
Normal file
146
Adaptation/FileHandlers/pdsf/Run.cs
Normal file
@ -0,0 +1,146 @@
|
||||
using Adaptation.Shared;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.pdsf;
|
||||
|
||||
#nullable enable
|
||||
|
||||
internal class Run
|
||||
{
|
||||
|
||||
public Header Header { get; }
|
||||
public ReadOnlyCollection<Wafer> Wafers { get; }
|
||||
|
||||
public Run(Header header, ReadOnlyCollection<Wafer> wafers)
|
||||
{
|
||||
Header = header;
|
||||
Wafers = wafers;
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<Wafer> GetLastWaferForEachSlot(ReadOnlyDictionary<string, string> pages, Constant constant, string headerFileName, Header header)
|
||||
{
|
||||
List<Wafer> results = new();
|
||||
string id;
|
||||
Wafer wafer;
|
||||
ReadOnlyCollection<Wafer>? wafers;
|
||||
ReadOnlyDictionary<string, ReadOnlyCollection<Wafer>> keyValuePairs = Wafer.Get(pages, constant, headerFileName);
|
||||
ReadOnlyCollection<string> waferIds = GetWaferIds(header);
|
||||
for (int i = 0; i < waferIds.Count; i++)
|
||||
{
|
||||
id = waferIds[i];
|
||||
if (!keyValuePairs.TryGetValue(id, out wafers) || wafers.Count == 0)
|
||||
wafer = Wafer.Get(id);
|
||||
else
|
||||
wafer = (from l in wafers where l.Recipe == header.Recipe select l).Last();
|
||||
if (wafer is null)
|
||||
break;
|
||||
results.Add(wafer);
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, Run result)
|
||||
{
|
||||
FileInfo fileInfo = new($"{logistics.ReportFullPath}.run.json");
|
||||
string json = JsonSerializer.Serialize(result, RunSourceGenerationContext.Default.Run);
|
||||
File.WriteAllText(fileInfo.FullName, json);
|
||||
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
|
||||
fileInfoCollection.Add(fileInfo);
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<string> GetLines(Logistics logistics, JsonElement[]? jsonElements)
|
||||
{
|
||||
List<string> results = new();
|
||||
int columns = 0;
|
||||
StringBuilder stringBuilder = new();
|
||||
results.Add($"\"Count\",{jsonElements?.Length}");
|
||||
results.Add($"\"{nameof(logistics.Sequence)}\",\"{logistics.Sequence}\"");
|
||||
results.Add($"\"{nameof(logistics.MesEntity)}\",\"{logistics.MesEntity}\"");
|
||||
string dateTimeFromSequence = logistics.DateTimeFromSequence.ToString("MM/dd/yyyy hh:mm:ss tt");
|
||||
for (int i = 0; i < jsonElements?.Length;)
|
||||
{
|
||||
_ = stringBuilder.Append('"').Append(nameof(logistics.DateTimeFromSequence)).Append('"').Append(',');
|
||||
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
|
||||
{
|
||||
columns += 1;
|
||||
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append(',');
|
||||
}
|
||||
break;
|
||||
}
|
||||
if (jsonElements?.Length != 0)
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
results.Add(stringBuilder.ToString());
|
||||
for (int i = 0; i < jsonElements?.Length; i++)
|
||||
{
|
||||
_ = stringBuilder.Clear();
|
||||
_ = stringBuilder.Append('"').Append(dateTimeFromSequence).Append('"').Append(',');
|
||||
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
|
||||
{
|
||||
if (jsonProperty.Value.ValueKind == JsonValueKind.Object)
|
||||
_ = stringBuilder.Append(',');
|
||||
else if (jsonProperty.Value.ValueKind != JsonValueKind.String)
|
||||
_ = stringBuilder.Append(jsonProperty.Value).Append(',');
|
||||
else
|
||||
_ = stringBuilder.Append('"').Append(jsonProperty.Value).Append('"').Append(',');
|
||||
}
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
results.Add(stringBuilder.ToString());
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private static void WriteCommaSeparatedValues(Logistics logistics, Run run)
|
||||
{
|
||||
List<Row> results = new();
|
||||
Row row;
|
||||
for (int i = 0; i < run.Wafers.Count; i++)
|
||||
{
|
||||
row = new(run, i);
|
||||
results.Add(row);
|
||||
}
|
||||
string json = JsonSerializer.Serialize(results);
|
||||
JsonElement[]? jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
|
||||
ReadOnlyCollection<string> lines = GetLines(logistics, jsonElements);
|
||||
File.WriteAllText($"{logistics.ReportFullPath}.csv", string.Join(Environment.NewLine, lines));
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<string> GetWaferIds(Header header)
|
||||
{
|
||||
List<string> results = new();
|
||||
foreach (WaferSummary waferSummary in header.WaferSummary)
|
||||
results.Add(waferSummary.Id);
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
internal static Run? Get(Logistics logistics, List<FileInfo> fileInfoCollection, ReadOnlyDictionary<string, string> pages)
|
||||
{
|
||||
Run? result;
|
||||
Constant constant = new();
|
||||
string headerFileName = pages.ElementAt(pages.Count - 1).Key;
|
||||
Header? header = Header.Get(pages, constant, headerFileName);
|
||||
if (header is null)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
ReadOnlyCollection<Wafer> wafers = GetLastWaferForEachSlot(pages, constant, headerFileName, header);
|
||||
result = new(header, wafers);
|
||||
WriteJson(logistics, fileInfoCollection, result);
|
||||
WriteCommaSeparatedValues(logistics, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(Run))]
|
||||
internal partial class RunSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
240
Adaptation/FileHandlers/pdsf/Wafer.cs
Normal file
240
Adaptation/FileHandlers/pdsf/Wafer.cs
Normal file
@ -0,0 +1,240 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Linq;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.pdsf;
|
||||
|
||||
#nullable enable
|
||||
|
||||
public class Wafer
|
||||
{
|
||||
|
||||
public Wafer(string date, string id, string comments, string sort, string lPDCount, string lPDCM2, string bin1, string bin2, string bin3, string bin4, string bin5, string bin6, string bin7, string bin8, string mean, string stdDev, string areaCount, string areaTotal, string scratchCount, string scratchTotal, string sumOfDefects, string hazeRegion, string hazeAverage, string hazePeak, string laser, string gain, string diameter, string thresh, string exclusion, string hazeRng, string thruput, string recipe)
|
||||
{
|
||||
Date = date;
|
||||
Id = id;
|
||||
Comments = comments;
|
||||
Sort = sort;
|
||||
LPDCount = lPDCount;
|
||||
LPDCM2 = lPDCM2;
|
||||
Bin1 = bin1;
|
||||
Bin2 = bin2;
|
||||
Bin3 = bin3;
|
||||
Bin4 = bin4;
|
||||
Bin5 = bin5;
|
||||
Bin6 = bin6;
|
||||
Bin7 = bin7;
|
||||
Bin8 = bin8;
|
||||
Mean = mean;
|
||||
StdDev = stdDev;
|
||||
AreaCount = areaCount;
|
||||
AreaTotal = areaTotal;
|
||||
ScratchCount = scratchCount;
|
||||
ScratchTotal = scratchTotal;
|
||||
SumOfDefects = sumOfDefects;
|
||||
HazeRegion = hazeRegion;
|
||||
HazeAverage = hazeAverage;
|
||||
HazePeak = hazePeak;
|
||||
Laser = laser;
|
||||
Gain = gain;
|
||||
Diameter = diameter;
|
||||
Thresh = thresh;
|
||||
Exclusion = exclusion;
|
||||
HazeRng = hazeRng;
|
||||
Thruput = thruput;
|
||||
Recipe = recipe;
|
||||
}
|
||||
|
||||
internal static Wafer Get(string id) =>
|
||||
new(date: string.Empty,
|
||||
id: id,
|
||||
comments: string.Empty,
|
||||
sort: string.Empty,
|
||||
lPDCount: string.Empty,
|
||||
lPDCM2: string.Empty,
|
||||
bin1: string.Empty,
|
||||
bin2: string.Empty,
|
||||
bin3: string.Empty,
|
||||
bin4: string.Empty,
|
||||
bin5: string.Empty,
|
||||
bin6: string.Empty,
|
||||
bin7: string.Empty,
|
||||
bin8: string.Empty,
|
||||
mean: string.Empty,
|
||||
stdDev: string.Empty,
|
||||
areaCount: string.Empty,
|
||||
areaTotal: string.Empty,
|
||||
scratchCount: string.Empty,
|
||||
scratchTotal: string.Empty,
|
||||
sumOfDefects: string.Empty,
|
||||
hazeRegion: string.Empty,
|
||||
hazeAverage: string.Empty,
|
||||
hazePeak: string.Empty,
|
||||
laser: string.Empty,
|
||||
gain: string.Empty,
|
||||
diameter: string.Empty,
|
||||
thresh: string.Empty,
|
||||
exclusion: string.Empty,
|
||||
hazeRng: string.Empty,
|
||||
thruput: string.Empty,
|
||||
recipe: string.Empty);
|
||||
|
||||
public string Date { get; }
|
||||
public string Id { get; }
|
||||
public string Comments { get; }
|
||||
public string Sort { get; }
|
||||
public string LPDCount { get; }
|
||||
public string LPDCM2 { get; }
|
||||
public string Bin1 { get; }
|
||||
public string Bin2 { get; }
|
||||
public string Bin3 { get; }
|
||||
public string Bin4 { get; }
|
||||
public string Bin5 { get; }
|
||||
public string Bin6 { get; }
|
||||
public string Bin7 { get; }
|
||||
public string Bin8 { get; }
|
||||
public string Mean { get; }
|
||||
public string StdDev { get; }
|
||||
public string AreaCount { get; }
|
||||
public string AreaTotal { get; }
|
||||
public string ScratchCount { get; }
|
||||
public string ScratchTotal { get; }
|
||||
public string SumOfDefects { get; }
|
||||
public string HazeRegion { get; }
|
||||
public string HazeAverage { get; }
|
||||
public string HazePeak { get; }
|
||||
public string Laser { get; }
|
||||
public string Gain { get; }
|
||||
public string Diameter { get; }
|
||||
public string Thresh { get; }
|
||||
public string Exclusion { get; }
|
||||
public string HazeRng { get; }
|
||||
public string Thruput { get; }
|
||||
public string Recipe { get; }
|
||||
|
||||
internal static ReadOnlyDictionary<string, ReadOnlyCollection<Wafer>> Get(ReadOnlyDictionary<string, string> pages, Constant constant, string headerFileName)
|
||||
{
|
||||
Dictionary<string, ReadOnlyCollection<Wafer>> results = new();
|
||||
Wafer wafer;
|
||||
string? text;
|
||||
List<string> stringList;
|
||||
int[] i = new int[] { 0 };
|
||||
Dictionary<string, List<Wafer>> keyValuePairs = new();
|
||||
foreach (KeyValuePair<string, string> keyValuePair in pages)
|
||||
{
|
||||
if (keyValuePair.Key == headerFileName)
|
||||
continue;
|
||||
if (!pages.ContainsKey(keyValuePair.Key))
|
||||
throw new Exception();
|
||||
i[0] = 0;
|
||||
stringList = new();
|
||||
if (!pages.TryGetValue(keyValuePair.Key, out text))
|
||||
throw new Exception();
|
||||
if (string.IsNullOrEmpty(text) || !text.Contains(constant.Id))
|
||||
continue;
|
||||
Header.ScanPast(text, i, constant.Date);
|
||||
string date = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, constant.Id);
|
||||
string id = Header.GetToEOL(text, i);
|
||||
if (id.Length > 5)
|
||||
id = string.Concat(id.Substring(0, 5), "... - ***");
|
||||
id = id.Replace("*", "");
|
||||
Header.ScanPast(text, i, "Comments:");
|
||||
string comments = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Sort:");
|
||||
string sort = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "LPD Count:");
|
||||
string lPDCount = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "LPD / cm2:");
|
||||
string lPDCM2 = Header.GetToEOL(text, i);
|
||||
while (Header.GetBefore(text, i, ":").Contains("Bin"))
|
||||
stringList.Add(Header.GetToEOL(text, i));
|
||||
string bin1 = stringList.Count >= 1 ? stringList[0] : string.Empty;
|
||||
string bin2 = stringList.Count >= 2 ? stringList[1] : string.Empty;
|
||||
string bin3 = stringList.Count >= 3 ? stringList[2] : string.Empty;
|
||||
string bin4 = stringList.Count >= 4 ? stringList[3] : string.Empty;
|
||||
string bin5 = stringList.Count >= 5 ? stringList[4] : string.Empty;
|
||||
string bin6 = stringList.Count >= 6 ? stringList[5] : string.Empty;
|
||||
string bin7 = stringList.Count >= 7 ? stringList[6] : string.Empty;
|
||||
string bin8 = stringList.Count >= 8 ? stringList[7] : string.Empty;
|
||||
string mean = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Std Dev:");
|
||||
string stdDev = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Area Count:");
|
||||
string areaCount = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Area Total:");
|
||||
string areaTotal = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Scratch Count:");
|
||||
string scratchCount = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Scratch Total:");
|
||||
string scratchTotal = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Sum of All Defects:");
|
||||
string sumOfDefects = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Haze Region:");
|
||||
string hazeRegion = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Haze Average:");
|
||||
string hazeAverage = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Haze Peak:");
|
||||
string hazePeak = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Laser:");
|
||||
string laser = Header.GetBefore(text, i, "Gain:");
|
||||
string gain = Header.GetBefore(text, i, "Diameter:");
|
||||
string diameter = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Thresh:");
|
||||
string thresh = Header.GetBefore(text, i, "Exclusion:");
|
||||
string exclusion = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Haze Rng:");
|
||||
string hazeRng = Header.GetBefore(text, i, "Thruput:");
|
||||
string thruput = Header.GetToEOL(text, i);
|
||||
Header.ScanPast(text, i, "Recipe ID:");
|
||||
string recipe = Header.GetToEOL(text, i);
|
||||
wafer = new(date: date,
|
||||
id: id,
|
||||
comments: comments,
|
||||
sort: sort,
|
||||
lPDCount: lPDCount,
|
||||
lPDCM2: lPDCM2,
|
||||
bin1: bin1,
|
||||
bin2: bin2,
|
||||
bin3: bin3,
|
||||
bin4: bin4,
|
||||
bin5: bin5,
|
||||
bin6: bin6,
|
||||
bin7: bin7,
|
||||
bin8: bin8,
|
||||
mean: mean,
|
||||
stdDev: stdDev,
|
||||
areaCount: areaCount,
|
||||
areaTotal: areaTotal,
|
||||
scratchCount: scratchCount,
|
||||
scratchTotal: scratchTotal,
|
||||
sumOfDefects: sumOfDefects,
|
||||
hazeRegion: hazeRegion,
|
||||
hazeAverage: hazeAverage,
|
||||
hazePeak: hazePeak,
|
||||
laser: laser,
|
||||
gain: gain,
|
||||
diameter: diameter,
|
||||
thresh: thresh,
|
||||
exclusion: exclusion,
|
||||
hazeRng: hazeRng,
|
||||
thruput: thruput,
|
||||
recipe: recipe);
|
||||
if (!keyValuePairs.ContainsKey(id))
|
||||
keyValuePairs.Add(id, new List<Wafer>());
|
||||
keyValuePairs[id].Add(wafer);
|
||||
}
|
||||
foreach (KeyValuePair<string, List<Wafer>> keyValuePair in keyValuePairs)
|
||||
results.Add(keyValuePair.Key, keyValuePair.Value.AsReadOnly());
|
||||
return new(results);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(Wafer))]
|
||||
internal partial class WaferSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
43
Adaptation/FileHandlers/pdsf/WaferSummary.cs
Normal file
43
Adaptation/FileHandlers/pdsf/WaferSummary.cs
Normal file
@ -0,0 +1,43 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.pdsf;
|
||||
|
||||
#nullable enable
|
||||
|
||||
public class WaferSummary
|
||||
{
|
||||
|
||||
public WaferSummary(string id, string lPDCount, string lPDCM2, string areaCount, string areaTotal, string scratchCount, string scratchTotal, string sumOfDefects, string hazeRegion, string hazeAverage, string grade)
|
||||
{
|
||||
Id = id;
|
||||
LPDCount = lPDCount;
|
||||
LPDCM2 = lPDCM2;
|
||||
AreaCount = areaCount;
|
||||
AreaTotal = areaTotal;
|
||||
ScratchCount = scratchCount;
|
||||
ScratchTotal = scratchTotal;
|
||||
SumOfDefects = sumOfDefects;
|
||||
HazeRegion = hazeRegion;
|
||||
HazeAverage = hazeAverage;
|
||||
Grade = grade;
|
||||
}
|
||||
|
||||
public string Id { get; }
|
||||
public string LPDCount { get; }
|
||||
public string LPDCM2 { get; }
|
||||
public string AreaCount { get; }
|
||||
public string AreaTotal { get; }
|
||||
public string ScratchCount { get; }
|
||||
public string ScratchTotal { get; }
|
||||
public string SumOfDefects { get; }
|
||||
public string HazeRegion { get; }
|
||||
public string HazeAverage { get; }
|
||||
public string Grade { get; }
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(WaferSummary))]
|
||||
internal partial class WaferSummarySourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
@ -3,6 +3,7 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.Shared.Duplicator;
|
||||
|
||||
@ -178,4 +179,16 @@ public class Description : IDescription, Properties.IDescription
|
||||
|
||||
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
|
||||
[JsonSerializable(typeof(Description))]
|
||||
internal partial class SharedDescriptionSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
|
||||
[JsonSerializable(typeof(Description[]))]
|
||||
internal partial class SharedDescriptionArraySourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
@ -9,7 +9,6 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.Shared;
|
||||
@ -447,12 +446,13 @@ public class FileRead : Properties.IFileRead
|
||||
{
|
||||
List<Properties.IDescription> results = new();
|
||||
Duplicator.Description description;
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
|
||||
foreach (JsonElement jsonElement in jsonElements)
|
||||
{
|
||||
if (jsonElement.ValueKind != JsonValueKind.Object)
|
||||
throw new Exception();
|
||||
description = JsonSerializer.Deserialize<Duplicator.Description>(jsonElement.ToString(), jsonSerializerOptions);
|
||||
description = JsonSerializer.Deserialize(jsonElement.ToString(), Duplicator.SharedDescriptionSourceGenerationContext.Default.Description);
|
||||
if (description is null)
|
||||
continue;
|
||||
results.Add(description);
|
||||
}
|
||||
return results;
|
||||
|
@ -654,6 +654,17 @@ internal class ProcessDataStandardFormat
|
||||
return results;
|
||||
}
|
||||
|
||||
internal static JsonElement[] GetArray(string reportFullPath, string[] lines, ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
JsonElement[] results;
|
||||
string? json = GetRecordsJson(reportFullPath, lines);
|
||||
if (string.IsNullOrEmpty(json))
|
||||
results = GetArray(processDataStandardFormat);
|
||||
else
|
||||
results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
|
||||
return results;
|
||||
}
|
||||
|
||||
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
|
||||
{
|
||||
string result;
|
||||
@ -956,6 +967,26 @@ internal class ProcessDataStandardFormat
|
||||
return result;
|
||||
}
|
||||
|
||||
private static string? GetRecordsJson(string reportFullPath, string[] lines)
|
||||
{
|
||||
string? result;
|
||||
bool foundRecords = false;
|
||||
List<string> results = new();
|
||||
lines ??= File.ReadAllLines(reportFullPath);
|
||||
foreach (string line in lines)
|
||||
{
|
||||
if (line.StartsWith("\"Records\""))
|
||||
foundRecords = true;
|
||||
if (!foundRecords)
|
||||
continue;
|
||||
if (line == "],")
|
||||
break;
|
||||
results.Add(line);
|
||||
}
|
||||
result = results.Count == 0 ? null : $"{string.Join(Environment.NewLine, results.Skip(1))}{Environment.NewLine}]";
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
|
@ -61,5 +61,16 @@ public class TENCOR2 : EAFLoggingUnitTesting
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
[Ignore]
|
||||
[TestMethod]
|
||||
public void Production__v2_60_0__TENCOR2__pdsf()
|
||||
{
|
||||
string check = "*EQP_*.pdsf";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
@ -61,5 +61,16 @@ public class TENCOR3 : EAFLoggingUnitTesting
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
[Ignore]
|
||||
[TestMethod]
|
||||
public void Production__v2_60_0__TENCOR3__TransmissionControlProtocol()
|
||||
{
|
||||
string check = "Statistics";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
@ -1,4 +1,5 @@
|
||||
#if true
|
||||
using Adaptation._Tests.Shared;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
@ -50,7 +51,7 @@ public class TENCOR2
|
||||
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
@ -67,7 +68,22 @@ public class TENCOR2
|
||||
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
[Ignore]
|
||||
[TestMethod]
|
||||
public void Production__v2_60_0__TENCOR2__pdsf__Normal()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "*EQP_*.pdsf";
|
||||
_TENCOR2.Production__v2_60_0__TENCOR2__pdsf();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
|
@ -54,5 +54,20 @@ public class TENCOR3
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
[Ignore]
|
||||
[TestMethod]
|
||||
public void Production__v2_60_0__TENCOR3__TransmissionControlProtocol638930712297063335__Normal()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "Statistics";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
_TENCOR3.Production__v2_60_0__TENCOR3__TransmissionControlProtocol();
|
||||
string[] variables = _TENCOR3.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _TENCOR3.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
Reference in New Issue
Block a user