Refactor QS408M and PDSF File Handlers

- Updated Detail class to include JsonSourceGenerationOptions and JsonSerializable attributes for source generation.
- Modified ProcessData class to utilize source generation context for Description deserialization.
- Renamed RowSourceGenerationContext to QS408MRowSourceGenerationContext for clarity.
- Updated Run class to use QS408MRunSourceGenerationContext for serialization.
- Enhanced Description class with source generation attributes.
- Refactored FileRead class to use source generation context for Description deserialization.
- Added new methods in ProcessDataStandardFormat for JSON element array extraction.
- Introduced new PDSF file handler classes: Body, Constant, FileRead, Footer, Header, Row, Run, and Site.
- Implemented logic for parsing and handling PDSF data structures.
- Added unit tests for PDSF processing to ensure functionality.
This commit is contained in:
2025-09-15 11:37:05 -07:00
parent 515df7ec37
commit d07755c3a0
29 changed files with 869 additions and 204 deletions

View File

@ -1,43 +1,10 @@
{ {
"configurations": [ "configurations": [
{
"mode": "debug",
"name": "Go launch file",
"program": "${file}",
"request": "launch",
"type": "go"
},
{
"name": "node Launch Current Opened File",
"program": "${file}",
"request": "launch",
"type": "node"
},
{
"cwd": "${workspaceFolder}",
"internalConsoleOptions": "neverOpen",
"name": "Debug File",
"program": "${file}",
"request": "launch",
"stopOnEntry": false,
"type": "bun",
"watchMode": false
},
{
"cwd": "${workspaceFolder}",
"internalConsoleOptions": "neverOpen",
"name": "Run File",
"noDebug": true,
"program": "${file}",
"request": "launch",
"type": "bun",
"watchMode": false
},
{ {
"name": ".NET Core Attach", "name": ".NET Core Attach",
"processId": 32760, "type": "coreclr",
"request": "attach", "request": "attach",
"type": "coreclr" "processId": 2688
} }
] ]
} }

View File

@ -21,6 +21,7 @@ public class CellInstanceConnectionName
nameof(OpenInsight) => new OpenInsight.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(OpenInsight) => new OpenInsight.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsightMetrologyViewer) => new OpenInsightMetrologyViewer.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(OpenInsightMetrologyViewer) => new OpenInsightMetrologyViewer.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(pdsf) => new pdsf.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(QS408M) => new QS408M.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(QS408M) => new QS408M.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),

View File

@ -157,7 +157,7 @@ public class FileRead : Shared.FileRead, IFileRead
return results.ToString(); return results.ToString();
} }
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<QS408M.Description> descriptions, Test[] tests) private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, List<QS408M.Description> descriptions, Test[] tests)
{ {
string duplicateFile; string duplicateFile;
bool isDummyRun = false; bool isDummyRun = false;
@ -201,7 +201,7 @@ public class FileRead : Shared.FileRead, IFileRead
else else
duplicateFile = Path.Combine(duplicateDirectory, $"{$"Viewer {subgroupId}".TrimEnd()} {fileName.Replace("Viewer", string.Empty)}"); duplicateFile = Path.Combine(duplicateDirectory, $"{$"Viewer {subgroupId}".TrimEnd()} {fileName.Replace("Viewer", string.Empty)}");
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00"); string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), subgroupId, weekOfYear); FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, descriptions.First(), subgroupId, weekOfYear);
} }
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
{ {
@ -214,15 +214,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime) private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{ {
Tuple<string, Test[], JsonElement[], List<FileInfo>> results; Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); string[] lines = File.ReadAllLines(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat); _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID(); SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
// List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements); List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests); SaveOpenInsightFile(reportFullPath, dateTime, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -325,74 +325,18 @@ public class FromIQS
return new(result, count, commandText); return new(result, count, commandText);
} }
private static string GetJson(Logistics logistics, ProcessDataStandardFormat processDataStandardFormat, QS408M.Description description) internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, QS408M.Description description, long? subGroupId, string weekOfYear)
{ {
string result;
StringBuilder stringBuilder = new();
var @object = new
{
description.MesEntity,
description.Employee,
description.Layer,
description.PSN,
description.RDS,
description.Reactor,
description.Recipe,
description.Zone,
logistics.DateTimeFromSequence.Ticks
};
string[] pair;
string safeValue;
string[] segments;
string serializerValue;
foreach (string line in processDataStandardFormat.Logistics)
{
segments = line.Split('\t');
if (segments.Length < 2)
continue;
segments = segments[1].Split(';');
_ = stringBuilder.Append('{');
foreach (string segment in segments)
{
pair = segment.Split('=');
if (pair.Length != 2 || pair[0].Length < 3)
continue;
serializerValue = JsonSerializer.Serialize(pair[1]);
safeValue = serializerValue.Substring(1, serializerValue.Length - 2);
_ = stringBuilder.Append('"').Append(pair[0].Substring(2)).Append('"').Append(':').Append('"').Append(safeValue).Append('"').Append(',');
}
if (stringBuilder.Length > 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.Append('}').Append(',');
}
if (stringBuilder.Length > 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.Append(']').Append('}');
_ = stringBuilder.Insert(0, ",\"Logistics\":[");
string json = JsonSerializer.Serialize(@object);
_ = stringBuilder.Insert(0, json.Substring(0, json.Length - 1));
JsonElement? jsonElement = JsonSerializer.Deserialize<JsonElement>(stringBuilder.ToString());
result = jsonElement is null ? "{}" : JsonSerializer.Serialize(jsonElement, new JsonSerializerOptions { WriteIndented = true });
return result;
}
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, QS408M.Description description, long? subGroupId, string weekOfYear)
{
string checkFile;
string fileName = Path.GetFileName(reportFullPath); string fileName = Path.GetFileName(reportFullPath);
string json = GetJson(logistics, processDataStandardFormat, description);
string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory); string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);
bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot); bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot);
string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}"; string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
string ecDirectory = Path.Combine(openInsightApiECDirectory, weekYear, $"-{description.PSN}", $"-{description.Reactor}", $"-{description.RDS}", $"-{subGroupId}"); string ecDirectory = Path.Combine(openInsightApiECDirectory, weekYear, $"-{description.PSN}", $"-{description.Reactor}", $"-{description.RDS}", $"-{subGroupId}");
if (ecExists && !Directory.Exists(ecDirectory)) if (ecExists && !Directory.Exists(ecDirectory))
_ = Directory.CreateDirectory(ecDirectory); _ = Directory.CreateDirectory(ecDirectory);
checkFile = Path.Combine(ecDirectory, fileName); string checkFile = Path.Combine(ecDirectory, fileName);
if (ecExists && !File.Exists(checkFile)) if (ecExists && !File.Exists(checkFile))
File.Copy(reportFullPath, checkFile); File.Copy(reportFullPath, checkFile);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json);
} }
private static string GetCommandText(string[] iqsCopyValues) private static string GetCommandText(string[] iqsCopyValues)

View File

@ -110,10 +110,10 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
private void SendData(string reportFullPath, DateTime dateTime, List<QS408M.Description> descriptions) private void SendData(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<QS408M.Description> descriptions)
{ {
string checkDirectory; string checkDirectory;
WSRequest wsRequest = new(this, _Logistics, descriptions); WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
int weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday); int weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
string directory = Path.Combine(_OpenInsightMetrologyViewerFileShare, dateTime.Year.ToString(), $"WW{weekOfYear:00}"); string directory = Path.Combine(_OpenInsightMetrologyViewerFileShare, dateTime.Year.ToString(), $"WW{weekOfYear:00}");
checkDirectory = Path.Combine(directory, _Logistics.Sequence.ToString()); checkDirectory = Path.Combine(directory, _Logistics.Sequence.ToString());
@ -139,14 +139,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime) private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{ {
Tuple<string, Test[], JsonElement[], List<FileInfo>> results; Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); string[] lines = File.ReadAllLines(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat); _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID(); SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements); List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(reportFullPath, dateTime, descriptions); SendData(reportFullPath, dateTime, jsonElements, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -4,6 +4,7 @@ using Adaptation.Shared.Properties;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer; namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer;
@ -37,7 +38,7 @@ public class WSRequest
[Obsolete("For json")] public WSRequest() { } [Obsolete("For json")] public WSRequest() { }
#pragma warning disable IDE0060 #pragma warning disable IDE0060
internal WSRequest(IFileRead fileRead, Logistics logistics, List<QS408M.Description> descriptions, string processDataStandardFormat = null) internal WSRequest(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, List<QS408M.Description> descriptions, string processDataStandardFormat = null)
#pragma warning restore IDE0060 #pragma warning restore IDE0060
{ {
Id = -1; Id = -1;
@ -48,10 +49,10 @@ public class WSRequest
Details = new List<QS408M.Detail>(); Details = new List<QS408M.Detail>();
//Header //Header
{ {
Batch = x.Lot; Batch = x.Lot; // different name
Cassette = x.Cassette; Cassette = x.Cassette;
Date = x.Date; Date = x.Date;
Op = x.Employee; Op = x.Employee; // different name
Layer = x.Layer; Layer = x.Layer;
MeanThickness = x.MeanThickness; MeanThickness = x.MeanThickness;
PSN = x.PSN; PSN = x.PSN;
@ -87,14 +88,14 @@ public class WSRequest
} }
} }
internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, List<QS408M.Description> descriptions) internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, JsonElement[] jsonElements, List<QS408M.Description> descriptions)
{ {
long result; long result;
if (results is not null && results.HeaderId is not null) if (results is not null && results.HeaderId is not null)
result = results.HeaderId.Value; result = results.HeaderId.Value;
else else
{ {
WSRequest wsRequest = new(fileRead, logistics, descriptions); WSRequest wsRequest = new(fileRead, logistics, jsonElements, descriptions);
string directory = Path.Combine(openInsightMetrologyViewerFileShare, logistics.DateTimeFromSequence.Year.ToString(), $"WW{weekOfYear:00}"); string directory = Path.Combine(openInsightMetrologyViewerFileShare, logistics.DateTimeFromSequence.Year.ToString(), $"WW{weekOfYear:00}");
(_, WS.Results wsResults) = WS.SendData(openInsightMetrologyViewerAPI, logistics.Sequence, directory, wsRequest); (_, WS.Results wsResults) = WS.SendData(openInsightMetrologyViewerAPI, logistics.Sequence, directory, wsRequest);
if (wsResults.Success is null || !wsResults.Success.Value) if (wsResults.Success is null || !wsResults.Success.Value)

View File

@ -135,7 +135,7 @@ public class FileRead : Shared.FileRead, IFileRead
return result; return result;
} }
private void PostOpenInsightMetrologyViewerAttachments(List<QS408M.Description> descriptions) private void PostOpenInsightMetrologyViewerAttachments(JsonElement[] jsonElements, List<QS408M.Description> descriptions)
{ {
Shared.Metrology.WS.Results? results; Shared.Metrology.WS.Results? results;
string jobIdDirectory = Path.Combine(Path.GetDirectoryName(_FileConnectorConfiguration.AlternateTargetFolder) ?? throw new Exception(), _Logistics.JobID); string jobIdDirectory = Path.Combine(Path.GetDirectoryName(_FileConnectorConfiguration.AlternateTargetFolder) ?? throw new Exception(), _Logistics.JobID);
@ -151,7 +151,7 @@ public class FileRead : Shared.FileRead, IFileRead
results = wsResults[0]; results = wsResults[0];
} }
int weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday); int weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
long headerId = !_IsEAFHosted ? -1 : OpenInsightMetrologyViewer.WSRequest.GetHeaderId(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OpenInsightMetrologyViewerFileShare, weekOfYear, results, descriptions); long headerId = !_IsEAFHosted ? -1 : OpenInsightMetrologyViewer.WSRequest.GetHeaderId(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OpenInsightMetrologyViewerFileShare, weekOfYear, results, jsonElements, descriptions);
string? headerIdDirectory = GetHeaderIdDirectory(headerId); string? headerIdDirectory = GetHeaderIdDirectory(headerId);
if (string.IsNullOrEmpty(headerIdDirectory)) if (string.IsNullOrEmpty(headerIdDirectory))
throw new Exception($"Didn't find header id directory <{headerId}>"); throw new Exception($"Didn't find header id directory <{headerId}>");
@ -163,14 +163,15 @@ public class FileRead : Shared.FileRead, IFileRead
if (dateTime == DateTime.MinValue) if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime)); throw new ArgumentNullException(nameof(dateTime));
Tuple<string, Test[], JsonElement[], List<FileInfo>> results; Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); string[] lines = File.ReadAllLines(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat); _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID(); SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements); List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(descriptions); PostOpenInsightMetrologyViewerAttachments(jsonElements, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -108,7 +108,7 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
private void DirectoryMove(string reportFullPath, DateTime dateTime, List<QS408M.Description> descriptions) private void DirectoryMove(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<QS408M.Description> descriptions)
{ {
if (dateTime == DateTime.MinValue) if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime)); throw new ArgumentNullException(nameof(dateTime));
@ -122,7 +122,7 @@ public class FileRead : Shared.FileRead, IFileRead
throw new Exception("Didn't find directory by logistics sequence"); throw new Exception("Didn't find directory by logistics sequence");
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime) if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime); File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions); OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true }; JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions); string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
string directoryName = $"{Path.GetFileName(matchDirectories[0]).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0]}{_Logistics.DateTimeFromSequence:yyyy-MM-dd_hh;mm_tt_}{DateTime.Now.Ticks - _Logistics.Sequence}"; string directoryName = $"{Path.GetFileName(matchDirectories[0]).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0]}{_Logistics.DateTimeFromSequence:yyyy-MM-dd_hh;mm_tt_}{DateTime.Now.Ticks - _Logistics.Sequence}";
@ -166,23 +166,24 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime) private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{ {
Tuple<string, Test[], JsonElement[], List<FileInfo>> results; Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); string[] lines = File.ReadAllLines(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat); _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID(); SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements); List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
DirectoryMove(reportFullPath, dateTime, descriptions); DirectoryMove(reportFullPath, dateTime, jsonElements, descriptions);
else if (!_IsEAFHosted) else if (!_IsEAFHosted)
{ {
OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions); OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true }; JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions); string check = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json"); string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
string historicalText = File.ReadAllText(jsonFileName); string historicalText = File.ReadAllText(jsonFileName);
if (json != historicalText) if (check != historicalText)
throw new Exception("File doesn't match historical!"); throw new Exception("File doesn't match historical!");
} }
return results; return results;

View File

@ -4,72 +4,73 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.QS408M; namespace Adaptation.FileHandlers.QS408M;
public class Description : IDescription, Shared.Properties.IDescription public class Description : IDescription, Shared.Properties.IDescription
{ {
public int Test { get; set; } [JsonPropertyName("EventId")] public int Test { get; set; }
public int Count { get; set; } [JsonPropertyName("Count")] public int Count { get; set; }
public int Index { get; set; } [JsonPropertyName("Index")] public int Index { get; set; }
// //
public string EventName { get; set; } public string EventName { get; set; }
public string NullData { get; set; } public string NullData { get; set; }
public string JobID { get; set; } public string JobID { get; set; }
public string Sequence { get; set; } public string Sequence { get; set; }
public string MesEntity { get; set; } [JsonPropertyName("MesEntity")] public string MesEntity { get; set; }
public string ReportFullPath { get; set; } public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; } public string ProcessJobID { get; set; }
public string MID { get; set; } public string MID { get; set; }
// //
public string Date { get; set; } [JsonPropertyName("DateTime")] public string Date { get; set; }
public string Employee { get; set; } [JsonPropertyName("Operator")] public string Employee { get; set; }
public string Lot { get; set; } public string Lot { get; set; }
public string PSN { get; set; } [JsonPropertyName("PSN")] public string PSN { get; set; }
public string Reactor { get; set; } [JsonPropertyName("Reactor")] public string Reactor { get; set; }
public string Recipe { get; set; } [JsonPropertyName("Recipe")] public string Recipe { get; set; }
// //
public string Cassette { get; set; } [JsonPropertyName("Cassette")] public string Cassette { get; set; }
public string HeaderUniqueId { get; set; } public string HeaderUniqueId { get; set; }
public string Layer { get; set; } [JsonPropertyName("Layer")] public string Layer { get; set; }
public string PassFail { get; set; } [JsonPropertyName("PassFail")] public string PassFail { get; set; }
public string Position { get; set; } [JsonPropertyName("Position")] public string Position { get; set; }
public string RDS { get; set; } [JsonPropertyName("RDS")] public string RDS { get; set; }
public string Title { get; set; } [JsonPropertyName("Title")] public string Title { get; set; }
public string UniqueId { get; set; } public string UniqueId { get; set; }
public string Wafer { get; set; } [JsonPropertyName("Wafer")] public string Wafer { get; set; }
public string Zone { get; set; } [JsonPropertyName("Zone")] public string Zone { get; set; }
// //
public string MeanThickness { get; set; } [JsonPropertyName("WaferMeanThickness")] public string MeanThickness { get; set; }
public string RVThickness { get; set; } [JsonPropertyName("RadialVariationThickness")] public string RVThickness { get; set; }
public string StdDev { get; set; } [JsonPropertyName("StdDev")] public string StdDev { get; set; }
public string Thickness { get; set; } [JsonPropertyName("Thickness")] public string Thickness { get; set; }
// //
public string Slot { get; set; } [JsonPropertyName("Slot")] public string Slot { get; set; }
public string ThicknessFourteen3mmEdgeMean { get; set; } [JsonPropertyName("Thickness 14 3mm Edge Mean")] public string ThicknessFourteen3mmEdgeMean { get; set; }
public string ThicknessFourteen3mmEdgePercent { get; set; } [JsonPropertyName("Thickness 14 3mm Edge % from R/2")] public string ThicknessFourteen3mmEdgePercent { get; set; }
public string ThicknessFourteen5mmEdgeMean { get; set; } [JsonPropertyName("Thickness 14 5mm Edge Mean")] public string ThicknessFourteen5mmEdgeMean { get; set; }
public string ThicknessFourteen5mmEdgePercent { get; set; } [JsonPropertyName("Thickness 14 5mm Edge % from R/2")] public string ThicknessFourteen5mmEdgePercent { get; set; }
public string ThicknessFourteenCenterMean { get; set; } [JsonPropertyName("Thickness 14 Center Mean")] public string ThicknessFourteenCenterMean { get; set; }
public string ThicknessFourteenCriticalPointsAverage { get; set; } [JsonPropertyName("Thickness 14 Average")] public string ThicknessFourteenCriticalPointsAverage { get; set; }
public string ThicknessFourteenCriticalPointsStdDev { get; set; } [JsonPropertyName("Thickness 14 Std Dev")] public string ThicknessFourteenCriticalPointsStdDev { get; set; }
public string ThicknessFourteenMeanFrom { get; set; } [JsonPropertyName("Thickness 14 R 2/Mean")] public string ThicknessFourteenMeanFrom { get; set; }
// //
public string Thickness01 { get; set; } [JsonPropertyName("Thickness01")] public string Thickness01 { get; set; }
public string Thickness02 { get; set; } [JsonPropertyName("Thickness02")] public string Thickness02 { get; set; }
public string Thickness03 { get; set; } [JsonPropertyName("Thickness03")] public string Thickness03 { get; set; }
public string Thickness04 { get; set; } [JsonPropertyName("Thickness04")] public string Thickness04 { get; set; }
public string Thickness05 { get; set; } [JsonPropertyName("Thickness05")] public string Thickness05 { get; set; }
public string Thickness06 { get; set; } [JsonPropertyName("Thickness06")] public string Thickness06 { get; set; }
public string Thickness07 { get; set; } [JsonPropertyName("Thickness07")] public string Thickness07 { get; set; }
public string Thickness08 { get; set; } [JsonPropertyName("Thickness08")] public string Thickness08 { get; set; }
public string Thickness09 { get; set; } [JsonPropertyName("Thickness09")] public string Thickness09 { get; set; }
public string Thickness10 { get; set; } [JsonPropertyName("Thickness10")] public string Thickness10 { get; set; }
public string Thickness11 { get; set; } [JsonPropertyName("Thickness11")] public string Thickness11 { get; set; }
public string Thickness12 { get; set; } [JsonPropertyName("Thickness12")] public string Thickness12 { get; set; }
public string Thickness13 { get; set; } [JsonPropertyName("Thickness13")] public string Thickness13 { get; set; }
public string Thickness14 { get; set; } [JsonPropertyName("Thickness14")] public string Thickness14 { get; set; }
string IDescription.GetEventDescription() => "File Has been read and parsed"; string IDescription.GetEventDescription() => "File Has been read and parsed";
@ -351,4 +352,16 @@ public class Description : IDescription, Shared.Properties.IDescription
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt"; internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Description))]
internal partial class DescriptionSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Description[]))]
internal partial class DescriptionArraySourceGenerationContext : JsonSerializerContext
{
} }

View File

@ -1,13 +1,27 @@
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.QS408M; namespace Adaptation.FileHandlers.QS408M;
public class Detail public class Detail
{ {
public string HeaderUniqueId { get; set; } public string HeaderUniqueId { get; set; }
public string Position { get; set; } [JsonPropertyName("Position")] public string Position { get; set; }
public string Thickness { get; set; } [JsonPropertyName("Thickness")] public string Thickness { get; set; }
public string UniqueId { get; set; } public string UniqueId { get; set; }
public override string ToString() => string.Concat(Position, ";", Thickness, ";"); public override string ToString() => string.Concat(Position, ";", Thickness, ";");
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Detail))]
internal partial class DetailSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Detail[]))]
internal partial class DetailArraySourceGenerationContext : JsonSerializerContext
{
} }

View File

@ -304,12 +304,11 @@ public partial class ProcessData : IProcessData
{ {
List<Description> results = new(); List<Description> results = new();
Description? description; Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements) foreach (JsonElement jsonElement in jsonElements)
{ {
if (jsonElement.ValueKind != JsonValueKind.Object) if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception(); throw new Exception();
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions); description = JsonSerializer.Deserialize(jsonElement.ToString(), DescriptionSourceGenerationContext.Default.Description);
if (description is null) if (description is null)
continue; continue;
results.Add(description); results.Add(description);

View File

@ -58,6 +58,6 @@ internal class Row
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Row))] [JsonSerializable(typeof(Row))]
internal partial class RowSourceGenerationContext : JsonSerializerContext internal partial class QS408MRowSourceGenerationContext : JsonSerializerContext
{ {
} }

View File

@ -71,7 +71,7 @@ internal class Run
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, Run result) private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, Run result)
{ {
FileInfo fileInfo = new($"{logistics.ReportFullPath}.run.json"); FileInfo fileInfo = new($"{logistics.ReportFullPath}.run.json");
string json = JsonSerializer.Serialize(result, RunSourceGenerationContext.Default.Run); string json = JsonSerializer.Serialize(result, QS408MRunSourceGenerationContext.Default.Run);
File.WriteAllText(fileInfo.FullName, json); File.WriteAllText(fileInfo.FullName, json);
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence); File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
fileInfoCollection.Add(fileInfo); fileInfoCollection.Add(fileInfo);
@ -190,6 +190,6 @@ internal class Run
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Run))] [JsonSerializable(typeof(Run))]
internal partial class RunSourceGenerationContext : JsonSerializerContext internal partial class QS408MRunSourceGenerationContext : JsonSerializerContext
{ {
} }

View File

@ -0,0 +1,80 @@
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
public class Body
{
public Body(string waferMeanThickness, string stdDev, string passFail)
{
WaferMeanThickness = waferMeanThickness;
StdDev = stdDev;
PassFail = passFail;
}
public string WaferMeanThickness { get; }
public string StdDev { get; }
public string PassFail { get; }
private static bool IsNullOrWhiteSpace(string text)
{
bool flag;
int num = 0;
while (true)
{
if (num >= text.Length)
{
flag = true;
break;
}
else if (char.IsWhiteSpace(text[num]))
{
num++;
}
else
{
flag = false;
break;
}
}
return flag;
}
internal static string GetToken(string text, int[] i)
{
while (true)
{
if (i[0] >= text.Length || !IsNullOrWhiteSpace(text.Substring(i[0], 1)))
{
break;
}
i[0]++;
}
int num = i[0];
while (true)
{
if (num >= text.Length || IsNullOrWhiteSpace(text.Substring(num, 1)))
{
break;
}
num++;
}
string str = text.Substring(i[0], num - i[0]);
i[0] = num;
return str.Trim();
}
internal static Body? Get(Constant constant, string text, int[] i)
{
Body? result;
i[0] = Run.ScanPast(text, i, constant.MeanThickness);
string meanThickness = Run.GetBefore(text, i, constant.StandardDeviation);
string stdDev = GetToken(text, i);
string passFail = Run.GetToEOL(text, i);
result = new(meanThickness,
stdDev,
passFail);
return result;
}
}

View File

@ -0,0 +1,21 @@
namespace Adaptation.FileHandlers.pdsf;
internal class Constant
{
public string Period { get; } = ".";
public string Slot { get; } = "Slot:";
public string Semicolon { get; } = ";";
public string Batch { get; } = "batch:";
public string TwoSpaces { get; } = " ";
public string Wafer { get; } = "wafer:";
public string BioRad { get; } = "Bio-Rad";
public string Recipe { get; } = "Recipe:";
public string Cassette { get; } = "cassette:";
public string Operator { get; } = "operator:";
public string Thickness { get; } = "thickness";
public string MeanThickness { get; } = "mean thickness =";
public string StandardDeviation { get; } = ", std. dev =";
public string WaferFieldIsMissing { get; } = "Wafer field is missing.";
}

View File

@ -0,0 +1,130 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text.Json;
namespace Adaptation.FileHandlers.pdsf;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly Header[] _LastHeader;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_LastHeader = new Header[] { Header.Get() };
if (_IsEAFHosted)
NestExistingFiles(_FileConnectorConfiguration);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
#nullable enable
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
string result;
JsonElement[] jsonElements;
Test[] tests = Array.Empty<Test>();
List<FileInfo> fileInfoCollection = new();
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
Run? run = Run.Get(_Logistics, processDataStandardFormat, fileInfoCollection, lastHeader: _LastHeader[0]);
if (run is null)
{
_LastHeader[0] = Header.Get();
jsonElements = Array.Empty<JsonElement>();
result = string.Concat("A) No Data - ", dateTime.Ticks);
results = new(result, tests, jsonElements, fileInfoCollection);
}
else
{
_LastHeader[0] = run.Header;
result = string.Join(Environment.NewLine, _Logistics.Logistics1);
jsonElements = _IsEAFHosted ? Array.Empty<JsonElement>() : ProcessDataStandardFormat.GetArray(processDataStandardFormat);
results = new(result, tests, jsonElements, fileInfoCollection);
}
return results;
}
}

View File

@ -0,0 +1,36 @@
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
public class Footer
{
public Footer(string line, string radialVariationThickness, string slot)
{
Line = line;
RadialVariationThickness = radialVariationThickness;
Slot = slot;
}
public string Line { get; }
public string RadialVariationThickness { get; }
public string Slot { get; }
internal static Footer? Get(Constant constant, string text, int[] i)
{
Footer? result;
_ = Run.GetToEOL(text, i);
_ = Run.GetToEOL(text, i);
string line = Run.GetToEOL(text, i);
i[0] = Run.ScanPast(text, i, constant.Thickness);
string radialVariationThickness = Run.GetToEOL(text, i);
_ = Run.GetToEOL(text, i);
i[0] = Run.ScanPast(text, i, constant.Slot);
string slot = Run.GetBefore(text, i, constant.Semicolon);
result = new(line,
radialVariationThickness,
slot);
return result;
}
}

View File

@ -0,0 +1,106 @@
using System;
using System.Collections.ObjectModel;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
public class Header
{
public Header(string title, string recipe, string dateTime, string @operator, string batch, string cassette, bool usedLast, string wafer)
{
Title = title;
Recipe = recipe;
DateTime = dateTime;
Operator = @operator;
Batch = batch;
Cassette = cassette;
UsedLast = usedLast;
Wafer = wafer;
}
public string Title { get; }
public string Recipe { get; }
public string DateTime { get; }
public string Operator { get; }
public string Batch { get; }
public string Cassette { get; }
public bool UsedLast { get; }
public string Wafer { get; }
internal static Header Get() =>
new(string.Empty,
string.Empty,
string.Empty,
string.Empty,
string.Empty,
string.Empty,
false,
string.Empty);
internal static Header? Get(Constant constant, Header lastHeader, string text, int[] i)
{
Header? result;
// occasionally there are multiple blocks of details, get the last one as earlier ones may be aborted runs.
int index = text.LastIndexOf(constant.BioRad);
if (index > -1)
text = text.Substring(index);
if (string.IsNullOrEmpty(text))
result = null;
else
{
bool usedLast;
string recipe;
string title = Run.GetBefore(text, i, constant.Recipe);
string recipeAndDateTime = Run.GetToEOL(text, i);
if (recipeAndDateTime.Length < constant.TwoSpaces.Length)
recipe = recipeAndDateTime.Trim();
else if (!recipeAndDateTime.Contains(constant.TwoSpaces))
recipe = recipeAndDateTime.Substring(0, 25).Trim();
else
recipe = recipeAndDateTime.Split(new string[] { constant.TwoSpaces }, StringSplitOptions.None)[0].Trim();
string dateTime = recipeAndDateTime.Substring(recipe.Length).Trim();
if (dateTime.EndsWith(constant.Period))
dateTime = dateTime.Remove(dateTime.Length - 1, 1);
i[0] = Run.ScanPast(text, i, constant.Operator);
string @operator = Run.GetBefore(text, i, constant.Batch);
string batch = Run.GetToEOL(text, i);
i[0] = Run.ScanPast(text, i, constant.Cassette);
if (!text.Contains(constant.Cassette))
title = string.Empty;
string cassette = Run.GetBefore(text, i, constant.Wafer);
if (string.IsNullOrEmpty(batch))
{
i[0] = 0;
i[0] = Run.ScanPast(text, i, constant.Wafer);
}
string wafer = Run.GetToEOL(text, i);
_ = Run.GetToEOL(text, i);
_ = Run.GetToEOL(text, i);
if (string.IsNullOrEmpty(wafer))
throw new Exception(constant.WaferFieldIsMissing);
if (!string.IsNullOrEmpty(title))
usedLast = false;
else
{
title = lastHeader.Title;
recipe = lastHeader.Recipe;
@operator = lastHeader.Operator;
batch = lastHeader.Batch;
cassette = lastHeader.Cassette;
usedLast = true;
}
result = new(title: title,
recipe: recipe,
dateTime: dateTime,
@operator: @operator,
batch: batch,
cassette: cassette,
usedLast: usedLast,
wafer: wafer);
}
return result;
}
}

View File

@ -0,0 +1,63 @@
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
internal class Row
{
public Row(Run run, int i)
{
Index = i;
//
Title = run.Header.Title;
Recipe = run.Header.Recipe;
DateTime = run.Header.DateTime;
Operator = run.Header.Operator;
Batch = run.Header.Batch;
Cassette = run.Header.Cassette;
UsedLast = run.Header.UsedLast;
Wafer = run.Header.Wafer;
//
Position = run.Sites[i].Position;
Thickness = run.Sites[i].Thickness;
//
WaferMeanThickness = run.Body.WaferMeanThickness;
StdDev = run.Body.StdDev;
PassFail = run.Body.PassFail;
//
Line = run.Footer.Line;
RadialVariationThickness = run.Footer.RadialVariationThickness;
Slot = run.Footer.Slot;
}
public int Index { get; }
//
public string Title { get; }
public string Recipe { get; }
public string DateTime { get; }
public string Operator { get; }
public string Batch { get; }
public string Cassette { get; }
public bool UsedLast { get; }
public string Wafer { get; }
//
public string Position { get; }
public string Thickness { get; }
//
public string WaferMeanThickness { get; }
public string StdDev { get; }
public string PassFail { get; }
//
public string Line { get; }
public string RadialVariationThickness { get; }
public string Slot { get; }
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Row))]
internal partial class RowSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -0,0 +1,190 @@
using Adaptation.Shared;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
internal class Run
{
public Header Header { get; }
public ReadOnlyCollection<Site> Sites { get; }
public Body Body { get; }
public Footer Footer { get; }
public Run(Header header, ReadOnlyCollection<Site> sites, Body body, Footer footer)
{
Header = header;
Sites = sites;
Body = body;
Footer = footer;
}
internal static string GetBefore(string text, int[] i, string search)
{
string str;
string str1;
int num = text.IndexOf(search, i[0]);
if (num <= -1)
{
str = text.Substring(i[0]);
i[0] = text.Length;
str1 = str.Trim();
}
else
{
str = text.Substring(i[0], num - i[0]);
i[0] = num + search.Length;
str1 = str.Trim();
}
return str1;
}
internal static string GetToEOL(string text, int[] i)
{
string result;
if (text.IndexOf("\n", i[0]) > -1)
result = GetBefore(text, i, "\n");
else
result = GetBefore(text, i, Environment.NewLine);
return result;
}
internal static int ScanPast(string text, int[] i, string search)
{
int result;
int num = text.IndexOf(search, i[0]);
if (num <= -1)
result = text.Length;
else
result = num + search.Length;
return result;
}
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, Run result)
{
FileInfo fileInfo = new($"{logistics.ReportFullPath}.run.json");
string json = JsonSerializer.Serialize(result, RunSourceGenerationContext.Default.Run);
File.WriteAllText(fileInfo.FullName, json);
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
fileInfoCollection.Add(fileInfo);
}
private static ReadOnlyCollection<string> GetLines(Logistics logistics, JsonElement[]? jsonElements)
{
List<string> results = new();
int columns = 0;
StringBuilder stringBuilder = new();
results.Add($"\"Count\",{jsonElements?.Length}");
results.Add($"\"{nameof(logistics.Sequence)}\",\"{logistics.Sequence}\"");
results.Add($"\"{nameof(logistics.MesEntity)}\",\"{logistics.MesEntity}\"");
string dateTimeFromSequence = logistics.DateTimeFromSequence.ToString("MM/dd/yyyy hh:mm:ss tt");
for (int i = 0; i < jsonElements?.Length;)
{
_ = stringBuilder.Append('"').Append(nameof(logistics.DateTimeFromSequence)).Append('"').Append(',');
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append(',');
}
break;
}
if (jsonElements?.Length != 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements?.Length; i++)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('"').Append(dateTimeFromSequence).Append('"').Append(',');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
{
if (jsonProperty.Value.ValueKind == JsonValueKind.Object)
_ = stringBuilder.Append(',');
else if (jsonProperty.Value.ValueKind != JsonValueKind.String)
_ = stringBuilder.Append(jsonProperty.Value).Append(',');
else
_ = stringBuilder.Append('"').Append(jsonProperty.Value).Append('"').Append(',');
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
}
return results.AsReadOnly();
}
private static void WriteCommaSeparatedValues(Logistics logistics, Run run)
{
List<Row> results = new();
Row row;
for (int i = 0; i < run.Sites.Count; i++)
{
row = new(run, i);
results.Add(row);
}
string json = JsonSerializer.Serialize(results);
JsonElement[]? jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
ReadOnlyCollection<string> lines = GetLines(logistics, jsonElements);
File.WriteAllText($"{logistics.ReportFullPath}.csv", string.Join(Environment.NewLine, lines));
}
private static string GetText(ProcessDataStandardFormat processDataStandardFormat)
{
string result;
List<string> results = new();
int skipColumns = processDataStandardFormat.Columns.Count - 1;
foreach (string line in processDataStandardFormat.Body)
results.Add(string.Join("\t", line.Split('\t').Skip(skipColumns)));
result = string.Join(Environment.NewLine, results);
return result;
}
internal static Run? Get(Logistics logistics, ProcessDataStandardFormat processDataStandardFormat, List<FileInfo> fileInfoCollection, Header lastHeader)
{
Run? result;
Constant constant = new();
int[] i = new int[] { 0 };
string text = GetText(processDataStandardFormat);
Header? header = Header.Get(constant, lastHeader, text, i);
if (header is null)
result = null;
else
{
ReadOnlyCollection<Site> sites = Site.Get(text, i);
if (sites.Count == 0)
result = null;
else
{
Body? body = Body.Get(constant, text, i);
if (body is null)
result = null;
else
{
Footer? footer = Footer.Get(constant, text, i);
if (footer is null)
result = null;
else
{
result = new(header, sites, body, footer);
WriteJson(logistics, fileInfoCollection, result);
WriteCommaSeparatedValues(logistics, result);
}
}
}
}
return result;
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Run))]
internal partial class RunSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -0,0 +1,36 @@
using System.Collections.Generic;
using System.Collections.ObjectModel;
namespace Adaptation.FileHandlers.pdsf;
public class Site
{
public Site(string position, string thickness)
{
Position = position;
Thickness = thickness;
}
public string Position { get; }
public string Thickness { get; }
internal static ReadOnlyCollection<Site> Get(string text, int[] i)
{
List<Site> results = new();
Site site;
string thickness;
string position = Body.GetToken(text, i);
while (true)
{
if (string.IsNullOrEmpty(position) || !char.IsDigit(position[0]))
break;
thickness = Body.GetToken(text, i);
site = new(position, thickness);
results.Add(site);
position = Body.GetToken(text, i);
}
return results.AsReadOnly();
}
}

View File

@ -3,6 +3,7 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.Shared.Duplicator; namespace Adaptation.Shared.Duplicator;
@ -178,4 +179,16 @@ public class Description : IDescription, Properties.IDescription
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt"; internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Description))]
internal partial class SharedDescriptionSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Description[]))]
internal partial class SharedDescriptionArraySourceGenerationContext : JsonSerializerContext
{
} }

View File

@ -9,7 +9,6 @@ using System.IO;
using System.Linq; using System.Linq;
using System.Text; using System.Text;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading; using System.Threading;
namespace Adaptation.Shared; namespace Adaptation.Shared;
@ -447,12 +446,13 @@ public class FileRead : Properties.IFileRead
{ {
List<Properties.IDescription> results = new(); List<Properties.IDescription> results = new();
Duplicator.Description description; Duplicator.Description description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements) foreach (JsonElement jsonElement in jsonElements)
{ {
if (jsonElement.ValueKind != JsonValueKind.Object) if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception(); throw new Exception();
description = JsonSerializer.Deserialize<Duplicator.Description>(jsonElement.ToString(), jsonSerializerOptions); description = JsonSerializer.Deserialize(jsonElement.ToString(), Duplicator.SharedDescriptionSourceGenerationContext.Default.Description);
if (description is null)
continue;
results.Add(description); results.Add(description);
} }
return results; return results;

View File

@ -654,6 +654,17 @@ internal class ProcessDataStandardFormat
return results; return results;
} }
internal static JsonElement[] GetArray(string reportFullPath, string[] lines, ProcessDataStandardFormat processDataStandardFormat)
{
JsonElement[] results;
string? json = GetRecordsJson(reportFullPath, lines);
if (string.IsNullOrEmpty(json))
results = GetArray(processDataStandardFormat);
else
results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
return results;
}
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText) internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
{ {
string result; string result;
@ -956,6 +967,26 @@ internal class ProcessDataStandardFormat
return result; return result;
} }
private static string? GetRecordsJson(string reportFullPath, string[] lines)
{
string? result;
bool foundRecords = false;
List<string> results = new();
lines ??= File.ReadAllLines(reportFullPath);
foreach (string line in lines)
{
if (line.StartsWith("\"Records\""))
foundRecords = true;
if (!foundRecords)
continue;
if (line == "],")
break;
results.Add(line);
}
result = results.Count == 0 ? null : $"{string.Join(Environment.NewLine, results.Skip(1))}{Environment.NewLine}]";
return result;
}
} }
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]

View File

@ -48,9 +48,7 @@ public class BIORAD2 : EAFLoggingUnitTesting
EAFLoggingUnitTesting?.Dispose(); EAFLoggingUnitTesting?.Dispose();
} }
#if DEBUG
[Ignore] [Ignore]
#endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__BIORAD2__QS408M() public void Production__v2_60_0__BIORAD2__QS408M()
{ {
@ -61,5 +59,18 @@ public class BIORAD2 : EAFLoggingUnitTesting
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit")); EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
} }
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_60_0__BIORAD2__pdsf()
{
string check = "*EQP_*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
} }
#endif #endif

View File

@ -48,9 +48,7 @@ public class BIORAD3 : EAFLoggingUnitTesting
EAFLoggingUnitTesting?.Dispose(); EAFLoggingUnitTesting?.Dispose();
} }
#if DEBUG
[Ignore] [Ignore]
#endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__BIORAD3__QS408M() public void Production__v2_60_0__BIORAD3__QS408M()
{ {

View File

@ -1,4 +1,5 @@
#if true #if true
using Adaptation._Tests.Shared;
using Adaptation.Shared; using Adaptation.Shared;
using Adaptation.Shared.Methods; using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;
@ -31,15 +32,11 @@ public class BIORAD2
catch (Exception) { } catch (Exception) { }
} }
#if DEBUG
[Ignore] [Ignore]
#endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__BIORAD2__QS408M() => _BIORAD2.Production__v2_60_0__BIORAD2__QS408M(); public void Production__v2_60_0__BIORAD2__QS408M() => _BIORAD2.Production__v2_60_0__BIORAD2__QS408M();
#if DEBUG
[Ignore] [Ignore]
#endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__BIORAD2__QS408M638185231662401081__NinePoint() public void Production__v2_60_0__BIORAD2__QS408M638185231662401081__NinePoint()
{ {
@ -55,13 +52,11 @@ public class BIORAD2
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime); Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.QS408M.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970"); dateTime = FileHandlers.QS408M.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime); Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF); _ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch(); NonThrowTryCatch();
} }
#if DEBUG
[Ignore] [Ignore]
#endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__BIORAD2__QS408M638185291035612698__FourteenPoint() public void Production__v2_60_0__BIORAD2__QS408M638185291035612698__FourteenPoint()
{ {
@ -77,13 +72,11 @@ public class BIORAD2
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime); Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.QS408M.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970"); dateTime = FileHandlers.QS408M.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime); Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF); _ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch(); NonThrowTryCatch();
} }
#if DEBUG
[Ignore] [Ignore]
#endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__BIORAD2__QS408M638206292859940029__EpiPro() public void Production__v2_60_0__BIORAD2__QS408M638206292859940029__EpiPro()
{ {
@ -99,13 +92,11 @@ public class BIORAD2
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime); Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.QS408M.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970"); dateTime = FileHandlers.QS408M.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime); Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF); _ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch(); NonThrowTryCatch();
} }
#if DEBUG
[Ignore] [Ignore]
#endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__BIORAD2__QS408M638211310710952565__WMO() public void Production__v2_60_0__BIORAD2__QS408M638211310710952565__WMO()
{ {
@ -121,13 +112,11 @@ public class BIORAD2
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime); Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.QS408M.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970"); dateTime = FileHandlers.QS408M.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime); Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF); _ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch(); NonThrowTryCatch();
} }
#if DEBUG
[Ignore] [Ignore]
#endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__BIORAD2__QS408M638635290101315251__ADO126448() public void Production__v2_60_0__BIORAD2__QS408M638635290101315251__ADO126448()
{ {
@ -143,7 +132,24 @@ public class BIORAD2
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime); Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.QS408M.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970"); dateTime = FileHandlers.QS408M.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime); Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF); _ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_60_0__BIORAD2__pdsf638925042012952259__Normal()
{
bool validatePDSF = false;
string check = "*EQP_*.pdsf";
_BIORAD2.Production__v2_60_0__BIORAD2__pdsf();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _BIORAD2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch(); NonThrowTryCatch();
} }

View File

@ -31,15 +31,11 @@ public class BIORAD3
catch (Exception) { } catch (Exception) { }
} }
#if DEBUG
[Ignore] [Ignore]
#endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__BIORAD3__QS408M() => _BIORAD3.Production__v2_60_0__BIORAD3__QS408M(); public void Production__v2_60_0__BIORAD3__QS408M() => _BIORAD3.Production__v2_60_0__BIORAD3__QS408M();
#if DEBUG
[Ignore] [Ignore]
#endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__BIORAD3__QS408M637406016892454000__ReactorAndRDS() public void Production__v2_60_0__BIORAD3__QS408M637406016892454000__ReactorAndRDS()
{ {
@ -59,9 +55,7 @@ public class BIORAD3
NonThrowTryCatch(); NonThrowTryCatch();
} }
#if DEBUG
[Ignore] [Ignore]
#endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__BIORAD3__QS408M638227775101723135__Error() public void Production__v2_60_0__BIORAD3__QS408M638227775101723135__Error()
{ {

View File

@ -115,6 +115,14 @@
<Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewer\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewer\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewer\WSRequest.cs" /> <Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewer\WSRequest.cs" />
<Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewerAttachments\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewerAttachments\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Body.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Constant.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Footer.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Header.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Row.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Run.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Site.cs" />
<Compile Include="Adaptation\FileHandlers\Processed\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\Processed\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\Body.cs" /> <Compile Include="Adaptation\FileHandlers\QS408M\Body.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\Description.cs" /> <Compile Include="Adaptation\FileHandlers\QS408M\Description.cs" />