Refactor and clean up test cases for BIORAD4 and BIORAD5; update file handling logic and add new tests for descriptor parsing
- Removed unused test methods and consolidated checks in BIORAD4 and BIORAD5 test classes. - Updated file name patterns in test methods to be more specific. - Enhanced error handling in AdaptationTesting class to avoid creating directories with "10." in the path. - Added new static tests for descriptor parsing and date formatting. - Updated project file to include new txt file handlers and remove obsolete Stratus file handlers.
This commit is contained in:
@ -24,7 +24,6 @@ public class CellInstanceConnectionName
|
||||
nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Stratus) => new Stratus.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(txt) => new txt.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
_ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped")
|
||||
};
|
||||
|
||||
@ -97,9 +97,9 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
|
||||
private void CallbackInProcessCleared(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, string inProcessDirectory, long sequence, bool warning)
|
||||
{
|
||||
const string site = "sjc";
|
||||
const string site = "els";
|
||||
string stateName = string.Concat("Dummy_", _EventName);
|
||||
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
|
||||
const string monInURL = $"http://moninhttp.{site}.infineon.com/input/text";
|
||||
#pragma warning disable CS0436 // The type 'MonIn' in 'L:\Workspaces\33_CS29_79_72\MIT_EAF_Adaptations\Trunk\MET08THFTIRSTRATUS\06_SourceCode\MET08THFTIRSTRATUS\Adaptation\Infineon\Monitoring\MonA\MonIn.cs' conflicts with the imported type 'MonIn' in 'Infineon.Monitoring.MonA, Version=2.0.0.0, Culture=neutral, PublicKeyToken=null'. Using the type defined in 'L:\Workspaces\33_CS29_79_72\MIT_EAF_Adaptations\Trunk\MET08THFTIRSTRATUS\06_SourceCode\MET08THFTIRSTRATUS\Adaptation\Infineon\Monitoring\MonA\MonIn.cs'.
|
||||
#pragma warning disable CS0436 // The type 'MonIn' in 'L:\Workspaces\33_CS29_79_72\MIT_EAF_Adaptations\Trunk\MET08THFTIRSTRATUS\06_SourceCode\MET08THFTIRSTRATUS\Adaptation\Infineon\Monitoring\MonA\MonIn.cs' conflicts with the imported type 'MonIn' in 'Infineon.Monitoring.MonA, Version=2.0.0.0, Culture=neutral, PublicKeyToken=null'. Using the type defined in 'L:\Workspaces\33_CS29_79_72\MIT_EAF_Adaptations\Trunk\MET08THFTIRSTRATUS\06_SourceCode\MET08THFTIRSTRATUS\Adaptation\Infineon\Monitoring\MonA\MonIn.cs'.
|
||||
MonIn monIn = MonIn.GetInstance(monInURL);
|
||||
|
||||
@ -113,7 +113,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return results;
|
||||
}
|
||||
|
||||
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, List<Stratus.Description> descriptions, Test[] tests)
|
||||
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, List<txt.Description> descriptions, Test[] tests)
|
||||
{
|
||||
string duplicateFile;
|
||||
bool isDummyRun = false;
|
||||
@ -163,7 +163,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
|
||||
List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements);
|
||||
List<txt.Description> descriptions = txt.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SaveOpenInsightFile(reportFullPath, dateTime, descriptions, tests);
|
||||
|
||||
@ -128,7 +128,7 @@ public class FromIQS
|
||||
return string.Join(Environment.NewLine, results);
|
||||
} // cSpell:restore
|
||||
|
||||
private static string GetCommandText(Logistics logistics, Stratus.Description description, string dateTime, long? subGroupId)
|
||||
private static string GetCommandText(Logistics logistics, txt.Description description, string dateTime, long? subGroupId)
|
||||
{ // cSpell:disable
|
||||
List<string> results = new();
|
||||
results.Add(" select iq.ev_count, iq.cl_count, iq.sl_count, iq.se_sgrp, iq.se_sgtm, iq.se_tsno, iq.td_test, iq.pr_name, iq.jd_name, iq.pl_name, iq.pd_name, iq.td_name, iq.se_val ");
|
||||
@ -251,14 +251,14 @@ public class FromIQS
|
||||
}
|
||||
}
|
||||
|
||||
internal static (long?, int?, string) GetCommandText(string connectionString, Logistics logistics, Stratus.Description description, long breakAfter, long preWait)
|
||||
internal static (long?, int?, string) GetCommandText(string connectionString, Logistics logistics, txt.Description description, long breakAfter, long preWait)
|
||||
{
|
||||
DateTime dateTime;
|
||||
int? count = null;
|
||||
string commandText;
|
||||
long? result = null;
|
||||
StringBuilder stringBuilder;
|
||||
string dateFormat = Stratus.Description.GetDateFormat();
|
||||
string dateFormat = txt.Description.GetDateFormat();
|
||||
if (DateTime.TryParseExact(description.Date, dateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
|
||||
dateTime = dateTimeParsed;
|
||||
else if (DateTime.TryParse(description.Date, CultureInfo.InvariantCulture, DateTimeStyles.None, out dateTimeParsed))
|
||||
@ -325,7 +325,7 @@ public class FromIQS
|
||||
return new(result, count, commandText);
|
||||
}
|
||||
|
||||
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, Stratus.Description description, long? subGroupId, string weekOfYear)
|
||||
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, txt.Description description, long? subGroupId, string weekOfYear)
|
||||
{
|
||||
string fileName = Path.GetFileName(reportFullPath);
|
||||
string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);
|
||||
|
||||
@ -110,7 +110,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return results;
|
||||
}
|
||||
|
||||
private void SendData(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<Stratus.Description> descriptions)
|
||||
private void SendData(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<txt.Description> descriptions)
|
||||
{
|
||||
string checkDirectory;
|
||||
WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
|
||||
@ -144,7 +144,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
|
||||
List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements);
|
||||
List<txt.Description> descriptions = txt.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SendData(reportFullPath, dateTime, jsonElements, descriptions);
|
||||
|
||||
@ -29,20 +29,20 @@ public class WSRequest
|
||||
public string StdDev { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public List<Stratus.Detail> Details { get; protected set; }
|
||||
public List<txt.Detail> Details { get; protected set; }
|
||||
|
||||
[Obsolete("For json")] public WSRequest() { }
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
internal WSRequest(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, List<Stratus.Description> descriptions, string processDataStandardFormat = null)
|
||||
internal WSRequest(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, List<txt.Description> descriptions, string processDataStandardFormat = null)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
Id = -1;
|
||||
FilePath = string.Empty;
|
||||
CellName = logistics.MesEntity;
|
||||
if (descriptions[0] is not Stratus.Description x)
|
||||
if (descriptions[0] is not txt.Description x)
|
||||
throw new Exception();
|
||||
Details = new List<Stratus.Detail>();
|
||||
Details = new List<txt.Detail>();
|
||||
//Header
|
||||
{
|
||||
AttemptCounter = x.AttemptCounter;
|
||||
@ -61,10 +61,10 @@ public class WSRequest
|
||||
UniqueId = x.UniqueId;
|
||||
}
|
||||
string[] segments;
|
||||
Stratus.Detail detail;
|
||||
foreach (Stratus.Description description in descriptions)
|
||||
txt.Detail detail;
|
||||
foreach (txt.Description description in descriptions)
|
||||
{
|
||||
detail = new Stratus.Detail
|
||||
detail = new txt.Detail
|
||||
{
|
||||
HeaderUniqueId = description.HeaderUniqueId,
|
||||
Mean = description.Mean,
|
||||
@ -79,7 +79,7 @@ public class WSRequest
|
||||
};
|
||||
segments = description.Slot.Split(',');
|
||||
foreach (string segment in segments)
|
||||
detail.Points.Add(new Stratus.Point { HeaderUniqueId = description.HeaderUniqueId, UniqueId = description.UniqueId, Position = segment });
|
||||
detail.Points.Add(new txt.Point { HeaderUniqueId = description.HeaderUniqueId, UniqueId = description.UniqueId, Position = segment });
|
||||
segments = description.Thickness.Split(',');
|
||||
if (detail.Points.Count != segments.Length)
|
||||
throw new Exception();
|
||||
@ -96,7 +96,7 @@ public class WSRequest
|
||||
}
|
||||
}
|
||||
|
||||
internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, JsonElement[] jsonElements, List<Stratus.Description> descriptions)
|
||||
internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, JsonElement[] jsonElements, List<txt.Description> descriptions)
|
||||
{
|
||||
long result;
|
||||
if (results is not null && results.HeaderId is not null)
|
||||
@ -114,11 +114,11 @@ public class WSRequest
|
||||
}
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string originalDataBioRad, List<Stratus.Description> descriptions, string matchDirectory, WS.Results results, string headerIdDirectory)
|
||||
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, List<txt.Description> descriptions, string matchDirectory, WS.Results results, string headerIdDirectory)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
string dataPDFFile = Path.Combine(matchDirectory, $"{results.HeaderId}.pdf");
|
||||
string[] txtFiles = Directory.GetFiles(matchDirectory, string.Concat(originalDataBioRad, "*.txt"), SearchOption.TopDirectoryOnly);
|
||||
string[] txtFiles = Directory.GetFiles(matchDirectory, "*.txt", SearchOption.TopDirectoryOnly);
|
||||
if (txtFiles.Length != 1)
|
||||
throw new Exception($"Invalid source file count for <{results.HeaderId}>!");
|
||||
string[] lines = File.ReadAllLines(txtFiles[0]);
|
||||
|
||||
@ -15,7 +15,6 @@ namespace Adaptation.FileHandlers.OpenInsightMetrologyViewerAttachments;
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly string _OriginalDataBioRad;
|
||||
private readonly string _OpenInsightMetrologyViewerAPI;
|
||||
private readonly string _OpenInsightMetrologyViewerFileShare;
|
||||
|
||||
@ -31,7 +30,6 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_OriginalDataBioRad = "OriginalDataBioRad_";
|
||||
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
|
||||
_OpenInsightMetrologyViewerFileShare = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerFileShare");
|
||||
}
|
||||
@ -137,7 +135,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return result;
|
||||
}
|
||||
|
||||
private void PostOpenInsightMetrologyViewerAttachments(JsonElement[] jsonElements, List<Stratus.Description> descriptions)
|
||||
private void PostOpenInsightMetrologyViewerAttachments(JsonElement[] jsonElements, List<txt.Description> descriptions)
|
||||
{
|
||||
Shared.Metrology.WS.Results? results;
|
||||
string jobIdDirectory = Path.Combine(Path.GetDirectoryName(_FileConnectorConfiguration.AlternateTargetFolder) ?? throw new Exception(), _Logistics.JobID);
|
||||
@ -157,7 +155,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
string? headerIdDirectory = GetHeaderIdDirectory(headerId);
|
||||
if (string.IsNullOrEmpty(headerIdDirectory))
|
||||
throw new Exception($"Didn't find header id directory <{headerId}>");
|
||||
OpenInsightMetrologyViewer.WSRequest.PostOpenInsightMetrologyViewerAttachments(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OriginalDataBioRad, descriptions, matchDirectories[0], results, headerIdDirectory);
|
||||
OpenInsightMetrologyViewer.WSRequest.PostOpenInsightMetrologyViewerAttachments(this, _Logistics, _OpenInsightMetrologyViewerAPI, descriptions, matchDirectories[0], results, headerIdDirectory);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
@ -170,7 +168,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
|
||||
List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements);
|
||||
List<txt.Description> descriptions = txt.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
PostOpenInsightMetrologyViewerAttachments(jsonElements, descriptions);
|
||||
|
||||
@ -108,7 +108,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return results;
|
||||
}
|
||||
|
||||
private void DirectoryMove(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<Stratus.Description> descriptions)
|
||||
private void DirectoryMove(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<txt.Description> descriptions)
|
||||
{
|
||||
if (dateTime == DateTime.MinValue)
|
||||
throw new ArgumentNullException(nameof(dateTime));
|
||||
@ -171,7 +171,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
|
||||
List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements);
|
||||
List<txt.Description> descriptions = txt.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
|
||||
@ -1,26 +0,0 @@
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
internal class Constant
|
||||
{
|
||||
|
||||
public string Mean { get; } = "Mean";
|
||||
public string Slot { get; } = "Slot";
|
||||
public string STDD { get; } = "STDD";
|
||||
public string Batch { get; } = "Batch";
|
||||
public string Wafer { get; } = "Wafer";
|
||||
public string OneHypen { get; } = "1 - ";
|
||||
public string Recipe { get; } = "Recipe";
|
||||
public string Source { get; } = "Source:";
|
||||
public string Started { get; } = "started";
|
||||
public string Cassette { get; } = "Cassette";
|
||||
public string Finished { get; } = "finished.";
|
||||
public string Reference { get; } = "Reference";
|
||||
public string StartedAt { get; } = "started at";
|
||||
public string Thickness { get; } = "Thickness,";
|
||||
public string Destination { get; } = "Destination:";
|
||||
public string IsPut { get; } = "is put to the slot";
|
||||
public string WaferParentheses { get; } = "Wafer (";
|
||||
public string IsTaken { get; } = "is taken from the slot";
|
||||
public string ProcessFailed { get; } = "- Process failed -";
|
||||
|
||||
}
|
||||
@ -1,147 +0,0 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private long? _TickOffset;
|
||||
private readonly string _OriginalDataBioRad;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), true, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_OriginalDataBioRad = "OriginalDataBioRad_";
|
||||
if (_IsEAFHosted)
|
||||
NestExistingFiles(_FileConnectorConfiguration);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
#nullable enable
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
|
||||
_TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
|
||||
_Logistics = new Logistics(this, _TickOffset.Value, reportFullPath, useSplitForMID: true);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
if (_Logistics.FileInfo.Length < _MinFileLength)
|
||||
results.Item4.Add(_Logistics.FileInfo);
|
||||
else
|
||||
{
|
||||
Run? run = Run.Get(_Logistics);
|
||||
if (run is null)
|
||||
throw new Exception(string.Concat("A) No Data - ", dateTime.Ticks));
|
||||
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, _OriginalDataBioRad, run, dataText: string.Empty);
|
||||
if (iProcessData is not ProcessData processData)
|
||||
results = new(string.Concat("B) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
|
||||
else
|
||||
{
|
||||
string mid;
|
||||
if (!string.IsNullOrEmpty(processData.Cassette) && string.IsNullOrEmpty(processData.Reactor) && string.IsNullOrEmpty(processData.RDS) && string.IsNullOrEmpty(processData.PSN))
|
||||
mid = processData.Cassette;
|
||||
else if (!string.IsNullOrEmpty(processData.Employee) && string.IsNullOrEmpty(processData.Reactor) && string.IsNullOrEmpty(processData.RDS) && string.IsNullOrEmpty(processData.PSN))
|
||||
mid = processData.Employee;
|
||||
else
|
||||
{
|
||||
mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
|
||||
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
}
|
||||
SetFileParameterLotID(mid);
|
||||
_Logistics.Update(mid, processData.Reactor);
|
||||
if (iProcessData.Details.Count > 0 && iProcessData.Details[0] is Detail detail && string.IsNullOrEmpty(detail.PassFail))
|
||||
results.Item4.Add(_Logistics.FileInfo);
|
||||
else
|
||||
{
|
||||
if (iProcessData.Details.Count > 0)
|
||||
results = iProcessData.GetResults(this, _Logistics, results.Item4);
|
||||
else
|
||||
results = new(string.Concat("C) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,48 +0,0 @@
|
||||
using System.Collections.ObjectModel;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
#nullable enable
|
||||
|
||||
public class Grade
|
||||
{
|
||||
|
||||
public Grade(string meanThickness, string stdDev)
|
||||
{
|
||||
MeanThickness = meanThickness;
|
||||
StdDev = stdDev;
|
||||
}
|
||||
|
||||
public string MeanThickness { get; }
|
||||
public string StdDev { get; }
|
||||
|
||||
internal static Grade? Get(Constant constant, ReadOnlyCollection<string> groups)
|
||||
{
|
||||
Grade? result;
|
||||
string? mean = null;
|
||||
string? stdDev = null;
|
||||
int[] j = new int[] { 0 };
|
||||
foreach (string groupText in groups)
|
||||
{
|
||||
if (!groupText.Contains(constant.Finished))
|
||||
continue;
|
||||
mean = string.Empty;
|
||||
stdDev = string.Empty;
|
||||
Header.ScanPast(groupText, j, constant.Mean);
|
||||
mean = Wafer.GetToken(groupText, j);
|
||||
if (mean.EndsWith(","))
|
||||
mean = mean.Remove(mean.Length - 1, 1);
|
||||
Header.ScanPast(groupText, j, constant.STDD);
|
||||
stdDev = Wafer.GetToken(groupText, j);
|
||||
if (stdDev.EndsWith(","))
|
||||
stdDev = stdDev.Remove(stdDev.Length - 1, 1);
|
||||
}
|
||||
if (mean is null || stdDev is null)
|
||||
result = null;
|
||||
else
|
||||
result = new(meanThickness: mean,
|
||||
stdDev: stdDev);
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,96 +0,0 @@
|
||||
using System;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
#nullable enable
|
||||
|
||||
public class Header
|
||||
{
|
||||
|
||||
public Header(string batch, string cassette, string dateTime)
|
||||
{
|
||||
Batch = batch;
|
||||
Cassette = cassette;
|
||||
DateTime = dateTime;
|
||||
}
|
||||
|
||||
public string Batch { get; }
|
||||
public string Cassette { get; }
|
||||
public string DateTime { get; }
|
||||
|
||||
internal static string GetBefore(string text, int[] i, string search)
|
||||
{
|
||||
string str;
|
||||
string str1;
|
||||
int num = text.IndexOf(search, i[0]);
|
||||
if (num <= -1)
|
||||
{
|
||||
str = text.Substring(i[0]);
|
||||
i[0] = text.Length;
|
||||
str1 = str.Trim();
|
||||
}
|
||||
else
|
||||
{
|
||||
str = text.Substring(i[0], num - i[0]);
|
||||
i[0] = num + search.Length;
|
||||
str1 = str.Trim();
|
||||
}
|
||||
return str1;
|
||||
}
|
||||
|
||||
internal static string GetToEOL(string text, int[] i)
|
||||
{
|
||||
string result;
|
||||
if (text.IndexOf("\n", i[0]) > -1)
|
||||
result = GetBefore(text, i, "\n");
|
||||
else
|
||||
result = GetBefore(text, i, Environment.NewLine);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static string GetToText(string text, int[] i, string search) =>
|
||||
text.Substring(i[0], text.IndexOf(search, i[0]) - i[0]).Trim();
|
||||
|
||||
internal static void ScanPast(string text, int[] i, string search)
|
||||
{
|
||||
int num = text.IndexOf(search, i[0]);
|
||||
if (num <= -1)
|
||||
i[0] = text.Length;
|
||||
else
|
||||
i[0] = num + search.Length;
|
||||
}
|
||||
|
||||
internal static Header Get(string text, Constant constant, int[] i)
|
||||
{
|
||||
Header? result;
|
||||
string batch;
|
||||
if (!text.Contains(constant.Batch) || !text.Contains(constant.Started))
|
||||
batch = string.Empty;
|
||||
else
|
||||
{
|
||||
for (int z = 0; z < int.MaxValue; z++)
|
||||
{
|
||||
ScanPast(text, i, constant.Batch);
|
||||
if (!text.Substring(i[0]).Contains(constant.Batch))
|
||||
break;
|
||||
}
|
||||
batch = GetToText(text, i, constant.Started);
|
||||
ScanPast(text, i, constant.StartedAt);
|
||||
}
|
||||
ScanPast(text, i, constant.Cassette);
|
||||
string cassette;
|
||||
if (!text.Substring(i[0]).Contains(constant.Started))
|
||||
cassette = string.Empty;
|
||||
else
|
||||
cassette = GetToText(text, i, constant.Started);
|
||||
ScanPast(text, i, constant.StartedAt);
|
||||
string dateTime = GetToEOL(text, i);
|
||||
if (dateTime.EndsWith("."))
|
||||
dateTime = dateTime.Remove(dateTime.Length - 1, 1);
|
||||
result = new(batch: batch,
|
||||
cassette: cassette,
|
||||
dateTime: dateTime);
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,676 +0,0 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using log4net;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Data;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
public partial class ProcessData : IProcessData
|
||||
{
|
||||
|
||||
private readonly List<object> _Details;
|
||||
|
||||
public string JobID { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string Batch { get; set; }
|
||||
public string Cassette { get; set; }
|
||||
public DateTime Date { get; set; }
|
||||
public string FilePath { get; set; }
|
||||
public string Layer { get; set; }
|
||||
public string MeanThickness { get; set; }
|
||||
public string Employee { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Zone { get; set; }
|
||||
//
|
||||
public string ThicknessSlotOne { get; set; }
|
||||
public string ThicknessSlotTwentyFive { get; set; }
|
||||
public string DeltaThicknessSlotsOneAndTwentyFive { get; set; }
|
||||
public string PercentDeltaThicknessSlotsOneAndTwentyFive { get; set; }
|
||||
|
||||
List<object> Shared.Properties.IProcessData.Details => _Details;
|
||||
|
||||
private int _I;
|
||||
private string _Data;
|
||||
private readonly ILog _Log;
|
||||
|
||||
#nullable enable
|
||||
|
||||
internal ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, Run? run, string dataText)
|
||||
{
|
||||
JobID = logistics.JobID;
|
||||
if (!string.IsNullOrEmpty(dataText))
|
||||
fileInfoCollection.Clear();
|
||||
_Details = new List<object>();
|
||||
MesEntity = logistics.MesEntity;
|
||||
_Log = LogManager.GetLogger(typeof(ProcessData));
|
||||
Parse(fileRead, logistics, fileInfoCollection, originalDataBioRad, run, dataText);
|
||||
}
|
||||
|
||||
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<Test> tests = new();
|
||||
foreach (object item in _Details)
|
||||
tests.Add(Test.BioRadStratus);
|
||||
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
|
||||
if (tests.Count != descriptions.Count)
|
||||
throw new Exception();
|
||||
for (int i = 0; i < tests.Count; i++)
|
||||
{
|
||||
if (descriptions[i] is not Description description)
|
||||
throw new Exception();
|
||||
if (description.Test != (int)tests[i])
|
||||
throw new Exception();
|
||||
}
|
||||
FileInfo fileInfo = new($"{logistics.ReportFullPath}.descriptions.json");
|
||||
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
|
||||
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
|
||||
File.WriteAllText(fileInfo.FullName, json);
|
||||
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
|
||||
fileInfoCollection.Add(fileInfo);
|
||||
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json) ?? throw new Exception();
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
|
||||
return results;
|
||||
}
|
||||
|
||||
private string GetBefore(string text)
|
||||
{
|
||||
string str;
|
||||
string str1;
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num <= -1)
|
||||
{
|
||||
str = _Data.Substring(_I);
|
||||
_I = _Data.Length;
|
||||
str1 = str.Trim();
|
||||
}
|
||||
else
|
||||
{
|
||||
str = _Data.Substring(_I, num - _I);
|
||||
_I = num + text.Length;
|
||||
str1 = str.Trim();
|
||||
}
|
||||
return str1;
|
||||
}
|
||||
|
||||
private string GetBefore(string text, bool trim)
|
||||
{
|
||||
string str;
|
||||
string before;
|
||||
if (!trim)
|
||||
{
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num <= -1)
|
||||
{
|
||||
str = _Data.Substring(_I);
|
||||
_I = _Data.Length;
|
||||
before = str;
|
||||
}
|
||||
else
|
||||
{
|
||||
str = _Data.Substring(_I, num - _I);
|
||||
_I = num + text.Length;
|
||||
before = str;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
before = GetBefore(text);
|
||||
}
|
||||
return before;
|
||||
}
|
||||
|
||||
private string GetToEOL()
|
||||
{
|
||||
string result;
|
||||
if (_Data.IndexOf("\n", _I) > -1)
|
||||
result = GetBefore("\n");
|
||||
else
|
||||
result = GetBefore(Environment.NewLine);
|
||||
return result;
|
||||
}
|
||||
|
||||
private string GetToEOL(bool trim)
|
||||
{
|
||||
string str;
|
||||
if (_Data.IndexOf("\n", _I) > -1)
|
||||
str = !trim ? GetBefore("\n", false) : GetToEOL();
|
||||
else
|
||||
str = !trim ? GetBefore(Environment.NewLine, false) : GetToEOL();
|
||||
return str;
|
||||
}
|
||||
|
||||
private string GetToken()
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
if (_I >= _Data.Length || !IsNullOrWhiteSpace(_Data.Substring(_I, 1)))
|
||||
{
|
||||
break;
|
||||
}
|
||||
_I++;
|
||||
}
|
||||
int num = _I;
|
||||
while (true)
|
||||
{
|
||||
if (num >= _Data.Length || IsNullOrWhiteSpace(_Data.Substring(num, 1)))
|
||||
{
|
||||
break;
|
||||
}
|
||||
num++;
|
||||
}
|
||||
string str = _Data.Substring(_I, num - _I);
|
||||
_I = num;
|
||||
return str.Trim();
|
||||
}
|
||||
|
||||
private string GetToText(string text)
|
||||
{
|
||||
string str = _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
|
||||
return str;
|
||||
}
|
||||
|
||||
private bool IsBlankLine()
|
||||
{
|
||||
int num = _Data.IndexOf("\n", _I);
|
||||
return IsNullOrWhiteSpace(num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I));
|
||||
}
|
||||
|
||||
private static bool IsNullOrWhiteSpace(string text)
|
||||
{
|
||||
bool flag;
|
||||
int num = 0;
|
||||
while (true)
|
||||
{
|
||||
if (num >= text.Length)
|
||||
{
|
||||
flag = true;
|
||||
break;
|
||||
}
|
||||
else if (char.IsWhiteSpace(text[num]))
|
||||
{
|
||||
num++;
|
||||
}
|
||||
else
|
||||
{
|
||||
flag = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return flag;
|
||||
}
|
||||
|
||||
private string PeekNextLine()
|
||||
{
|
||||
int num = _I;
|
||||
string toEOL = GetToEOL();
|
||||
_I = num;
|
||||
return toEOL;
|
||||
}
|
||||
|
||||
private void ScanPast(string text)
|
||||
{
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num <= -1)
|
||||
{
|
||||
_I = _Data.Length;
|
||||
}
|
||||
else
|
||||
{
|
||||
_I = num + text.Length;
|
||||
}
|
||||
}
|
||||
|
||||
internal static DateTime GetDateTime(Logistics logistics, string dateTimeText)
|
||||
{
|
||||
DateTime result;
|
||||
string inputDateFormat = "MM/dd/yy HH:mm";
|
||||
if (dateTimeText.Length != inputDateFormat.Length)
|
||||
result = logistics.DateTimeFromSequence;
|
||||
else
|
||||
{
|
||||
if (!DateTime.TryParseExact(dateTimeText, inputDateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
|
||||
result = logistics.DateTimeFromSequence;
|
||||
else
|
||||
{
|
||||
if (dateTimeParsed < logistics.DateTimeFromSequence.AddDays(1) && dateTimeParsed > logistics.DateTimeFromSequence.AddDays(-1))
|
||||
result = dateTimeParsed;
|
||||
else
|
||||
result = logistics.DateTimeFromSequence;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static (string, string) GetReactorAndRDS(string defaultReactor, string defaultRDS, string text, string formattedText, string[] segments)
|
||||
{
|
||||
string rds;
|
||||
string reactor;
|
||||
if (string.IsNullOrEmpty(text) || segments.Length == 0 || string.IsNullOrEmpty(formattedText))
|
||||
reactor = defaultReactor;
|
||||
else
|
||||
reactor = segments[0];
|
||||
if (segments.Length <= 1 || !int.TryParse(segments[1], out int rdsValue) || rdsValue < 99)
|
||||
rds = defaultRDS;
|
||||
else
|
||||
rds = segments[1];
|
||||
if (reactor.Length > 3)
|
||||
{
|
||||
rds = reactor;
|
||||
reactor = defaultReactor;
|
||||
}
|
||||
return new(reactor, rds);
|
||||
}
|
||||
|
||||
private static (string, string) GetLayerAndPSN(string defaultLayer, string defaultPSN, string[] segments)
|
||||
{
|
||||
string psn;
|
||||
string layer;
|
||||
if (segments.Length <= 2)
|
||||
{
|
||||
psn = defaultPSN;
|
||||
layer = defaultLayer;
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] segmentsB = segments[2].Split('.');
|
||||
psn = segmentsB[0];
|
||||
if (segmentsB.Length <= 1)
|
||||
layer = defaultLayer;
|
||||
else
|
||||
{
|
||||
layer = segmentsB[1];
|
||||
if (layer.Length > 1 && layer[0] == '0')
|
||||
layer = layer.Substring(1);
|
||||
}
|
||||
}
|
||||
return (layer, psn);
|
||||
}
|
||||
|
||||
private static string GetZone(string[] segments)
|
||||
{
|
||||
string result;
|
||||
if (segments.Length <= 3)
|
||||
result = string.Empty;
|
||||
else
|
||||
{
|
||||
result = segments[3];
|
||||
if (result.Length > 1 && result[0] == '0')
|
||||
result = result.Substring(1);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static Descriptor GetDescriptor(string text)
|
||||
{
|
||||
Descriptor result;
|
||||
string psn;
|
||||
string rds;
|
||||
string zone;
|
||||
string layer;
|
||||
string reactor;
|
||||
string cassette;
|
||||
string employee;
|
||||
string defaultPSN = string.Empty;
|
||||
string defaultRDS = string.Empty;
|
||||
string defaultZone = string.Empty;
|
||||
string defaultLayer = string.Empty;
|
||||
string defaultReactor = string.Empty;
|
||||
string defaultEmployee = string.Empty;
|
||||
if (Regex.IsMatch(text, @"^[a-zA-z][0-9]{2,4}$"))
|
||||
{
|
||||
cassette = text.ToUpper();
|
||||
psn = defaultPSN;
|
||||
rds = defaultRDS;
|
||||
zone = defaultZone;
|
||||
layer = defaultLayer;
|
||||
reactor = defaultReactor;
|
||||
employee = defaultEmployee;
|
||||
}
|
||||
else if (string.IsNullOrEmpty(text) || (text.Length is 2 or 3 && Regex.IsMatch(text, "^[a-zA-z]{2,3}")))
|
||||
{
|
||||
cassette = text;
|
||||
psn = defaultPSN;
|
||||
rds = defaultRDS;
|
||||
zone = defaultZone;
|
||||
employee = cassette;
|
||||
layer = defaultLayer;
|
||||
reactor = defaultReactor;
|
||||
}
|
||||
else if (Regex.IsMatch(text, @"^[0-9]{2}[.][0-9]{1}[.]?[0-9]{0,1}"))
|
||||
{
|
||||
string[] segments = text.Split('.');
|
||||
cassette = text;
|
||||
psn = defaultPSN;
|
||||
rds = defaultRDS;
|
||||
layer = segments[1];
|
||||
reactor = segments[0];
|
||||
employee = defaultEmployee;
|
||||
if (segments.Length <= 2)
|
||||
zone = defaultZone;
|
||||
else
|
||||
zone = segments[2];
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] segments;
|
||||
// Remove illegal characters \/:*?"<>| found in the Cassette.
|
||||
cassette = Regex.Replace(text, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
if (cassette.Length > 2 && cassette[0] == '1' && (cassette[1] == 'T' || cassette[1] == 't'))
|
||||
cassette = cassette.Substring(2);
|
||||
if (cassette.Contains('-'))
|
||||
segments = cassette.Split(new char[] { '-' });
|
||||
else if (!cassette.Contains('\u005F'))
|
||||
segments = cassette.Split(new char[] { ' ' });
|
||||
else if (cassette.Contains('.'))
|
||||
segments = cassette.Split(new char[] { '.' });
|
||||
else
|
||||
segments = cassette.Split(new char[] { '\u005F' });
|
||||
// bool hasRDS = Regex.IsMatch(cassette, "[-]?([QP][0-9]{4,}|[0-9]{5,})[-]?");
|
||||
(reactor, rds) = GetReactorAndRDS(defaultReactor, defaultRDS, text, cassette, segments);
|
||||
(layer, psn) = GetLayerAndPSN(defaultLayer, defaultPSN, segments);
|
||||
zone = GetZone(segments);
|
||||
if (segments.Length <= 3 || segments[3].Length <= 1)
|
||||
employee = defaultEmployee;
|
||||
else
|
||||
employee = segments[3];
|
||||
}
|
||||
result = new(cassette, employee, layer, psn, rds, reactor, zone);
|
||||
return result;
|
||||
}
|
||||
|
||||
private void Set(Logistics logistics, Run? run)
|
||||
{
|
||||
string psn;
|
||||
string rds;
|
||||
string text;
|
||||
string zone;
|
||||
string batch;
|
||||
string layer;
|
||||
string title;
|
||||
DateTime date;
|
||||
string reactor;
|
||||
string cassette;
|
||||
string employee;
|
||||
const string batchKey = "Batch";
|
||||
const string startedKey = "started";
|
||||
const string cassetteKey = "Cassette";
|
||||
const string startedAtKey = "started at";
|
||||
if (!_Data.Contains(batchKey) || !_Data.Contains(startedKey))
|
||||
batch = string.Empty;
|
||||
else
|
||||
{
|
||||
for (int z = 0; z < int.MaxValue; z++)
|
||||
{
|
||||
ScanPast(batchKey);
|
||||
if (!_Data.Substring(_I).Contains(batchKey))
|
||||
break;
|
||||
}
|
||||
batch = GetToText(startedKey);
|
||||
ScanPast(startedAtKey);
|
||||
}
|
||||
if (run is not null)
|
||||
{ }
|
||||
ScanPast(cassetteKey);
|
||||
if (!_Data.Substring(_I).Contains(startedKey))
|
||||
text = string.Empty;
|
||||
else
|
||||
text = GetToText(startedKey);
|
||||
ScanPast(startedAtKey);
|
||||
string dateTimeText = GetToEOL();
|
||||
if (dateTimeText.EndsWith("."))
|
||||
dateTimeText = dateTimeText.Remove(dateTimeText.Length - 1, 1);
|
||||
date = GetDateTime(logistics, dateTimeText);
|
||||
Descriptor descriptor = GetDescriptor(text);
|
||||
cassette = descriptor.Cassette;
|
||||
psn = descriptor.PSN;
|
||||
rds = descriptor.RDS;
|
||||
zone = descriptor.Zone;
|
||||
layer = descriptor.Layer;
|
||||
reactor = descriptor.Reactor;
|
||||
employee = descriptor.Employee;
|
||||
title = !string.IsNullOrEmpty(batch) ? batch : cassette;
|
||||
PSN = psn;
|
||||
RDS = rds;
|
||||
Date = date;
|
||||
Zone = zone;
|
||||
Batch = batch;
|
||||
Layer = layer;
|
||||
Title = title;
|
||||
Reactor = reactor;
|
||||
Cassette = cassette;
|
||||
Employee = employee;
|
||||
UniqueId = string.Concat("StratusBioRad_", reactor, "_", rds, "_", psn, "_", logistics.DateTimeFromSequence.ToString("yyyyMMddHHmmssffff"));
|
||||
}
|
||||
|
||||
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, Run? run, string dataText)
|
||||
{
|
||||
if (fileRead is null)
|
||||
throw new ArgumentNullException(nameof(fileRead));
|
||||
_I = 0;
|
||||
_Data = string.Empty;
|
||||
List<Detail> details = new();
|
||||
if (string.IsNullOrEmpty(dataText))
|
||||
dataText = File.ReadAllText(logistics.ReportFullPath);
|
||||
_Log.Debug($"****ParseData - Source file contents:");
|
||||
_Log.Debug(dataText);
|
||||
List<string> moveFiles = new();
|
||||
string fileNameWithoutExtension = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
|
||||
string directoryName = Path.GetDirectoryName(logistics.ReportFullPath) ?? throw new Exception();
|
||||
moveFiles.AddRange(Directory.GetFiles(directoryName, string.Concat(originalDataBioRad, "*", logistics.Sequence, "*"), SearchOption.TopDirectoryOnly));
|
||||
moveFiles.AddRange(Directory.GetFiles(directoryName, string.Concat(originalDataBioRad, "*", fileNameWithoutExtension.Split('_').Last(), "*"), SearchOption.TopDirectoryOnly));
|
||||
foreach (string moveFile in moveFiles.Distinct())
|
||||
fileInfoCollection.Add(new FileInfo(moveFile));
|
||||
if (!string.IsNullOrEmpty(dataText))
|
||||
{
|
||||
int i;
|
||||
int num;
|
||||
int num2;
|
||||
Point point;
|
||||
int num1 = 0;
|
||||
Detail detail;
|
||||
string recipe;
|
||||
string nextLine;
|
||||
_I = 0;
|
||||
_Data = dataText;
|
||||
Set(logistics, run);
|
||||
nextLine = PeekNextLine();
|
||||
string cassette = "Cassette";
|
||||
if (nextLine.Contains("Wafer"))
|
||||
{
|
||||
_Log.Debug("****ProcessData Contains Wafer");
|
||||
while (!PeekNextLine().Contains(cassette))
|
||||
{
|
||||
num2 = num1;
|
||||
num1 = num2 + 1;
|
||||
if (num2 > 25)
|
||||
break;
|
||||
else
|
||||
{
|
||||
_Log.Debug("****ProcessData new stratusBioRadWaferDetail");
|
||||
detail = new Detail();
|
||||
ScanPast("Wafer");
|
||||
detail.Wafer = GetToEOL();
|
||||
if (detail.Wafer.EndsWith("."))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing Wafer");
|
||||
detail.Wafer = detail.Wafer.Remove(detail.Wafer.Length - 1, 1);
|
||||
}
|
||||
ScanPast("Slot");
|
||||
detail.Slot = GetToEOL();
|
||||
ScanPast("Recipe");
|
||||
recipe = GetToEOL();
|
||||
if (recipe.EndsWith("."))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing Recipe");
|
||||
recipe = recipe.Remove(recipe.Length - 1, 1);
|
||||
}
|
||||
detail.Recipe = recipe;
|
||||
_ = GetToEOL();
|
||||
nextLine = PeekNextLine();
|
||||
if (nextLine.Contains("Thickness"))
|
||||
{
|
||||
ScanPast("1 - ");
|
||||
num = Convert.ToInt32(GetToken());
|
||||
_Log.Debug(string.Concat("****ProcessData Thickness =", num));
|
||||
detail.Points = new();
|
||||
for (i = 0; i < num; i++)
|
||||
{
|
||||
point = new() { Thickness = GetToken() };
|
||||
if (point.Thickness == "Thickness,")
|
||||
{
|
||||
ScanPast("um");
|
||||
continue;
|
||||
}
|
||||
detail.Points.Add(point);
|
||||
point.Position = Convert.ToString(detail.Points.Count);
|
||||
}
|
||||
}
|
||||
_ = GetToEOL();
|
||||
nextLine = PeekNextLine();
|
||||
if (nextLine.Contains("Thickness"))
|
||||
{
|
||||
ScanPast("11 - ");
|
||||
num = Convert.ToInt32(GetToken());
|
||||
for (i = detail.Points.Count; i < num; i++)
|
||||
{
|
||||
point = new() { Thickness = GetToken() };
|
||||
detail.Points.Add(point);
|
||||
point.Position = Convert.ToString(detail.Points.Count);
|
||||
}
|
||||
}
|
||||
ScanPast("Slot");
|
||||
_ = GetToken();
|
||||
detail.PassFail = GetToken();
|
||||
if (detail.PassFail.EndsWith("."))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing PassFail");
|
||||
detail.PassFail = detail.PassFail.Remove(detail.PassFail.Length - 1, 1);
|
||||
}
|
||||
ScanPast("Mean");
|
||||
detail.Mean = GetToken();
|
||||
if (detail.Mean.EndsWith(","))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing Mean");
|
||||
detail.Mean = detail.Mean.Remove(detail.Mean.Length - 1, 1);
|
||||
}
|
||||
ScanPast("STDD");
|
||||
detail.StdDev = GetToEOL();
|
||||
if (detail.StdDev.EndsWith("."))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing stdDev");
|
||||
detail.StdDev = detail.StdDev.Remove(detail.StdDev.Length - 1, 1);
|
||||
}
|
||||
detail.UniqueId = string.Concat("_Wafer-", detail.Wafer, "_Slot-", detail.Slot);
|
||||
details.Add(detail);
|
||||
nextLine = PeekNextLine();
|
||||
if (nextLine.Contains(cassette))
|
||||
{
|
||||
_ = GetToEOL();
|
||||
nextLine = PeekNextLine();
|
||||
}
|
||||
if (nextLine.Contains(cassette))
|
||||
{
|
||||
_ = GetToEOL();
|
||||
nextLine = PeekNextLine();
|
||||
}
|
||||
if (nextLine.Contains("Process failed"))
|
||||
_ = GetToEOL();
|
||||
}
|
||||
}
|
||||
ScanPast("Mean");
|
||||
MeanThickness = GetToken();
|
||||
if (MeanThickness.EndsWith(","))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing MeanThickness");
|
||||
MeanThickness = MeanThickness.Remove(MeanThickness.Length - 1, 1);
|
||||
}
|
||||
ScanPast("STDD");
|
||||
StdDev = GetToken();
|
||||
if (StdDev.EndsWith(","))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing thi.StdDev");
|
||||
StdDev = StdDev.Remove(StdDev.Length - 1, 1);
|
||||
}
|
||||
}
|
||||
if (dataText.Contains("------------- Process failed -------------"))
|
||||
details.Add(new());
|
||||
}
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach (Detail detail in details)
|
||||
{
|
||||
detail.HeaderUniqueId = UniqueId;
|
||||
detail.UniqueId = string.Concat(UniqueId, detail.UniqueId);
|
||||
detail.Points ??= new List<Point>();
|
||||
foreach (Point bioRadDetail in detail.Points)
|
||||
{
|
||||
bioRadDetail.HeaderUniqueId = detail.HeaderUniqueId;
|
||||
bioRadDetail.UniqueId = detail.UniqueId;
|
||||
}
|
||||
_ = stringBuilder.Clear();
|
||||
foreach (Point point in detail.Points)
|
||||
_ = stringBuilder.Append(point.Thickness).Append(',');
|
||||
if (stringBuilder.Length > 0)
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
detail.Thickness = stringBuilder.ToString();
|
||||
_ = stringBuilder.Clear();
|
||||
}
|
||||
if (details.Count != 2
|
||||
|| details[0].Slot != "1"
|
||||
|| details[1].Slot != "25"
|
||||
|| string.IsNullOrEmpty(details[0].Thickness)
|
||||
|| string.IsNullOrEmpty(details[1].Thickness)
|
||||
|| !decimal.TryParse(details[0].Thickness, out decimal thick01)
|
||||
|| !decimal.TryParse(details[1].Thickness, out decimal thick25)
|
||||
|| (thick01 == 0 && thick25 == 0))
|
||||
{
|
||||
ThicknessSlotOne = string.Empty;
|
||||
ThicknessSlotTwentyFive = string.Empty;
|
||||
DeltaThicknessSlotsOneAndTwentyFive = string.Empty;
|
||||
PercentDeltaThicknessSlotsOneAndTwentyFive = string.Empty;
|
||||
}
|
||||
else
|
||||
{
|
||||
ThicknessSlotOne = details[0].Thickness;
|
||||
ThicknessSlotTwentyFive = details[1].Thickness;
|
||||
DeltaThicknessSlotsOneAndTwentyFive = (thick01 - thick25).ToString();
|
||||
// https://www.calculatorsoup.com/calculators/algebra/percent-difference-calculator.php
|
||||
PercentDeltaThicknessSlotsOneAndTwentyFive = $"{Math.Abs(thick01 - thick25) / ((thick01 + thick25) / 2) * 100:0.000}";
|
||||
}
|
||||
fileInfoCollection.Add(logistics.FileInfo);
|
||||
_Details.AddRange(details);
|
||||
}
|
||||
|
||||
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
|
||||
{
|
||||
List<Description> results = new();
|
||||
Description? description;
|
||||
foreach (JsonElement jsonElement in jsonElements)
|
||||
{
|
||||
if (jsonElement.ValueKind != JsonValueKind.Object)
|
||||
throw new Exception();
|
||||
description = JsonSerializer.Deserialize(jsonElement.ToString(), DescriptionSourceGenerationContext.Default.Description);
|
||||
if (description is null)
|
||||
continue;
|
||||
results.Add(description);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
||||
@ -1,59 +0,0 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
#nullable enable
|
||||
|
||||
internal class Row
|
||||
{
|
||||
|
||||
public Row(Run run, int index, int i, int j)
|
||||
{
|
||||
Index = index;
|
||||
//
|
||||
Batch = run.Header.Batch;
|
||||
Cassette = run.Header.Cassette;
|
||||
DateTime = run.Header.DateTime;
|
||||
//
|
||||
Destination = run.Wafers[i].Destination;
|
||||
Mean = run.Wafers[i].Mean;
|
||||
PassFail = run.Wafers[i].PassFail;
|
||||
Recipe = run.Wafers[i].Recipe;
|
||||
Reference = run.Wafers[i].Reference;
|
||||
Site = run.Wafers[i].Sites[j];
|
||||
Slot = run.Wafers[i].Slot;
|
||||
Source = run.Wafers[i].Source;
|
||||
StdDev = run.Wafers[i].StdDev;
|
||||
Text = run.Wafers[i].Text;
|
||||
//
|
||||
GradeMean = run.Grade.MeanThickness;
|
||||
GradeStdDev = run.Grade.StdDev;
|
||||
}
|
||||
|
||||
public int Index { get; }
|
||||
//
|
||||
public string Batch { get; }
|
||||
public string Cassette { get; }
|
||||
public string DateTime { get; }
|
||||
//
|
||||
public string Destination { get; }
|
||||
public string Mean { get; }
|
||||
public string PassFail { get; }
|
||||
public string Recipe { get; }
|
||||
public string Reference { get; }
|
||||
public string Site { get; }
|
||||
public string Slot { get; }
|
||||
public string Source { get; }
|
||||
public string StdDev { get; }
|
||||
public string Text { get; }
|
||||
//
|
||||
public string GradeMean { get; }
|
||||
public string GradeStdDev { get; }
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(Row))]
|
||||
internal partial class StratusRowSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
||||
@ -1,66 +0,0 @@
|
||||
using Adaptation.Shared;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
#nullable enable
|
||||
|
||||
internal class Run
|
||||
{
|
||||
|
||||
public Run(Header header, ReadOnlyCollection<Wafer> wafers, Grade grade)
|
||||
{
|
||||
Header = header;
|
||||
Wafers = wafers;
|
||||
Grade = grade;
|
||||
}
|
||||
|
||||
public Header Header { get; }
|
||||
public ReadOnlyCollection<Wafer> Wafers { get; }
|
||||
public Grade Grade { get; }
|
||||
|
||||
internal static Run? Get(Logistics logistics)
|
||||
{
|
||||
Run? result;
|
||||
Constant constant = new();
|
||||
int[] i = new int[] { 0 };
|
||||
string text = File.ReadAllText(logistics.ReportFullPath);
|
||||
Header? header = Header.Get(text, constant, i);
|
||||
if (header is null)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
ReadOnlyCollection<string> groups = Wafer.GetGroups(text, constant, i);
|
||||
if (groups.Count == 0)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
Grade? grade = Grade.Get(constant, groups);
|
||||
if (grade is null)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
ReadOnlyCollection<Wafer> wafers = Wafer.Get(constant, groups);
|
||||
if (wafers.Count == 0 && grade.MeanThickness != "0")
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
result = new(header, wafers, grade);
|
||||
// WriteJson(logistics, fileInfoCollection, result);
|
||||
// WriteCommaSeparatedValues(logistics, result);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(Run))]
|
||||
internal partial class StratusRunSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
||||
@ -1,234 +0,0 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
#nullable enable
|
||||
|
||||
public class Wafer
|
||||
{
|
||||
|
||||
public Wafer(string destination, string mean, string passFail, string recipe, string reference, ReadOnlyCollection<string> sites, string slot, string source, string stdDev, string waferText)
|
||||
{
|
||||
Destination = destination;
|
||||
Mean = mean;
|
||||
PassFail = passFail;
|
||||
Recipe = recipe;
|
||||
Reference = reference;
|
||||
Sites = sites;
|
||||
Slot = slot;
|
||||
Source = source;
|
||||
StdDev = stdDev;
|
||||
Text = waferText;
|
||||
}
|
||||
|
||||
public string Destination { get; }
|
||||
public string Mean { get; }
|
||||
public string PassFail { get; }
|
||||
public string Recipe { get; }
|
||||
public string Reference { get; }
|
||||
public ReadOnlyCollection<string> Sites { get; }
|
||||
public string Slot { get; }
|
||||
public string Source { get; }
|
||||
public string StdDev { get; }
|
||||
public string Text { get; }
|
||||
|
||||
internal static string GetToken(string text, int[] i)
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
if (i[0] >= text.Length || !IsNullOrWhiteSpace(text.Substring(i[0], 1)))
|
||||
break;
|
||||
i[0]++;
|
||||
}
|
||||
int num = i[0];
|
||||
while (true)
|
||||
{
|
||||
if (num >= text.Length || IsNullOrWhiteSpace(text.Substring(num, 1)))
|
||||
break;
|
||||
num++;
|
||||
}
|
||||
string str = text.Substring(i[0], num - i[0]);
|
||||
i[0] = num;
|
||||
return str.Trim();
|
||||
}
|
||||
|
||||
internal static bool IsNullOrWhiteSpace(string search)
|
||||
{
|
||||
bool flag;
|
||||
int num = 0;
|
||||
while (true)
|
||||
{
|
||||
if (num >= search.Length)
|
||||
{
|
||||
flag = true;
|
||||
break;
|
||||
}
|
||||
else if (char.IsWhiteSpace(search[num]))
|
||||
num++;
|
||||
else
|
||||
{
|
||||
flag = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return flag;
|
||||
}
|
||||
|
||||
internal static string PeekNextLine(string text, int[] i)
|
||||
{
|
||||
int num = i[0];
|
||||
string toEOL = Header.GetToEOL(text, i);
|
||||
i[0] = num;
|
||||
return toEOL;
|
||||
}
|
||||
|
||||
internal static ReadOnlyCollection<string> GetGroups(string text, Constant constant, int[] i)
|
||||
{
|
||||
List<string> results = new();
|
||||
string[] lines = text.Substring(i[0]).Split(new string[] { Environment.NewLine }, StringSplitOptions.None);
|
||||
if (lines.Length > 0)
|
||||
{
|
||||
List<string> group = new();
|
||||
foreach (string line in lines)
|
||||
{
|
||||
if (string.IsNullOrEmpty(line.Trim()))
|
||||
continue;
|
||||
group.Add(line);
|
||||
if (line.StartsWith(constant.Destination)
|
||||
|| line.Contains(constant.ProcessFailed)
|
||||
|| line.StartsWith(constant.WaferParentheses) && line.Contains(constant.IsPut))
|
||||
{
|
||||
results.Add(string.Join(Environment.NewLine, group));
|
||||
group.Clear();
|
||||
}
|
||||
}
|
||||
results.Add(string.Join(Environment.NewLine, group));
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
internal static ReadOnlyCollection<Wafer> Get(Constant constant, ReadOnlyCollection<string> groups)
|
||||
{
|
||||
List<Wafer> results = new();
|
||||
string mean;
|
||||
string slot;
|
||||
Wafer wafer;
|
||||
string recipe;
|
||||
string source;
|
||||
string stdDev;
|
||||
string nextLine;
|
||||
string passFail;
|
||||
string reference;
|
||||
string thickness;
|
||||
string waferText;
|
||||
string destination;
|
||||
List<string> sites;
|
||||
int[] j = new int[] { 0 };
|
||||
foreach (string groupText in groups)
|
||||
{
|
||||
j[0] = 0;
|
||||
sites = new();
|
||||
if (groupText.Contains(constant.ProcessFailed))
|
||||
{
|
||||
mean = string.Empty;
|
||||
slot = string.Empty;
|
||||
recipe = string.Empty;
|
||||
source = string.Empty;
|
||||
stdDev = string.Empty;
|
||||
passFail = string.Empty;
|
||||
reference = string.Empty;
|
||||
waferText = string.Empty;
|
||||
destination = string.Empty;
|
||||
}
|
||||
else if (groupText.Contains(constant.Reference))
|
||||
{
|
||||
mean = string.Empty;
|
||||
slot = string.Empty;
|
||||
recipe = string.Empty;
|
||||
stdDev = string.Empty;
|
||||
passFail = string.Empty;
|
||||
waferText = string.Empty;
|
||||
Header.ScanPast(groupText, j, constant.Reference);
|
||||
reference = Header.GetToEOL(groupText, j);
|
||||
Header.ScanPast(groupText, j, constant.Source);
|
||||
source = Header.GetToEOL(groupText, j).Trim();
|
||||
Header.ScanPast(groupText, j, constant.Destination);
|
||||
destination = Header.GetToEOL(groupText, j).Trim();
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!groupText.Contains(constant.Wafer))
|
||||
continue;
|
||||
Header.ScanPast(groupText, j, constant.Wafer);
|
||||
waferText = Header.GetToEOL(groupText, j);
|
||||
if (waferText.EndsWith("."))
|
||||
waferText = waferText.Remove(waferText.Length - 1, 1);
|
||||
Header.ScanPast(groupText, j, constant.Slot);
|
||||
slot = Header.GetToEOL(groupText, j);
|
||||
Header.ScanPast(groupText, j, constant.Recipe);
|
||||
recipe = Header.GetToEOL(groupText, j);
|
||||
if (recipe.EndsWith("."))
|
||||
recipe = recipe.Remove(recipe.Length - 1, 1);
|
||||
Header.ScanPast(groupText, j, constant.Thickness);
|
||||
_ = GetToken(groupText, j);
|
||||
nextLine = PeekNextLine(groupText, j);
|
||||
if (nextLine.Contains(constant.OneHypen))
|
||||
{
|
||||
Header.ScanPast(groupText, j, constant.OneHypen);
|
||||
_ = GetToken(groupText, j);
|
||||
}
|
||||
for (int k = 0; k < 100; k++)
|
||||
{
|
||||
nextLine = PeekNextLine(groupText, j);
|
||||
if (nextLine.Contains("Slot"))
|
||||
break;
|
||||
if (string.IsNullOrEmpty(nextLine))
|
||||
{
|
||||
_ = Header.GetToEOL(groupText, j);
|
||||
continue;
|
||||
}
|
||||
thickness = GetToken(groupText, j);
|
||||
if (thickness == constant.Thickness)
|
||||
{
|
||||
_ = GetToken(groupText, j);
|
||||
continue;
|
||||
}
|
||||
sites.Add(thickness);
|
||||
}
|
||||
Header.ScanPast(groupText, j, constant.Slot);
|
||||
_ = GetToken(groupText, j);
|
||||
passFail = GetToken(groupText, j);
|
||||
if (passFail.EndsWith("."))
|
||||
passFail = passFail.Remove(passFail.Length - 1, 1);
|
||||
Header.ScanPast(groupText, j, constant.Mean);
|
||||
mean = GetToken(groupText, j);
|
||||
if (mean.EndsWith(","))
|
||||
mean = mean.Remove(mean.Length - 1, 1);
|
||||
Header.ScanPast(groupText, j, constant.STDD);
|
||||
stdDev = Header.GetToEOL(groupText, j);
|
||||
if (stdDev.EndsWith("."))
|
||||
stdDev = stdDev.Remove(stdDev.Length - 1, 1);
|
||||
reference = string.Empty;
|
||||
Header.ScanPast(groupText, j, constant.Source);
|
||||
source = Header.GetToEOL(groupText, j).Trim();
|
||||
Header.ScanPast(groupText, j, constant.Destination);
|
||||
destination = Header.GetToEOL(groupText, j).Trim();
|
||||
}
|
||||
wafer = new(destination: destination,
|
||||
mean: mean,
|
||||
passFail: passFail,
|
||||
recipe: recipe,
|
||||
reference: reference,
|
||||
sites: sites.AsReadOnly(),
|
||||
slot: slot,
|
||||
source: source,
|
||||
stdDev: stdDev,
|
||||
waferText: waferText);
|
||||
results.Add(wafer);
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
}
|
||||
@ -11,6 +11,7 @@ internal class Constant
|
||||
public string OneHypen { get; } = "1 - ";
|
||||
public string Recipe { get; } = "Recipe";
|
||||
public string Source { get; } = "Source:";
|
||||
public string Elapsed { get; } = "Elapsed";
|
||||
public string Started { get; } = "started";
|
||||
public string Cassette { get; } = "Cassette";
|
||||
public string Finished { get; } = "finished.";
|
||||
|
||||
@ -6,7 +6,7 @@ using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
public class Description : IDescription, Shared.Properties.IDescription
|
||||
{
|
||||
@ -199,37 +199,37 @@ public class Description : IDescription, Shared.Properties.IDescription
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
//
|
||||
Date = processData.Date.ToString(GetDateFormat()),
|
||||
Employee = processData.Employee,
|
||||
Lot = processData.Batch,
|
||||
PSN = processData.PSN,
|
||||
Reactor = processData.Reactor,
|
||||
// Date = processData.Date.ToString(GetDateFormat()),
|
||||
// Employee = processData.Employee,
|
||||
// Lot = processData.Batch,
|
||||
// PSN = processData.PSN,
|
||||
// Reactor = processData.Reactor,
|
||||
Recipe = detail.Recipe,
|
||||
IndexOf = nameof(IndexOf),
|
||||
AttemptCounter = nameof(AttemptCounter),
|
||||
//
|
||||
Cassette = processData.Cassette,
|
||||
GradeStdDev = processData.StdDev,
|
||||
// Cassette = processData.Cassette,
|
||||
// GradeStdDev = processData.StdDev,
|
||||
HeaderUniqueId = detail.HeaderUniqueId,
|
||||
Layer = processData.Layer,
|
||||
MeanThickness = processData.MeanThickness,
|
||||
// Layer = processData.Layer,
|
||||
// MeanThickness = processData.MeanThickness,
|
||||
PassFail = detail.PassFail,
|
||||
RDS = processData.RDS,
|
||||
// RDS = processData.RDS,
|
||||
Slot = detail.Slot,
|
||||
Title = processData.Title,
|
||||
// Title = processData.Title,
|
||||
UniqueId = detail.UniqueId,
|
||||
Wafer = detail.Wafer,
|
||||
Zone = processData.Zone,
|
||||
// Zone = processData.Zone,
|
||||
//
|
||||
Mean = detail.Mean,
|
||||
StdDev = detail.StdDev,
|
||||
Thickness = detail.Thickness,
|
||||
//
|
||||
ThicknessSlotOne = processData.ThicknessSlotOne,
|
||||
ThicknessSlotTwentyFive = processData.ThicknessSlotTwentyFive,
|
||||
// ThicknessSlotOne = processData.ThicknessSlotOne,
|
||||
// ThicknessSlotTwentyFive = processData.ThicknessSlotTwentyFive,
|
||||
//
|
||||
DeltaThicknessSlotsOneAndTwentyFive = processData.DeltaThicknessSlotsOneAndTwentyFive,
|
||||
PercentDeltaThicknessSlotsOneAndTwentyFive = processData.PercentDeltaThicknessSlotsOneAndTwentyFive,
|
||||
// DeltaThicknessSlotsOneAndTwentyFive = processData.DeltaThicknessSlotsOneAndTwentyFive,
|
||||
// PercentDeltaThicknessSlotsOneAndTwentyFive = processData.PercentDeltaThicknessSlotsOneAndTwentyFive,
|
||||
};
|
||||
results.Add(description);
|
||||
}
|
||||
@ -243,7 +243,7 @@ public class Description : IDescription, Shared.Properties.IDescription
|
||||
return result;
|
||||
}
|
||||
|
||||
private Description GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
private static Description GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = new()
|
||||
{
|
||||
@ -294,6 +294,15 @@ public class Description : IDescription, Shared.Properties.IDescription
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static JsonElement GetDefaultJsonElement(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
JsonElement result;
|
||||
Description description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, DescriptionSourceGenerationContext.Default.Description);
|
||||
result = JsonSerializer.Deserialize<JsonElement>(json);
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
|
||||
|
||||
}
|
||||
@ -1,4 +1,6 @@
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
public class Descriptor
|
||||
{
|
||||
@ -22,4 +24,10 @@ public class Descriptor
|
||||
Zone = zone;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(Descriptor))]
|
||||
internal partial class DescriptorSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
||||
@ -1,7 +1,7 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
public class Detail
|
||||
{
|
||||
@ -7,7 +7,10 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Reflection.Metadata;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
@ -15,7 +18,6 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private long? _TickOffset;
|
||||
private readonly string _OriginalDataBioRad;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
@ -29,7 +31,6 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_OriginalDataBioRad = "OriginalDataBioRad_";
|
||||
if (_IsEAFHosted)
|
||||
NestExistingFiles(_FileConnectorConfiguration);
|
||||
}
|
||||
@ -103,71 +104,45 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
if (dateTime == DateTime.MinValue)
|
||||
throw new ArgumentNullException(nameof(dateTime));
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Test[] tests = Array.Empty<Test>();
|
||||
List<JsonElement> jsonElements = new();
|
||||
List<FileInfo> fileInfoCollection = new();
|
||||
_TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
|
||||
_Logistics = new Logistics(this, _TickOffset.Value, reportFullPath, useSplitForMID: true);
|
||||
fileInfoCollection.Add(_Logistics.FileInfo);
|
||||
SetFileParameterLotID(_Logistics.MID);
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
if (fileInfo.Length < _MinFileLength)
|
||||
results.Item4.Add(fileInfo);
|
||||
if (_Logistics.FileInfo.Length < _MinFileLength)
|
||||
results = new(string.Empty, tests, jsonElements.ToArray(), fileInfoCollection);
|
||||
else
|
||||
{
|
||||
try
|
||||
{
|
||||
ReadOnlyCollection<Run>? runs = Run.Get(_TickOffset.Value, _Logistics, results.Item4);
|
||||
// if (run is null)
|
||||
// throw new Exception(string.Concat("A) No Data - ", dateTime.Ticks));
|
||||
if (runs.Count == 0)
|
||||
{ }
|
||||
}
|
||||
catch (Exception)
|
||||
{ }
|
||||
bool isBioRad;
|
||||
string dataText;
|
||||
string cassetteID;
|
||||
string fileNameTemp;
|
||||
string tupleFileName;
|
||||
DateTime cassetteDateTime;
|
||||
string directoryName = Path.GetDirectoryName(reportFullPath) ?? throw new Exception();
|
||||
string sequenceDirectoryName = Path.Combine(directoryName, _Logistics.Sequence.ToString());
|
||||
string originalDataBioRad = Path.Combine(directoryName, $"{_OriginalDataBioRad}{_Logistics.Sequence}.txt");
|
||||
IProcessData iProcessData = new ProcessData(this, _Logistics, _TickOffset.Value, results.Item4, _OriginalDataBioRad);
|
||||
if (_IsEAFHosted && iProcessData.Details.Count != 0)
|
||||
{
|
||||
if (!Directory.Exists(sequenceDirectoryName))
|
||||
_ = Directory.CreateDirectory(sequenceDirectoryName);
|
||||
File.Move(reportFullPath, originalDataBioRad);
|
||||
_Log.Debug(string.Concat("****Extract() - Renamed [", reportFullPath, "] to [", originalDataBioRad, "]"));
|
||||
}
|
||||
for (int i = 0; i < iProcessData.Details.Count; i++)
|
||||
{
|
||||
if (iProcessData.Details[i] is not Tuple<string, bool, DateTime, string> tuple)
|
||||
continue;
|
||||
isBioRad = tuple.Item2;
|
||||
dataText = tuple.Item4;
|
||||
cassetteID = tuple.Item1;
|
||||
cassetteDateTime = tuple.Item3;
|
||||
if (isBioRad)
|
||||
tupleFileName = string.Concat("DetailDataBioRad_", cassetteID, "_", cassetteDateTime.Ticks, ".txt");
|
||||
else
|
||||
tupleFileName = string.Concat("CassetteDataBioRad_", cassetteID, "_", cassetteDateTime.Ticks, ".txt");
|
||||
fileNameTemp = Path.Combine(sequenceDirectoryName, tupleFileName);
|
||||
if (!_IsEAFHosted)
|
||||
continue;
|
||||
File.WriteAllText(fileNameTemp, dataText);
|
||||
File.SetLastWriteTime(fileNameTemp, cassetteDateTime);
|
||||
if (_Logistics.Sequence != cassetteDateTime.Ticks && File.Exists(originalDataBioRad))
|
||||
File.Copy(originalDataBioRad, Path.Combine(directoryName, $"{_OriginalDataBioRad}{cassetteDateTime.Ticks}.txt"));
|
||||
File.Move(fileNameTemp, Path.Combine(directoryName, tupleFileName));
|
||||
}
|
||||
if (!_IsEAFHosted)
|
||||
results = iProcessData.GetResults(this, _Logistics, results.Item4);
|
||||
Constant constant = new();
|
||||
string[] allLines = File.ReadAllLines(_Logistics.ReportFullPath);
|
||||
bool isValid = allLines.Any(l => l.Contains(constant.Finished) || l.Contains(constant.ProcessFailed));
|
||||
if (!isValid)
|
||||
results = new(string.Empty, tests, jsonElements.ToArray(), fileInfoCollection);
|
||||
else
|
||||
{
|
||||
if (Directory.Exists(sequenceDirectoryName))
|
||||
Directory.Delete(sequenceDirectoryName);
|
||||
Run? run = Run.Get(_TickOffset.Value, _Logistics, fileInfoCollection, constant, allLines);
|
||||
if (run is null)
|
||||
results = new(string.Concat("A) No Data - ", dateTime.Ticks), tests, jsonElements.ToArray(), fileInfoCollection);
|
||||
else
|
||||
{
|
||||
string mid;
|
||||
Descriptor descriptor = ProcessData.GetDescriptor(run.Header.Cassette);
|
||||
if (!string.IsNullOrEmpty(descriptor.Cassette) && string.IsNullOrEmpty(descriptor.Reactor) && string.IsNullOrEmpty(descriptor.RDS) && string.IsNullOrEmpty(descriptor.PSN))
|
||||
mid = descriptor.Cassette;
|
||||
else if (!string.IsNullOrEmpty(descriptor.Employee) && string.IsNullOrEmpty(descriptor.Reactor) && string.IsNullOrEmpty(descriptor.RDS) && string.IsNullOrEmpty(descriptor.PSN))
|
||||
mid = descriptor.Employee;
|
||||
else
|
||||
mid = string.Concat(descriptor.Reactor, "-", descriptor.RDS, "-", descriptor.PSN);
|
||||
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
SetFileParameterLotID(mid);
|
||||
_Logistics.Update(mid, descriptor.Reactor);
|
||||
JsonElement jsonElement = Description.GetDefaultJsonElement(this, _Logistics);
|
||||
jsonElements.Add(jsonElement);
|
||||
results = new(_Logistics.Logistics1[0], tests, jsonElements.ToArray(), fileInfoCollection);
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
|
||||
@ -48,8 +48,16 @@ public class Header
|
||||
return result;
|
||||
}
|
||||
|
||||
private static string GetToText(string text, int[] i, string search) =>
|
||||
text.Substring(i[0], text.IndexOf(search, i[0]) - i[0]).Trim();
|
||||
private static string? GetToText(string text, int[] i, string search)
|
||||
{
|
||||
string? result;
|
||||
string check = text.Substring(i[0]);
|
||||
if (!check.Contains(search))
|
||||
result = null;
|
||||
else
|
||||
result = text.Substring(i[0], text.IndexOf(search, i[0]) - i[0]).Trim();
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static void ScanPast(string text, int[] i, string search)
|
||||
{
|
||||
@ -60,12 +68,13 @@ public class Header
|
||||
i[0] = num + search.Length;
|
||||
}
|
||||
|
||||
internal static Header Get(string text, Constant constant, int[] i)
|
||||
internal static Header? Get(string text, Constant constant, int[] i)
|
||||
{
|
||||
Header? result;
|
||||
string batch;
|
||||
string? batch;
|
||||
string? cassette;
|
||||
if (!text.Contains(constant.Batch) || !text.Contains(constant.Started))
|
||||
batch = string.Empty;
|
||||
batch = null;
|
||||
else
|
||||
{
|
||||
for (int z = 0; z < int.MaxValue; z++)
|
||||
@ -78,18 +87,20 @@ public class Header
|
||||
ScanPast(text, i, constant.StartedAt);
|
||||
}
|
||||
ScanPast(text, i, constant.Cassette);
|
||||
string cassette;
|
||||
if (!text.Substring(i[0]).Contains(constant.Started))
|
||||
cassette = string.Empty;
|
||||
cassette = null;
|
||||
else
|
||||
cassette = GetToText(text, i, constant.Started);
|
||||
ScanPast(text, i, constant.StartedAt);
|
||||
string dateTime = GetToEOL(text, i);
|
||||
if (dateTime.EndsWith("."))
|
||||
dateTime = dateTime.Remove(dateTime.Length - 1, 1);
|
||||
result = new(batch: batch,
|
||||
cassette: cassette,
|
||||
dateTime: dateTime);
|
||||
if (batch is null || cassette is null)
|
||||
result = null;
|
||||
else
|
||||
result = new(batch: batch,
|
||||
cassette: cassette,
|
||||
dateTime: dateTime);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
public class Point
|
||||
{
|
||||
@ -1,12 +1,9 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using log4net;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
@ -15,544 +12,189 @@ namespace Adaptation.FileHandlers.txt;
|
||||
public partial class ProcessData : IProcessData
|
||||
{
|
||||
|
||||
private readonly ILog _Log;
|
||||
private readonly List<object> _Details;
|
||||
|
||||
public string JobID { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
|
||||
List<object> Shared.Properties.IProcessData.Details => _Details;
|
||||
List<object> Shared.Properties.IProcessData.Details { get; }
|
||||
|
||||
internal ProcessData(IFileRead fileRead, Logistics logistics, long tickOffset, List<FileInfo> fileInfoCollection, string originalDataBioRad)
|
||||
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) =>
|
||||
throw new Exception(string.Concat("See ", nameof(ProcessData)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection) =>
|
||||
throw new NotImplementedException();
|
||||
|
||||
internal static DateTime GetDateTime(Logistics logistics, string dateTimeText)
|
||||
{
|
||||
JobID = logistics.JobID;
|
||||
_Details = new List<object>();
|
||||
MesEntity = logistics.MesEntity;
|
||||
_Log = LogManager.GetLogger(typeof(ProcessData));
|
||||
List<Tuple<string, bool, DateTime, string>> tuples = Parse(fileRead, logistics, tickOffset, fileInfoCollection, originalDataBioRad);
|
||||
_Details.AddRange(tuples);
|
||||
DateTime result;
|
||||
string inputDateFormat = "MM/dd/yy HH:mm";
|
||||
if (dateTimeText.Length != inputDateFormat.Length)
|
||||
result = logistics.DateTimeFromSequence;
|
||||
else
|
||||
{
|
||||
if (!DateTime.TryParseExact(dateTimeText, inputDateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
|
||||
result = logistics.DateTimeFromSequence;
|
||||
else
|
||||
{
|
||||
if (dateTimeParsed < logistics.DateTimeFromSequence.AddDays(1) && dateTimeParsed > logistics.DateTimeFromSequence.AddDays(-1))
|
||||
result = dateTimeParsed;
|
||||
else
|
||||
result = logistics.DateTimeFromSequence;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
private static (string, string) GetReactorAndRDS(string defaultReactor, string defaultRDS, string text, string formattedText, string[] segments)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<Test> tests = new();
|
||||
foreach (object item in _Details)
|
||||
tests.Add(Test.BioRadStratus);
|
||||
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
|
||||
if (tests.Count != descriptions.Count)
|
||||
throw new Exception();
|
||||
for (int i = 0; i < tests.Count; i++)
|
||||
string rds;
|
||||
string reactor;
|
||||
if (string.IsNullOrEmpty(text) || segments.Length == 0 || string.IsNullOrEmpty(formattedText))
|
||||
reactor = defaultReactor;
|
||||
else
|
||||
reactor = segments[0];
|
||||
if (segments.Length <= 1 || !int.TryParse(segments[1], out int rdsValue) || rdsValue < 99)
|
||||
rds = defaultRDS;
|
||||
else
|
||||
rds = segments[1];
|
||||
if (reactor.Length > 3)
|
||||
{
|
||||
if (descriptions[i] is not Description description)
|
||||
throw new Exception();
|
||||
if (description.Test != (int)tests[i])
|
||||
throw new Exception();
|
||||
rds = reactor;
|
||||
reactor = defaultReactor;
|
||||
}
|
||||
FileInfo fileInfo = new($"{logistics.ReportFullPath}.descriptions.json");
|
||||
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
|
||||
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
|
||||
File.WriteAllText(fileInfo.FullName, json);
|
||||
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
|
||||
fileInfoCollection.Add(fileInfo);
|
||||
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json) ?? throw new Exception();
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
|
||||
return results;
|
||||
return new(reactor, rds);
|
||||
}
|
||||
|
||||
private static (string, string) GetLayerAndPSN(string defaultLayer, string defaultPSN, string[] segments)
|
||||
{
|
||||
string psn;
|
||||
string layer;
|
||||
if (segments.Length <= 2)
|
||||
{
|
||||
psn = defaultPSN;
|
||||
layer = defaultLayer;
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] segmentsB = segments[2].Split('.');
|
||||
psn = segmentsB[0];
|
||||
if (segmentsB.Length <= 1)
|
||||
layer = defaultLayer;
|
||||
else
|
||||
{
|
||||
layer = segmentsB[1];
|
||||
if (layer.Length > 1 && layer[0] == '0')
|
||||
layer = layer.Substring(1);
|
||||
}
|
||||
}
|
||||
return (layer, psn);
|
||||
}
|
||||
|
||||
private static string GetZone(string[] segments)
|
||||
{
|
||||
string result;
|
||||
if (segments.Length <= 3)
|
||||
result = string.Empty;
|
||||
else
|
||||
{
|
||||
result = segments[3];
|
||||
if (result.Length > 1 && result[0] == '0')
|
||||
result = result.Substring(1);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static Descriptor GetDescriptor(string text)
|
||||
{
|
||||
Descriptor result;
|
||||
string psn;
|
||||
string rds;
|
||||
string zone;
|
||||
string layer;
|
||||
string reactor;
|
||||
string cassette;
|
||||
string employee;
|
||||
string defaultPSN = string.Empty;
|
||||
string defaultRDS = string.Empty;
|
||||
string defaultZone = string.Empty;
|
||||
string defaultLayer = string.Empty;
|
||||
string defaultReactor = string.Empty;
|
||||
string defaultEmployee = string.Empty;
|
||||
if (Regex.IsMatch(text, @"^[a-zA-z][0-9]{2,4}$"))
|
||||
{
|
||||
cassette = text.ToUpper();
|
||||
psn = defaultPSN;
|
||||
rds = defaultRDS;
|
||||
zone = defaultZone;
|
||||
layer = defaultLayer;
|
||||
reactor = defaultReactor;
|
||||
employee = defaultEmployee;
|
||||
}
|
||||
else if (string.IsNullOrEmpty(text) || (text.Length is 2 or 3 && Regex.IsMatch(text, "^[a-zA-z]{2,3}")))
|
||||
{
|
||||
cassette = text;
|
||||
psn = defaultPSN;
|
||||
rds = defaultRDS;
|
||||
zone = defaultZone;
|
||||
employee = cassette;
|
||||
layer = defaultLayer;
|
||||
reactor = defaultReactor;
|
||||
}
|
||||
else if (Regex.IsMatch(text, @"^[0-9]{2}[.][0-9]{1}[.]?[0-9]{0,1}"))
|
||||
{
|
||||
string[] segments = text.Split('.');
|
||||
cassette = text;
|
||||
psn = defaultPSN;
|
||||
rds = defaultRDS;
|
||||
layer = segments[1];
|
||||
reactor = segments[0];
|
||||
employee = defaultEmployee;
|
||||
if (segments.Length <= 2)
|
||||
zone = defaultZone;
|
||||
else
|
||||
zone = segments[2];
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] segments;
|
||||
// Remove illegal characters \/:*?"<>| found in the Cassette.
|
||||
cassette = Regex.Replace(text, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
if (cassette.Length > 2 && cassette[0] == '1' && (cassette[1] == 'T' || cassette[1] == 't'))
|
||||
cassette = cassette.Substring(2);
|
||||
if (cassette.Contains("-"))
|
||||
segments = cassette.Split(new char[] { '-' });
|
||||
else if (!cassette.Contains("\u005F"))
|
||||
segments = cassette.Split(new char[] { ' ' });
|
||||
else if (cassette.Contains("."))
|
||||
segments = cassette.Split(new char[] { '.' });
|
||||
else
|
||||
segments = cassette.Split(new char[] { '\u005F' });
|
||||
// bool hasRDS = Regex.IsMatch(cassette, "[-]?([QP][0-9]{4,}|[0-9]{5,})[-]?");
|
||||
(reactor, rds) = GetReactorAndRDS(defaultReactor, defaultRDS, text, cassette, segments);
|
||||
(layer, psn) = GetLayerAndPSN(defaultLayer, defaultPSN, segments);
|
||||
zone = GetZone(segments);
|
||||
if (segments.Length <= 3 || segments[3].Length <= 1)
|
||||
employee = defaultEmployee;
|
||||
else
|
||||
employee = segments[3];
|
||||
}
|
||||
result = new(cassette, employee, layer, psn, rds, reactor, zone);
|
||||
return result;
|
||||
}
|
||||
|
||||
#nullable enable
|
||||
|
||||
private List<Tuple<string, bool, DateTime, string>> Parse(IFileRead fileRead, Logistics logistics, long tickOffset, List<FileInfo> fileInfoCollection, string originalDataBioRad)
|
||||
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
|
||||
{
|
||||
List<Tuple<string, bool, DateTime, string>> results = new();
|
||||
string[] reportFullPathlines = File.ReadAllLines(logistics.ReportFullPath);
|
||||
// ***********************************************************************************
|
||||
// * Step #2 - Verify completeness of each cassette scan in the raw data source file *
|
||||
// ***********************************************************************************
|
||||
bool? cassetteScanCompleted = null;
|
||||
// Scrub the source file to verify that for each cassette, present in the file, there is a complete
|
||||
// data set (i.e., that is there is a start and finished statement).
|
||||
//
|
||||
// Scenario #1 - Normal
|
||||
// For every cassette "started" there must be a matching cassette "finished".
|
||||
// Scenario #2 - Only Cassette "finished" (with or witout additional cassette complete data sets)
|
||||
// Incomplete data file. File will be process and generate error for the incomplete portion.
|
||||
// Scenario #3 - Only Cassette "Started"
|
||||
// Bail out of the solution. Source data file not ready to be processed.
|
||||
foreach (string line in reportFullPathlines)
|
||||
List<Description> results = new();
|
||||
Description? description;
|
||||
foreach (JsonElement jsonElement in jsonElements)
|
||||
{
|
||||
if (line is null)
|
||||
break;
|
||||
if (line.Contains("Cassette") && line.Contains("started") && (cassetteScanCompleted is null || cassetteScanCompleted.Value))
|
||||
{
|
||||
cassetteScanCompleted = false;
|
||||
_Log.Debug("****Extract() - CassetteScanCompleted = FALSE");
|
||||
}
|
||||
else if (line.Contains("Cassette") && line.Contains("finished") && (cassetteScanCompleted is null || !cassetteScanCompleted.Value))
|
||||
{
|
||||
cassetteScanCompleted = true;
|
||||
_Log.Debug("****Extract() - CassetteScanCompleted = TRUE");
|
||||
}
|
||||
}
|
||||
Dictionary<string, List<string>> cassetteIDAndDataSets;
|
||||
if (string.IsNullOrEmpty(logistics.ReportFullPath))
|
||||
cassetteIDAndDataSets = new();
|
||||
else if (cassetteScanCompleted is null || !cassetteScanCompleted.Value)
|
||||
{
|
||||
cassetteIDAndDataSets = new();
|
||||
// Raw source file has an incomplete data set or it only contains a "Process failed" and should not be
|
||||
// processed /split yet. Simply get out of this routine until enough data has been appended to the file.
|
||||
_Log.Debug($"****Extract() - Raw source file has an incomplete data set and should not be processed yet.");
|
||||
}
|
||||
else
|
||||
cassetteIDAndDataSets = GetCassetteIDAndDataSets(reportFullPathlines);
|
||||
if (cassetteIDAndDataSets.Count != 0)
|
||||
{
|
||||
int wafer;
|
||||
string user;
|
||||
string runID;
|
||||
bool isBioRad;
|
||||
string recipe;
|
||||
int count = -1;
|
||||
int stringIndex;
|
||||
string dataText;
|
||||
string dataType;
|
||||
string[] segments;
|
||||
string cassetteID;
|
||||
string recipeName;
|
||||
IProcessData iProcessData;
|
||||
DateTime cassetteDateTime;
|
||||
string recipeSearch = "Recipe";
|
||||
string toolType = string.Empty;
|
||||
StringBuilder contents = new();
|
||||
Stratus.ProcessData processData;
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in cassetteIDAndDataSets)
|
||||
{
|
||||
isBioRad = false;
|
||||
dataType = string.Empty;
|
||||
cassetteID = keyValuePair.Key;
|
||||
for (int i = 0; i < keyValuePair.Value.Count; i++)
|
||||
{
|
||||
dataText = keyValuePair.Value[i];
|
||||
// Finished capturing the complete cassette scan data information. Release the cassette file.
|
||||
if (dataText.Contains("Cassette") &&
|
||||
dataText.Contains("Wafer") &&
|
||||
dataText.Contains("Slot") &&
|
||||
dataText.Contains("Recipe") &&
|
||||
dataText.Contains("Points") &&
|
||||
dataText.Contains("Thickness") &&
|
||||
dataText.Contains("Mean") &&
|
||||
dataText.Contains("Source:") &&
|
||||
dataText.Contains("Destination:"))
|
||||
{
|
||||
// Extract the recipe name
|
||||
runID = string.Empty;
|
||||
recipeName = string.Empty;
|
||||
stringIndex = dataText.IndexOf(recipeSearch);
|
||||
recipeName = dataText.Substring(stringIndex + recipeSearch.Length);
|
||||
_Log.Debug($"****Extract(FDR): recipeName = {recipeName}");
|
||||
#pragma warning disable CA2249
|
||||
if (!string.IsNullOrEmpty(recipeName) && (recipeName.IndexOf("center", StringComparison.CurrentCultureIgnoreCase) >= 0))
|
||||
#pragma warning restore CA2249
|
||||
{
|
||||
/***************************************/
|
||||
/* STRATUS Measurement = FQA Thickness */
|
||||
/***************************************/
|
||||
// Recipes that contains the substring "Center" are STRATUS centerpoint recipes. They are used for Inspection and FQA measurements.
|
||||
// measurement. The data from these scans should be uploaded to the Metrology Viewer database as STRATUS and uploaded to the
|
||||
// OpenInsight [FQA Thickness - Post Epi - QA Metrology / Thk/RHO Value for each slotID] automatically.
|
||||
isBioRad = false;
|
||||
toolType = "STRATUS";
|
||||
dataType = "FQA Thickness";
|
||||
}
|
||||
#pragma warning disable CA2249
|
||||
else if (!string.IsNullOrEmpty(recipeName) && (recipeName.IndexOf("prod_", StringComparison.CurrentCultureIgnoreCase) >= 0))
|
||||
#pragma warning restore CA2249
|
||||
{
|
||||
/******************************************/
|
||||
/* BIORAD Measurement = Product Thickness */
|
||||
/******************************************/
|
||||
// Recipes that contains the substring "Center" are STRATUS centerpoint recipes. They are used for Inspection and FQA measurements.
|
||||
// measurement. The data from these scans should be uploaded to the Metrology Viewer database as STRATUS and uploaded to the
|
||||
// OpenInsight [FQA Thickness - Post Epi - QA Metrology / Thk/RHO Value for each slotID] automatically.
|
||||
isBioRad = true;
|
||||
toolType = "BIORAD";
|
||||
dataType = "Product Thickness";
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(recipeName) &&
|
||||
#pragma warning disable CA2249
|
||||
((recipeName.IndexOf("T-Low", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T_Low", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T-Mid", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T_Mid", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T-High", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T_High", StringComparison.CurrentCultureIgnoreCase) >= 0)))
|
||||
#pragma warning restore CA2249
|
||||
{
|
||||
/*************************************/
|
||||
/* BIORAD Measurement = No Uploading */
|
||||
/*************************************/
|
||||
// Recipes that contains the substring "T-Low, T_Low, T-Mid, T_Mid and T-High, T_High" are BIORAD verification recipe. The information
|
||||
// should be uploaded to the Metrology Viewer database as BIORAD. No OpenInsight.
|
||||
isBioRad = true;
|
||||
toolType = "BIORAD";
|
||||
dataType = "Verification";
|
||||
}
|
||||
else
|
||||
{
|
||||
// Count the number of wafers (ref. "Source: Slot") in the cassette
|
||||
int waferCount = Regex.Matches(dataText, "Source: Slot").Count;
|
||||
if (waferCount == 1)
|
||||
{
|
||||
// Metrology Thickness. Upload to OpenInsight same as BR2 and BR3
|
||||
isBioRad = true;
|
||||
toolType = "BIORAD";
|
||||
dataType = "Metrology Thickness";
|
||||
}
|
||||
else if (waferCount > 1)
|
||||
{
|
||||
// Inspection Measurement. Do not upload to OpenInsight.
|
||||
isBioRad = true;
|
||||
toolType = "BIORAD";
|
||||
dataType = "Inspection";
|
||||
}
|
||||
}
|
||||
}
|
||||
_Log.Debug($"****Extract(FDR): ToolType = {toolType}");
|
||||
_Log.Debug($"****Extract(FDR): DataType = {dataType}");
|
||||
if (!isBioRad)
|
||||
{
|
||||
cassetteDateTime = logistics.DateTimeFromSequence.AddTicks(i * -1);
|
||||
results.Add(new Tuple<string, bool, DateTime, string>(cassetteID, isBioRad, cassetteDateTime, dataText));
|
||||
}
|
||||
else
|
||||
{
|
||||
Stratus.Run? complete = null;
|
||||
processData = new Stratus.ProcessData(fileRead, logistics, fileInfoCollection, originalDataBioRad, complete, dataText: dataText);
|
||||
iProcessData = processData;
|
||||
if (iProcessData.Details.Count == 0)
|
||||
_Log.Warn("No Details!");
|
||||
else
|
||||
{
|
||||
foreach (object item in iProcessData.Details)
|
||||
{
|
||||
if (item is not Stratus.Detail detail)
|
||||
throw new Exception();
|
||||
count += 1;
|
||||
_ = contents.Clear();
|
||||
cassetteDateTime = logistics.DateTimeFromSequence.AddTicks(count * -1);
|
||||
user = processData.Employee?.ToString() ?? "";
|
||||
recipe = detail.Recipe?.ToString() ?? "";
|
||||
if (isBioRad)
|
||||
recipe = recipe.Split(' ').First();
|
||||
_ = contents.Append("Bio-Rad ").Append("QS400MEPI".PadRight(17)).Append("Recipe: ").Append(recipe.PadRight(25)).AppendLine(processData.Date.ToString(Stratus.Description.GetDateFormat()));
|
||||
_ = contents.Append("operator: ").Append(user.PadRight(22)).Append("batch: BIORAD #").AppendLine(logistics.JobID.Substring(6, 1));
|
||||
_ = contents.Append("cassette: ").Append("".PadRight(22)).Append("wafer: ").AppendLine(processData.Cassette);
|
||||
_ = contents.AppendLine("--------------------------------------------------------------------------------");
|
||||
_ = contents.AppendLine(" position thickness position thickness position thickness");
|
||||
segments = detail.Thickness.Split(',');
|
||||
for (int j = 0; j < segments.Length; j++)
|
||||
{
|
||||
wafer = j + 1;
|
||||
_ = contents.Append(wafer.ToString().PadLeft(11));
|
||||
if ((wafer % 3) > 0)
|
||||
_ = contents.Append(segments[j].PadLeft(10));
|
||||
else
|
||||
_ = contents.AppendLine(segments[j].PadLeft(10));
|
||||
}
|
||||
if ((segments.Length % 3) > 0)
|
||||
_ = contents.AppendLine();
|
||||
_ = contents.Append(" wafer mean thickness = ").Append(detail.Mean).Append(", std. dev = ").Append(detail.StdDev).Append(' ').AppendLine(detail.PassFail);
|
||||
_ = contents.AppendLine("================================================================================");
|
||||
_ = contents.AppendLine("");
|
||||
_ = contents.AppendLine("Radial variation (computation B) PASS:");
|
||||
_ = contents.AppendLine("");
|
||||
_ = contents.AppendLine(" thickness 0.0000");
|
||||
_ = contents.AppendLine("");
|
||||
_ = contents.Append(" Slot:").Append(detail.Slot).AppendLine(";");
|
||||
results.Add(new Tuple<string, bool, DateTime, string>(cassetteID, isBioRad, cassetteDateTime, contents.ToString()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// **********************************************
|
||||
// * Step #3 - Protect the raw data source file *
|
||||
// **********************************************
|
||||
// The multi-cassettes raw source file is ready to be splitted. Each cassette scan set has
|
||||
// been determined to be complete (i.e., has the started & finished statements). At this point
|
||||
// it is important to rename the multi-cassette raw data source file, located in the RawData
|
||||
// folder, to a different name so that the tool does not attempt to update the file while being
|
||||
// processed by the EAF cell instance.
|
||||
// Get the last date/time the DataBioRad.txt file was updated
|
||||
DateTime afterCheck = new(File.GetLastWriteTime(logistics.ReportFullPath).Ticks + tickOffset);
|
||||
// Ensure that the DataBioRad.txt file has not been updated since the FileReader began the healthcheck
|
||||
// If the date/time values are different between the "Before" and "After" checks then let it go. The
|
||||
// tool is still busy trying to update the file. The FileReader will try to catch the data on the
|
||||
// next update.
|
||||
if (logistics.DateTimeFromSequence != afterCheck)
|
||||
{
|
||||
results.Clear();
|
||||
_Log.Debug($"****Extract() - DataBioRad.txt file is getting updated fast");
|
||||
_Log.Debug($"****Extract() - DataBioRadDateTime_AfterCheck = {afterCheck.Ticks}");
|
||||
_Log.Debug($"****Extract() - DataBioRadDateTime_BeforeCheck = {logistics.Sequence}");
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static Dictionary<string, List<string>> GetCassetteIDAndDataSets(string[] reportFullPathlines)
|
||||
{
|
||||
Dictionary<string, List<string>> results = new();
|
||||
string line;
|
||||
string[] segments;
|
||||
int cassetteEndIndex;
|
||||
int thicknessCounter;
|
||||
string thicknessHead;
|
||||
string thicknessInfo;
|
||||
string thicknessTail;
|
||||
int cassetteStartIndex;
|
||||
StringBuilder lines = new();
|
||||
string slotID = string.Empty;
|
||||
string cassetteID = string.Empty;
|
||||
string batchHeader = string.Empty;
|
||||
bool finishedReadingThicknessInfo;
|
||||
bool slotInformationCaptured = false;
|
||||
bool pointsInformationCaptured = false;
|
||||
bool sourceInformationCaptured = false;
|
||||
bool waferWaferInformationCaptured = false;
|
||||
bool destinationInformationCaptured = false;
|
||||
List<Tuple<string, int, int>> cassetteStartAndEnds = new();
|
||||
for (int i = 0; i < reportFullPathlines.Length; i++)
|
||||
{
|
||||
line = reportFullPathlines[i].Trim();
|
||||
if (string.IsNullOrEmpty(line))
|
||||
if (jsonElement.ValueKind != JsonValueKind.Object)
|
||||
throw new Exception();
|
||||
description = JsonSerializer.Deserialize(jsonElement.ToString(), DescriptionSourceGenerationContext.Default.Description);
|
||||
if (description is null)
|
||||
continue;
|
||||
if (line.StartsWith("Batch") && line.Contains("started"))
|
||||
batchHeader = line;
|
||||
if (i + 1 == reportFullPathlines.Length)
|
||||
continue;
|
||||
if (line.StartsWith("Cassette") && line.Contains("started"))
|
||||
{
|
||||
for (int j = i + 1; j < reportFullPathlines.Length; j++)
|
||||
{
|
||||
if (j + 1 == reportFullPathlines.Length)
|
||||
cassetteStartAndEnds.Add(new Tuple<string, int, int>(batchHeader, i, j));
|
||||
else
|
||||
{
|
||||
line = reportFullPathlines[j].Trim();
|
||||
if (line.StartsWith("Cassette") && line.Contains("started"))
|
||||
{
|
||||
cassetteStartAndEnds.Add(new Tuple<string, int, int>(batchHeader, i, j - 1));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
foreach (Tuple<string, int, int> tuple in cassetteStartAndEnds)
|
||||
{
|
||||
_ = lines.Clear();
|
||||
batchHeader = tuple.Item1;
|
||||
cassetteEndIndex = tuple.Item3;
|
||||
cassetteStartIndex = tuple.Item2;
|
||||
for (int l = cassetteStartIndex; l <= cassetteEndIndex; l++)
|
||||
{
|
||||
line = reportFullPathlines[l].Trim();
|
||||
if (string.IsNullOrEmpty(line))
|
||||
continue;
|
||||
if (l == cassetteStartIndex)
|
||||
{
|
||||
// Save the previously saved "Batch Header"
|
||||
_ = lines.AppendLine(batchHeader);
|
||||
// Save the first line of the cassette scan information
|
||||
_ = lines.AppendLine(line);
|
||||
// Each new cassette initialize the WaferWafer information flag
|
||||
waferWaferInformationCaptured = false;
|
||||
slotInformationCaptured = false;
|
||||
if (line.Length > 9)
|
||||
{
|
||||
// Detected a new cassette data scan. Extract the cassette ID.
|
||||
// Example: "Cassette 47-241330-4238 started."
|
||||
segments = line.Substring(9).Split(new string[] { "started" }, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (segments.Length != 0)
|
||||
// Remove illegal characters \/:*?"<>| found in the Cassette.
|
||||
cassetteID = Regex.Replace(segments[0].Trim(), @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
}
|
||||
}
|
||||
// Continue reading and saving the cassette scan information, into the cassette
|
||||
// scan output file, until the end of the cassette scan "Finished" statement has
|
||||
// been detected.
|
||||
// Maintain standard for mat between various BioRad tools. The "Points" and "Thickness"
|
||||
// values between various BioRad tools might be spread over multiple lines. The following
|
||||
// is simply to regroup the "Points" and "Thickness" information on the same line accordingly.
|
||||
if (line.StartsWith("Wafer Wafer"))
|
||||
{
|
||||
_ = lines.AppendLine(line);
|
||||
slotInformationCaptured = false;
|
||||
waferWaferInformationCaptured = true;
|
||||
}
|
||||
else if (line.StartsWith("Slot"))
|
||||
{
|
||||
slotID = string.Empty;
|
||||
segments = line.Split(' ');
|
||||
if (segments.Length > 1)
|
||||
slotID = segments[1];
|
||||
// There are cases where the WaferWafer information is missing. Create a
|
||||
// WaferWafer entry based off the slot number.
|
||||
if (!waferWaferInformationCaptured)
|
||||
{
|
||||
waferWaferInformationCaptured = true;
|
||||
_ = lines.AppendLine("Wafer Wafer " + slotID + ".");
|
||||
}
|
||||
_ = lines.AppendLine(line);
|
||||
slotInformationCaptured = true;
|
||||
}
|
||||
else if (line.StartsWith("Recipe"))
|
||||
{
|
||||
_ = lines.AppendLine(line);
|
||||
pointsInformationCaptured = false;
|
||||
}
|
||||
else if (line.StartsWith("Points"))
|
||||
{
|
||||
_ = lines.AppendLine(line);
|
||||
pointsInformationCaptured = true;
|
||||
}
|
||||
else if (line.Contains("Thickness"))
|
||||
{
|
||||
// Before addressing the "Thickness" section, ensure that the "Points" section
|
||||
// has been found. Otherwise, we need to write out a default value.
|
||||
if (!pointsInformationCaptured)
|
||||
{
|
||||
// No "Points" information has been capture. Default to "Points : 0 0"
|
||||
_ = lines.AppendLine("Points : 0 0");
|
||||
pointsInformationCaptured = true;
|
||||
}
|
||||
// The "Thickness" output section comes out differently between various Stratus tools. In some
|
||||
// cases, the thickness values are either empty (no values), on the same line or on different lines.
|
||||
// Below are examples of how the data needs to be formatted after being parsed:
|
||||
// Thickness, um 1 - 1 0
|
||||
// Thickness, um 1 - 1 13.630
|
||||
// Thickness, um 1 - 9 1.197 1.231 1.248 1.235 1.199 1.202 1.236 1.242 1.212
|
||||
thicknessCounter = 0;
|
||||
thicknessHead = line;
|
||||
thicknessInfo = "";
|
||||
thicknessTail = "";
|
||||
finishedReadingThicknessInfo = false;
|
||||
for (int t = l + 1; t <= cassetteEndIndex; t++)
|
||||
{
|
||||
l = t;
|
||||
line = reportFullPathlines[l].Trim();
|
||||
if (string.IsNullOrEmpty(line))
|
||||
continue;
|
||||
if (!line.StartsWith("Slot"))
|
||||
{
|
||||
thicknessCounter++;
|
||||
thicknessTail = string.Concat(thicknessTail, " ", line);
|
||||
}
|
||||
else
|
||||
{
|
||||
finishedReadingThicknessInfo = true;
|
||||
if (thicknessCounter != 0)
|
||||
thicknessInfo = string.Concat(" 1 - ", thicknessCounter);
|
||||
else
|
||||
{
|
||||
// Two possible formatting scenarios at this point. Either the data was already
|
||||
// formatted properly on one line. Or the Thickness value was missing, in which
|
||||
// case we need to default the thickness value to zero (0).
|
||||
segments = thicknessHead.Split(' ');
|
||||
if (segments.Length > 2)
|
||||
{
|
||||
// The "Thickness" raw data if formatted as a normal single line format and
|
||||
// already include the Header + Info + Tail
|
||||
}
|
||||
else
|
||||
{
|
||||
// The "Thickness raw data has no values. Formatting the output with zero.
|
||||
thicknessInfo = " 1 - 1";
|
||||
thicknessTail = " 0";
|
||||
}
|
||||
}
|
||||
_ = lines.AppendLine(string.Concat(thicknessHead, thicknessInfo, thicknessTail));
|
||||
// The "Slot" keyword is the tag that determines the end of the Thickness section. The "Slot"
|
||||
// information has already been ready. Simply write it back.
|
||||
_ = lines.AppendLine(line);
|
||||
}
|
||||
if (finishedReadingThicknessInfo)
|
||||
break;
|
||||
}
|
||||
}
|
||||
else if (line.StartsWith("Mean"))
|
||||
{
|
||||
_ = lines.AppendLine(line);
|
||||
sourceInformationCaptured = false;
|
||||
destinationInformationCaptured = false;
|
||||
}
|
||||
else if (line.StartsWith("Source:") && slotInformationCaptured)
|
||||
{
|
||||
_ = lines.AppendLine(line);
|
||||
sourceInformationCaptured = true;
|
||||
}
|
||||
else if (line.StartsWith("Destination:") && slotInformationCaptured)
|
||||
{
|
||||
if (!sourceInformationCaptured)
|
||||
{
|
||||
sourceInformationCaptured = true;
|
||||
_ = lines.AppendLine(string.Concat("Source: Slot ", slotID, ", Cassette"));
|
||||
}
|
||||
_ = lines.AppendLine(line);
|
||||
destinationInformationCaptured = true;
|
||||
// Each time a cassette slot section has been completed, we must reinitialize
|
||||
// the "Wafer Wafer" information flag in case there are multiple slots in the
|
||||
// same cassette
|
||||
slotInformationCaptured = false;
|
||||
waferWaferInformationCaptured = false;
|
||||
}
|
||||
else if (line.StartsWith("Cassette") && line.Contains("finished."))
|
||||
{
|
||||
// Reach the end of the cassette data set information
|
||||
if (!sourceInformationCaptured)
|
||||
{
|
||||
sourceInformationCaptured = true;
|
||||
_ = lines.AppendLine(string.Concat("Source: Slot ", slotID, ", Cassette"));
|
||||
}
|
||||
if (!destinationInformationCaptured)
|
||||
{
|
||||
destinationInformationCaptured = true;
|
||||
_ = lines.AppendLine(string.Concat("Destination: Slot ", slotID, ", Cassette"));
|
||||
// Each time a cassette slot section has been completed, we must reinitialize
|
||||
// the "Wafer Wafer" information flag in case there are multiple slots in the
|
||||
// same cassette
|
||||
slotInformationCaptured = false;
|
||||
waferWaferInformationCaptured = false;
|
||||
}
|
||||
// Write the end of cassette statement to the output file
|
||||
_ = lines.AppendLine(line);
|
||||
// Read the Mean-Average line information, post the cassette "Finished" statement
|
||||
for (int a = l + 1; a <= cassetteEndIndex; a++)
|
||||
{
|
||||
l = a;
|
||||
line = reportFullPathlines[l].Trim();
|
||||
if (string.IsNullOrEmpty(line))
|
||||
continue;
|
||||
// There are many blank lines in the source file. Search for the first
|
||||
// occurrence of the string "Mean".
|
||||
if (line.StartsWith("Mean"))
|
||||
{
|
||||
_ = lines.AppendLine(line);
|
||||
break;
|
||||
}
|
||||
// The mean Average information is missing. We are done reading the cassette information.
|
||||
if (line.StartsWith("Batch"))
|
||||
break;
|
||||
}
|
||||
if (!results.ContainsKey(cassetteID))
|
||||
results.Add(cassetteID, new List<string>());
|
||||
results[cassetteID].Add(lines.ToString());
|
||||
}
|
||||
}
|
||||
results.Add(description);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
@ -7,6 +7,7 @@ using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
@ -15,24 +16,26 @@ namespace Adaptation.FileHandlers.txt;
|
||||
internal class Run
|
||||
{
|
||||
|
||||
public Run(Header header, ReadOnlyCollection<Wafer> wafers, Grade grade)
|
||||
public Run(string text, Header header, ReadOnlyCollection<Wafer> wafers, Grade grade)
|
||||
{
|
||||
Text = text;
|
||||
Header = header;
|
||||
Wafers = wafers;
|
||||
Grade = grade;
|
||||
}
|
||||
|
||||
public string Text { get; }
|
||||
public Header Header { get; }
|
||||
public ReadOnlyCollection<Wafer> Wafers { get; }
|
||||
public Grade Grade { get; }
|
||||
|
||||
private static void WriteJson(Logistics logistics, List<FileInfo> _, int r, Run result)
|
||||
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, string extension, Run result)
|
||||
{
|
||||
FileInfo fileInfo = new($"{logistics.ReportFullPath}-{r}.run.json");
|
||||
FileInfo fileInfo = new($"{logistics.ReportFullPath}.{logistics.Sequence}{extension}.run.json");
|
||||
string json = JsonSerializer.Serialize(result, RunSourceGenerationContext.Default.Run);
|
||||
File.WriteAllText(fileInfo.FullName, json);
|
||||
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
|
||||
// Can't add until old parse is removed // fileInfoCollection.Add(fileInfo);
|
||||
fileInfoCollection.Add(fileInfo);
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<string> GetLines(Logistics logistics, JsonElement[]? jsonElements)
|
||||
@ -76,11 +79,12 @@ internal class Run
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private static void WriteCommaSeparatedValues(Logistics logistics, int r, Run run)
|
||||
private static void WriteCommaSeparatedValues(Logistics logistics, string extension, Run run)
|
||||
{
|
||||
List<Row> results = new();
|
||||
Row row;
|
||||
int index = 0;
|
||||
string path = $"{logistics.ReportFullPath}.{logistics.Sequence}{extension}.csv";
|
||||
for (int w = 0; w < run.Wafers.Count; w++)
|
||||
{
|
||||
for (int s = 0; s < run.Wafers[w].Sites.Count; s++)
|
||||
@ -93,87 +97,112 @@ internal class Run
|
||||
string json = JsonSerializer.Serialize(results);
|
||||
JsonElement[]? jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
|
||||
ReadOnlyCollection<string> lines = GetLines(logistics, jsonElements);
|
||||
File.WriteAllText($"{logistics.ReportFullPath}-{r}.csv", string.Join(Environment.NewLine, lines));
|
||||
File.WriteAllText(path, string.Join(Environment.NewLine, lines));
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<string> GetRuns(Constant constant, string text)
|
||||
private static int? GetFirstRun(Constant constant, string[] allLines)
|
||||
{
|
||||
List<string> results = new();
|
||||
string check;
|
||||
List<string> collection = new();
|
||||
List<string> lines = text.Split(new string[] { Environment.NewLine }, StringSplitOptions.None).ToList();
|
||||
lines.Add($"{constant.Batch}{constant.Started}");
|
||||
foreach (string line in lines)
|
||||
int? result = null;
|
||||
string line;
|
||||
string lineB;
|
||||
for (int i = 0; i < allLines.Length; i++)
|
||||
{
|
||||
if (line.StartsWith(constant.Batch) && line.Contains(constant.Started))
|
||||
line = allLines[i];
|
||||
if (line.Contains(constant.Finished))
|
||||
{
|
||||
check = string.Join(Environment.NewLine, collection);
|
||||
if (check.Contains(constant.Finished))
|
||||
results.Add(check);
|
||||
collection.Clear();
|
||||
for (int j = i + 1; j < allLines.Length; j++)
|
||||
{
|
||||
lineB = allLines[j];
|
||||
if (!lineB.StartsWith(constant.Batch) || !lineB.Contains(constant.Started))
|
||||
continue;
|
||||
result = j;
|
||||
break;
|
||||
}
|
||||
result ??= allLines.Length;
|
||||
break;
|
||||
}
|
||||
collection.Add(line);
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
return result;
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<Run> GetRuns(Logistics logistics)
|
||||
private static Run? GetRun(Constant constant, string text)
|
||||
{
|
||||
List<Run> results = new();
|
||||
int[] i;
|
||||
Constant constant = new();
|
||||
string allText = File.ReadAllText(logistics.ReportFullPath);
|
||||
string[] segments = allText.Split(new string[] { constant.Finished }, StringSplitOptions.None);
|
||||
if (segments.Length > 1)
|
||||
Run? result;
|
||||
int[] i = new int[] { 0 };
|
||||
Header? header = Header.Get(text, constant, i);
|
||||
if (header is null)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
Run run;
|
||||
ReadOnlyCollection<string> runs = GetRuns(constant, allText);
|
||||
foreach (string text in runs)
|
||||
ReadOnlyCollection<string> groups = Wafer.GetGroups(text, constant, i);
|
||||
if (groups.Count == 0)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
i = new int[] { 0 };
|
||||
Header? header = Header.Get(text, constant, i);
|
||||
if (header is null)
|
||||
continue;
|
||||
Grade? grade = Grade.Get(constant, groups);
|
||||
if (grade is null)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
ReadOnlyCollection<string> groups = Wafer.GetGroups(text, constant, i);
|
||||
if (groups.Count == 0)
|
||||
continue;
|
||||
ReadOnlyCollection<Wafer> wafers = Wafer.Get(constant, groups);
|
||||
if (wafers.Count == 0 && grade.MeanThickness != "0")
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
Grade? grade = Grade.Get(constant, groups);
|
||||
if (grade is null)
|
||||
continue;
|
||||
else
|
||||
{
|
||||
ReadOnlyCollection<Wafer> wafers = Wafer.Get(constant, groups);
|
||||
if (wafers.Count == 0 && grade.MeanThickness != "0")
|
||||
continue;
|
||||
else
|
||||
{
|
||||
run = new(header, wafers, grade);
|
||||
results.Add(run);
|
||||
}
|
||||
}
|
||||
}
|
||||
result = new(text, header, wafers, grade);
|
||||
}
|
||||
}
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static ReadOnlyCollection<Run> Get(long tickOffset, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
private static void WriteFile(Logistics logistics, List<FileInfo> fileInfoCollection, string extension, int lines, string[] allLines, string text)
|
||||
{
|
||||
ReadOnlyCollection<Run> results = GetRuns(logistics);
|
||||
DateTime afterCheck = new(File.GetLastWriteTime(logistics.ReportFullPath).Ticks + tickOffset);
|
||||
if (logistics.DateTimeFromSequence != afterCheck)
|
||||
results = new(new List<Run>());
|
||||
for (int i = 0; i < results.Count; i++)
|
||||
string remainderText = string.Join(Environment.NewLine, allLines.Skip(lines));
|
||||
FileInfo fileInfo = new($"{logistics.ReportFullPath}.{logistics.Sequence}{extension}");
|
||||
File.WriteAllText(fileInfo.FullName, text);
|
||||
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
|
||||
fileInfoCollection.Add(fileInfo);
|
||||
if (string.IsNullOrEmpty(remainderText))
|
||||
File.Delete(logistics.ReportFullPath);
|
||||
else
|
||||
{
|
||||
WriteJson(logistics, fileInfoCollection, i, results[i]);
|
||||
WriteCommaSeparatedValues(logistics, i, results[i]);
|
||||
Thread.Sleep(100);
|
||||
File.WriteAllText(logistics.ReportFullPath, remainderText);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static void WriteFailed(Logistics logistics, List<FileInfo> fileInfoCollection, string text)
|
||||
{
|
||||
FileInfo fileInfo = new($"{logistics.ReportFullPath}.{logistics.Sequence}.err");
|
||||
File.WriteAllText(fileInfo.FullName, text);
|
||||
fileInfoCollection.Add(fileInfo);
|
||||
}
|
||||
|
||||
internal static Run? Get(long tickOffset, Logistics logistics, List<FileInfo> fileInfoCollection, Constant constant, string[] allLines)
|
||||
{
|
||||
Run? result;
|
||||
int? lines = GetFirstRun(constant, allLines);
|
||||
string extension = Path.GetExtension(logistics.ReportFullPath);
|
||||
string? text = lines is null ? null : string.Join(Environment.NewLine, allLines.Take(lines.Value));
|
||||
result = string.IsNullOrEmpty(text) ? null : GetRun(constant, text);
|
||||
DateTime? afterCheck = lines is null ? null : new(File.GetLastWriteTime(logistics.ReportFullPath).Ticks + tickOffset);
|
||||
if (lines is null || string.IsNullOrEmpty(text) || afterCheck is null || logistics.DateTimeFromSequence != afterCheck)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
WriteFile(logistics, fileInfoCollection, extension, lines.Value, allLines, text);
|
||||
if (result is null)
|
||||
{
|
||||
result = null;
|
||||
WriteFailed(logistics, fileInfoCollection, text);
|
||||
}
|
||||
else
|
||||
{
|
||||
WriteJson(logistics, fileInfoCollection, extension, result);
|
||||
WriteCommaSeparatedValues(logistics, extension, result);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@ -178,7 +178,6 @@ public class Description : IDescription, Properties.IDescription
|
||||
}
|
||||
|
||||
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
|
||||
|
||||
@ -54,40 +54,12 @@ public class BIORAD4 : EAFLoggingUnitTesting
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__txt()
|
||||
{
|
||||
string check = "*DataBioRad.txt";
|
||||
string check = "DataBioRad.txt";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__Stratus()
|
||||
{
|
||||
string check = "CassetteDataBioRad_*.txt";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__QS408M()
|
||||
{
|
||||
string check = "DetailDataBioRad_*.txt";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
string[] fileNameAndJson = EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
|
||||
Assert.IsTrue(fileNameAndJson[1].Contains(check));
|
||||
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
||||
@ -54,40 +54,12 @@ public class BIORAD5 : EAFLoggingUnitTesting
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD5__txt()
|
||||
{
|
||||
string check = "*DataBioRad.txt";
|
||||
string check = "DataBioRad.txt";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD5__Stratus()
|
||||
{
|
||||
string check = "CassetteDataBioRad_*.txt";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD5__QS408M()
|
||||
{
|
||||
string check = "DetailDataBioRad_*.txt";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
string[] fileNameAndJson = EAFLoggingUnitTesting.AdaptationTesting.GetConfiguration(methodBase);
|
||||
Assert.IsTrue(fileNameAndJson[1].Contains(check));
|
||||
File.WriteAllText(fileNameAndJson[0], fileNameAndJson[1]);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
||||
@ -37,7 +37,7 @@ public class BIORAD4
|
||||
public void Production__v2_61_1__BIORAD4__txt638763379187800166__Partial()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "*DataBioRad.txt";
|
||||
string check = "DataBioRad.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__txt();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
@ -46,14 +46,14 @@ public class BIORAD4
|
||||
Shared.AdaptationTesting.UpdatePassDirectory(variables[2]);
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
#if (!DEBUG)
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__txt637730081979221342__Normal()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "*DataBioRad.txt";
|
||||
string check = "DataBioRad.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__txt();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
@ -69,7 +69,7 @@ public class BIORAD4
|
||||
public void Production__v2_61_1__BIORAD4__txt637818036815840307__ProcessFailed()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "*DataBioRad.txt";
|
||||
string check = "DataBioRad.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__txt();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
@ -85,7 +85,7 @@ public class BIORAD4
|
||||
public void Production__v2_61_1__BIORAD4__txt637746296480404920__Failure()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "*DataBioRad.txt";
|
||||
string check = "DataBioRad.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__txt();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
@ -101,7 +101,7 @@ public class BIORAD4
|
||||
public void Production__v2_61_1__BIORAD4__txt638187028378748930__THigh()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "*DataBioRad.txt";
|
||||
string check = "DataBioRad.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__txt();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
@ -110,150 +110,5 @@ public class BIORAD4
|
||||
Shared.AdaptationTesting.UpdatePassDirectory(variables[2]);
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__Stratus() => _BIORAD4.Production__v2_61_1__BIORAD4__Stratus();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__Stratus637730081979221342__RDS()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "CassetteDataBioRad_*.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__Stratus();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__Stratus637730081979221342__1TRDS()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "CassetteDataBioRad_*.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__Stratus();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__Stratus637733400573863329__ReactorAndRDS()
|
||||
{
|
||||
DateTime dateTime;
|
||||
bool validatePDSF = false;
|
||||
string check = "CassetteDataBioRad_*.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__Stratus();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
dateTime = FileHandlers.Stratus.ProcessData.GetDateTime(logistics, string.Empty);
|
||||
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
|
||||
dateTime = FileHandlers.Stratus.ProcessData.GetDateTime(logistics, "11/24/21 08:39");
|
||||
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__Stratus637818036815840307__ProcessFailed()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "CassetteDataBioRad_*.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__Stratus();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
_ = fileRead.ReExtract();
|
||||
Shared.AdaptationTesting.UpdatePassDirectory(variables[2]);
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__QS408M() => _BIORAD4.Production__v2_61_1__BIORAD4__QS408M();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__Stratus638010209430211312__MissingRecipe()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "CassetteDataBioRad_*.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__Stratus();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
_ = fileRead.ReExtract();
|
||||
Shared.AdaptationTesting.UpdatePassDirectory(variables[2]);
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__Stratus638613924531133783__14()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "CassetteDataBioRad_*.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__Stratus();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
_ = fileRead.ReExtract();
|
||||
Shared.AdaptationTesting.UpdatePassDirectory(variables[2]);
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__Stratus638675062726795419__T()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "CassetteDataBioRad_*.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__Stratus();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
_ = fileRead.ReExtract();
|
||||
Shared.AdaptationTesting.UpdatePassDirectory(variables[2]);
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD4__Stratus638960114557838333__Zero()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "CassetteDataBioRad_*.txt";
|
||||
_BIORAD4.Production__v2_61_1__BIORAD4__Stratus();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
_ = fileRead.ReExtract();
|
||||
Shared.AdaptationTesting.UpdatePassDirectory(variables[2]);
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
||||
@ -44,7 +44,7 @@ public class BIORAD5
|
||||
public void Production__v2_61_1__BIORAD5__txt638221788953480284__MorePoints()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "*DataBioRad.txt";
|
||||
string check = "DataBioRad.txt";
|
||||
_BIORAD5.Production__v2_61_1__BIORAD5__txt();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD5.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
@ -54,61 +54,5 @@ public class BIORAD5
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD5__Stratus() => _BIORAD5.Production__v2_61_1__BIORAD5__Stratus();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD5__Stratus637738592809956919__ReactorAndRDS()
|
||||
{
|
||||
DateTime dateTime;
|
||||
bool validatePDSF = false;
|
||||
string check = "CassetteDataBioRad_*.txt";
|
||||
_BIORAD5.Production__v2_61_1__BIORAD5__Stratus();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD5.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _BIORAD5.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
dateTime = FileHandlers.Stratus.ProcessData.GetDateTime(logistics, string.Empty);
|
||||
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
|
||||
dateTime = FileHandlers.Stratus.ProcessData.GetDateTime(logistics, "11/24/21 08:39");
|
||||
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD5__Stratus637805172599370243__Why()
|
||||
{
|
||||
DateTime dateTime;
|
||||
bool validatePDSF = false;
|
||||
string check = "CassetteDataBioRad_*.txt";
|
||||
_BIORAD5.Production__v2_61_1__BIORAD5__Stratus();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD5.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _BIORAD5.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
dateTime = FileHandlers.Stratus.ProcessData.GetDateTime(logistics, string.Empty);
|
||||
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
|
||||
dateTime = FileHandlers.Stratus.ProcessData.GetDateTime(logistics, "11/24/21 08:39");
|
||||
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_61_1__BIORAD5__QS408M() => _BIORAD5.Production__v2_61_1__BIORAD5__QS408M();
|
||||
|
||||
}
|
||||
#endif
|
||||
@ -1001,22 +1001,22 @@ public class AdaptationTesting : ISMTP
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation))
|
||||
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation) && !fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation.Contains("10."))
|
||||
{
|
||||
if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation))
|
||||
_ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation);
|
||||
}
|
||||
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.SourceFileLocation))
|
||||
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.SourceFileLocation) && !fileConnectorConfigurationTuple.Item2.SourceFileLocation.Contains("10."))
|
||||
{
|
||||
if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.SourceFileLocation))
|
||||
_ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.SourceFileLocation);
|
||||
}
|
||||
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.TargetFileLocation))
|
||||
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.TargetFileLocation) && !fileConnectorConfigurationTuple.Item2.TargetFileLocation.Contains("10."))
|
||||
{
|
||||
if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.TargetFileLocation))
|
||||
_ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.TargetFileLocation);
|
||||
}
|
||||
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder))
|
||||
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder) && !fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Contains("10."))
|
||||
{
|
||||
if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Split('|')[0]))
|
||||
_ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Split('|')[0]);
|
||||
|
||||
@ -6,50 +6,65 @@
|
||||
|
||||
function getValue(tool, patternSize, recipe, pattern, json) {
|
||||
let result;
|
||||
if (tool == undefined || tool.length === 0 || patternSize == undefined || patternSize.length === 0 || recipe == undefined || recipe.length === 0 || pattern == undefined || pattern.length === 0 || json == undefined || json.length === 0)
|
||||
result = 'A) Invalid input!';
|
||||
let firstSegment = recipe.split(' ')[0].toUpperCase();
|
||||
if (firstSegment === 'T-LOW')
|
||||
result = '1';
|
||||
else if (firstSegment === 'T-MID')
|
||||
result = '1';
|
||||
else if (firstSegment === 'T-HIGH')
|
||||
result = '1';
|
||||
else if (firstSegment === 'T_LOW')
|
||||
result = '1';
|
||||
else if (firstSegment === 'T_MID')
|
||||
result = '1';
|
||||
else if (firstSegment === 'T_HIGH')
|
||||
result = '1';
|
||||
else {
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(json);
|
||||
} catch (error) {
|
||||
parsed = null;
|
||||
}
|
||||
if (parsed == null)
|
||||
result = 'B) Invalid input!';
|
||||
else if (parsed.rds == undefined || parsed.rds.prodSpec == undefined || parsed.rds.prodSpec.recipesAndPatterns == undefined)
|
||||
result = 'C) No Spec!';
|
||||
if (tool == undefined || tool.length === 0 || patternSize == undefined || patternSize.length === 0 || recipe == undefined || recipe.length === 0 || pattern == undefined || pattern.length === 0 || json == undefined || json.length === 0)
|
||||
result = 'A) Invalid input!';
|
||||
else {
|
||||
let toolMatches = [];
|
||||
for (let index = 0; index < parsed.rds.prodSpec.recipesAndPatterns.length; index++) {
|
||||
if (parsed.rds.prodSpec.recipesAndPatterns[index].tool === tool) {
|
||||
toolMatches.push(parsed.rds.prodSpec.recipesAndPatterns[index]);
|
||||
}
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(json);
|
||||
} catch (error) {
|
||||
parsed = null;
|
||||
}
|
||||
if (toolMatches == null || toolMatches.length === 0)
|
||||
result = 'Tool [' + tool + '] not found in OI API results!';
|
||||
if (parsed == null)
|
||||
result = 'B) Invalid input!';
|
||||
else if (parsed.rds == undefined || parsed.rds.prodSpec == undefined || parsed.rds.prodSpec.recipesAndPatterns == undefined)
|
||||
result = 'C) No Spec!';
|
||||
else {
|
||||
let debug = '';
|
||||
let matches = 0;
|
||||
for (let index = 0; index < toolMatches.length; index++) {
|
||||
debug += 'patternSize: ' + toolMatches[index].patternSize +
|
||||
'; recipe: ' + toolMatches[index].recipe +
|
||||
'; pattern: ' + toolMatches[index].pattern + ';~';
|
||||
if (toolMatches[index].recipe.localeCompare(recipe, ['en-US'], { sensitivity: 'base' }) === 0) {
|
||||
matches++;
|
||||
let toolMatches = [];
|
||||
for (let index = 0; index < parsed.rds.prodSpec.recipesAndPatterns.length; index++) {
|
||||
if (parsed.rds.prodSpec.recipesAndPatterns[index].tool === tool) {
|
||||
toolMatches.push(parsed.rds.prodSpec.recipesAndPatterns[index]);
|
||||
}
|
||||
}
|
||||
if (matches > 0)
|
||||
result = '1';
|
||||
else
|
||||
result = 'Value not matched~Run~patternSize: ' + patternSize + '; recipe: ' + recipe + '; pattern: ' + pattern + ';~API~' + debug;
|
||||
if (toolMatches == null || toolMatches.length === 0)
|
||||
result = 'Tool [' + tool + '] not found in OI API results!';
|
||||
else {
|
||||
let debug = '';
|
||||
let matches = 0;
|
||||
for (let index = 0; index < toolMatches.length; index++) {
|
||||
debug += 'patternSize: ' + toolMatches[index].patternSize +
|
||||
';~recipe: ' + toolMatches[index].recipe +
|
||||
';~pattern: ' + toolMatches[index].pattern + ';~';
|
||||
if (toolMatches[index].recipe.toLowerCase() == recipe.toLowerCase()) {
|
||||
matches++;
|
||||
}
|
||||
}
|
||||
if (matches > 0)
|
||||
result = '1';
|
||||
else
|
||||
result = 'Value not matched~Run~patternSize: ' + patternSize + ';~recipe: ' + recipe + ';~pattern: ' + pattern + ';~API~' + debug;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
getValue('FTIR', 1, '6in_Centerpoint', 'pattern', '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"6in_Centerpoint","pattern":"Centerpoint","patternSize":1,"tool":"FTIR"}]}}}');
|
||||
getValue('FTIR', 1, '6in_Centerpoint', 'pattern', '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"6in_Centerpoint","pattern":"pattern","patternSize":1,"tool":"FTIR"}]}}}');
|
||||
|
||||
let json;
|
||||
let tool;
|
||||
@ -61,7 +76,7 @@ tool = 'FTIR'
|
||||
patternSize = 1;
|
||||
recipe = '6in_Centerpoint';
|
||||
pattern = 'pattern';
|
||||
json = '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"6in_Centerpoint","pattern":"Centerpoint","patternSize":1,"tool":"FTIR"}]}}}';
|
||||
json = '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"6in_Centerpoint","pattern":"pattern","patternSize":1,"tool":"FTIR"}]}}}';
|
||||
const testA = getValue(tool, patternSize, recipe, pattern, json);
|
||||
if (testA !== '1')
|
||||
throw 'Test A failed: ' + testA;
|
||||
@ -112,4 +127,4 @@ if (testM !== 'C) No Spec!')
|
||||
json = '{"rds":{"prodSpec":{"recipesAndPatterns":[]}}}';
|
||||
const testN = getValue(tool, patternSize, recipe, pattern, json);
|
||||
if (testN !== 'Tool [FTIR] not found in OI API results!')
|
||||
throw 'Test E failed: ' + testN;
|
||||
throw 'Test E failed: ' + testN;
|
||||
@ -1,5 +1,5 @@
|
||||
using Adaptation._Tests.Shared;
|
||||
using Adaptation.FileHandlers.Stratus;
|
||||
using Adaptation.FileHandlers.txt;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
using System;
|
||||
@ -12,27 +12,29 @@ using System.Text;
|
||||
|
||||
namespace Adaptation._Tests.Static;
|
||||
|
||||
#pragma warning disable IDE1006
|
||||
|
||||
[TestClass]
|
||||
public class Stratus : LoggingUnitTesting, IDisposable
|
||||
public class txt : LoggingUnitTesting, IDisposable
|
||||
{
|
||||
|
||||
#pragma warning disable CA2254
|
||||
#pragma warning disable IDE0060
|
||||
|
||||
internal static Stratus LoggingUnitTesting { get; private set; }
|
||||
internal static txt LoggingUnitTesting { get; private set; }
|
||||
|
||||
public Stratus() : base(testContext: null, declaringType: null)
|
||||
public txt() : base(testContext: null, declaringType: null)
|
||||
{
|
||||
if (LoggingUnitTesting is null)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
public Stratus(TestContext testContext) : base(testContext, new StackFrame().GetMethod().DeclaringType)
|
||||
public txt(TestContext testContext) : base(testContext, new StackFrame().GetMethod().DeclaringType)
|
||||
{
|
||||
}
|
||||
|
||||
[ClassInitialize]
|
||||
public static void ClassInitialize(TestContext testContext) => LoggingUnitTesting ??= new Stratus(testContext);
|
||||
public static void ClassInitialize(TestContext testContext) => LoggingUnitTesting ??= new txt(testContext);
|
||||
|
||||
[ClassCleanup()]
|
||||
public static void ClassCleanup()
|
||||
@ -119,22 +119,14 @@
|
||||
<Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewerAttachments\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Processed\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\SPaCe\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Stratus\Constant.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Stratus\Description.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Stratus\Descriptor.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Stratus\Detail.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Stratus\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Stratus\Grade.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Stratus\Header.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Stratus\Point.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Stratus\ProcessData.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Stratus\Row.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Stratus\Run.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Stratus\Wafer.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\txt\Constant.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\txt\Description.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\txt\Descriptor.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\txt\Detail.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\txt\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\txt\Grade.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\txt\Header.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\txt\Point.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\txt\ProcessData.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\txt\Row.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\txt\Run.cs" />
|
||||
|
||||
Reference in New Issue
Block a user