Compare commits

8 Commits

Author SHA1 Message Date
89d98f8b83 Update FileRead class to use hardcoded column names and indices for process data standard format mapping 2025-12-11 09:06:43 -07:00
d2687c288d Refactor FileRead and WSRequest classes to use unique sequence and unique ID generation methods for improved data handling 2025-12-10 11:42:19 -07:00
ce86094262 Update project configuration and dependencies; change target framework to net10.0 and adjust package versions 2025-11-17 13:39:19 -07:00
5f20c326a3 Refactor BIORAD and MET08THFTIR tests to use pdsf file handler
- Updated BIORAD2, BIORAD3, and MET08THFTIR test classes to replace references from QS408M to pdsf for date time processing.
- Modified project file to remove QS408M file handlers and include pdsf file handlers.
- Added new classes for pdsf file handling: Description, Descriptor, Detail, and ProcessData.
- Implemented methods in ProcessData for parsing and handling data specific to pdsf format.
2025-11-13 08:45:26 -07:00
f2d84c86a1 Update directory creation logic to exclude paths containing "10." 2025-11-10 18:21:50 -07:00
c9184b7e92 Infineon.EAF.Runtime v2.61.1 2025-10-21 09:29:36 -07:00
78c34c1d26 Enhance getValue function to handle specific recipe cases and improve input validation 2025-10-21 09:29:29 -07:00
11f393730d Add IndexOf and AttemptCounter properties to WSRequest and Description classes; implement getValue function in recipes-and-patterns.js 2025-10-13 17:46:57 -07:00
46 changed files with 1320 additions and 1173 deletions

View File

@ -107,6 +107,7 @@ dotnet_diagnostic.CA1864.severity = none # CA1864: To avoid double lookup, call
dotnet_diagnostic.CA1866.severity = none # CA1866: Use 'string.EndsWith(char)' instead of 'string.EndsWith(string)' when you have a string with a single char
dotnet_diagnostic.CA1869.severity = none # CA1869: Avoid creating a new 'JsonSerializerOptions' instance for every serialization operation. Cache and reuse instances instead.
dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template should not vary between calls to 'LoggerExtensions.LogInformation(ILogger, string?, params object?[])'
dotnet_diagnostic.CS0618.severity = none # 'member' is obsolete: 'description'
dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name
dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2");
dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant.
@ -123,8 +124,9 @@ dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization c
dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
dotnet_diagnostic.MSTEST0015.severity = none # MSTEST0015: Test method {method} should not be ignored
dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods
dotnet_diagnostic.MSTEST0037.severity = none # MSTEST0037: Use proper 'Assert' methods
dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation
dotnet_diagnostic.MSTEST0048.severity = none # MSTEST0048: Test method '{method}' should be attributed with 'DataTestMethod' when using 'DataRow' attributes
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.abstract_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.abstract_method_should_be_pascal_case.symbols = abstract_method

View File

@ -1,10 +1,43 @@
{
"configurations": [
{
"mode": "debug",
"name": "Go launch file",
"program": "${file}",
"request": "launch",
"type": "go"
},
{
"name": "node Launch Current Opened File",
"program": "${file}",
"request": "launch",
"type": "node"
},
{
"cwd": "${workspaceFolder}",
"internalConsoleOptions": "neverOpen",
"name": "Debug File",
"program": "${file}",
"request": "launch",
"stopOnEntry": false,
"type": "bun",
"watchMode": false
},
{
"cwd": "${workspaceFolder}",
"internalConsoleOptions": "neverOpen",
"name": "Run File",
"noDebug": true,
"program": "${file}",
"request": "launch",
"type": "bun",
"watchMode": false
},
{
"name": ".NET Core Attach",
"type": "coreclr",
"processId": 32760,
"request": "attach",
"processId": 2688
"type": "coreclr"
}
]
}
}

View File

@ -216,7 +216,7 @@
{
"label": "File-Folder-Helper AOT s X Day-Helper-2025-03-20",
"type": "shell",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net10.0/win-x64/publish/File-Folder-Helper.exe",
"args": [
"s",
"X",

View File

@ -23,7 +23,6 @@ public class CellInstanceConnectionName
nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(pdsf) => new pdsf.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(QS408M) => new QS408M.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
_ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped")
};

View File

@ -97,9 +97,9 @@ public class FileRead : Shared.FileRead, IFileRead
private void CallbackInProcessCleared(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, string inProcessDirectory, long sequence, bool warning)
{
const string site = "sjc";
const string site = "els";
string stateName = string.Concat("Dummy_", _EventName);
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
const string monInURL = $"http://moninhttp.{site}.infineon.com/input/text";
MonIn monIn = MonIn.GetInstance(monInURL);
try
{

View File

@ -85,9 +85,9 @@ public class FileRead : Shared.FileRead, IFileRead
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
string processDataStandardFormatMappingOldColumnNames = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Cassette,HeaderUniqueId,Layer,PassFail,Position,RDS,Title,UniqueId,Wafer,Zone,MeanThickness,RVThickness,StdDev,Thickness,Slot,ThicknessFourteen3mmEdgeMean,ThicknessFourteen3mmEdgePercent,ThicknessFourteen5mmEdgeMean,ThicknessFourteen5mmEdgePercent,ThicknessFourteenCenterMean,ThicknessFourteenCriticalPointsAverage,ThicknessFourteenCriticalPointsStdDev,ThicknessFourteenMeanFrom,Thickness01,Thickness02,Thickness03,Thickness04,Thickness05,Thickness06,Thickness07,Thickness08,Thickness09,Thickness10,Thickness11,Thickness12,Thickness13,Thickness14,EventId,IndexOf,AttemptCounter,ReactorMode,ExportControl,CastingVerification"; // GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
string processDataStandardFormatMappingNewColumnNames = "Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Title,Recipe,DateTime,Operator,Batch,Cassette,UsedLast,Wafer,Position,Thickness,WaferMeanThickness,StdDev,PassFail,Line,RadialVariationThickness,Slot,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Thickness 14 3mm Edge Mean,Thickness 14 3mm Edge % from R/2,Thickness 14 5mm Edge Mean,Thickness 14 5mm Edge % from R/2,Thickness 14 Center Mean,Thickness 14 Average,Thickness 14 Std Dev,Thickness 14 R 2/Mean,Thickness01,Thickness02,Thickness03,Thickness04,Thickness05,Thickness06,Thickness07,Thickness08,Thickness09,Thickness10,Thickness11,Thickness12,Thickness13,Thickness14,EventId,IndexOf,AttemptCounter,ReactorMode,ExportControl,CastingVerification"; // GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
string processDataStandardFormatMappingColumnIndices = "0,1,2,52,3,6,5,9,10,14,24,25,8,12,-1,26,19,15,23,7,-1,14,27,17,21,18,16,22,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57"; // GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
_ProcessDataStandardFormatMapping = ProcessDataStandardFormatMapping.Get(processDataStandardFormatMappingOldColumnNames,
processDataStandardFormatMappingNewColumnNames,
processDataStandardFormatMappingColumnIndices);
@ -267,11 +267,12 @@ public class FileRead : Shared.FileRead, IFileRead
List<Post> results = new();
Post post;
long preWait;
long uniqueSequence = Logistics.GetUniqueSequence(_Logistics);
foreach (PreWith preWith in preWithCollection)
{
if (!_IsEAFHosted)
continue;
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
if (!_StaticRuns.TryGetValue(uniqueSequence, out List<Shared.Metrology.WS.Results>? wsResults))
wsResults = null;
if (processDataStandardFormat.InputPDSF is null)
File.Move(preWith.MatchingFile, preWith.CheckFile);

View File

@ -113,11 +113,11 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
private static string GetLines(Logistics logistics, List<QS408M.Description> descriptions, bool isStratusQual)
private static string GetLines(Logistics logistics, List<pdsf.Description> descriptions, bool isStratusQual)
{
StringBuilder results = new();
char del = '\t';
QS408M.Description x = descriptions[0];
pdsf.Description x = descriptions[0];
if (isStratusQual)
{
_ = results.Append("Stratus_").Append(logistics.MID).Append('_').Append(logistics.DateTimeFromSequence.ToString("yyyyMMddhhmmssfff")).Append(del).
@ -157,7 +157,7 @@ public class FileRead : Shared.FileRead, IFileRead
return results.ToString();
}
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, List<QS408M.Description> descriptions, Test[] tests)
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, List<pdsf.Description> descriptions, Test[] tests)
{
string duplicateFile;
bool isDummyRun = false;
@ -175,6 +175,7 @@ public class FileRead : Shared.FileRead, IFileRead
{
long? subgroupId;
string fileName = Path.GetFileName(reportFullPath);
long uniqueSequence = Logistics.GetUniqueSequence(_Logistics);
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
@ -189,7 +190,7 @@ public class FileRead : Shared.FileRead, IFileRead
else
collection.Add(new(new ScopeInfo(tests[0], $"{subgroupId.Value} {_OpenInsightFilePattern}"), lines));
}
if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
if (_StaticRuns.TryGetValue(uniqueSequence, out List<WS.Results> wsResults))
{
if (wsResults is null || wsResults.Count != 1)
throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");
@ -219,7 +220,7 @@ public class FileRead : Shared.FileRead, IFileRead
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
List<pdsf.Description> descriptions = pdsf.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveOpenInsightFile(reportFullPath, dateTime, descriptions, tests);

View File

@ -128,7 +128,7 @@ public class FromIQS
return string.Join(Environment.NewLine, results);
} // cSpell:restore
private static string GetCommandText(Logistics logistics, QS408M.Description description, string dateTime, long? subGroupId)
private static string GetCommandText(Logistics logistics, pdsf.Description description, string dateTime, long? subGroupId)
{ // cSpell:disable
List<string> results = new();
results.Add(" select iq.ev_count, iq.cl_count, iq.sl_count, iq.se_sgrp, iq.se_sgtm, iq.se_tsno, iq.td_test, iq.pr_name, iq.jd_name, iq.pl_name, iq.pd_name, iq.td_name, iq.se_val ");
@ -251,14 +251,14 @@ public class FromIQS
}
}
internal static (long?, int?, string) GetCommandText(string connectionString, Logistics logistics, QS408M.Description description, long breakAfter, long preWait)
internal static (long?, int?, string) GetCommandText(string connectionString, Logistics logistics, pdsf.Description description, long breakAfter, long preWait)
{
DateTime dateTime;
int? count = null;
string commandText;
long? result = null;
StringBuilder stringBuilder;
string dateFormat = QS408M.Description.GetDateFormat();
string dateFormat = pdsf.Description.GetDateFormat();
if (DateTime.TryParseExact(description.Date, dateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
dateTime = dateTimeParsed;
else if (DateTime.TryParse(description.Date, CultureInfo.InvariantCulture, DateTimeStyles.None, out dateTimeParsed))
@ -325,7 +325,7 @@ public class FromIQS
return new(result, count, commandText);
}
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, QS408M.Description description, long? subGroupId, string weekOfYear)
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, pdsf.Description description, long? subGroupId, string weekOfYear)
{
string fileName = Path.GetFileName(reportFullPath);
string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);

View File

@ -110,9 +110,10 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
private void SendData(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<QS408M.Description> descriptions)
private void SendData(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<pdsf.Description> descriptions)
{
string checkDirectory;
long uniqueSequence = Logistics.GetUniqueSequence(_Logistics);
WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
int weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
string directory = Path.Combine(_OpenInsightMetrologyViewerFileShare, dateTime.Year.ToString(), $"WW{weekOfYear:00}");
@ -126,9 +127,9 @@ public class FileRead : Shared.FileRead, IFileRead
_Log.Debug(wsResults.HeaderId);
lock (_StaticRuns)
{
if (!_StaticRuns.ContainsKey(_Logistics.Sequence))
_StaticRuns.Add(_Logistics.Sequence, new());
_StaticRuns[_Logistics.Sequence].Add(wsResults);
if (!_StaticRuns.ContainsKey(uniqueSequence))
_StaticRuns.Add(uniqueSequence, new());
_StaticRuns[uniqueSequence].Add(wsResults);
}
checkDirectory = Path.Combine(directory, $"-{wsResults.HeaderId}");
if (!Directory.Exists(checkDirectory))
@ -144,7 +145,7 @@ public class FileRead : Shared.FileRead, IFileRead
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
List<pdsf.Description> descriptions = pdsf.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(reportFullPath, dateTime, jsonElements, descriptions);

View File

@ -15,10 +15,12 @@ public class WSRequest
//
public int Id { get; set; }
public string Batch { get; set; }
public string AttemptCounter { get; set; }
public string Cassette { get; set; }
public string CellName { get; set; }
public string Date { get; set; }
public string FilePath { get; set; }
public string IndexOf { get; set; }
public string Layer { get; set; }
public string MeanThickness { get; set; }
public string Op { get; set; }
@ -33,44 +35,46 @@ public class WSRequest
public string UniqueId { get; set; }
public string Wafer { get; set; }
public string Zone { get; set; }
public List<QS408M.Detail> Details { get; protected set; }
public List<pdsf.Detail> Details { get; protected set; }
[Obsolete("For json")] public WSRequest() { }
#pragma warning disable IDE0060
internal WSRequest(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, List<QS408M.Description> descriptions, string processDataStandardFormat = null)
internal WSRequest(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, List<pdsf.Description> descriptions, string processDataStandardFormat = null)
#pragma warning restore IDE0060
{
Id = -1;
FilePath = string.Empty;
CellName = logistics.MesEntity;
if (descriptions[0] is not QS408M.Description x)
if (descriptions[0] is not pdsf.Description x)
throw new Exception();
Details = new List<QS408M.Detail>();
Details = new List<pdsf.Detail>();
//Header
{
AttemptCounter = x.AttemptCounter;
Batch = x.Lot; // different name
Cassette = x.Cassette;
Date = x.Date;
Op = x.Employee; // different name
IndexOf = x.IndexOf;
Layer = x.Layer;
MeanThickness = x.MeanThickness;
PSN = x.PSN;
Op = x.Employee; // different name
PassFail = x.PassFail;
PSN = x.PSN;
RDS = x.RDS;
RVThickness = x.RVThickness;
Reactor = x.Reactor;
Recipe = x.Recipe;
RVThickness = x.RVThickness;
StdDev = x.StdDev;
Title = x.Title;
UniqueId = x.UniqueId;
Wafer = x.Wafer;
Zone = x.Zone;
}
QS408M.Detail detail;
foreach (QS408M.Description description in descriptions)
pdsf.Detail detail;
foreach (pdsf.Description description in descriptions)
{
detail = new QS408M.Detail
detail = new pdsf.Detail
{
HeaderUniqueId = description.HeaderUniqueId,
Position = description.Position,
@ -80,15 +84,16 @@ public class WSRequest
Details.Add(detail);
}
Date ??= logistics.DateTimeFromSequence.ToString();
UniqueId = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}";
string uniqueId = Logistics.GetUniqueId(logistics);
UniqueId = uniqueId;
for (int i = 0; i < Details.Count; i++)
{
Details[i].HeaderUniqueId = UniqueId;
Details[i].UniqueId = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}_Item-{i + 1}";
Details[i].UniqueId = $"{uniqueId}_Item-{i + 1}";
}
}
internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, JsonElement[] jsonElements, List<QS408M.Description> descriptions)
internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, JsonElement[] jsonElements, List<pdsf.Description> descriptions)
{
long result;
if (results is not null && results.HeaderId is not null)
@ -106,7 +111,7 @@ public class WSRequest
}
#pragma warning disable IDE0060
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, List<QS408M.Description> descriptions, string matchDirectory, WS.Results results, string headerIdDirectory)
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, List<pdsf.Description> descriptions, string matchDirectory, WS.Results results, string headerIdDirectory)
#pragma warning restore IDE0060
{
}

View File

@ -135,14 +135,15 @@ public class FileRead : Shared.FileRead, IFileRead
return result;
}
private void PostOpenInsightMetrologyViewerAttachments(JsonElement[] jsonElements, List<QS408M.Description> descriptions)
private void PostOpenInsightMetrologyViewerAttachments(JsonElement[] jsonElements, List<pdsf.Description> descriptions)
{
Shared.Metrology.WS.Results? results;
string jobIdDirectory = Path.Combine(Path.GetDirectoryName(_FileConnectorConfiguration.AlternateTargetFolder) ?? throw new Exception(), _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
long uniqueSequence = Logistics.GetUniqueSequence(_Logistics);
string[] matchDirectories = GetInProcessDirectory(jobIdDirectory);
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
if (!_StaticRuns.TryGetValue(uniqueSequence, out List<Shared.Metrology.WS.Results>? wsResults))
results = null;
else
{
@ -168,7 +169,7 @@ public class FileRead : Shared.FileRead, IFileRead
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
List<pdsf.Description> descriptions = pdsf.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(jsonElements, descriptions);

View File

@ -108,12 +108,13 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
private void DirectoryMove(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<QS408M.Description> descriptions)
private void DirectoryMove(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<pdsf.Description> descriptions)
{
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
FileInfo fileInfo = new(reportFullPath);
string logisticsSequence = _Logistics.Sequence.ToString();
long uniqueSequence = Logistics.GetUniqueSequence(_Logistics);
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
@ -136,7 +137,7 @@ public class FileRead : Shared.FileRead, IFileRead
File.Copy(reportFullPath, Path.Combine(sequenceDirectory, Path.GetFileName(reportFullPath)), overwrite: true);
File.WriteAllText(jsonFileName, json);
lock (_StaticRuns)
_ = _StaticRuns.Remove(_Logistics.Sequence);
_ = _StaticRuns.Remove(uniqueSequence);
}
private static void MoveMatchingFile(string jobIdDirectory, string matchDirectory)
@ -171,7 +172,7 @@ public class FileRead : Shared.FileRead, IFileRead
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
List<pdsf.Description> descriptions = pdsf.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)

View File

@ -1,80 +0,0 @@
namespace Adaptation.FileHandlers.QS408M;
#nullable enable
public class Body
{
public Body(string waferMeanThickness, string stdDev, string passFail)
{
WaferMeanThickness = waferMeanThickness;
StdDev = stdDev;
PassFail = passFail;
}
public string WaferMeanThickness { get; }
public string StdDev { get; }
public string PassFail { get; }
private static bool IsNullOrWhiteSpace(string text)
{
bool flag;
int num = 0;
while (true)
{
if (num >= text.Length)
{
flag = true;
break;
}
else if (char.IsWhiteSpace(text[num]))
{
num++;
}
else
{
flag = false;
break;
}
}
return flag;
}
internal static string GetToken(string text, int[] i)
{
while (true)
{
if (i[0] >= text.Length || !IsNullOrWhiteSpace(text.Substring(i[0], 1)))
{
break;
}
i[0]++;
}
int num = i[0];
while (true)
{
if (num >= text.Length || IsNullOrWhiteSpace(text.Substring(num, 1)))
{
break;
}
num++;
}
string str = text.Substring(i[0], num - i[0]);
i[0] = num;
return str.Trim();
}
internal static Body? Get(string text, int[] i)
{
Body? result;
i[0] = Run.ScanPast(text, i, "mean thickness =");
string meanThickness = Run.GetBefore(text, i, ", std. dev =");
string stdDev = GetToken(text, i);
string passFail = Run.GetToEOL(text, i);
result = new(meanThickness,
stdDev,
passFail);
return result;
}
}

View File

@ -1,145 +0,0 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.QS408M;
public class FileRead : Shared.FileRead, IFileRead
{
private long? _TickOffset;
private readonly Header[] _LastHeader;
private readonly string _OriginalDataBioRad;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), true, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_OriginalDataBioRad = "OriginalDataBioRad_";
_LastHeader = new Header[] { Header.Get() };
if (_IsEAFHosted)
NestExistingFiles(_FileConnectorConfiguration);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
#nullable enable
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
_TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
_Logistics = new Logistics(this, _TickOffset.Value, reportFullPath, useSplitForMID: true);
SetFileParameterLotIDToLogisticsMID();
if (_Logistics.FileInfo.Length < _MinFileLength)
results.Item4.Add(_Logistics.FileInfo);
else
{
Run? run = Run.Get(_Logistics, results.Item4, lastHeader: _LastHeader);
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, _OriginalDataBioRad, _TickOffset.Value, run);
if (run is null)
throw new Exception(string.Concat("A) No Data - ", dateTime.Ticks));
if (iProcessData is not ProcessData processData)
results = new(string.Concat("B) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
else
{
string mid;
if (!string.IsNullOrEmpty(processData.Wafer) && string.IsNullOrEmpty(processData.Reactor) && string.IsNullOrEmpty(processData.RDS) && string.IsNullOrEmpty(processData.PSN))
mid = processData.Wafer;
else if (!string.IsNullOrEmpty(processData.Employee) && string.IsNullOrEmpty(processData.Reactor) && string.IsNullOrEmpty(processData.RDS) && string.IsNullOrEmpty(processData.PSN))
mid = processData.Employee;
else
{
mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
}
SetFileParameterLotID(mid);
_Logistics.Update(mid, processData.Reactor);
if (iProcessData.Details.Count > 0)
results = iProcessData.GetResults(this, _Logistics, results.Item4);
else
results = new(string.Concat("C) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
_LastHeader[0] = run.Header;
}
}
return results;
}
}

View File

@ -1,36 +0,0 @@
namespace Adaptation.FileHandlers.QS408M;
#nullable enable
public class Footer
{
public Footer(string line, string radialVariationThickness, string slot)
{
Line = line;
RadialVariationThickness = radialVariationThickness;
Slot = slot;
}
public string Line { get; }
public string RadialVariationThickness { get; }
public string Slot { get; }
internal static Footer? Get(string text, int[] i)
{
Footer? result;
_ = Run.GetToEOL(text, i);
_ = Run.GetToEOL(text, i);
string line = Run.GetToEOL(text, i);
i[0] = Run.ScanPast(text, i, "thickness");
string radialVariationThickness = Run.GetToEOL(text, i);
_ = Run.GetToEOL(text, i);
i[0] = Run.ScanPast(text, i, "Slot:");
string slot = Run.GetBefore(text, i, ";");
result = new(line,
radialVariationThickness,
slot);
return result;
}
}

View File

@ -1,100 +0,0 @@
using System;
namespace Adaptation.FileHandlers.QS408M;
#nullable enable
public class Header
{
public Header(string title, string recipe, string dateTime, string @operator, string batch, string cassette, bool usedLast, string wafer)
{
Title = title;
Recipe = recipe;
DateTime = dateTime;
Operator = @operator;
Batch = batch;
Cassette = cassette;
UsedLast = usedLast;
Wafer = wafer;
}
public string Title { get; }
public string Recipe { get; }
public string DateTime { get; }
public string Operator { get; }
public string Batch { get; }
public string Cassette { get; }
public bool UsedLast { get; }
public string Wafer { get; }
internal static Header Get() =>
new(string.Empty,
string.Empty,
string.Empty,
string.Empty,
string.Empty,
string.Empty,
false,
string.Empty);
internal static Header? Get(Header[] lastHeader, string text, int[] i)
{
Header? result;
// occasionally there are multiple blocks of details, get the last one as earlier ones may be aborted runs.
int index = text.LastIndexOf("Bio-Rad");
if (index > -1)
text = text.Substring(index);
if (string.IsNullOrEmpty(text))
result = null;
else
{
bool usedLast;
const string twoSpaces = " ";
string title = Run.GetBefore(text, i, "Recipe:");
string recipeAndDateTime = Run.GetToEOL(text, i);
string recipe = recipeAndDateTime.Length < twoSpaces.Length ? recipeAndDateTime.Trim() : !recipeAndDateTime.Contains(twoSpaces) ? recipeAndDateTime.Substring(0, 25).Trim() : recipeAndDateTime.Split(new string[] { twoSpaces }, StringSplitOptions.None)[0].Trim();
string dateTime = recipeAndDateTime.Substring(recipe.Length).Trim();
if (dateTime.EndsWith("."))
dateTime = dateTime.Remove(dateTime.Length - 1, 1);
i[0] = Run.ScanPast(text, i, "operator:");
string @operator = Run.GetBefore(text, i, "batch:");
string batch = Run.GetToEOL(text, i);
i[0] = Run.ScanPast(text, i, "cassette:");
if (!text.Contains("cassette:"))
title = string.Empty;
string cassette = Run.GetBefore(text, i, "wafer:");
if (string.IsNullOrEmpty(batch))
{
i[0] = 0;
i[0] = Run.ScanPast(text, i, "wafer:");
}
string wafer = Run.GetToEOL(text, i);
_ = Run.GetToEOL(text, i);
_ = Run.GetToEOL(text, i);
if (string.IsNullOrEmpty(wafer))
throw new Exception("Wafer field is missing.");
if (!string.IsNullOrEmpty(title))
usedLast = false;
else
{
title = lastHeader[0].Title;
recipe = lastHeader[0].Recipe;
@operator = lastHeader[0].Operator;
batch = lastHeader[0].Batch;
cassette = lastHeader[0].Cassette;
usedLast = true;
}
result = new(title: title,
recipe: recipe,
dateTime: dateTime,
@operator: @operator,
batch: batch,
cassette: cassette,
usedLast: usedLast,
wafer: wafer);
}
return result;
}
}

View File

@ -1,368 +0,0 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Data;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.QS408M;
public partial class ProcessData : IProcessData
{
private readonly List<object> _Details;
public string JobID { get; set; }
public string MesEntity { get; set; }
public string Batch { get; set; }
public string Cassette { get; set; }
public DateTime Date { get; set; }
public string Employee { get; set; }
public string Layer { get; set; }
public string MeanThickness { get; set; }
public string PSN { get; set; }
public string PassFail { get; set; }
public string RDS { get; set; }
public string RVThickness { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string StdDev { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Wafer { get; set; }
public string Zone { get; set; }
//
public string Slot { get; set; }
//
public string ThicknessFourteen3mmEdgeMean { get; set; }
public string ThicknessFourteen3mmEdgePercent { get; set; }
public string ThicknessFourteen5mmEdgeMean { get; set; }
public string ThicknessFourteen5mmEdgePercent { get; set; }
public string ThicknessFourteenCenterMean { get; set; }
public string ThicknessFourteenCriticalPointsAverage { get; set; }
public string ThicknessFourteenCriticalPointsStdDev { get; set; }
public string ThicknessFourteenMeanFrom { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details;
public ProcessData()
{
}
#nullable enable
internal ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, long tickOffset, Run? run)
{
if (fileRead is null)
throw new ArgumentNullException(nameof(fileRead));
JobID = logistics.JobID;
_Details = new List<object>();
List<string> moveFiles = new();
MesEntity = logistics.MesEntity;
string fileNameWithoutExtension = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
string directoryName = Path.GetDirectoryName(logistics.ReportFullPath) ?? throw new Exception();
moveFiles.AddRange(Directory.GetFiles(directoryName, string.Concat(originalDataBioRad, "*", logistics.Sequence, "*"), SearchOption.TopDirectoryOnly));
moveFiles.AddRange(Directory.GetFiles(directoryName, string.Concat(originalDataBioRad, "*", fileNameWithoutExtension.Split('_').Last(), "*"), SearchOption.TopDirectoryOnly));
foreach (string moveFile in moveFiles.Distinct())
fileInfoCollection.Add(new FileInfo(moveFile));
fileInfoCollection.Add(logistics.FileInfo);
if (run is not null)
SetValues(logistics, tickOffset, run);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(ProcessData)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.BioRadQS408M);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json) ?? throw new Exception();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
internal static DateTime GetDateTime(Logistics logistics, long tickOffset, string dateTimeText)
{
DateTime result;
string inputDateFormat = "ddd mmm dd HH:mm:ss yyyy";
if (dateTimeText.Length != inputDateFormat.Length)
result = logistics.DateTimeFromSequence;
else
{
if (!DateTime.TryParseExact(dateTimeText, inputDateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
result = logistics.DateTimeFromSequence;
else
{
if (dateTimeParsed < logistics.DateTimeFromSequence.AddDays(1) && dateTimeParsed > logistics.DateTimeFromSequence.AddDays(-1))
result = new(dateTimeParsed.Ticks + tickOffset);
else
result = logistics.DateTimeFromSequence;
}
}
return result;
}
private static (string, string) GetReactorAndRDS(string defaultReactor, string defaultRDS, string text, string formattedText, string[] segments)
{
string rds;
string reactor;
if (string.IsNullOrEmpty(text) || segments.Length == 0 || string.IsNullOrEmpty(formattedText))
reactor = defaultReactor;
else
reactor = segments[0];
if (segments.Length <= 1 || !int.TryParse(segments[1], out int rdsValue) || rdsValue < 99)
rds = defaultRDS;
else
rds = segments[1];
if (reactor.Length > 3)
{
rds = reactor;
reactor = defaultReactor;
}
return new(reactor, rds);
}
private static (string, string) GetLayerAndPSN(string defaultLayer, string defaultPSN, string[] segments)
{
string psn;
string layer;
if (segments.Length <= 2)
{
psn = defaultPSN;
layer = defaultLayer;
}
else
{
string[] segmentsB = segments[2].Split('.');
psn = segmentsB[0];
if (segmentsB.Length <= 1)
layer = defaultLayer;
else
{
layer = segmentsB[1];
if (layer.Length > 1 && layer[0] == '0')
layer = layer.Substring(1);
}
}
return (layer, psn);
}
private static string GetZone(string[] segments)
{
string result;
if (segments.Length <= 3)
result = string.Empty;
else
{
result = segments[3];
if (result.Length > 1 && result[0] == '0')
result = result.Substring(1);
}
return result;
}
public static Descriptor GetDescriptor(string text)
{
Descriptor result;
string psn;
string rds;
string zone;
string layer;
string wafer;
string reactor;
string employee;
string defaultPSN = string.Empty;
string defaultRDS = string.Empty;
string defaultZone = string.Empty;
string defaultLayer = string.Empty;
string defaultReactor = string.Empty;
string defaultEmployee = string.Empty;
if (Regex.IsMatch(text, @"^[a-zA-z][0-9]{2,4}$"))
{
wafer = text.ToUpper();
psn = defaultPSN;
rds = defaultRDS;
zone = defaultZone;
layer = defaultLayer;
reactor = defaultReactor;
employee = defaultEmployee;
}
else if (string.IsNullOrEmpty(text) || (text.Length is 2 or 3 && Regex.IsMatch(text, "^[a-zA-z]{2,3}")))
{
wafer = text;
employee = text;
psn = defaultPSN;
rds = defaultRDS;
zone = defaultZone;
layer = defaultLayer;
reactor = defaultReactor;
}
else if (Regex.IsMatch(text, @"^[0-9]{2}[.][0-9]{1}[.]?[0-9]{0,1}"))
{
string[] segments = text.Split('.');
wafer = text;
psn = defaultPSN;
rds = defaultRDS;
layer = segments[1];
reactor = segments[0];
employee = defaultEmployee;
if (segments.Length <= 2)
zone = defaultZone;
else
zone = segments[2];
}
else
{
// Remove illegal characters \/:*?"<>| found in the Batch
wafer = Regex.Replace(text, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
if (wafer.Length > 2 && wafer[0] == '1' && (wafer[1] == 'T' || wafer[1] == 't'))
wafer = wafer.Substring(2);
string[] segments = wafer.Split('-');
// bool hasRDS = Regex.IsMatch(wafer, "[-]?([QP][0-9]{4,}|[0-9]{5,})[-]?");
(reactor, rds) = GetReactorAndRDS(defaultReactor, defaultRDS, text, wafer, segments);
(layer, psn) = GetLayerAndPSN(defaultLayer, defaultPSN, segments);
zone = GetZone(segments);
if (segments.Length <= 4)
employee = defaultEmployee;
else
employee = segments[4];
}
result = new(employee, layer, psn, rds, reactor, wafer, zone);
return result;
}
private void SetValues(Logistics logistics, long tickOffset, Run run)
{
int slot = 0;
Detail detail;
int counter = 1;
List<Detail> details = new();
DateTime dateTime = GetDateTime(logistics, tickOffset, run.Header.DateTime);
bool isWaferSlot = !string.IsNullOrEmpty(run.Header.Wafer) && run.Header.Wafer.Length is 1 or 2 && int.TryParse(run.Header.Wafer, out slot) && slot < 27;
string batch = !isWaferSlot ? logistics.JobID : Regex.Replace(run.Header.Batch, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
Descriptor descriptor = isWaferSlot ? GetDescriptor(run.Header.Batch) : GetDescriptor(run.Header.Wafer);
string wafer = isWaferSlot ? slot.ToString("00") : descriptor.Wafer;
string uniqueId = string.Concat(run.Header.Title, '_', wafer, '_', logistics.DateTimeFromSequence.ToString("yyyyMMddHHmmssffff"), '_', logistics.TotalSecondsSinceLastWriteTimeFromSequence);
Batch = batch;
Wafer = wafer;
Date = dateTime;
UniqueId = uniqueId;
PSN = descriptor.PSN;
RDS = descriptor.RDS;
Zone = descriptor.Zone;
JobID = logistics.JobID;
Layer = descriptor.Layer;
Reactor = descriptor.Reactor;
StdDev = run.Body.StdDev;
Title = run.Header.Title;
Recipe = run.Header.Recipe;
PassFail = run.Body.PassFail;
Cassette = run.Header.Cassette;
RVThickness = run.Footer.RadialVariationThickness;
Slot = string.IsNullOrEmpty(run.Footer.Slot) ? slot.ToString("00") : run.Footer.Slot;
Employee = string.IsNullOrEmpty(run.Header.Operator) ? Employee : run.Header.Operator;
MeanThickness = string.IsNullOrEmpty(run.Body.WaferMeanThickness) && run.Sites.Count == 1 ? run.Sites[0].Thickness : run.Body.WaferMeanThickness;
foreach (Site site in run.Sites)
{
detail = new()
{
Position = site.Position,
HeaderUniqueId = uniqueId,
Thickness = site.Thickness,
UniqueId = string.Concat(uniqueId, "_Point-", counter)
};
details.Add(detail);
counter++;
}
PopulateCalculated(details);
_Details.AddRange(details);
}
#nullable enable
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
List<Description> results = new();
Description? description;
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize(jsonElement.ToString(), DescriptionSourceGenerationContext.Default.Description);
if (description is null)
continue;
results.Add(description);
}
return results;
}
private static double StandardDeviation(IEnumerable<double> values)
{
double avg = values.Average();
return Math.Sqrt(values.Average(v => Math.Pow(v - avg, 2)));
}
private void PopulateCalculated(List<Detail> details)
{
if (details.Count != 14)
{
ThicknessFourteenMeanFrom = string.Empty;
ThicknessFourteenCenterMean = string.Empty;
ThicknessFourteen3mmEdgeMean = string.Empty;
ThicknessFourteen5mmEdgeMean = string.Empty;
ThicknessFourteen3mmEdgePercent = string.Empty;
ThicknessFourteen5mmEdgePercent = string.Empty;
ThicknessFourteenCriticalPointsStdDev = string.Empty;
ThicknessFourteenCriticalPointsAverage = string.Empty;
}
else
{
List<double> thicknessPoints = new();
foreach (Detail bioRadDetail in details)
{
if (!double.TryParse(bioRadDetail.Thickness, out double thickness))
thicknessPoints.Add(0);
else
thicknessPoints.Add(thickness);
}
ReadOnlyCollection<double> thicknessTenPoints = new(thicknessPoints.Take(10).ToArray());
double thicknessFourteenCriticalPointsAverage = thicknessTenPoints.Average(); // 15
double thicknessFourteenCriticalPointsStdDev = StandardDeviation(thicknessTenPoints); // 16
double thicknessFourteenCenterMean = thicknessPoints[4]; // 17
double thicknessFourteenMeanFrom = new double[] { thicknessPoints[1], thicknessPoints[2], thicknessPoints[6], thicknessPoints[7] }.Average(); // 18
double thicknessFourteen5mmEdgeMean = new double[] { thicknessPoints[0], thicknessPoints[9] }.Average(); // 19
double thicknessFourteen3mmEdgeMean = new double[] { thicknessPoints[10], thicknessPoints[11], thicknessPoints[12], thicknessPoints[13] }.Average(); // 20
double thicknessFourteen5mmEdgePercent = (thicknessFourteen5mmEdgeMean - thicknessFourteenMeanFrom) / thicknessFourteenMeanFrom * 100; // 21
double thicknessFourteen3mmEdgePercent = (thicknessFourteen3mmEdgeMean - thicknessFourteenMeanFrom) / thicknessFourteenMeanFrom * 100; // 22
ThicknessFourteenCriticalPointsAverage = thicknessFourteenCriticalPointsAverage.ToString("0.0000000"); // 15
ThicknessFourteenCriticalPointsStdDev = thicknessFourteenCriticalPointsStdDev.ToString("0.0000000"); // 16
ThicknessFourteenCenterMean = thicknessFourteenCenterMean.ToString("0.0000000"); // 17
ThicknessFourteenMeanFrom = thicknessFourteenMeanFrom.ToString("0.0000000"); // 18
ThicknessFourteen5mmEdgeMean = thicknessFourteen5mmEdgeMean.ToString("0.0000000"); // 19
ThicknessFourteen3mmEdgeMean = thicknessFourteen3mmEdgeMean.ToString("0.0000000"); // 20
ThicknessFourteen5mmEdgePercent = thicknessFourteen5mmEdgePercent.ToString("0.0000000"); // 21
ThicknessFourteen3mmEdgePercent = thicknessFourteen3mmEdgePercent.ToString("0.0000000"); // 22
}
}
}

View File

@ -1,63 +0,0 @@
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.QS408M;
#nullable enable
internal class Row
{
public Row(Run run, int i)
{
Index = i;
//
Title = run.Header.Title;
Recipe = run.Header.Recipe;
DateTime = run.Header.DateTime;
Operator = run.Header.Operator;
Batch = run.Header.Batch;
Cassette = run.Header.Cassette;
UsedLast = run.Header.UsedLast;
Wafer = run.Header.Wafer;
//
Position = run.Sites[i].Position;
Thickness = run.Sites[i].Thickness;
//
WaferMeanThickness = run.Body.WaferMeanThickness;
StdDev = run.Body.StdDev;
PassFail = run.Body.PassFail;
//
Line = run.Footer.Line;
RadialVariationThickness = run.Footer.RadialVariationThickness;
Slot = run.Footer.Slot;
}
public int Index { get; }
//
public string Title { get; }
public string Recipe { get; }
public string DateTime { get; }
public string Operator { get; }
public string Batch { get; }
public string Cassette { get; }
public bool UsedLast { get; }
public string Wafer { get; }
//
public string Position { get; }
public string Thickness { get; }
//
public string WaferMeanThickness { get; }
public string StdDev { get; }
public string PassFail { get; }
//
public string Line { get; }
public string RadialVariationThickness { get; }
public string Slot { get; }
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Row))]
internal partial class QS408MRowSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -1,195 +0,0 @@
using Adaptation.Shared;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.QS408M;
#nullable enable
internal class Run
{
public Run(Header header, ReadOnlyCollection<Site> sites, Body body, Footer footer)
{
Header = header;
Sites = sites;
Body = body;
Footer = footer;
}
public Header Header { get; }
public ReadOnlyCollection<Site> Sites { get; }
public Body Body { get; }
public Footer Footer { get; }
internal static string GetBefore(string text, int[] i, string search)
{
string str;
string str1;
int num = text.IndexOf(search, i[0]);
if (num <= -1)
{
str = text.Substring(i[0]);
i[0] = text.Length;
str1 = str.Trim();
}
else
{
str = text.Substring(i[0], num - i[0]);
i[0] = num + search.Length;
str1 = str.Trim();
}
return str1;
}
internal static string GetToEOL(string text, int[] i)
{
string result;
if (text.IndexOf("\n", i[0]) > -1)
result = GetBefore(text, i, "\n");
else
result = GetBefore(text, i, Environment.NewLine);
return result;
}
internal static int ScanPast(string text, int[] i, string search)
{
int result;
int num = text.IndexOf(search, i[0]);
if (num <= -1)
result = text.Length;
else
result = num + search.Length;
return result;
}
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, Run result)
{
FileInfo fileInfo = new($"{logistics.ReportFullPath}.run.json");
string json = JsonSerializer.Serialize(result, QS408MRunSourceGenerationContext.Default.Run);
File.WriteAllText(fileInfo.FullName, json);
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
fileInfoCollection.Add(fileInfo);
}
private static ReadOnlyCollection<string> GetLines(Logistics logistics, JsonElement[]? jsonElements)
{
List<string> results = new();
int columns = 0;
StringBuilder stringBuilder = new();
results.Add($"\"Count\",{jsonElements?.Length}");
results.Add($"\"{nameof(logistics.Sequence)}\",\"{logistics.Sequence}\"");
results.Add($"\"{nameof(logistics.MesEntity)}\",\"{logistics.MesEntity}\"");
string dateTimeFromSequence = logistics.DateTimeFromSequence.ToString("MM/dd/yyyy hh:mm:ss tt");
for (int i = 0; i < jsonElements?.Length;)
{
_ = stringBuilder.Append('"').Append(nameof(logistics.DateTimeFromSequence)).Append('"').Append(',');
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append(',');
}
break;
}
if (jsonElements?.Length != 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements?.Length; i++)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('"').Append(dateTimeFromSequence).Append('"').Append(',');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
{
if (jsonProperty.Value.ValueKind == JsonValueKind.Object)
_ = stringBuilder.Append(',');
else if (jsonProperty.Value.ValueKind != JsonValueKind.String)
_ = stringBuilder.Append(jsonProperty.Value).Append(',');
else
_ = stringBuilder.Append('"').Append(jsonProperty.Value).Append('"').Append(',');
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
}
return results.AsReadOnly();
}
private static void WriteCommaSeparatedValues(Logistics logistics, Run run)
{
List<Row> results = new();
Row row;
for (int i = 0; i < run.Sites.Count; i++)
{
row = new(run, i);
results.Add(row);
}
string json = JsonSerializer.Serialize(results);
JsonElement[]? jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
ReadOnlyCollection<string> lines = GetLines(logistics, jsonElements);
File.WriteAllText($"{logistics.ReportFullPath}.csv", string.Join(Environment.NewLine, lines));
}
internal static Run? Get(Logistics logistics, List<FileInfo> fileInfoCollection, Header[] lastHeader)
{
Run? result;
int[] i = new int[] { 0 };
string text = File.ReadAllText(logistics.ReportFullPath);
Header? header = Header.Get(lastHeader, text, i);
if (header is null)
result = null;
else
{
ReadOnlyCollection<Site> sites = Site.Get(text, i);
if (sites.Count == 0)
result = null;
else
{
Body? body = Body.Get(text, i);
if (body is null)
result = null;
else
{
Footer? footer = Footer.Get(text, i);
if (footer is null)
result = null;
else
{
result = new(header, sites, body, footer);
if (logistics.JobID is not "BIORAD4" and not "BIORAD5")
{
WriteJson(logistics, fileInfoCollection, result);
WriteCommaSeparatedValues(logistics, result);
}
}
}
}
}
return result;
}
}
// Bio-Rad QS400MEPI Recipe: EP_8IN9PT Thu Apr 30 11:29:10 1970
// operator: J batch: BIORAD#2
// cassette: wafer: 52-589368-4445
// --------------------------------------------------------------------------------
// position thickness position thickness position thickness
// 1 45.735 2 46.536 3 46.742
// 4 46.015 5 46.648 6 45.366
// 7 46.263 8 46.512 9 46.373
// wafer mean thickness = 46.2433, std. dev = 0.4564 PASS
// ================================================================================
// Radial variation (computation B) PASS:
// thickness -2.7474
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Run))]
internal partial class QS408MRunSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -1,36 +0,0 @@
using System.Collections.Generic;
using System.Collections.ObjectModel;
namespace Adaptation.FileHandlers.QS408M;
public class Site
{
public Site(string position, string thickness)
{
Position = position;
Thickness = thickness;
}
public string Position { get; }
public string Thickness { get; }
internal static ReadOnlyCollection<Site> Get(string text, int[] i)
{
List<Site> results = new();
Site site;
string thickness;
string position = Body.GetToken(text, i);
while (true)
{
if (string.IsNullOrEmpty(position) || !char.IsDigit(position[0]))
break;
thickness = Body.GetToken(text, i);
site = new(position, thickness);
results.Add(site);
position = Body.GetToken(text, i);
}
return results.AsReadOnly();
}
}

View File

@ -6,7 +6,7 @@ using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.QS408M;
namespace Adaptation.FileHandlers.pdsf;
public class Description : IDescription, Shared.Properties.IDescription
{
@ -30,6 +30,8 @@ public class Description : IDescription, Shared.Properties.IDescription
[JsonPropertyName("PSN")] public string PSN { get; set; }
[JsonPropertyName("Reactor")] public string Reactor { get; set; }
[JsonPropertyName("Recipe")] public string Recipe { get; set; }
[JsonPropertyName("IndexOf")] public string IndexOf { get; set; }
[JsonPropertyName("AttemptCounter")] public string AttemptCounter { get; set; }
//
[JsonPropertyName("Cassette")] public string Cassette { get; set; }
public string HeaderUniqueId { get; set; }
@ -229,6 +231,8 @@ public class Description : IDescription, Shared.Properties.IDescription
PSN = processData.PSN,
Reactor = processData.Reactor,
Recipe = processData.Recipe,
IndexOf = nameof(IndexOf),
AttemptCounter = nameof(AttemptCounter),
//
Cassette = processData.Cassette,
HeaderUniqueId = detail.HeaderUniqueId,
@ -282,7 +286,7 @@ public class Description : IDescription, Shared.Properties.IDescription
return result;
}
private Description GetDefault(IFileRead fileRead, Logistics logistics)
private static Description GetDefault(IFileRead fileRead, Logistics logistics)
{
Description result = new()
{
@ -305,6 +309,8 @@ public class Description : IDescription, Shared.Properties.IDescription
PSN = nameof(PSN),
Reactor = nameof(Reactor),
Recipe = nameof(Recipe),
IndexOf = nameof(IndexOf),
AttemptCounter = nameof(AttemptCounter),
//
Cassette = nameof(Cassette),
HeaderUniqueId = nameof(HeaderUniqueId),
@ -350,6 +356,15 @@ public class Description : IDescription, Shared.Properties.IDescription
return result;
}
internal static JsonElement GetDefaultJsonElement(IFileRead fileRead, Logistics logistics)
{
JsonElement result;
Description description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, DescriptionSourceGenerationContext.Default.Description);
result = JsonSerializer.Deserialize<JsonElement>(json);
return result;
}
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
}

View File

@ -1,4 +1,4 @@
namespace Adaptation.FileHandlers.QS408M;
namespace Adaptation.FileHandlers.pdsf;
public class Descriptor
{

View File

@ -1,6 +1,6 @@
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.QS408M;
namespace Adaptation.FileHandlers.pdsf;
public class Detail
{

View File

@ -7,6 +7,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.pdsf;
@ -102,27 +103,43 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
string result;
JsonElement[] jsonElements;
Test[] tests = Array.Empty<Test>();
List<JsonElement> jsonElements = new();
List<FileInfo> fileInfoCollection = new();
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
fileInfoCollection.Add(_Logistics.FileInfo);
SetFileParameterLotIDToLogisticsMID();
Run? run = Run.Get(_Logistics, processDataStandardFormat, fileInfoCollection, lastHeader: _LastHeader[0]);
if (run is null)
{
_LastHeader[0] = Header.Get();
jsonElements = Array.Empty<JsonElement>();
result = string.Concat("A) No Data - ", dateTime.Ticks);
results = new(result, tests, jsonElements, fileInfoCollection);
}
if (_Logistics.FileInfo.Length < _MinFileLength)
results = new(string.Empty, tests, jsonElements.ToArray(), fileInfoCollection);
else
{
_LastHeader[0] = run.Header;
result = string.Join(Environment.NewLine, _Logistics.Logistics1);
jsonElements = _IsEAFHosted ? Array.Empty<JsonElement>() : ProcessDataStandardFormat.GetArray(processDataStandardFormat);
results = new(result, tests, jsonElements, fileInfoCollection);
Run? run = Run.Get(_Logistics, processDataStandardFormat, fileInfoCollection, lastHeader: _LastHeader[0]);
if (run is null)
{
_LastHeader[0] = Header.Get();
results = new(string.Concat("A) No Data - ", dateTime.Ticks), tests, jsonElements.ToArray(), fileInfoCollection);
}
else
{
_LastHeader[0] = run.Header;
string mid;
bool isWaferSlot = !string.IsNullOrEmpty(run.Header.Wafer) && run.Header.Wafer.Length is 1 or 2 && int.TryParse(run.Header.Wafer, out int slot) && slot < 27;
Descriptor descriptor = isWaferSlot ? ProcessData.GetDescriptor(run.Header.Batch) : ProcessData.GetDescriptor(run.Header.Wafer);
if (!string.IsNullOrEmpty(descriptor.Wafer) && string.IsNullOrEmpty(descriptor.Reactor) && string.IsNullOrEmpty(descriptor.RDS) && string.IsNullOrEmpty(descriptor.PSN))
mid = descriptor.Wafer;
else if (!string.IsNullOrEmpty(descriptor.Employee) && string.IsNullOrEmpty(descriptor.Reactor) && string.IsNullOrEmpty(descriptor.RDS) && string.IsNullOrEmpty(descriptor.PSN))
mid = descriptor.Employee;
else
mid = string.Concat(descriptor.Reactor, "-", descriptor.RDS, "-", descriptor.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
SetFileParameterLotID(mid);
_Logistics.Update(mid, descriptor.Reactor);
JsonElement jsonElement = Description.GetDefaultJsonElement(this, _Logistics);
jsonElements.Add(jsonElement);
results = new(_Logistics.Logistics1[0], tests, jsonElements.ToArray(), fileInfoCollection);
_LastHeader[0] = run.Header;
}
}
return results;
}

View File

@ -0,0 +1,228 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.pdsf;
public partial class ProcessData : IProcessData
{
public string JobID { get; set; }
public string MesEntity { get; set; }
public string Batch { get; set; }
public string Cassette { get; set; }
public DateTime Date { get; set; }
public string Employee { get; set; }
public string Layer { get; set; }
public string MeanThickness { get; set; }
public string PSN { get; set; }
public string PassFail { get; set; }
public string RDS { get; set; }
public string RVThickness { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string StdDev { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Wafer { get; set; }
public string Zone { get; set; }
//
public string Slot { get; set; }
//
public string ThicknessFourteen3mmEdgeMean { get; set; }
public string ThicknessFourteen3mmEdgePercent { get; set; }
public string ThicknessFourteen5mmEdgeMean { get; set; }
public string ThicknessFourteen5mmEdgePercent { get; set; }
public string ThicknessFourteenCenterMean { get; set; }
public string ThicknessFourteenCriticalPointsAverage { get; set; }
public string ThicknessFourteenCriticalPointsStdDev { get; set; }
public string ThicknessFourteenMeanFrom { get; set; }
List<object> Shared.Properties.IProcessData.Details { get; }
public ProcessData()
{
}
#nullable enable
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) =>
throw new Exception(string.Concat("See ", nameof(ProcessData)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection) =>
throw new NotImplementedException();
internal static DateTime GetDateTime(Logistics logistics, long tickOffset, string dateTimeText)
{
DateTime result;
string inputDateFormat = "ddd mmm dd HH:mm:ss yyyy";
if (dateTimeText.Length != inputDateFormat.Length)
result = logistics.DateTimeFromSequence;
else
{
if (!DateTime.TryParseExact(dateTimeText, inputDateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
result = logistics.DateTimeFromSequence;
else
{
if (dateTimeParsed < logistics.DateTimeFromSequence.AddDays(1) && dateTimeParsed > logistics.DateTimeFromSequence.AddDays(-1))
result = new(dateTimeParsed.Ticks + tickOffset);
else
result = logistics.DateTimeFromSequence;
}
}
return result;
}
private static (string, string) GetReactorAndRDS(string defaultReactor, string defaultRDS, string text, string formattedText, string[] segments)
{
string rds;
string reactor;
if (string.IsNullOrEmpty(text) || segments.Length == 0 || string.IsNullOrEmpty(formattedText))
reactor = defaultReactor;
else
reactor = segments[0];
if (segments.Length <= 1 || !int.TryParse(segments[1], out int rdsValue) || rdsValue < 99)
rds = defaultRDS;
else
rds = segments[1];
if (reactor.Length > 3)
{
rds = reactor;
reactor = defaultReactor;
}
return new(reactor, rds);
}
private static (string, string) GetLayerAndPSN(string defaultLayer, string defaultPSN, string[] segments)
{
string psn;
string layer;
if (segments.Length <= 2)
{
psn = defaultPSN;
layer = defaultLayer;
}
else
{
string[] segmentsB = segments[2].Split('.');
psn = segmentsB[0];
if (segmentsB.Length <= 1)
layer = defaultLayer;
else
{
layer = segmentsB[1];
if (layer.Length > 1 && layer[0] == '0')
layer = layer.Substring(1);
}
}
return (layer, psn);
}
private static string GetZone(string[] segments)
{
string result;
if (segments.Length <= 3)
result = string.Empty;
else
{
result = segments[3];
if (result.Length > 1 && result[0] == '0')
result = result.Substring(1);
}
return result;
}
public static Descriptor GetDescriptor(string text)
{
Descriptor result;
string psn;
string rds;
string zone;
string layer;
string wafer;
string reactor;
string employee;
string defaultPSN = string.Empty;
string defaultRDS = string.Empty;
string defaultZone = string.Empty;
string defaultLayer = string.Empty;
string defaultReactor = string.Empty;
string defaultEmployee = string.Empty;
if (Regex.IsMatch(text, @"^[a-zA-z][0-9]{2,4}$"))
{
wafer = text.ToUpper();
psn = defaultPSN;
rds = defaultRDS;
zone = defaultZone;
layer = defaultLayer;
reactor = defaultReactor;
employee = defaultEmployee;
}
else if (string.IsNullOrEmpty(text) || (text.Length is 2 or 3 && Regex.IsMatch(text, "^[a-zA-z]{2,3}")))
{
wafer = text;
employee = text;
psn = defaultPSN;
rds = defaultRDS;
zone = defaultZone;
layer = defaultLayer;
reactor = defaultReactor;
}
else if (Regex.IsMatch(text, @"^[0-9]{2}[.][0-9]{1}[.]?[0-9]{0,1}"))
{
string[] segments = text.Split('.');
wafer = text;
psn = defaultPSN;
rds = defaultRDS;
layer = segments[1];
reactor = segments[0];
employee = defaultEmployee;
if (segments.Length <= 2)
zone = defaultZone;
else
zone = segments[2];
}
else
{
// Remove illegal characters \/:*?"<>| found in the Batch
wafer = Regex.Replace(text, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
if (wafer.Length > 2 && wafer[0] == '1' && (wafer[1] == 'T' || wafer[1] == 't'))
wafer = wafer.Substring(2);
string[] segments = wafer.Split('-');
// bool hasRDS = Regex.IsMatch(wafer, "[-]?([QP][0-9]{4,}|[0-9]{5,})[-]?");
(reactor, rds) = GetReactorAndRDS(defaultReactor, defaultRDS, text, wafer, segments);
(layer, psn) = GetLayerAndPSN(defaultLayer, defaultPSN, segments);
zone = GetZone(segments);
if (segments.Length <= 4)
employee = defaultEmployee;
else
employee = segments[4];
}
result = new(employee, layer, psn, rds, reactor, wafer, zone);
return result;
}
#nullable enable
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
List<Description> results = new();
Description? description;
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize(jsonElement.ToString(), DescriptionSourceGenerationContext.Default.Description);
if (description is null)
continue;
results.Add(description);
}
return results;
}
}

View File

@ -226,9 +226,9 @@ public class MonIn : IMonIn, IDisposable
{
StringBuilder stringBuilder = new();
if (string.IsNullOrEmpty(subresource))
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
return stringBuilder.ToString();
}
@ -247,14 +247,14 @@ public class MonIn : IMonIn, IDisposable
if (string.IsNullOrEmpty(subresource))
{
if (unit.Equals(string.Empty) && !interval.HasValue)
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), performanceName.Trim(), value, description.Trim());
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), performanceName.Trim(), value, description.Trim());
else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} {5} {{interval={6}, unit={7}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : (object)string.Empty, unit.Trim());
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} {5} {{interval={6}, unit={7}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : string.Empty, unit.Trim());
}
else if (unit.Equals(string.Empty) && !interval.HasValue)
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim());
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim());
else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} {6} {{interval={7}, unit={8}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : (object)string.Empty, unit.Trim());
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} {6} {{interval={7}, unit={8}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : string.Empty, unit.Trim());
return stringBuilder.ToString();
}

View File

@ -10,7 +10,7 @@
<IsPackable>false</IsPackable>
<Nullable>disable</Nullable>
<RuntimeIdentifier>win-x64</RuntimeIdentifier>
<TargetFramework>net8.0</TargetFramework>
<TargetFramework>net10.0</TargetFramework>
</PropertyGroup>
<PropertyGroup>
<VSTestLogger>trx</VSTestLogger>
@ -35,8 +35,8 @@
<RuntimeHostConfigurationOption Include="AssemblyName" Value="MET08THFTIRQS408M" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.3" />
<PackageReference Include="FFMpegCore" Version="5.1.0" />
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="FFMpegCore" Version="5.4.0" />
<PackageReference Include="IKVM.AWT.WinForms" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.OpenJDK.Core" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.OpenJDK.Media" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
@ -44,29 +44,28 @@
<PackageReference Include="IKVM.OpenJDK.Util" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.OpenJDK.XML.API" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.Runtime" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="Instances" Version="3.0.1" />
<PackageReference Include="log4net" Version="3.0.3"></PackageReference>
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.CommandLine" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.FileExtensions" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.json" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging" Version="9.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
<PackageReference Include="Microsoft.Win32.SystemEvents" Version="9.0.0" />
<PackageReference Include="Instances" Version="3.0.2" />
<PackageReference Include="log4net" Version="3.2.0"></PackageReference>
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.CommandLine" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.FileExtensions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.json" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging" Version="10.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageReference Include="Microsoft.Win32.SystemEvents" Version="10.0.0" />
<PackageReference Include="MSTest.TestAdapter" Version="3.7.0" />
<PackageReference Include="MSTest.TestFramework" Version="3.7.0" />
<PackageReference Include="Pdfbox" Version="1.1.1"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="RoboSharp" Version="1.6.0" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="9.0.0" />
<PackageReference Include="System.Data.OleDb" Version="9.0.0" />
<PackageReference Include="System.Data.SqlClient" Version="4.8.6" />
<PackageReference Include="System.Drawing.Common" Version="9.0.0" />
<PackageReference Include="System.Text.Json" Version="9.0.0" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="10.0.0" />
<PackageReference Include="System.Data.OleDb" Version="10.0.0" />
<PackageReference Include="System.Data.SqlClient" Version="4.9.0" />
<PackageReference Include="System.Drawing.Common" Version="10.0.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Infineon.Mesa.PDF.Text.Stripper" Version="4.8.0.2"><NoWarn>NU1701</NoWarn></PackageReference>

View File

@ -199,4 +199,31 @@ public class Logistics : ILogistics
_ProcessJobID = processJobID;
}
private static int GetCountFromFileName(Logistics logistics)
{
int result;
string[] segments = logistics.FileInfo.Name.Split('.');
string[] segmentsB = segments[0].Split('_');
string countFromFileName = segmentsB.Length < 3 ? "0" : segmentsB[2];
if (!int.TryParse(countFromFileName, out result))
result = 0;
return result;
}
internal static long GetUniqueSequence(Logistics logistics)
{
long result;
int countFromFileName = GetCountFromFileName(logistics);
result = (logistics.Sequence * 10) + countFromFileName;
return result;
}
internal static string GetUniqueId(Logistics logistics)
{
string result;
int countFromFileName = GetCountFromFileName(logistics);
result = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}_{countFromFileName}";
return result;
}
}

View File

@ -187,7 +187,7 @@ internal class ProcessDataStandardFormat
break;
}
}
string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
string? linesOne = lines.Length > 1 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
logistics = GetLogistics(footer, linesOne: linesOne);
if (logistics.Count == 0)
sequence = null;
@ -235,7 +235,7 @@ internal class ProcessDataStandardFormat
const int columnsLine = 6;
FileInfo fileInfo = new(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count != processDataStandardFormatMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count == 0 ? null : GetFullArray(processDataStandardFormat);
JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
if (jsonElements is null || jsonProperties is null || jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
result = processDataStandardFormat;
@ -665,7 +665,7 @@ internal class ProcessDataStandardFormat
return results;
}
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string? logisticsText)
{
string result;
if (jsonElements.Length == 0)
@ -850,33 +850,6 @@ internal class ProcessDataStandardFormat
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
}
private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName)
{
int? result = null;
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
if (result is null)
{
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name[0] != propertyName[0])
continue;
if (jsonProperties[i].Name.Length != propertyName.Length)
continue;
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
}
return result;
}
internal static string GetXml(ProcessDataStandardFormat processDataStandardFormat)
{
string result;

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -0,0 +1,76 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_61_1;
[TestClass]
public class BIORAD2 : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static BIORAD2 EAFLoggingUnitTesting { get; private set; }
static BIORAD2() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public BIORAD2() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public BIORAD2(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new BIORAD2(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__BIORAD2__QS408M()
{
string check = "*.txt";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__BIORAD2__pdsf()
{
string check = "*EQP_*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,63 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_61_1;
[TestClass]
public class BIORAD3 : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static BIORAD3 EAFLoggingUnitTesting { get; private set; }
static BIORAD3() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public BIORAD3() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public BIORAD3(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new BIORAD3(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__BIORAD3__QS408M()
{
string check = "*.txt";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,182 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_61_1;
[TestClass]
public class MET08THFTIRQS408M : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static MET08THFTIRQS408M EAFLoggingUnitTesting { get; private set; }
static MET08THFTIRQS408M() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public MET08THFTIRQS408M() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public MET08THFTIRQS408M(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new MET08THFTIRQS408M(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__MoveMatchingFiles()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__OpenInsightMetrologyViewer()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__IQSSi()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__OpenInsight()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__OpenInsightMetrologyViewerAttachments()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__APC()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__SPaCe()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__Processed()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__Archive()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__Dummy()
{
string check = "637406068146286000.zip";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation._Tests.Shared;
using Adaptation.Shared;
using Adaptation.Shared.Methods;

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -0,0 +1,157 @@
#if true
using Adaptation._Tests.Shared;
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Adaptation._Tests.Extract.Production.v2_61_1;
[TestClass]
public class BIORAD2
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Production.v2_61_1.BIORAD2 _BIORAD2;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Production.v2_61_1.BIORAD2.ClassInitialize(testContext);
_BIORAD2 = CreateSelfDescription.Production.v2_61_1.BIORAD2.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__BIORAD2__QS408M() => _BIORAD2.Production__v2_61_1__BIORAD2__QS408M();
[Ignore]
[TestMethod]
public void Production__v2_61_1__BIORAD2__QS408M638185231662401081__NinePoint()
{
DateTime dateTime;
string check = "*.txt";
bool validatePDSF = false;
_BIORAD2.Production__v2_61_1__BIORAD2__QS408M();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _BIORAD2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: string.Empty);
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__BIORAD2__QS408M638185291035612698__FourteenPoint()
{
DateTime dateTime;
string check = "*.txt";
bool validatePDSF = false;
_BIORAD2.Production__v2_61_1__BIORAD2__QS408M();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _BIORAD2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: string.Empty);
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__BIORAD2__QS408M638206292859940029__EpiPro()
{
DateTime dateTime;
string check = "*.txt";
bool validatePDSF = false;
_BIORAD2.Production__v2_61_1__BIORAD2__QS408M();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _BIORAD2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: string.Empty);
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__BIORAD2__QS408M638211310710952565__WMO()
{
DateTime dateTime;
string check = "*.txt";
bool validatePDSF = false;
_BIORAD2.Production__v2_61_1__BIORAD2__QS408M();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _BIORAD2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: string.Empty);
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__BIORAD2__QS408M638635290101315251__ADO126448()
{
DateTime dateTime;
string check = "*.txt";
bool validatePDSF = false;
_BIORAD2.Production__v2_61_1__BIORAD2__QS408M();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _BIORAD2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: string.Empty);
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__BIORAD2__pdsf638925042012952259__Normal()
{
bool validatePDSF = false;
string check = "*EQP_*.pdsf";
_BIORAD2.Production__v2_61_1__BIORAD2__pdsf();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _BIORAD2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
}
#endif

View File

@ -0,0 +1,79 @@
#if true
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Adaptation._Tests.Extract.Production.v2_61_1;
[TestClass]
public class BIORAD3
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Production.v2_61_1.BIORAD3 _BIORAD3;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Production.v2_61_1.BIORAD3.ClassInitialize(testContext);
_BIORAD3 = CreateSelfDescription.Production.v2_61_1.BIORAD3.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__BIORAD3__QS408M() => _BIORAD3.Production__v2_61_1__BIORAD3__QS408M();
[Ignore]
[TestMethod]
public void Production__v2_61_1__BIORAD3__QS408M637406016892454000__ReactorAndRDS()
{
DateTime dateTime;
string check = "*.txt";
bool validatePDSF = false;
_BIORAD3.Production__v2_61_1__BIORAD3__QS408M();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD3.AdaptationTesting.GetVariables(methodBase, check);
IFileRead fileRead = _BIORAD3.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: string.Empty);
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__BIORAD3__QS408M638227775101723135__Error()
{
DateTime dateTime;
string check = "*.txt";
bool validatePDSF = false;
_BIORAD3.Production__v2_61_1__BIORAD3__QS408M();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD3.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _BIORAD3.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: string.Empty);
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
}
#endif

View File

@ -0,0 +1,179 @@
#if true
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Adaptation._Tests.Extract.Production.v2_61_1;
[TestClass]
public class MET08THFTIRQS408M
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Production.v2_61_1.MET08THFTIRQS408M _MET08THFTIRQS408M;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Production.v2_61_1.MET08THFTIRQS408M.ClassInitialize(testContext);
_MET08THFTIRQS408M = CreateSelfDescription.Production.v2_61_1.MET08THFTIRQS408M.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__MoveMatchingFiles() => _MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__MoveMatchingFiles();
[Ignore]
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__MoveMatchingFiles638402505394171507__Normal()
{
DateTime dateTime;
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
_MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__MoveMatchingFiles();
string test = System.Text.RegularExpressions.Regex.Replace("Thickness 14 5mm Edge % from R/2", @"[^a-zA-Z0-9]", "_").Split('\r')[0].Split('\n')[0];
Assert.AreEqual("Thickness_14_5mm_Edge___from_R_2", test);
string[] variables = _MET08THFTIRQS408M.AdaptationTesting.GetVariables(methodBase, check, validatePDSF: false);
IFileRead fileRead = _MET08THFTIRQS408M.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: string.Empty);
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__OpenInsightMetrologyViewer() => _MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__OpenInsightMetrologyViewer();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__IQSSi() => _MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__IQSSi();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__IQSSi638402505394171507__IqsSql()
{
DateTime dateTime;
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
_MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__IQSSi();
string[] variables = _MET08THFTIRQS408M.AdaptationTesting.GetVariables(methodBase, check, validatePDSF: false);
IFileRead fileRead = _MET08THFTIRQS408M.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: string.Empty);
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__OpenInsight() => _MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__OpenInsight();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__OpenInsight638042558563679143__IqsSql()
{
DateTime dateTime;
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
_MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__OpenInsight();
string[] variables = _MET08THFTIRQS408M.AdaptationTesting.GetVariables(methodBase, check, validatePDSF: false);
IFileRead fileRead = _MET08THFTIRQS408M.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: string.Empty);
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__OpenInsight638662107339513927__Long()
{
DateTime dateTime;
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
_MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__OpenInsight();
string[] variables = _MET08THFTIRQS408M.AdaptationTesting.GetVariables(methodBase, check, validatePDSF: false);
IFileRead fileRead = _MET08THFTIRQS408M.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: string.Empty);
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
dateTime = FileHandlers.pdsf.ProcessData.GetDateTime(logistics, tickOffset: 0, dateTimeText: "Tue Nov 10 12:03:56 1970");
Assert.AreEqual(logistics.DateTimeFromSequence, dateTime);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__OpenInsightMetrologyViewerAttachments() => _MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__OpenInsightMetrologyViewerAttachments();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__APC() => _MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__APC();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__SPaCe() => _MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__SPaCe();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__Processed() => _MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__Processed();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__Archive() => _MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__Archive();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08THFTIRQS408M__Dummy() => _MET08THFTIRQS408M.Production__v2_61_1__MET08THFTIRQS408M__Dummy();
}
#endif

View File

@ -193,7 +193,12 @@ public class AdaptationTesting : ISMTP
segments = withActualCICN.Split(new string[] { ticks }, StringSplitOptions.None);
dummyDirectory = Path.Combine(dummyRoot, cellInstanceName, ticks, string.Join(null, segments));
if (!Directory.Exists(dummyDirectory))
{
_ = Directory.CreateDirectory(dummyDirectory);
try
{ Directory.SetLastWriteTime(Path.Combine(dummyRoot, cellInstanceName), DateTime.Now); }
catch { }
}
}
if (string.IsNullOrEmpty(ticks))
{
@ -996,22 +1001,22 @@ public class AdaptationTesting : ISMTP
{
try
{
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation))
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation) && !fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation.Contains("10."))
{
if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation))
_ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation);
}
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.SourceFileLocation))
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.SourceFileLocation) && !fileConnectorConfigurationTuple.Item2.SourceFileLocation.Contains("10."))
{
if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.SourceFileLocation))
_ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.SourceFileLocation);
}
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.TargetFileLocation))
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.TargetFileLocation) && !fileConnectorConfigurationTuple.Item2.TargetFileLocation.Contains("10."))
{
if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.TargetFileLocation))
_ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.TargetFileLocation);
}
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder))
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder) && !fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Contains("10."))
{
if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Split('|')[0]))
_ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Split('|')[0]);

View File

@ -64,7 +64,7 @@ public class MET08THFTIRQS408M : LoggingUnitTesting, IDisposable
StringBuilder results = new();
(string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[]
{
new("MET08THFTIRQS408M", "v2.60.0"),
new("MET08THFTIRQS408M", "v2.61.1"),
};
string production = "http://messa08ec.infineon.com:9003/CellInstanceServiceV2";
Shared.PasteSpecialXml.EAF.XML.API.CellInstance.CellInstanceVersion cellInstanceVersion;

View File

@ -1,5 +1,5 @@
using Adaptation._Tests.Shared;
using Adaptation.FileHandlers.QS408M;
using Adaptation.FileHandlers.pdsf;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
@ -203,8 +203,8 @@ public class QS408M : LoggingUnitTesting, IDisposable
StringBuilder results = new();
(string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[]
{
new("BIORAD2", "v2.60.0"),
new("BIORAD3", "v2.60.0"),
new("BIORAD2", "v2.61.1"),
new("BIORAD3", "v2.61.1"),
};
string production = "http://messa08ec.infineon.com:9003/CellInstanceServiceV2";
Shared.PasteSpecialXml.EAF.XML.API.CellInstance.CellInstanceVersion cellInstanceVersion;

View File

@ -0,0 +1,133 @@
// Recipe 1 = Matched
// recipes-and-patterns.js under IndexOf
// RecipesAndPatternsMatch
// ($('dcp.BIORAD2/csv/Index', 0) + 1) == $('dcp.BIORAD2/csv/Count', 0)
// getValue('FTIR', $('dcp.BIORAD2/csv/Count', 0), $('dcp.BIORAD2/csv/Recipe', ''), 'pattern', getContextData('2', 'cds.NULL_DATA', ''));
function getValue(tool, patternSize, recipe, pattern, json) {
let result;
if (recipe.toUpperCase() === 'T-LOW')
result = '1';
else if (recipe.toUpperCase() === 'T-MID')
result = '1';
else if (recipe.toUpperCase() === 'T-HIGH')
result = '1';
else if (recipe.toUpperCase() === 'T_LOW')
result = '1';
else if (recipe.toUpperCase() === 'T_MID')
result = '1';
else if (recipe.toUpperCase() === 'T_HIGH')
result = '1';
else {
if (tool == undefined || tool.length === 0 || patternSize == undefined || patternSize.length === 0 || recipe == undefined || recipe.length === 0 || pattern == undefined || pattern.length === 0 || json == undefined || json.length === 0)
result = 'A) Invalid input!';
else {
let parsed;
try {
parsed = JSON.parse(json);
} catch (error) {
parsed = null;
}
if (parsed == null)
result = 'B) Invalid input!';
else if (parsed.rds == undefined || parsed.rds.prodSpec == undefined || parsed.rds.prodSpec.recipesAndPatterns == undefined)
result = 'C) No Spec!';
else {
let toolMatches = [];
for (let index = 0; index < parsed.rds.prodSpec.recipesAndPatterns.length; index++) {
if (parsed.rds.prodSpec.recipesAndPatterns[index].tool === tool) {
toolMatches.push(parsed.rds.prodSpec.recipesAndPatterns[index]);
}
}
if (toolMatches == null || toolMatches.length === 0)
result = 'Tool [' + tool + '] not found in OI API results!';
else {
let debug = '';
let matches = 0;
for (let index = 0; index < toolMatches.length; index++) {
debug += 'patternSize: ' + toolMatches[index].patternSize +
';~recipe: ' + toolMatches[index].recipe +
';~pattern: ' + toolMatches[index].pattern + ';~';
if (toolMatches[index].patternSize == patternSize &&
toolMatches[index].recipe.toLowerCase() == recipe.toLowerCase()) {
matches++;
}
}
if (matches > 0)
result = '1';
else
result = 'Value not matched~Run~patternSize: ' + patternSize + ';~recipe: ' + recipe + ';~pattern: ' + pattern + ';~API~' + debug;
}
}
}
}
return result;
}
getValue('FTIR', 5, 'Thin8inch', 'pattern', '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"thin8inch","pattern":"6INCH6MM","patternSize":5,"tool":"FTIR"}]}}}');
let json;
let tool;
let recipe;
let pattern;
let patternSize;
tool = 'FTIR'
patternSize = 5;
recipe = 'IRC6in_6mm';
pattern = 'pattern';
json = '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"IRC6in_6mm","pattern":"6INCH6MM","patternSize":5,"tool":"FTIR"}]}}}';
const testA = getValue(tool, patternSize, recipe, pattern, json);
if (testA !== '1')
throw 'Test A failed: ' + testA;
tool = null;
const testB = getValue(tool, patternSize, recipe, pattern, json);
if (testB !== 'A) Invalid input!')
throw 'Test L failed: ' + testB;
tool = '';
const testC = getValue(tool, patternSize, recipe, pattern, json);
if (testC !== 'A) Invalid input!')
throw 'Test M failed: ' + testC;
patternSize = null;
const testD = getValue(tool, patternSize, recipe, pattern, json);
if (testD !== 'A) Invalid input!')
throw 'Test J failed: ' + testD;
patternSize = '';
const testE = getValue(tool, patternSize, recipe, pattern, json);
if (testE !== 'A) Invalid input!')
throw 'Test K failed: ' + testE;
recipe = null;
const testF = getValue(tool, patternSize, recipe, pattern, json);
if (testF !== 'A) Invalid input!')
throw 'Test F failed: ' + testF;
recipe = '';
const testG = getValue(tool, patternSize, recipe, pattern, json);
if (testG !== 'A) Invalid input!')
throw 'Test G failed: ' + testG;
pattern = null;
const testH = getValue(tool, patternSize, recipe, pattern, json);
if (testH !== 'A) Invalid input!')
throw 'Test H failed: ' + testH;
pattern = '';
const testI = getValue(tool, patternSize, recipe, pattern, json);
if (testI !== 'A) Invalid input!')
throw 'Test I failed: ' + testI;
json = '';
const testK = getValue(tool, patternSize, recipe, pattern, json);
if (testK !== 'A) Invalid input!')
throw 'Test B failed: ' + testK;
json = 'invalid';
const testL = getValue(tool, patternSize, recipe, pattern, json);
if (testL !== 'B) Invalid input!')
throw 'Test C failed: ' + testL;
json = '{"rds":{}}';
const testM = getValue(tool, patternSize, recipe, pattern, json);
if (testM !== 'C) No Spec!')
throw 'Test D failed: ' + testM;
json = '{"rds":{"prodSpec":{"recipesAndPatterns":[]}}}';
const testN = getValue(tool, patternSize, recipe, pattern, json);
if (testN !== 'Tool [FTIR] not found in OI API results!')
throw 'Test E failed: ' + testN;
const testO = getValue('FTIR', 14, 'Thin8inch', 'pattern', '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"thin8inch","pattern":"6INCH6MM","patternSize":5,"tool":"FTIR"}]}}}');
if (testO.indexOf('Value not matched~Run~patternSize: 14; recipe: Thin8inch; pattern: pattern;~API~patternSize: 5; recipe: thin8inch; pattern: 6INCH6MM;~') !== 0)
throw 'Test P failed: ' + testO;

View File

@ -117,24 +117,17 @@
<Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewerAttachments\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Body.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Constant.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Description.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Descriptor.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Detail.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Footer.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Header.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\ProcessData.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Row.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Run.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Site.cs" />
<Compile Include="Adaptation\FileHandlers\Processed\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\Body.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\Description.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\Descriptor.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\Detail.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\Footer.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\Header.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\ProcessData.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\Row.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\Run.cs" />
<Compile Include="Adaptation\FileHandlers\QS408M\Site.cs" />
<Compile Include="Adaptation\FileHandlers\SPaCe\FileRead.cs" />
<Compile Include="Adaptation\Ifx\Eaf\Common\Configuration\ConnectionSetting.cs" />
<Compile Include="Adaptation\Ifx\Eaf\EquipmentConnector\File\Component\File.cs" />
@ -184,10 +177,10 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Infineon.EAF.Runtime">
<Version>2.60.0</Version>
<Version>2.61.1</Version>
</PackageReference>
<PackageReference Include="System.Text.Json">
<Version>8.0.5</Version>
<Version>8.0.3</Version>
</PackageReference>
</ItemGroup>
<ItemGroup />

View File

@ -32,5 +32,5 @@ using System.Runtime.InteropServices;
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("2.60.0.0")]
[assembly: AssemblyFileVersion("2.60.0.0")]
[assembly: AssemblyVersion("2.61.1.0")]
[assembly: AssemblyFileVersion("2.61.1.0")]