8 Commits

Author SHA1 Message Date
1dc1d1a789 Update FileRead class to use hardcoded column names and indices for process data standard format mapping 2025-12-11 09:04:43 -07:00
8c7da1e63c Refactor FileRead and WSRequest classes to use unique sequence and ID generation; enhance logistics handling across multiple file handlers 2025-12-10 11:23:07 -07:00
e6d93e7515 Update project dependencies and target framework to net10.0; adjust diagnostic severities in .editorconfig 2025-12-10 11:21:36 -07:00
d4e21c2c9a Refactor: Update site configuration and enhance file extraction logic in FileRead and ProcessData classes 2025-12-10 11:21:19 -07:00
c20799ba72 Refactor: Remove PDSF file handlers and related classes
- Deleted Header.cs, Row.cs, Run.cs, Wafer.cs, WaferSummary.cs from Adaptation.FileHandlers.pdsf.
- Updated project file to remove references to deleted PDSF files.
- Adjusted logic in AdaptationTesting.cs to ensure directory creation checks are more robust.
- Modified recipes-and-patterns.js to improve string comparison and debugging output.
2025-11-10 18:40:34 -07:00
1603bb8175 Infineon.EAF.Runtime v2.61.1 2025-10-14 13:25:42 -07:00
10a0662561 Enhance PCL and PDSF file handlers with new constants and refactor methods for improved readability and functionality 2025-10-14 08:00:17 -07:00
0a477c7ea1 Add IndexOf property to WSRequest and Description classes; implement getValue function in recipes-and-patterns.js 2025-10-13 17:10:26 -07:00
55 changed files with 1297 additions and 1921 deletions

View File

@ -107,6 +107,7 @@ dotnet_diagnostic.CA1864.severity = none # CA1864: To avoid double lookup, call
dotnet_diagnostic.CA1866.severity = none # CA1866: Use 'string.EndsWith(char)' instead of 'string.EndsWith(string)' when you have a string with a single char
dotnet_diagnostic.CA1869.severity = none # CA1869: Avoid creating a new 'JsonSerializerOptions' instance for every serialization operation. Cache and reuse instances instead.
dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template should not vary between calls to 'LoggerExtensions.LogInformation(ILogger, string?, params object?[])'
dotnet_diagnostic.CS0618.severity = none # 'member' is obsolete: 'description'
dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name
dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2");
dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant.
@ -123,8 +124,9 @@ dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization c
dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
dotnet_diagnostic.MSTEST0015.severity = none # MSTEST0015: Test method {method} should not be ignored
dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods
dotnet_diagnostic.MSTEST0037.severity = none # MSTEST0037: Use proper 'Assert' methods
dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation
dotnet_diagnostic.MSTEST0048.severity = none # MSTEST0048: Test method '{method}' should be attributed with 'DataTestMethod' when using 'DataRow' attributes
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.abstract_method_should_be_pascal_case.style = pascal_case
dotnet_naming_rule.abstract_method_should_be_pascal_case.symbols = abstract_method

View File

@ -216,7 +216,7 @@
{
"label": "File-Folder-Helper AOT s X Day-Helper-2025-03-20",
"type": "shell",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net10.0/win-x64/publish/File-Folder-Helper.exe",
"args": [
"s",
"X",

View File

@ -21,7 +21,6 @@ public class CellInstanceConnectionName
nameof(OpenInsight) => new OpenInsight.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsightMetrologyViewer) => new OpenInsightMetrologyViewer.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(pdsf) => new pdsf.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(pcl) => new pcl.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),

View File

@ -97,9 +97,9 @@ public class FileRead : Shared.FileRead, IFileRead
private void CallbackInProcessCleared(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, string inProcessDirectory, long sequence, bool warning)
{
const string site = "sjc";
const string site = "els";
string stateName = string.Concat("Dummy_", _EventName);
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
const string monInURL = $"http://moninhttp.{site}.infineon.com/input/text";
MonIn monIn = MonIn.GetInstance(monInURL);
try
{

View File

@ -113,7 +113,7 @@ public class FileRead : Shared.FileRead, IFileRead
StringBuilder result = new();
pcl.Description x = descriptions[0];
char del = '\t';
_ = result.Append(x.AreaCountAvg).Append(del). // 001 - AreaCountAvg
_ = result.Append(x.AreaCountAvg).Append(del). // 001 - AreaCountAvg
Append(x.AreaCountMax).Append(del). // 002 - AreaCountMax
Append(x.AreaCountMin).Append(del). // 003 - AreaCountMin
Append(x.AreaCountStdDev).Append(del). // 004 - AreaCountStdDev
@ -121,41 +121,41 @@ public class FileRead : Shared.FileRead, IFileRead
Append(x.AreaTotalMax).Append(del). // 006 - AreaTotalMax
Append(x.AreaTotalMin).Append(del). // 007 - AreaTotalMin
Append(x.AreaTotalStdDev).Append(del). // 008 - AreaTotalStdDev
Append(x.Date).Append(del). // 009 -
Append(x.Date).Append(del). // 009 -
Append(x.HazeAverageAvg).Append(del). // 010 - Haze Average
Append(x.HazeAverageMax).Append(del). // 011 -
Append(x.HazeAverageMin).Append(del). // 012 -
Append(x.HazeAverageStdDev).Append(del). // 013 -
Append(x.HazeRegionAvg).Append(del). // 014 -
Append(x.HazeRegionMax).Append(del). // 015 -
Append(x.HazeRegionMin).Append(del). // 016 -
Append(x.HazeRegionStdDev).Append(del). // 017 -
Append(x.Lot).Append(del). // 018 -
Append(x.LPDCM2Avg).Append(del). // 019 -
Append(x.LPDCM2Max).Append(del). // 020 -
Append(x.LPDCM2Min).Append(del). // 021 -
Append(x.LPDCM2StdDev).Append(del). // 022 -
Append(x.LPDCountAvg).Append(del). // 023 -
Append(x.LPDCountMax).Append(del). // 024 -
Append(x.LPDCM2Min).Append(del). // 025 -
Append(x.LPDCountStdDev).Append(del). // 026 -
Append(x.Employee).Append(del). // 027 -
Append(x.HazeAverageMax).Append(del). // 011 -
Append(x.HazeAverageMin).Append(del). // 012 -
Append(x.HazeAverageStdDev).Append(del). // 013 -
Append(x.HazeRegionAvg).Append(del). // 014 -
Append(x.HazeRegionMax).Append(del). // 015 -
Append(x.HazeRegionMin).Append(del). // 016 -
Append(x.HazeRegionStdDev).Append(del). // 017 -
Append(x.Lot).Append(del). // 018 -
Append(x.LPDCM2Avg).Append(del). // 019 -
Append(x.LPDCM2Max).Append(del). // 020 -
Append(x.LPDCM2Min).Append(del). // 021 -
Append(x.LPDCM2StdDev).Append(del). // 022 -
Append(x.LPDCountAvg).Append(del). // 023 -
Append(x.LPDCountMax).Append(del). // 024 -
Append(x.LPDCM2Min).Append(del). // 025 -
Append(x.LPDCountStdDev).Append(del). // 026 -
Append(x.Employee).Append(del). // 027 -
Append(x.RDS).Append(del). // 028 - Lot
Append(x.Reactor).Append(del). // 029 - Process
Append(x.Recipe.Replace(";", string.Empty)).Append(del). // 030 - Part
Append(x.ScratchCountAvg).Append(del). // 031 - Scratch Count
Append(x.ScratchCountMax).Append(del). // 032 -
Append(x.ScratchCountMin).Append(del). // 033 -
Append(x.ScratchTotalStdDev).Append(del). // 034 -
Append(x.ScratchCountMax).Append(del). // 032 -
Append(x.ScratchCountMin).Append(del). // 033 -
Append(x.ScratchTotalStdDev).Append(del). // 034 -
Append(x.ScratchTotalAvg).Append(del). // 035 - Scratch Length
Append(x.ScratchTotalMax).Append(del). // 036 -
Append(x.ScratchTotalMin).Append(del). // 037 -
Append(x.ScratchTotalStdDev).Append(del). // 038 -
Append(x.ScratchTotalMax).Append(del). // 036 -
Append(x.ScratchTotalMin).Append(del). // 037 -
Append(x.ScratchTotalStdDev).Append(del). // 038 -
Append(x.SumOfDefectsAvg).Append(del). // 039 - Average Sum of Defects
Append(x.SumOfDefectsMax).Append(del). // 040 - Max Sum of Defects
Append(x.SumOfDefectsMin).Append(del). // 041 - Min Sum of Defects
Append(x.SumOfDefectsStdDev).Append(del). // 042 - SumOfDefectsStdDev
Append(logistics.MesEntity).Append(del). // 043 -
Append(logistics.MesEntity).Append(del). // 043 -
AppendLine();
return result.ToString();
}

View File

@ -85,9 +85,9 @@ public class FileRead : Shared.FileRead, IFileRead
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
string processDataStandardFormatMappingOldColumnNames = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Comments,Diameter,Exclusion,Gain,HeaderUniqueId,Laser,ParseErrorText,RDS,Slot,UniqueId,AreaCount,AreaCountAvg,AreaCountMax,AreaCountMin,AreaCountStdDev,AreaTotal,AreaTotalAvg,AreaTotalMax,AreaTotalMin,AreaTotalStdDev,Bin1,Bin2,Bin3,Bin4,Bin5,Bin6,Bin7,Bin8,HazeAverage,HazeAverageAvg,HazeAverageMax,HazeAverageMin,HazeAverageStdDev,HazePeak,HazeRegion,HazeRegionAvg,HazeRegionMax,HazeRegionMin,HazeRegionStdDev,HazeRng,LPDCM2,LPDCM2Avg,LPDCM2Max,LPDCM2Min,LPDCM2StdDev,LPDCount,LPDCountAvg,LPDCountMax,LPDCountMin,LPDCountStdDev,Mean,ScratchCount,ScratchCountAvg,ScratchCountMax,ScratchCountMin,ScratchCountStdDev,ScratchTotal,ScratchTotalAvg,ScratchTotalMax,ScratchTotalMin,ScratchTotalStdDev,Sort,StdDev,SumOfDefects,SumOfDefectsAvg,SumOfDefectsMax,SumOfDefectsMin,SumOfDefectsStdDev,Thresh,Thruput,EventId,IndexOf,AttemptCounter,ReactorMode,ExportControl,CastingVerification"; // GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
string processDataStandardFormatMappingNewColumnNames = "Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Date,Recipe,Id,WaferId,LPDCount,LPDCM2,AreaCount,AreaTotal,ScratchCount,ScratchTotal,SumOfDefects,HazeRegion,HazeAverage,Grade,LPDCountMin,LPDCM2Min,AreaCountMin,AreaTotalMin,ScratchCountMin,ScratchTotalMin,SumOfDefectsMin,HazeRegionMin,HazeAverageMin,LPDCountMax,LPDCM2Max,AreaCountMax,AreaTotalMax,ScratchCountMax,ScratchTotalMax,SumOfDefectsMax,HazeRegionMax,HazeAverageMax,LPDCountAvg,LPDCM2Avg,AreaCountAvg,AreaTotalAvg,ScratchCountAvg,ScratchTotalAvg,SumOfDefectsAvg,HazeRegionAvg,HazeAverageAvg,LPDCountStdDev,LPDCM2StdDev,AreaCountStdDev,AreaTotalStdDev,ScratchCountStdDev,ScratchTotalStdDev,SumOfDefectsStdDev,HazeRegionStdDev,HazeAverageStdDev,WaferDate,Comments,Sort,WaferLPDCount,WaferLPDCM2,Bin1,Bin2,Bin3,Bin4,Bin5,Bin6,Bin7,Bin8,Mean,StdDev,WaferAreaCount,WaferAreaTotal,WaferScratchCount,WaferScratchTotal,WaferSumOfDefects,WaferHazeRegion,WaferHazeAverage,HazePeak,Laser,Gain,Diameter,Thresh,Exclusion,HazeRng,Thruput,WaferRecipe,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,EventId,IndexOf,AttemptCounter,ReactorMode,ExportControl,CastingVerification"; // GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
string processDataStandardFormatMappingColumnIndices = "0,1,2,95,3,6,5,7,93,9,89,90,8,58,82,84,81,-1,80,-1,88,10,-1,13,41,32,23,50,73,42,33,24,51,62,63,64,65,66,67,68,69,78,47,38,29,56,79,77,46,37,28,55,85,12,40,31,22,49,11,39,30,21,48,70,15,43,34,25,52,75,44,35,26,53,59,71,17,45,36,27,54,83,86,87,88,89,90,91,92"; // GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
_ProcessDataStandardFormatMapping = ProcessDataStandardFormatMapping.Get(processDataStandardFormatMappingOldColumnNames,
processDataStandardFormatMappingNewColumnNames,
processDataStandardFormatMappingColumnIndices);
@ -267,11 +267,12 @@ public class FileRead : Shared.FileRead, IFileRead
List<Post> results = new();
Post post;
long preWait;
long uniqueSequence = Logistics.GetUniqueSequence(_Logistics);
foreach (PreWith preWith in preWithCollection)
{
if (!_IsEAFHosted)
continue;
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
if (!_StaticRuns.TryGetValue(uniqueSequence, out List<Shared.Metrology.WS.Results>? wsResults))
wsResults = null;
if (processDataStandardFormat.InputPDSF is null)
File.Move(preWith.MatchingFile, preWith.CheckFile);

View File

@ -125,13 +125,14 @@ public class FileRead : Shared.FileRead, IFileRead
{
long? subgroupId;
string fileName = Path.GetFileName(reportFullPath);
long uniqueSequence = Logistics.GetUniqueSequence(_Logistics);
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
subgroupId = null;
else
(subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
if (_StaticRuns.TryGetValue(uniqueSequence, out List<WS.Results> wsResults))
{
if (wsResults is null || wsResults.Count != 1)
throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");

View File

@ -113,6 +113,7 @@ public class FileRead : Shared.FileRead, IFileRead
private void SendData(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<pcl.Description> descriptions)
{
string checkDirectory;
long uniqueSequence = Logistics.GetUniqueSequence(_Logistics);
WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
int weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
string directory = Path.Combine(_OpenInsightMetrologyViewerFileShare, dateTime.Year.ToString(), $"WW{weekOfYear:00}");
@ -126,9 +127,9 @@ public class FileRead : Shared.FileRead, IFileRead
_Log.Debug(wsResults.HeaderId);
lock (_StaticRuns)
{
if (!_StaticRuns.ContainsKey(_Logistics.Sequence))
_StaticRuns.Add(_Logistics.Sequence, new());
_StaticRuns[_Logistics.Sequence].Add(wsResults);
if (!_StaticRuns.ContainsKey(uniqueSequence))
_StaticRuns.Add(uniqueSequence, new());
_StaticRuns[uniqueSequence].Add(wsResults);
}
checkDirectory = Path.Combine(directory, $"-{wsResults.HeaderId}");
if (!Directory.Exists(checkDirectory))

View File

@ -21,6 +21,7 @@ public class WSRequest
public string AreaTotalMax { get; set; }
public string AreaTotalMin { get; set; }
public string AreaTotalStdDev { get; set; }
public string AttemptCounter { get; set; }
public string Date { get; set; }
public string HazeAverageAvg { get; set; }
public string HazeAverageMax { get; set; }
@ -30,6 +31,7 @@ public class WSRequest
public string HazeRegionMax { get; set; }
public string HazeRegionMin { get; set; }
public string HazeRegionStdDev { get; set; }
public string IndexOf { get; set; }
public string Layer { get; set; }
public string LotID { get; set; }
public string LPDCM2Avg { get; set; }
@ -93,6 +95,7 @@ public class WSRequest
AreaTotalMax = x.AreaTotalMax;
AreaTotalMin = x.AreaTotalMin;
AreaTotalStdDev = x.AreaTotalStdDev;
AttemptCounter = x.AttemptCounter;
Date = x.Date;
HazeAverageAvg = x.HazeAverageAvg;
HazeAverageMax = x.HazeAverageMax;
@ -102,6 +105,7 @@ public class WSRequest
HazeRegionMax = x.HazeRegionMax;
HazeRegionMin = x.HazeRegionMin;
HazeRegionStdDev = x.HazeRegionStdDev;
IndexOf = x.IndexOf;
LotID = x.Lot;
LPDCM2Avg = x.LPDCM2Avg;
LPDCM2Max = x.LPDCM2Max;
@ -178,6 +182,7 @@ public class WSRequest
Details.Add(detail);
}
Date = logistics.DateTimeFromSequence.ToString();
string uniqueId = Logistics.GetUniqueId(logistics);
if (UniqueId is null && Details.Count != 0)
UniqueId = Details[0].HeaderUniqueId;
for (int i = 0; i < Details.Count; i++)
@ -199,11 +204,11 @@ public class WSRequest
if (string.IsNullOrEmpty(Details[i].Bin8))
Details[i].Bin8 = null;
}
UniqueId = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}";
UniqueId = uniqueId;
for (int i = 0; i < Details.Count; i++)
{
Details[i].HeaderUniqueId = UniqueId;
Details[i].UniqueId = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}_Item-{i + 1}";
Details[i].UniqueId = $"{uniqueId}_Item-{i + 1}";
}
}
@ -314,6 +319,7 @@ public class WSRequest
{
string checkFileName;
pcl.Description description;
string uniqueId = Logistics.GetUniqueId(logistics);
string[] pclFiles = Directory.GetFiles(matchDirectory, "*.pcl", SearchOption.TopDirectoryOnly);
if (pclFiles.Length != 1)
throw new Exception($"Invalid source file count for <{results.HeaderId}>!");
@ -326,19 +332,19 @@ public class WSRequest
else
{
UpdateDataPDF(descriptions, checkFileName);
headerAttachments.Add(new WS.Attachment(results, headerIdDirectory, $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}", "Data.pdf", checkFileName));
headerAttachments.Add(new WS.Attachment(results, headerIdDirectory, uniqueId, "Data.pdf", checkFileName));
}
for (int i = 0; i < descriptions.Count; i++)
{
description = descriptions[i];
checkFileName = Path.Combine(matchDirectory, $"{sourceFileNameNoExt}_s{description.Slot}_image.pdf");
if (File.Exists(checkFileName))
dataAttachments.Add(new WS.Attachment(results, headerIdDirectory, $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}_Item-{i + 1}", "Image.pdf", checkFileName));
dataAttachments.Add(new WS.Attachment(results, headerIdDirectory, $"{uniqueId}_Item-{i + 1}", "Image.pdf", checkFileName));
else
{
checkFileName = Path.Combine(matchDirectory, $"{sourceFileNameNoExt}_s{description.Slot}_data.pdf");
if (File.Exists(checkFileName))
dataAttachments.Add(new WS.Attachment(results, headerIdDirectory, $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}_Item-{i + 1}", "Image.pdf", checkFileName));
dataAttachments.Add(new WS.Attachment(results, headerIdDirectory, $"{uniqueId}_Item-{i + 1}", "Image.pdf", checkFileName));
}
}
if (dataAttachments.Count == 0 || dataAttachments.Count != descriptions.Count)

View File

@ -141,8 +141,9 @@ public class FileRead : Shared.FileRead, IFileRead
string jobIdDirectory = Path.Combine(Path.GetDirectoryName(_FileConnectorConfiguration.AlternateTargetFolder) ?? throw new Exception(), _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
long uniqueSequence = Logistics.GetUniqueSequence(_Logistics);
string[] matchDirectories = GetInProcessDirectory(jobIdDirectory);
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
if (!_StaticRuns.TryGetValue(uniqueSequence, out List<Shared.Metrology.WS.Results>? wsResults))
results = null;
else
{

View File

@ -114,6 +114,7 @@ public class FileRead : Shared.FileRead, IFileRead
{
FileInfo fileInfo = new(reportFullPath);
string logisticsSequence = _Logistics.Sequence.ToString();
long uniqueSequence = Logistics.GetUniqueSequence(_Logistics);
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
@ -136,7 +137,7 @@ public class FileRead : Shared.FileRead, IFileRead
File.Copy(reportFullPath, Path.Combine(sequenceDirectory, Path.GetFileName(reportFullPath)), overwrite: true);
File.WriteAllText(jsonFileName, json);
lock (_StaticRuns)
_ = _StaticRuns.Remove(_Logistics.Sequence);
_ = _StaticRuns.Remove(uniqueSequence);
}
private static void MoveMatchingFile(string jobIdDirectory, string matchDirectory)

View File

@ -9,5 +9,7 @@ internal class Constant
public string Date { get; } = "Date:";
public string StdDev { get; } = "Std Dev:";
public string Average { get; } = "Average:";
public string Statistics { get; } = "Statistics:";
public string DatabaseId { get; } = "Database ID:";
}

View File

@ -42,7 +42,7 @@ internal class Convert
RedirectStandardOutput = true,
};
Process process = Process.Start(processStartInfo);
_ = process.WaitForExit(30000);
_ = process.WaitForExit(300000);
string text;
string checkFile;
string[] pdfFiles = Directory.GetFiles(sourcePath, "*.pdf", SearchOption.TopDirectoryOnly);

View File

@ -30,6 +30,8 @@ public class Description : IDescription, Shared.Properties.IDescription
[JsonPropertyName("PSN")] public string PSN { get; set; }
[JsonPropertyName("Reactor")] public string Reactor { get; set; }
[JsonPropertyName("Recipe")] public string Recipe { get; set; }
[JsonPropertyName("IndexOf")] public string IndexOf { get; set; }
[JsonPropertyName("AttemptCounter")] public string AttemptCounter { get; set; }
//
[JsonPropertyName("Comments")] public string Comments { get; set; }
[JsonPropertyName("Diameter")] public string Diameter { get; set; }
@ -307,6 +309,8 @@ public class Description : IDescription, Shared.Properties.IDescription
PSN = processData.PSN,
Reactor = processData.Reactor,
Recipe = processData.Recipe,
IndexOf = nameof(IndexOf),
AttemptCounter = nameof(AttemptCounter),
//
Comments = detail.Comments,
Diameter = detail.Diameter,
@ -392,7 +396,7 @@ public class Description : IDescription, Shared.Properties.IDescription
return result;
}
private Description GetDefault(IFileRead fileRead, Logistics logistics)
private static Description GetDefault(IFileRead fileRead, Logistics logistics)
{
Description result = new()
{
@ -415,6 +419,8 @@ public class Description : IDescription, Shared.Properties.IDescription
PSN = nameof(PSN),
Reactor = nameof(Reactor),
Recipe = nameof(Recipe),
IndexOf = nameof(IndexOf),
AttemptCounter = nameof(AttemptCounter),
//
Comments = nameof(Comments),
Diameter = nameof(Diameter),
@ -491,6 +497,15 @@ public class Description : IDescription, Shared.Properties.IDescription
return result;
}
internal static JsonElement GetDefaultJsonElement(IFileRead fileRead, Logistics logistics)
{
JsonElement result;
Description description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, DescriptionSourceGenerationContext.Default.Description);
result = JsonSerializer.Deserialize<JsonElement>(json);
return result;
}
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
}

View File

@ -109,39 +109,38 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Test[] tests = Array.Empty<Test>();
List<JsonElement> jsonElements = new();
List<FileInfo> fileInfoCollection = new();
_TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
_Logistics = new Logistics(this, _TickOffset.Value, reportFullPath, useSplitForMID: true);
fileInfoCollection.Add(_Logistics.FileInfo);
SetFileParameterLotIDToLogisticsMID();
if (_Logistics.FileInfo.Length < _MinFileLength)
results.Item4.Add(_Logistics.FileInfo);
results = new(string.Empty, tests, jsonElements.ToArray(), fileInfoCollection);
else
{
ReadOnlyDictionary<string, string> pages = Convert.PDF(_Logistics, _GhostPCLFileName, _PDFTextStripperFileName, results.Item4);
Run? run = Run.Get(_Logistics, results.Item4, pages);
ReadOnlyDictionary<string, string> pages = Convert.PDF(_Logistics, _GhostPCLFileName, _PDFTextStripperFileName, fileInfoCollection);
Run? run = Run.Get(this, _Logistics, fileInfoCollection, pages);
if (run is null)
throw new Exception(string.Concat("A) No Data - ", dateTime.Ticks));
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, pages, run);
if (iProcessData is not ProcessData processData)
results = new(string.Concat("B) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
results = new(string.Concat("A) No Data - ", dateTime.Ticks), tests, jsonElements.ToArray(), fileInfoCollection);
else
{
string mid;
if (!string.IsNullOrEmpty(processData.Lot) && string.IsNullOrEmpty(processData.Reactor) && string.IsNullOrEmpty(processData.RDS) && string.IsNullOrEmpty(processData.PSN))
mid = processData.Lot;
else if (!string.IsNullOrEmpty(processData.Employee) && string.IsNullOrEmpty(processData.Reactor) && string.IsNullOrEmpty(processData.RDS) && string.IsNullOrEmpty(processData.PSN))
mid = processData.Employee;
Descriptor descriptor = ProcessData.GetDescriptor(run.Header.Id);
if (!string.IsNullOrEmpty(descriptor.Lot) && string.IsNullOrEmpty(descriptor.Reactor) && string.IsNullOrEmpty(descriptor.RDS) && string.IsNullOrEmpty(descriptor.PSN))
mid = descriptor.Lot;
else if (!string.IsNullOrEmpty(descriptor.Employee) && string.IsNullOrEmpty(descriptor.Reactor) && string.IsNullOrEmpty(descriptor.RDS) && string.IsNullOrEmpty(descriptor.PSN))
mid = descriptor.Employee;
else
{
mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
}
mid = string.Concat(descriptor.Reactor, "-", descriptor.RDS, "-", descriptor.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
SetFileParameterLotID(mid);
_Logistics.Update(mid, processData.Reactor);
if (iProcessData.Details.Count > 0)
results = iProcessData.GetResults(this, _Logistics, results.Item4);
else
results = new(string.Concat("LOGISTICS_1 - C) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
_Logistics.Update(mid, descriptor.Reactor);
JsonElement jsonElement = Description.GetDefaultJsonElement(this, _Logistics);
jsonElements.Add(jsonElement);
results = new(_Logistics.Logistics1[0], tests, jsonElements.ToArray(), fileInfoCollection);
}
}
return results;

View File

@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pcl;
@ -10,7 +11,8 @@ namespace Adaptation.FileHandlers.pcl;
public class Header
{
public Header(string date,
public Header(string fileName,
string date,
string recipe,
string id,
ReadOnlyCollection<WaferSummary> waferSummary,
@ -51,6 +53,7 @@ public class Header
string hazeRegionStdDev,
string hazeAverageStdDev)
{
FileName = fileName;
Date = date;
Recipe = recipe;
Id = id;
@ -93,6 +96,7 @@ public class Header
HazeAverageStdDev = hazeAverageStdDev;
}
public string FileName { get; }
public string Date { get; }
public string Recipe { get; }
public string Id { get; }
@ -215,17 +219,39 @@ public class Header
return GetBefore(text, i, "\n", false);
}
internal static Header Get(ReadOnlyDictionary<string, string> pages, Constant constant, string headerFileName)
private static KeyValuePair<string?, string?> GetHeader(ReadOnlyDictionary<string, string> pages, Constant constant)
{
KeyValuePair<string?, string?> result;
string? text;
string? headerFileName = null;
foreach (KeyValuePair<string, string> keyValuePair in pages)
{
if (!pages.TryGetValue(keyValuePair.Key, out text))
throw new Exception();
if (!text.Contains(constant.Statistics))
continue;
headerFileName = keyValuePair.Key;
}
headerFileName ??= pages.Count == 0 ? string.Empty : pages.ElementAt(pages.Count - 1).Key;
if (pages.Count == 0 || !pages.TryGetValue(headerFileName, out text))
text = null;
result = new(headerFileName, text);
return result;
}
internal static Header Get(ReadOnlyDictionary<string, string> pages, Constant constant)
{
Header? result;
string id;
string? text;
string[] segmentsB;
string[] segmentsC;
int[] i = new int[] { 0 };
WaferSummary waferSummary;
List<WaferSummary> collection = new();
if (!pages.TryGetValue(headerFileName, out text))
KeyValuePair<string?, string?> keyValuePair = GetHeader(pages, constant);
string? fileName = keyValuePair.Key;
string? text = keyValuePair.Value;
if (string.IsNullOrEmpty(fileName) || string.IsNullOrEmpty(text))
throw new Exception();
ScanPast(text, i, constant.Date);
string date = GetToEOL(text, i);
@ -270,7 +296,8 @@ public class Header
ScanPast(text, i, constant.StdDev);
string[] preToEol4 = GetToEOL(text, i, false).Trim().Split(' ');
ReadOnlyCollection<string> toEol4 = FixToEolArray(preToEol4);
result = new(date: date,
result = new(fileName: fileName,
date: date,
recipe: recipe,
id: id,
waferSummary: collection.AsReadOnly(),

View File

@ -1,6 +1,5 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
@ -9,7 +8,6 @@ using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.pcl;
@ -17,12 +15,6 @@ namespace Adaptation.FileHandlers.pcl;
public class ProcessData : IProcessData
{
private int _I;
private string _Data;
private readonly ILog _Log;
private readonly List<object> _Details;
public string JobID { get; set; }
public string MesEntity { get; set; }
public string AreaCountAvg { get; set; }
@ -71,194 +63,19 @@ public class ProcessData : IProcessData
public string SumOfDefectsStdDev { get; set; }
public string UniqueId { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details;
List<object> Shared.Properties.IProcessData.Details { get; }
internal ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, ReadOnlyDictionary<string, string> pages, Run run)
{
_Details = new List<object>();
_I = 0;
_Data = string.Empty;
JobID = logistics.JobID;
Date = GetDateTime(logistics);
MesEntity = logistics.MesEntity;
_Log = LogManager.GetLogger(typeof(ProcessData));
Parse(fileRead, logistics, fileInfoCollection, pages, run);
}
#nullable enable
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) =>
throw new Exception(string.Concat("See ", nameof(ProcessData)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection) =>
throw new NotImplementedException();
private static DateTime GetDateTime(Logistics logistics) =>
logistics.DateTimeFromSequence;
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.Tencor);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
FileInfo fileInfo = new($"{logistics.ReportFullPath}.descriptions.json");
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
File.WriteAllText(fileInfo.FullName, json);
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
fileInfoCollection.Add(fileInfo);
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json) ?? throw new Exception();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
/// <summary>
/// Test and fix a data line from the Lot Summary page if there are two values that are merged.
/// </summary>
/// <param name="toEol">data line from Lot Summary</param>
private void FixToEolArray(ref string[] toEol)
{
const int MAX_COLUMNS = 9;
int[] mColumnWidths = new int[MAX_COLUMNS] { 8, 6, 6, 6, 6, 7, 7, 5, 7 };
// is it short at least one data point
if (toEol.Length < MAX_COLUMNS)
{
_Log.Debug($"****FixToEolArray - Starting array:");
_Log.Debug(toEol);
_Log.Debug($"****FixToEolArray - Column widths:");
_Log.Debug(mColumnWidths);
string leftVal, rightVal;
// size up and assign a working list
List<string> toEolList = new(toEol);
if (string.IsNullOrEmpty(toEolList[toEolList.Count - 1]))
toEolList.RemoveAt(toEolList.Count - 1); // removes a null element at end
_Log.Debug($"****FixToEolArray - New toEolList:");
_Log.Debug(toEolList);
for (int i = toEolList.Count; i < MAX_COLUMNS; i++)
toEolList.Insert(0, ""); // insert to top of list
_Log.Debug(toEolList);
// start at the end
for (int i = MAX_COLUMNS - 1; i >= 0; i--)
{
// test for a bad value - does it have too many characters
_Log.Debug($"****FixToEolArray - toEolList[i].Length: {toEolList[i].Length}, mColumnWidths[i]: {mColumnWidths[i]}");
if (toEolList[i].Length > mColumnWidths[i])
{
// split it up into its two parts
leftVal = toEolList[i].Substring(0, toEolList[i].Length - mColumnWidths[i]);
rightVal = toEolList[i].Substring(leftVal.Length);
_Log.Debug($"****FixToEolArray - Split leftVal: {leftVal}");
_Log.Debug($"****FixToEolArray - Split rightVal: {rightVal}");
// insert new value
toEolList[i] = rightVal;
toEolList.Insert(i, leftVal);
if (string.IsNullOrEmpty(toEolList[0]))
toEolList.RemoveAt(0); // removes a null element at end
_Log.Debug($"****FixToEolArray - Fixed toEolList:");
_Log.Debug(toEolList);
}
}
toEol = toEolList.ToArray();
_Log.Debug($"****FixToEolArray - Ending array:");
_Log.Debug(toEol);
}
}
private void ScanPast(string text)
{
int num = _Data.IndexOf(text, _I);
if (num > -1)
_I = num + text.Length;
else
_I = _Data.Length;
}
private string GetBefore(string text)
{
int num = _Data.IndexOf(text, _I);
if (num > -1)
{
string str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
return str.Trim();
}
string str1 = _Data.Substring(_I);
_I = _Data.Length;
return str1.Trim();
}
private string GetBefore(string text, bool trim)
{
if (trim)
return GetBefore(text);
int num = _Data.IndexOf(text, _I);
if (num > -1)
{
string str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
return str;
}
string str1 = _Data.Substring(_I);
_I = _Data.Length;
return str1;
}
private static bool IsNullOrWhiteSpace(string text)
{
for (int index = 0; index < text.Length; ++index)
{
if (!char.IsWhiteSpace(text[index]))
return false;
}
return true;
}
private bool IsBlankLine()
{
int num = _Data.IndexOf("\n", _I);
return IsNullOrWhiteSpace(num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I));
}
private string GetToEOL() => GetBefore("\n");
private string GetToEOL(bool trim)
{
if (trim)
return GetToEOL();
return GetBefore("\n", false);
}
private string GetToText(string text) =>
_Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
private string GetToken()
{
while (_I < _Data.Length && IsNullOrWhiteSpace(_Data.Substring(_I, 1)))
++_I;
int j = _I;
while (j < _Data.Length && !IsNullOrWhiteSpace(_Data.Substring(j, 1)))
++j;
string str = _Data.Substring(_I, j - _I);
_I = j;
return str.Trim();
}
private string PeekNextLine()
{
int j = _I;
string result = GetToEOL();
_I = j;
return result;
}
private static (string, string) GetReactorAndRDS(string defaultReactor, string defaultRDS, string text, string formattedText, string[] segments)
{
string rds;
@ -385,320 +202,8 @@ public class ProcessData : IProcessData
return result;
}
private void Set(ILogistics logistics, Run run)
{
string lot;
string rds;
string psn;
string recipe;
string reactor;
string employee;
ScanPast("Recipe ID:");
_ = GetBefore("LotID:");
lot = run.Header.Id;
recipe = run.Header.Recipe;
Descriptor descriptor = GetDescriptor(lot);
lot = descriptor.Lot;
psn = descriptor.PSN;
rds = descriptor.RDS;
reactor = descriptor.Reactor;
employee = descriptor.Employee;
Lot = lot;
PSN = psn;
RDS = rds;
Recipe = recipe;
Reactor = reactor;
Employee = employee;
UniqueId = string.Format("{0}_{1}_{2}", logistics.JobID, lot, Path.GetFileNameWithoutExtension(logistics.ReportFullPath));
}
private void ParseLotSummary(ILogistics logistics, ReadOnlyDictionary<string, string> pages, Run run, string headerFileName, Dictionary<string, List<Detail>> slots)
{
_I = 0;
ParseErrorText = string.Empty;
if (!pages.TryGetValue(headerFileName, out string value))
throw new Exception();
_I = 0;
_Data = value;
ScanPast("Date:");
_ = GetToEOL();
Set(logistics, run);
// determine number of wafers and their slot numbers
_Log.Debug(_Data.Substring(_I));
string slot;
string toEOL;
int slotCount = _Data.Substring(_I).Split('*').Length - 1;
_Log.Debug($"****HeaderFile - Slot Count: {slotCount}.");
for (int i = 0; i < slotCount; i++)
{
ScanPast("*");
toEOL = GetToEOL(false);
slot = string.Concat("*", toEOL.Substring(0, 2));
if (!slots.ContainsKey(slot))
slots.Add(slot, new List<Detail>());
}
_Log.Debug($"****HeaderFile - Slots:");
_Log.Debug(slots);
ScanPast("Min:");
string[] toEol1 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol1 Count: {toEol1.Length}.");
FixToEolArray(ref toEol1);
LPDCountMin = toEol1[0].Trim();
LPDCM2Min = toEol1[1].Trim();
AreaCountMin = toEol1[2].Trim();
AreaTotalMin = toEol1[3].Trim();
ScratchCountMin = toEol1[4].Trim();
ScratchTotalMin = toEol1[5].Trim();
SumOfDefectsMin = toEol1[6].Trim();
HazeRegionMin = toEol1[7].Trim();
HazeAverageMin = toEol1[8].Trim();
ScanPast("Max:");
string[] toEol2 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol2 Count: {toEol2.Length}.");
FixToEolArray(ref toEol2);
LPDCountMax = toEol2[0].Trim();
LPDCM2Max = toEol2[1].Trim();
AreaCountMax = toEol2[2].Trim();
AreaTotalMax = toEol2[3].Trim();
ScratchCountMax = toEol2[4].Trim();
ScratchTotalMax = toEol2[5].Trim();
SumOfDefectsMax = toEol2[6].Trim();
HazeRegionMax = toEol2[7].Trim();
HazeAverageMax = toEol2[8].Trim();
ScanPast("Average:");
string[] toEol3 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol3 Count: {toEol3.Length}.");
FixToEolArray(ref toEol3);
LPDCountAvg = toEol3[0].Trim();
LPDCM2Avg = toEol3[1].Trim();
AreaCountAvg = toEol3[2].Trim();
AreaTotalAvg = toEol3[3].Trim();
ScratchCountAvg = toEol3[4].Trim();
ScratchTotalAvg = toEol3[5].Trim();
SumOfDefectsAvg = toEol3[6].Trim();
HazeRegionAvg = toEol3[7].Trim();
HazeAverageAvg = toEol3[8].Trim();
ScanPast("Std Dev:");
string[] toEol4 = GetToEOL(false).Trim().Split(' ');
_Log.Debug($"****HeaderFile - toEol4 Count: {toEol4.Length}.");
FixToEolArray(ref toEol4);
LPDCountStdDev = toEol4[0].Trim();
LPDCM2StdDev = toEol4[1].Trim();
AreaCountStdDev = toEol4[2].Trim();
AreaTotalStdDev = toEol4[3].Trim();
ScratchCountStdDev = toEol4[4].Trim();
ScratchTotalStdDev = toEol4[5].Trim();
SumOfDefectsStdDev = toEol4[6].Trim();
HazeRegionStdDev = toEol4[7].Trim();
HazeAverageStdDev = toEol4[8].Trim();
}
private Detail ParseWaferSummary(string waferFileName, ReadOnlyDictionary<string, string> pages)
{
Detail result = new() { Data = "*Data*", i = -1, };
_I = 0;
List<string> stringList = new();
result.HeaderUniqueId = UniqueId;
result.Id = 0;
result.Title = null;
if (!pages.TryGetValue(waferFileName, out string value))
throw new Exception();
_I = 0;
_Data = value;
ScanPast("Date:");
result.Date = GetToEOL();
ScanPast("ID#");
result.Slot = GetToEOL();
if (result.Slot.Length > 5)
result.Slot = string.Concat(result.Slot.Substring(0, 5), "... - ***");
//result.Slot = result.Slot.Replace("*", "");
ScanPast("Comments:");
result.Comments = GetToEOL();
ScanPast("Sort:");
result.Sort = GetToEOL();
ScanPast("LPD Count:");
result.LPDCount = GetToEOL();
ScanPast("LPD / cm2:");
result.LPDCM2 = GetToEOL();
while (GetBefore(":").Contains("Bin"))
stringList.Add(GetToEOL());
if (stringList.Count >= 1)
result.Bin1 = stringList[0];
if (stringList.Count >= 2)
result.Bin2 = stringList[1];
if (stringList.Count >= 3)
result.Bin3 = stringList[2];
if (stringList.Count >= 4)
result.Bin4 = stringList[3];
if (stringList.Count >= 5)
result.Bin5 = stringList[4];
if (stringList.Count >= 6)
result.Bin6 = stringList[5];
if (stringList.Count >= 7)
result.Bin7 = stringList[6];
if (stringList.Count >= 8)
result.Bin8 = stringList[7];
result.Mean = GetToEOL();
ScanPast("Std Dev:");
result.StdDev = GetToEOL();
ScanPast("Area Count:");
result.AreaCount = GetToEOL();
ScanPast("Area Total:");
result.AreaTotal = GetToEOL();
ScanPast("Scratch Count:");
result.ScratchCount = GetToEOL();
ScanPast("Scratch Total:");
result.ScratchTotal = GetToEOL();
ScanPast("Sum of All Defects:");
result.SumOfDefects = GetToEOL();
ScanPast("Haze Region:");
result.HazeRegion = GetToEOL();
ScanPast("Haze Average:");
result.HazeAverage = GetToEOL();
ScanPast("Haze Peak:");
result.HazePeak = GetToEOL();
ScanPast("Laser:");
result.Laser = GetBefore("Gain:");
result.Gain = GetBefore("Diameter:");
result.Diameter = GetToEOL();
ScanPast("Thresh:");
result.Thresh = GetBefore("Exclusion:");
result.Exclusion = GetToEOL();
ScanPast("Haze Rng:");
result.HazeRng = GetBefore("Thruput:");
result.Thruput = GetToEOL();
ScanPast("Recipe ID:");
result.Recipe = GetToEOL();
result.UniqueId = string.Format("{0}_{1}", UniqueId, result.Slot.Replace("*", string.Empty).TrimStart('0'));
return result;
}
/// <summary>
/// Convert the raw data file to parsable file format - in this case from PCL to PDF
/// </summary>
/// <param name="sourceFile">source file to be converted to PDF</param>
/// <returns></returns>
private static string ConvertSourceFileToPdf(string ghostPCLFileName, Logistics logistics)
{
string result = Path.ChangeExtension(logistics.ReportFullPath, ".pdf");
if (!File.Exists(result))
{
//string arguments = string.Concat("-i \"", sourceFile, "\" -o \"", result, "\"");
string arguments = string.Concat("-dSAFER -dBATCH -dNOPAUSE -sOutputFile=\"", result, "\" -sDEVICE=pdfwrite \"", logistics.ReportFullPath, "\"");
//Process process = Process.Start(configData.LincPDFCFileName, arguments);
Process process = Process.Start(ghostPCLFileName, arguments);
_ = process.WaitForExit(30000);
if (!File.Exists(result))
throw new Exception("PDF file wasn't created");
}
return result;
}
#nullable enable
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, ReadOnlyDictionary<string, string> pages, Run run)
{
if (fileRead is null)
throw new ArgumentNullException(nameof(fileRead));
List<string> sourceFiles = new();
List<string> missingSlots = new();
Dictionary<string, List<Detail>> slots = new();
List<Tuple<string, string>> pageMapping = new();
_Log.Debug($"****ParseData - Parsing lot summary");
string headerFileName = pages.ElementAt(pages.Count - 1).Key;
ParseLotSummary(logistics, pages, run, headerFileName, slots);
string sourcePath = Path.GetDirectoryName(logistics.ReportFullPath) ?? throw new Exception();
string sourceFileNameWithoutExtension = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
foreach (FileInfo fileInfo in fileInfoCollection)
sourceFiles.Add(fileInfo.FullName);
fileInfoCollection.Clear();
foreach (KeyValuePair<string, string> keyValuePair in pages)
{
if (keyValuePair.Key == headerFileName)
continue;
if (string.IsNullOrEmpty(keyValuePair.Value.Trim()))
{
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
continue;
}
if (!pages.ContainsKey(keyValuePair.Key))
throw new Exception();
Detail dataFile = ParseWaferSummary(keyValuePair.Key, pages);
if (string.IsNullOrEmpty(dataFile.Recipe) || dataFile.Recipe != Recipe)
{
missingSlots.Add(keyValuePair.Key);
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
continue;
}
if (!slots.ContainsKey(dataFile.Slot))
{
missingSlots.Add(keyValuePair.Key);
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
continue;
}
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_", dataFile.Slot.Replace('*', 's'), "_data.pdf")));
slots[dataFile.Slot].Add(dataFile);
}
string checkFileName = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_data.pdf");
if (fileRead.IsEAFHosted && !File.Exists(checkFileName))
{
File.Move(headerFileName, checkFileName);
_ = sourceFiles.Remove(headerFileName);
sourceFiles.Add(checkFileName);
}
checkFileName = string.Empty;
for (int i = pageMapping.Count - 1; i > -1; i--)
{
if (!string.IsNullOrEmpty(pageMapping[i].Item2))
{
checkFileName = pageMapping[i].Item2;
if (fileRead.IsEAFHosted && !File.Exists(checkFileName))
{
File.Move(pageMapping[i].Item1, checkFileName);
_ = sourceFiles.Remove(pageMapping[i].Item1);
sourceFiles.Add(checkFileName);
}
}
else if (!string.IsNullOrEmpty(checkFileName))
{
checkFileName = checkFileName.Replace("_data.pdf", "_image.pdf");
if (fileRead.IsEAFHosted && !File.Exists(checkFileName))
{
File.Move(pageMapping[i].Item1, checkFileName);
_ = sourceFiles.Remove(pageMapping[i].Item1);
sourceFiles.Add(checkFileName);
}
checkFileName = string.Empty;
}
}
foreach (KeyValuePair<string, List<Detail>> keyValuePair in slots)
{
if (keyValuePair.Value.Count == 0 || keyValuePair.Value[0] is null)
missingSlots.Add(string.Concat("Slot ", keyValuePair.Key, ") is missing."));
else
{
foreach (Detail data in keyValuePair.Value)
_Details.Add(data);
}
}
if (missingSlots.Count != 0)
{
string missingSlotsFile = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_MissingSlots.txt");
File.WriteAllLines(missingSlotsFile, missingSlots);
sourceFiles.Add(missingSlotsFile);
}
//for (int i = 0; i < dataFiles.Count; i++)
// dataFiles[i].Date = DateTime.Parse(dataFiles[i].Date).ToString();
foreach (string sourceFile in sourceFiles)
fileInfoCollection.Add(new FileInfo(sourceFile));
fileInfoCollection.Add(logistics.FileInfo);
}
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
List<Description> results = new();

View File

@ -1,4 +1,5 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
@ -24,13 +25,13 @@ internal class Run
Wafers = wafers;
}
private static ReadOnlyCollection<Wafer> GetLastWaferForEachSlot(ReadOnlyDictionary<string, string> pages, Constant constant, string headerFileName, Header header)
private static ReadOnlyCollection<Wafer> GetLastWaferForEachSlot(ReadOnlyDictionary<string, string> pages, Constant constant, Header header)
{
List<Wafer> results = new();
string id;
Wafer wafer;
ReadOnlyCollection<Wafer>? wafers;
ReadOnlyDictionary<string, ReadOnlyCollection<Wafer>> keyValuePairs = Wafer.Get(pages, constant, headerFileName);
ReadOnlyDictionary<string, ReadOnlyCollection<Wafer>> keyValuePairs = Wafer.Get(pages, constant);
ReadOnlyCollection<string> waferIds = GetWaferIds(header);
for (int i = 0; i < waferIds.Count; i++)
{
@ -111,6 +112,99 @@ internal class Run
File.WriteAllText($"{logistics.ReportFullPath}.csv", string.Join(Environment.NewLine, lines));
}
private static void Rename(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, ReadOnlyDictionary<string, string> pages, Run run)
{
string slotName;
string fileName;
string? pageText;
string checkFileName;
List<string> sourceFiles = new();
List<string> missingSlots = new();
Dictionary<string, List<Wafer>> slots = new();
List<Tuple<string, string>> pageMapping = new();
string sourcePath = Path.GetDirectoryName(logistics.ReportFullPath) ?? throw new Exception();
string sourceFileNameWithoutExtension = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
foreach (FileInfo fileInfo in fileInfoCollection)
sourceFiles.Add(fileInfo.FullName);
fileInfoCollection.Clear();
fileInfoCollection.Add(logistics.FileInfo);
checkFileName = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_data.pdf");
if (fileRead.IsEAFHosted && !File.Exists(checkFileName))
{
File.Move(run.Header.FileName, checkFileName);
_ = sourceFiles.Remove(run.Header.FileName);
sourceFiles.Add(checkFileName);
}
foreach (Wafer wafer in run.Wafers)
{
if (!pages.TryGetValue(wafer.FileName, out pageText))
continue;
if (string.IsNullOrEmpty(pageText.Trim()))
{
pageMapping.Add(new Tuple<string, string>(wafer.FileName, string.Empty));
continue;
}
if (run.Header.AreaCountAvg is null)
{
if (string.IsNullOrEmpty(wafer.Recipe) || wafer.Recipe != run.Header.Recipe)
{
missingSlots.Add(string.Concat("Slot ", wafer.Id, ") is missing."));
pageMapping.Add(new Tuple<string, string>(wafer.FileName, string.Empty));
continue;
}
if (!slots.ContainsKey(wafer.Id))
{
missingSlots.Add(wafer.FileName);
pageMapping.Add(new Tuple<string, string>(wafer.FileName, string.Empty));
continue;
}
slotName = wafer.Id.Contains('*') ? wafer.Id.Replace('*', 's') : $"s{wafer.Id}";
fileName = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_", slotName, "_data.pdf");
pageMapping.Add(new Tuple<string, string>(wafer.FileName, fileName));
slots[wafer.Id].Add(wafer);
}
}
checkFileName = string.Empty;
for (int i = pageMapping.Count - 1; i > -1; i--)
{
if (!string.IsNullOrEmpty(pageMapping[i].Item2))
{
checkFileName = pageMapping[i].Item2;
if (fileRead.IsEAFHosted && !File.Exists(checkFileName))
{
File.Move(pageMapping[i].Item1, checkFileName);
_ = sourceFiles.Remove(pageMapping[i].Item1);
sourceFiles.Add(checkFileName);
}
}
else if (!string.IsNullOrEmpty(checkFileName))
{
checkFileName = checkFileName.Replace("_data.pdf", "_image.pdf");
if (fileRead.IsEAFHosted && !File.Exists(checkFileName))
{
File.Move(pageMapping[i].Item1, checkFileName);
_ = sourceFiles.Remove(pageMapping[i].Item1);
sourceFiles.Add(checkFileName);
}
checkFileName = string.Empty;
}
}
foreach (KeyValuePair<string, List<Wafer>> keyValuePair in slots)
{
if (keyValuePair.Value.Count == 0 || keyValuePair.Value[0] is null)
missingSlots.Add(string.Concat("Slot ", keyValuePair.Key, ") is missing."));
}
if (missingSlots.Count != 0)
{
string missingSlotsFile = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_MissingSlots.txt");
File.WriteAllLines(missingSlotsFile, missingSlots);
sourceFiles.Add(missingSlotsFile);
}
foreach (string sourceFile in sourceFiles)
fileInfoCollection.Add(new FileInfo(sourceFile));
fileInfoCollection.Add(logistics.FileInfo);
}
private static ReadOnlyCollection<string> GetWaferIds(Header header)
{
List<string> results = new();
@ -119,18 +213,18 @@ internal class Run
return results.AsReadOnly();
}
internal static Run? Get(Logistics logistics, List<FileInfo> fileInfoCollection, ReadOnlyDictionary<string, string> pages)
internal static Run? Get(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, ReadOnlyDictionary<string, string> pages)
{
Run? result;
Constant constant = new();
string headerFileName = pages.ElementAt(pages.Count - 1).Key;
Header? header = Header.Get(pages, constant, headerFileName);
Header? header = Header.Get(pages, constant);
if (header is null)
result = null;
else
{
ReadOnlyCollection<Wafer> wafers = GetLastWaferForEachSlot(pages, constant, headerFileName, header);
ReadOnlyCollection<Wafer> wafers = GetLastWaferForEachSlot(pages, constant, header);
result = new(header, wafers);
Rename(fileRead, logistics, fileInfoCollection, pages, result);
WriteJson(logistics, fileInfoCollection, result);
WriteCommaSeparatedValues(logistics, result);
}

View File

@ -11,8 +11,9 @@ namespace Adaptation.FileHandlers.pcl;
public class Wafer
{
public Wafer(string date, string id, string comments, string sort, string lPDCount, string lPDCM2, string bin1, string bin2, string bin3, string bin4, string bin5, string bin6, string bin7, string bin8, string mean, string stdDev, string areaCount, string areaTotal, string scratchCount, string scratchTotal, string sumOfDefects, string hazeRegion, string hazeAverage, string hazePeak, string laser, string gain, string diameter, string thresh, string exclusion, string hazeRng, string thruput, string recipe)
public Wafer(string fileName, string date, string id, string comments, string sort, string lPDCount, string lPDCM2, string bin1, string bin2, string bin3, string bin4, string bin5, string bin6, string bin7, string bin8, string mean, string stdDev, string areaCount, string areaTotal, string scratchCount, string scratchTotal, string sumOfDefects, string hazeRegion, string hazeAverage, string hazePeak, string laser, string gain, string diameter, string thresh, string exclusion, string hazeRng, string thruput, string recipe)
{
FileName = fileName;
Date = date;
Id = id;
Comments = comments;
@ -48,7 +49,8 @@ public class Wafer
}
internal static Wafer Get(string id) =>
new(date: string.Empty,
new(fileName: string.Empty,
date: string.Empty,
id: id,
comments: string.Empty,
sort: string.Empty,
@ -81,6 +83,7 @@ public class Wafer
thruput: string.Empty,
recipe: string.Empty);
public string FileName { get; }
public string Date { get; }
public string Id { get; }
public string Comments { get; }
@ -114,7 +117,7 @@ public class Wafer
public string Thruput { get; }
public string Recipe { get; }
internal static ReadOnlyDictionary<string, ReadOnlyCollection<Wafer>> Get(ReadOnlyDictionary<string, string> pages, Constant constant, string headerFileName)
internal static ReadOnlyDictionary<string, ReadOnlyCollection<Wafer>> Get(ReadOnlyDictionary<string, string> pages, Constant constant)
{
Dictionary<string, ReadOnlyCollection<Wafer>> results = new();
Wafer wafer;
@ -124,15 +127,11 @@ public class Wafer
Dictionary<string, List<Wafer>> keyValuePairs = new();
foreach (KeyValuePair<string, string> keyValuePair in pages)
{
if (keyValuePair.Key == headerFileName)
continue;
if (!pages.ContainsKey(keyValuePair.Key))
throw new Exception();
i[0] = 0;
stringList = new();
if (!pages.TryGetValue(keyValuePair.Key, out text))
throw new Exception();
if (string.IsNullOrEmpty(text) || !text.Contains(constant.Id))
if (string.IsNullOrEmpty(text) || !text.Contains(constant.Id) || text.Contains(constant.Statistics) || text.Contains(constant.DatabaseId))
continue;
Header.ScanPast(text, i, constant.Date);
string date = Header.GetToEOL(text, i);
@ -190,7 +189,8 @@ public class Wafer
string thruput = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Recipe ID:");
string recipe = Header.GetToEOL(text, i);
wafer = new(date: date,
wafer = new(fileName: keyValuePair.Key,
date: date,
id: id,
comments: comments,
sort: sort,

View File

@ -1,13 +0,0 @@
namespace Adaptation.FileHandlers.pdsf;
internal class Constant
{
public string Id { get; } = "ID#";
public string Max { get; } = "Max:";
public string Min { get; } = "Min:";
public string Date { get; } = "Date:";
public string StdDev { get; } = "Std Dev:";
public string Average { get; } = "Average:";
}

View File

@ -1,150 +0,0 @@
using Adaptation.Shared;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.IO;
using System.Linq;
namespace Adaptation.FileHandlers.pdsf;
internal class Convert
{
/// <summary>
/// Convert the raw data file to parsable file format - in this case from PCL to PDF
/// </summary>
/// <param name="sourceFile">source file to be converted to PDF</param>
/// <returns></returns>
private static string ConvertSourceFileToPdf(string ghostPCLFileName, Logistics logistics)
{
string result = Path.ChangeExtension(logistics.ReportFullPath, ".pdf");
if (!File.Exists(result))
{
//string arguments = string.Concat("-i \"", sourceFile, "\" -o \"", result, "\"");
string arguments = string.Concat("-dSAFER -dBATCH -dNOPAUSE -sOutputFile=\"", result, "\" -sDEVICE=pdfwrite \"", logistics.ReportFullPath, "\"");
//Process process = Process.Start(configData.LincPDFCFileName, arguments);
Process process = Process.Start(ghostPCLFileName, arguments);
_ = process.WaitForExit(30000);
if (!File.Exists(result))
throw new Exception("PDF file wasn't created");
}
return result;
}
private static Dictionary<string, string> PortableDocumentFormatSplit(string pdfTextStripperFileName, string sourcePath, string sourceFileNamePdf)
{
Dictionary<string, string> results = new();
ProcessStartInfo processStartInfo = new(pdfTextStripperFileName, $"s \"{sourceFileNamePdf}\"")
{
UseShellExecute = false,
RedirectStandardError = true,
RedirectStandardOutput = true,
};
Process process = Process.Start(processStartInfo);
_ = process.WaitForExit(30000);
string text;
string checkFile;
string[] pdfFiles = Directory.GetFiles(sourcePath, "*.pdf", SearchOption.TopDirectoryOnly);
string[] textFiles = Directory.GetFiles(sourcePath, "*.txt", SearchOption.TopDirectoryOnly);
foreach (string pdfFile in pdfFiles)
{
if (pdfFile == sourceFileNamePdf)
continue;
checkFile = Path.ChangeExtension(pdfFile, ".txt");
if (!textFiles.Contains(checkFile))
continue;
text = File.ReadAllText(checkFile);
results.Add(pdfFile, text);
}
return results;
}
internal static ReadOnlyDictionary<string, string> PDF(Logistics logistics, string ghostPCLFileName, string pdfTextStripperFileName, List<FileInfo> fileInfoCollection)
{
Dictionary<string, string> results = new();
object item;
string pageText;
string pagePDFFile;
string pageTextFile;
List<string> sourceFiles = new();
string sourceFileNamePdf = ConvertSourceFileToPdf(ghostPCLFileName, logistics);
sourceFiles.Add(sourceFileNamePdf);
string sourcePath = Path.GetDirectoryName(logistics.ReportFullPath) ?? throw new Exception();
string sourceFileNameWithoutExtension = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
string[] txtFiles = Directory.GetFiles(sourcePath, $"{sourceFileNameWithoutExtension}_*.txt", SearchOption.TopDirectoryOnly);
if (txtFiles.Length != 0)
{
txtFiles = (from l in txtFiles orderby l.Length, l select l).ToArray();
foreach (string txtFile in txtFiles)
{
sourceFiles.Add(txtFile);
pageText = File.ReadAllText(txtFile);
pagePDFFile = Path.ChangeExtension(txtFile, ".pdf");
if (!File.Exists(pagePDFFile))
continue;
results.Add(pagePDFFile, pageText);
}
}
if (results.Count == 0)
{
try
{
java.io.File file = new(sourceFileNamePdf);
org.apache.pdfbox.util.Splitter splitter = new();
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
java.util.List list = splitter.split(pdDocument);
java.util.ListIterator iterator = list.listIterator();
org.apache.pdfbox.util.PDFTextStripper dataStripper = new();
for (short i = 1; i < short.MaxValue; i++)
{
if (!iterator.hasNext())
break;
item = iterator.next();
pagePDFFile = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_", i, ".pdf");
pageTextFile = Path.ChangeExtension(pagePDFFile, ".txt");
if (File.Exists(pageTextFile))
{
pageText = File.ReadAllText(pageTextFile);
sourceFiles.Add(pageTextFile);
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
continue;
pd.close();
}
else if (File.Exists(pagePDFFile))
{
org.apache.pdfbox.pdmodel.PDDocument document = org.apache.pdfbox.pdmodel.PDDocument.load(pagePDFFile);
pageText = dataStripper.getText(document);
document.close();
sourceFiles.Add(pagePDFFile);
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
continue;
pd.close();
}
else
{
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
continue;
pageText = dataStripper.getText(pd);
pd.save(pagePDFFile);
sourceFiles.Add(pagePDFFile);
pd.close();
File.WriteAllText(pageTextFile, pageText);
sourceFiles.Add(pageTextFile);
}
results.Add(pagePDFFile, pageText);
}
pdDocument.close();
}
catch (MissingMethodException)
{
if (results.Count == 0)
results = PortableDocumentFormatSplit(pdfTextStripperFileName, sourcePath, sourceFileNamePdf);
}
}
foreach (string sourceFile in sourceFiles)
fileInfoCollection.Add(new FileInfo(sourceFile));
return new(results);
}
}

View File

@ -1,136 +0,0 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Text.Json;
namespace Adaptation.FileHandlers.pdsf;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _GhostPCLFileName;
private readonly string _PDFTextStripperFileName;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 15;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_GhostPCLFileName = Path.Combine(AppContext.BaseDirectory, "gpcl6win64.exe");
if (!File.Exists(_GhostPCLFileName))
throw new Exception("Ghost PCL FileName doesn't Exist!");
_PDFTextStripperFileName = Path.Combine(AppContext.BaseDirectory, "PDF-Text-Stripper.exe");
if (!File.Exists(_PDFTextStripperFileName))
throw new Exception("PDF-Text-Stripper FileName doesn't Exist!");
if (_IsEAFHosted)
NestExistingFiles(_FileConnectorConfiguration);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
#nullable enable
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
string result;
JsonElement[] jsonElements;
Test[] tests = Array.Empty<Test>();
List<FileInfo> fileInfoCollection = new();
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
ReadOnlyDictionary<string, string> pages = Convert.PDF(_Logistics, _GhostPCLFileName, _PDFTextStripperFileName, fileInfoCollection);
Run? run = Run.Get(_Logistics, fileInfoCollection, pages);
if (run is null)
{
jsonElements = Array.Empty<JsonElement>();
result = string.Concat("A) No Data - ", dateTime.Ticks);
results = new(result, tests, jsonElements, fileInfoCollection);
}
else
{
result = string.Join(Environment.NewLine, _Logistics.Logistics1);
jsonElements = _IsEAFHosted ? Array.Empty<JsonElement>() : ProcessDataStandardFormat.GetArray(processDataStandardFormat);
results = new(result, tests, jsonElements, fileInfoCollection);
}
return results;
}
}

View File

@ -1,322 +0,0 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
public class Header
{
public Header(string date,
string recipe,
string id,
ReadOnlyCollection<WaferSummary> waferSummary,
string lPDCountMin,
string lPDCM2Min,
string areaCountMin,
string areaTotalMin,
string scratchCountMin,
string scratchTotalMin,
string sumOfDefectsMin,
string hazeRegionMin,
string hazeAverageMin,
string lPDCountMax,
string lPDCM2Max,
string areaCountMax,
string areaTotalMax,
string scratchCountMax,
string scratchTotalMax,
string sumOfDefectsMax,
string hazeRegionMax,
string hazeAverageMax,
string lPDCountAvg,
string lPDCM2Avg,
string areaCountAvg,
string areaTotalAvg,
string scratchCountAvg,
string scratchTotalAvg,
string sumOfDefectsAvg,
string hazeRegionAvg,
string hazeAverageAvg,
string lPDCountStdDev,
string lPDCM2StdDev,
string areaCountStdDev,
string areaTotalStdDev,
string scratchCountStdDev,
string scratchTotalStdDev,
string sumOfDefectsStdDev,
string hazeRegionStdDev,
string hazeAverageStdDev)
{
Date = date;
Recipe = recipe;
Id = id;
WaferSummary = waferSummary;
LPDCountMin = lPDCountMin;
LPDCM2Min = lPDCM2Min;
AreaCountMin = areaCountMin;
AreaTotalMin = areaTotalMin;
ScratchCountMin = scratchCountMin;
ScratchTotalMin = scratchTotalMin;
SumOfDefectsMin = sumOfDefectsMin;
HazeRegionMin = hazeRegionMin;
HazeAverageMin = hazeAverageMin;
LPDCountMax = lPDCountMax;
LPDCM2Max = lPDCM2Max;
AreaCountMax = areaCountMax;
AreaTotalMax = areaTotalMax;
ScratchCountMax = scratchCountMax;
ScratchTotalMax = scratchTotalMax;
SumOfDefectsMax = sumOfDefectsMax;
HazeRegionMax = hazeRegionMax;
HazeAverageMax = hazeAverageMax;
LPDCountAvg = lPDCountAvg;
LPDCM2Avg = lPDCM2Avg;
AreaCountAvg = areaCountAvg;
AreaTotalAvg = areaTotalAvg;
ScratchCountAvg = scratchCountAvg;
ScratchTotalAvg = scratchTotalAvg;
SumOfDefectsAvg = sumOfDefectsAvg;
HazeRegionAvg = hazeRegionAvg;
HazeAverageAvg = hazeAverageAvg;
LPDCountStdDev = lPDCountStdDev;
LPDCM2StdDev = lPDCM2StdDev;
AreaCountStdDev = areaCountStdDev;
AreaTotalStdDev = areaTotalStdDev;
ScratchCountStdDev = scratchCountStdDev;
ScratchTotalStdDev = scratchTotalStdDev;
SumOfDefectsStdDev = sumOfDefectsStdDev;
HazeRegionStdDev = hazeRegionStdDev;
HazeAverageStdDev = hazeAverageStdDev;
}
public string Date { get; }
public string Recipe { get; }
public string Id { get; }
public ReadOnlyCollection<WaferSummary> WaferSummary { get; }
public string LPDCountMin { get; }
public string LPDCM2Min { get; }
public string AreaCountMin { get; }
public string AreaTotalMin { get; }
public string ScratchCountMin { get; }
public string ScratchTotalMin { get; }
public string SumOfDefectsMin { get; }
public string HazeRegionMin { get; }
public string HazeAverageMin { get; }
public string LPDCountMax { get; }
public string LPDCM2Max { get; }
public string AreaCountMax { get; }
public string AreaTotalMax { get; }
public string ScratchCountMax { get; }
public string ScratchTotalMax { get; }
public string SumOfDefectsMax { get; }
public string HazeRegionMax { get; }
public string HazeAverageMax { get; }
public string LPDCountAvg { get; }
public string LPDCM2Avg { get; }
public string AreaCountAvg { get; }
public string AreaTotalAvg { get; }
public string ScratchCountAvg { get; }
public string ScratchTotalAvg { get; }
public string SumOfDefectsAvg { get; }
public string HazeRegionAvg { get; }
public string HazeAverageAvg { get; }
public string LPDCountStdDev { get; }
public string LPDCM2StdDev { get; }
public string AreaCountStdDev { get; }
public string AreaTotalStdDev { get; }
public string ScratchCountStdDev { get; }
public string ScratchTotalStdDev { get; }
public string SumOfDefectsStdDev { get; }
public string HazeRegionStdDev { get; }
public string HazeAverageStdDev { get; }
private static ReadOnlyCollection<string> FixToEolArray(string[] toEol)
{
List<string> results = new();
const int MAX_COLUMNS = 9;
if (toEol.Length >= MAX_COLUMNS)
results.AddRange(toEol);
else
{
string leftVal, rightVal;
List<string> toEolList = new(toEol);
int[] mColumnWidths = new int[MAX_COLUMNS] { 8, 6, 6, 6, 6, 7, 7, 5, 7 };
if (string.IsNullOrEmpty(toEolList[toEolList.Count - 1]))
toEolList.RemoveAt(toEolList.Count - 1);
for (int i = toEolList.Count; i < MAX_COLUMNS; i++)
toEolList.Insert(0, "");
for (int i = MAX_COLUMNS - 1; i >= 0; i--)
{
if (toEolList[i].Length > mColumnWidths[i])
{
leftVal = toEolList[i].Substring(0, toEolList[i].Length - mColumnWidths[i]);
rightVal = toEolList[i].Substring(leftVal.Length);
toEolList[i] = rightVal;
toEolList.Insert(i, leftVal);
if (string.IsNullOrEmpty(toEolList[0]))
toEolList.RemoveAt(0);
}
}
results.AddRange(toEolList);
}
return results.AsReadOnly();
}
internal static void ScanPast(string text, int[] i, string search)
{
int num = text.IndexOf(search, i[0]);
if (num > -1)
i[0] = num + search.Length;
else
i[0] = text.Length;
}
internal static string GetBefore(string text, int[] i, string search)
{
int num = text.IndexOf(search, i[0]);
if (num > -1)
{
string str = text.Substring(i[0], num - i[0]);
i[0] = num + search.Length;
return str.Trim();
}
string str1 = text.Substring(i[0]);
i[0] = text.Length;
return str1.Trim();
}
private static string GetBefore(string text, int[] i, string search, bool trim)
{
if (trim)
return GetBefore(text, i, search);
int num = text.IndexOf(search, i[0]);
if (num > -1)
{
string str = text.Substring(i[0], num - i[0]);
i[0] = num + search.Length;
return str;
}
string str1 = text.Substring(i[0]);
i[0] = text.Length;
return str1;
}
internal static string GetToEOL(string text, int[] i) =>
GetBefore(text, i, "\n");
private static string GetToEOL(string text, int[] i, bool trim)
{
if (trim)
return GetToEOL(text, i);
return GetBefore(text, i, "\n", false);
}
internal static Header Get(ReadOnlyDictionary<string, string> pages, Constant constant, string headerFileName)
{
Header? result;
string id;
string? text;
string[] segmentsB;
string[] segmentsC;
int[] i = new int[] { 0 };
WaferSummary waferSummary;
List<WaferSummary> collection = new();
if (!pages.TryGetValue(headerFileName, out text))
throw new Exception();
ScanPast(text, i, constant.Date);
string date = GetToEOL(text, i);
ScanPast(text, i, "Recipe ID:");
string recipe = GetBefore(text, i, "LotID:");
recipe = recipe.Replace(";", "");
if (text.Contains("[]"))
id = GetBefore(text, i, "[]");
else if (text.Contains("[7]"))
id = GetBefore(text, i, "[7]");
else
id = GetBefore(text, i, "[");
ScanPast(text, i, "*");
string[] segments = text.Substring(i[0]).Split('*');
string[] split = new string[] { Environment.NewLine };
foreach (string segment in segments)
{
segmentsB = segment.Split(split, StringSplitOptions.None);
segmentsC = segmentsB[0].Split(' ');
waferSummary = new(id: segmentsC.Length < 1 ? string.Empty : segmentsC[0].Trim(),
lPDCount: segmentsC.Length < 2 ? string.Empty : segmentsC[1].Trim(),
lPDCM2: segmentsC.Length < 3 ? string.Empty : segmentsC[2].Trim(),
areaCount: segmentsC.Length < 4 ? string.Empty : segmentsC[3].Trim(),
areaTotal: segmentsC.Length < 5 ? string.Empty : segmentsC[4].Trim(),
scratchCount: segmentsC.Length < 6 ? string.Empty : segmentsC[5].Trim(),
scratchTotal: segmentsC.Length < 7 ? string.Empty : segmentsC[6].Trim(),
sumOfDefects: segmentsC.Length < 8 ? string.Empty : segmentsC[7].Trim(),
hazeRegion: segmentsC.Length < 9 ? string.Empty : segmentsC[8].Trim(),
hazeAverage: segmentsC.Length < 10 ? string.Empty : segmentsC[9].Trim(),
grade: segmentsC.Length < 11 ? string.Empty : segmentsC[10].Trim());
collection.Add(waferSummary);
}
ScanPast(text, i, constant.Min);
string[] preToEol1 = GetToEOL(text, i, false).Trim().Split(' ');
ReadOnlyCollection<string> toEol1 = FixToEolArray(preToEol1);
ScanPast(text, i, constant.Max);
string[] preToEol2 = GetToEOL(text, i, false).Trim().Split(' ');
ReadOnlyCollection<string> toEol2 = FixToEolArray(preToEol2);
ScanPast(text, i, constant.Average);
string[] preToEol3 = GetToEOL(text, i, false).Trim().Split(' ');
ReadOnlyCollection<string> toEol3 = FixToEolArray(preToEol3);
ScanPast(text, i, constant.StdDev);
string[] preToEol4 = GetToEOL(text, i, false).Trim().Split(' ');
ReadOnlyCollection<string> toEol4 = FixToEolArray(preToEol4);
result = new(date: date,
recipe: recipe,
id: id,
waferSummary: collection.AsReadOnly(),
lPDCountMin: toEol1[0].Trim(),
lPDCM2Min: toEol1[1].Trim(),
areaCountMin: toEol1[2].Trim(),
areaTotalMin: toEol1[3].Trim(),
scratchCountMin: toEol1[4].Trim(),
scratchTotalMin: toEol1[5].Trim(),
sumOfDefectsMin: toEol1[6].Trim(),
hazeRegionMin: toEol1[7].Trim(),
hazeAverageMin: toEol1[8].Trim(),
lPDCountMax: toEol2[0].Trim(),
lPDCM2Max: toEol2[1].Trim(),
areaCountMax: toEol2[2].Trim(),
areaTotalMax: toEol2[3].Trim(),
scratchCountMax: toEol2[4].Trim(),
scratchTotalMax: toEol2[5].Trim(),
sumOfDefectsMax: toEol2[6].Trim(),
hazeRegionMax: toEol2[7].Trim(),
hazeAverageMax: toEol2[8].Trim(),
lPDCountAvg: toEol3[0].Trim(),
lPDCM2Avg: toEol3[1].Trim(),
areaCountAvg: toEol3[2].Trim(),
areaTotalAvg: toEol3[3].Trim(),
scratchCountAvg: toEol3[4].Trim(),
scratchTotalAvg: toEol3[5].Trim(),
sumOfDefectsAvg: toEol3[6].Trim(),
hazeRegionAvg: toEol3[7].Trim(),
hazeAverageAvg: toEol3[8].Trim(),
lPDCountStdDev: toEol4[0].Trim(),
lPDCM2StdDev: toEol4[1].Trim(),
areaCountStdDev: toEol4[2].Trim(),
areaTotalStdDev: toEol4[3].Trim(),
scratchCountStdDev: toEol4[4].Trim(),
scratchTotalStdDev: toEol4[5].Trim(),
sumOfDefectsStdDev: toEol4[6].Trim(),
hazeRegionStdDev: toEol4[7].Trim(),
hazeAverageStdDev: toEol4[8].Trim());
return result;
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Header))]
internal partial class HeaderSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -1,193 +0,0 @@
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
internal class Row
{
public Row(Run run, int i)
{
Index = i;
//
Date = run.Header.Date;
Recipe = run.Header.Recipe;
Id = run.Header.Id;
//
WaferId = run.Header.WaferSummary[i].Id;
LPDCount = run.Header.WaferSummary[i].LPDCount;
LPDCM2 = run.Header.WaferSummary[i].LPDCM2;
AreaCount = run.Header.WaferSummary[i].AreaCount;
AreaTotal = run.Header.WaferSummary[i].AreaTotal;
ScratchCount = run.Header.WaferSummary[i].ScratchCount;
ScratchTotal = run.Header.WaferSummary[i].ScratchTotal;
SumOfDefects = run.Header.WaferSummary[i].SumOfDefects;
HazeRegion = run.Header.WaferSummary[i].HazeRegion;
HazeAverage = run.Header.WaferSummary[i].HazeAverage;
Grade = run.Header.WaferSummary[i].Grade;
//
LPDCountMin = run.Header.LPDCountMin;
LPDCM2Min = run.Header.LPDCM2Min;
AreaCountMin = run.Header.AreaCountMin;
AreaTotalMin = run.Header.AreaTotalMin;
ScratchCountMin = run.Header.ScratchCountMin;
ScratchTotalMin = run.Header.ScratchTotalMin;
SumOfDefectsMin = run.Header.SumOfDefectsMin;
HazeRegionMin = run.Header.HazeRegionMin;
HazeAverageMin = run.Header.HazeAverageMin;
LPDCountMax = run.Header.LPDCountMax;
LPDCM2Max = run.Header.LPDCM2Max;
AreaCountMax = run.Header.AreaCountMax;
AreaTotalMax = run.Header.AreaTotalMax;
ScratchCountMax = run.Header.ScratchCountMax;
ScratchTotalMax = run.Header.ScratchTotalMax;
SumOfDefectsMax = run.Header.SumOfDefectsMax;
HazeRegionMax = run.Header.HazeRegionMax;
HazeAverageMax = run.Header.HazeAverageMax;
LPDCountAvg = run.Header.LPDCountAvg;
LPDCM2Avg = run.Header.LPDCM2Avg;
AreaCountAvg = run.Header.AreaCountAvg;
AreaTotalAvg = run.Header.AreaTotalAvg;
ScratchCountAvg = run.Header.ScratchCountAvg;
ScratchTotalAvg = run.Header.ScratchTotalAvg;
SumOfDefectsAvg = run.Header.SumOfDefectsAvg;
HazeRegionAvg = run.Header.HazeRegionAvg;
HazeAverageAvg = run.Header.HazeAverageAvg;
LPDCountStdDev = run.Header.LPDCountStdDev;
LPDCM2StdDev = run.Header.LPDCM2StdDev;
AreaCountStdDev = run.Header.AreaCountStdDev;
AreaTotalStdDev = run.Header.AreaTotalStdDev;
ScratchCountStdDev = run.Header.ScratchCountStdDev;
ScratchTotalStdDev = run.Header.ScratchTotalStdDev;
SumOfDefectsStdDev = run.Header.SumOfDefectsStdDev;
HazeRegionStdDev = run.Header.HazeRegionStdDev;
HazeAverageStdDev = run.Header.HazeAverageStdDev;
//
WaferDate = run.Wafers[i].Date;
Comments = run.Wafers[i].Comments;
Sort = run.Wafers[i].Sort;
WaferLPDCount = run.Wafers[i].LPDCount;
WaferLPDCM2 = run.Wafers[i].LPDCM2;
Bin1 = run.Wafers[i].Bin1;
Bin2 = run.Wafers[i].Bin2;
Bin3 = run.Wafers[i].Bin3;
Bin4 = run.Wafers[i].Bin4;
Bin5 = run.Wafers[i].Bin5;
Bin6 = run.Wafers[i].Bin6;
Bin7 = run.Wafers[i].Bin7;
Bin8 = run.Wafers[i].Bin8;
Mean = run.Wafers[i].Mean;
StdDev = run.Wafers[i].StdDev;
WaferAreaCount = run.Wafers[i].AreaCount;
WaferAreaTotal = run.Wafers[i].AreaTotal;
WaferScratchCount = run.Wafers[i].ScratchCount;
WaferScratchTotal = run.Wafers[i].ScratchTotal;
WaferSumOfDefects = run.Wafers[i].SumOfDefects;
WaferHazeRegion = run.Wafers[i].HazeRegion;
WaferHazeAverage = run.Wafers[i].HazeAverage;
HazePeak = run.Wafers[i].HazePeak;
Laser = run.Wafers[i].Laser;
Gain = run.Wafers[i].Gain;
Diameter = run.Wafers[i].Diameter;
Thresh = run.Wafers[i].Thresh;
Exclusion = run.Wafers[i].Exclusion;
HazeRng = run.Wafers[i].HazeRng;
Thruput = run.Wafers[i].Thruput;
WaferRecipe = run.Wafers[i].Recipe;
}
public int Index { get; }
//
public string Date { get; }
public string Recipe { get; }
public string Id { get; }
//
public string WaferId { get; }
public string LPDCount { get; }
public string LPDCM2 { get; }
public string AreaCount { get; }
public string AreaTotal { get; }
public string ScratchCount { get; }
public string ScratchTotal { get; }
public string SumOfDefects { get; }
public string HazeRegion { get; }
public string HazeAverage { get; }
public string Grade { get; }
//
public string LPDCountMin { get; }
public string LPDCM2Min { get; }
public string AreaCountMin { get; }
public string AreaTotalMin { get; }
public string ScratchCountMin { get; }
public string ScratchTotalMin { get; }
public string SumOfDefectsMin { get; }
public string HazeRegionMin { get; }
public string HazeAverageMin { get; }
public string LPDCountMax { get; }
public string LPDCM2Max { get; }
public string AreaCountMax { get; }
public string AreaTotalMax { get; }
public string ScratchCountMax { get; }
public string ScratchTotalMax { get; }
public string SumOfDefectsMax { get; }
public string HazeRegionMax { get; }
public string HazeAverageMax { get; }
public string LPDCountAvg { get; }
public string LPDCM2Avg { get; }
public string AreaCountAvg { get; }
public string AreaTotalAvg { get; }
public string ScratchCountAvg { get; }
public string ScratchTotalAvg { get; }
public string SumOfDefectsAvg { get; }
public string HazeRegionAvg { get; }
public string HazeAverageAvg { get; }
public string LPDCountStdDev { get; }
public string LPDCM2StdDev { get; }
public string AreaCountStdDev { get; }
public string AreaTotalStdDev { get; }
public string ScratchCountStdDev { get; }
public string ScratchTotalStdDev { get; }
public string SumOfDefectsStdDev { get; }
public string HazeRegionStdDev { get; }
public string HazeAverageStdDev { get; }
//
public string WaferDate { get; }
public string Comments { get; }
public string Sort { get; }
public string WaferLPDCount { get; }
public string WaferLPDCM2 { get; }
public string Bin1 { get; }
public string Bin2 { get; }
public string Bin3 { get; }
public string Bin4 { get; }
public string Bin5 { get; }
public string Bin6 { get; }
public string Bin7 { get; }
public string Bin8 { get; }
public string Mean { get; }
public string StdDev { get; }
public string WaferAreaCount { get; }
public string WaferAreaTotal { get; }
public string WaferScratchCount { get; }
public string WaferScratchTotal { get; }
public string WaferSumOfDefects { get; }
public string WaferHazeRegion { get; }
public string WaferHazeAverage { get; }
public string HazePeak { get; }
public string Laser { get; }
public string Gain { get; }
public string Diameter { get; }
public string Thresh { get; }
public string Exclusion { get; }
public string HazeRng { get; }
public string Thruput { get; }
public string WaferRecipe { get; }
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Row))]
internal partial class RowSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -1,146 +0,0 @@
using Adaptation.Shared;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
internal class Run
{
public Header Header { get; }
public ReadOnlyCollection<Wafer> Wafers { get; }
public Run(Header header, ReadOnlyCollection<Wafer> wafers)
{
Header = header;
Wafers = wafers;
}
private static ReadOnlyCollection<Wafer> GetLastWaferForEachSlot(ReadOnlyDictionary<string, string> pages, Constant constant, string headerFileName, Header header)
{
List<Wafer> results = new();
string id;
Wafer wafer;
ReadOnlyCollection<Wafer>? wafers;
ReadOnlyDictionary<string, ReadOnlyCollection<Wafer>> keyValuePairs = Wafer.Get(pages, constant, headerFileName);
ReadOnlyCollection<string> waferIds = GetWaferIds(header);
for (int i = 0; i < waferIds.Count; i++)
{
id = waferIds[i];
if (!keyValuePairs.TryGetValue(id, out wafers) || wafers.Count == 0)
wafer = Wafer.Get(id);
else
wafer = (from l in wafers where l.Recipe == header.Recipe select l).Last();
if (wafer is null)
break;
results.Add(wafer);
}
return results.AsReadOnly();
}
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, Run result)
{
FileInfo fileInfo = new($"{logistics.ReportFullPath}.run.json");
string json = JsonSerializer.Serialize(result, RunSourceGenerationContext.Default.Run);
File.WriteAllText(fileInfo.FullName, json);
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
fileInfoCollection.Add(fileInfo);
}
private static ReadOnlyCollection<string> GetLines(Logistics logistics, JsonElement[]? jsonElements)
{
List<string> results = new();
int columns = 0;
StringBuilder stringBuilder = new();
results.Add($"\"Count\",{jsonElements?.Length}");
results.Add($"\"{nameof(logistics.Sequence)}\",\"{logistics.Sequence}\"");
results.Add($"\"{nameof(logistics.MesEntity)}\",\"{logistics.MesEntity}\"");
string dateTimeFromSequence = logistics.DateTimeFromSequence.ToString("MM/dd/yyyy hh:mm:ss tt");
for (int i = 0; i < jsonElements?.Length;)
{
_ = stringBuilder.Append('"').Append(nameof(logistics.DateTimeFromSequence)).Append('"').Append(',');
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append(',');
}
break;
}
if (jsonElements?.Length != 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements?.Length; i++)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('"').Append(dateTimeFromSequence).Append('"').Append(',');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
{
if (jsonProperty.Value.ValueKind == JsonValueKind.Object)
_ = stringBuilder.Append(',');
else if (jsonProperty.Value.ValueKind != JsonValueKind.String)
_ = stringBuilder.Append(jsonProperty.Value).Append(',');
else
_ = stringBuilder.Append('"').Append(jsonProperty.Value).Append('"').Append(',');
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
}
return results.AsReadOnly();
}
private static void WriteCommaSeparatedValues(Logistics logistics, Run run)
{
List<Row> results = new();
Row row;
for (int i = 0; i < run.Wafers.Count; i++)
{
row = new(run, i);
results.Add(row);
}
string json = JsonSerializer.Serialize(results);
JsonElement[]? jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
ReadOnlyCollection<string> lines = GetLines(logistics, jsonElements);
File.WriteAllText($"{logistics.ReportFullPath}.csv", string.Join(Environment.NewLine, lines));
}
private static ReadOnlyCollection<string> GetWaferIds(Header header)
{
List<string> results = new();
foreach (WaferSummary waferSummary in header.WaferSummary)
results.Add(waferSummary.Id);
return results.AsReadOnly();
}
internal static Run? Get(Logistics logistics, List<FileInfo> fileInfoCollection, ReadOnlyDictionary<string, string> pages)
{
Run? result;
Constant constant = new();
string headerFileName = pages.ElementAt(pages.Count - 1).Key;
Header? header = Header.Get(pages, constant, headerFileName);
if (header is null)
result = null;
else
{
ReadOnlyCollection<Wafer> wafers = GetLastWaferForEachSlot(pages, constant, headerFileName, header);
result = new(header, wafers);
WriteJson(logistics, fileInfoCollection, result);
WriteCommaSeparatedValues(logistics, result);
}
return result;
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Run))]
internal partial class RunSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -1,240 +0,0 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
public class Wafer
{
public Wafer(string date, string id, string comments, string sort, string lPDCount, string lPDCM2, string bin1, string bin2, string bin3, string bin4, string bin5, string bin6, string bin7, string bin8, string mean, string stdDev, string areaCount, string areaTotal, string scratchCount, string scratchTotal, string sumOfDefects, string hazeRegion, string hazeAverage, string hazePeak, string laser, string gain, string diameter, string thresh, string exclusion, string hazeRng, string thruput, string recipe)
{
Date = date;
Id = id;
Comments = comments;
Sort = sort;
LPDCount = lPDCount;
LPDCM2 = lPDCM2;
Bin1 = bin1;
Bin2 = bin2;
Bin3 = bin3;
Bin4 = bin4;
Bin5 = bin5;
Bin6 = bin6;
Bin7 = bin7;
Bin8 = bin8;
Mean = mean;
StdDev = stdDev;
AreaCount = areaCount;
AreaTotal = areaTotal;
ScratchCount = scratchCount;
ScratchTotal = scratchTotal;
SumOfDefects = sumOfDefects;
HazeRegion = hazeRegion;
HazeAverage = hazeAverage;
HazePeak = hazePeak;
Laser = laser;
Gain = gain;
Diameter = diameter;
Thresh = thresh;
Exclusion = exclusion;
HazeRng = hazeRng;
Thruput = thruput;
Recipe = recipe;
}
internal static Wafer Get(string id) =>
new(date: string.Empty,
id: id,
comments: string.Empty,
sort: string.Empty,
lPDCount: string.Empty,
lPDCM2: string.Empty,
bin1: string.Empty,
bin2: string.Empty,
bin3: string.Empty,
bin4: string.Empty,
bin5: string.Empty,
bin6: string.Empty,
bin7: string.Empty,
bin8: string.Empty,
mean: string.Empty,
stdDev: string.Empty,
areaCount: string.Empty,
areaTotal: string.Empty,
scratchCount: string.Empty,
scratchTotal: string.Empty,
sumOfDefects: string.Empty,
hazeRegion: string.Empty,
hazeAverage: string.Empty,
hazePeak: string.Empty,
laser: string.Empty,
gain: string.Empty,
diameter: string.Empty,
thresh: string.Empty,
exclusion: string.Empty,
hazeRng: string.Empty,
thruput: string.Empty,
recipe: string.Empty);
public string Date { get; }
public string Id { get; }
public string Comments { get; }
public string Sort { get; }
public string LPDCount { get; }
public string LPDCM2 { get; }
public string Bin1 { get; }
public string Bin2 { get; }
public string Bin3 { get; }
public string Bin4 { get; }
public string Bin5 { get; }
public string Bin6 { get; }
public string Bin7 { get; }
public string Bin8 { get; }
public string Mean { get; }
public string StdDev { get; }
public string AreaCount { get; }
public string AreaTotal { get; }
public string ScratchCount { get; }
public string ScratchTotal { get; }
public string SumOfDefects { get; }
public string HazeRegion { get; }
public string HazeAverage { get; }
public string HazePeak { get; }
public string Laser { get; }
public string Gain { get; }
public string Diameter { get; }
public string Thresh { get; }
public string Exclusion { get; }
public string HazeRng { get; }
public string Thruput { get; }
public string Recipe { get; }
internal static ReadOnlyDictionary<string, ReadOnlyCollection<Wafer>> Get(ReadOnlyDictionary<string, string> pages, Constant constant, string headerFileName)
{
Dictionary<string, ReadOnlyCollection<Wafer>> results = new();
Wafer wafer;
string? text;
List<string> stringList;
int[] i = new int[] { 0 };
Dictionary<string, List<Wafer>> keyValuePairs = new();
foreach (KeyValuePair<string, string> keyValuePair in pages)
{
if (keyValuePair.Key == headerFileName)
continue;
if (!pages.ContainsKey(keyValuePair.Key))
throw new Exception();
i[0] = 0;
stringList = new();
if (!pages.TryGetValue(keyValuePair.Key, out text))
throw new Exception();
if (string.IsNullOrEmpty(text) || !text.Contains(constant.Id))
continue;
Header.ScanPast(text, i, constant.Date);
string date = Header.GetToEOL(text, i);
Header.ScanPast(text, i, constant.Id);
string id = Header.GetToEOL(text, i);
if (id.Length > 5)
id = string.Concat(id.Substring(0, 5), "... - ***");
id = id.Replace("*", "");
Header.ScanPast(text, i, "Comments:");
string comments = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Sort:");
string sort = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "LPD Count:");
string lPDCount = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "LPD / cm2:");
string lPDCM2 = Header.GetToEOL(text, i);
while (Header.GetBefore(text, i, ":").Contains("Bin"))
stringList.Add(Header.GetToEOL(text, i));
string bin1 = stringList.Count >= 1 ? stringList[0] : string.Empty;
string bin2 = stringList.Count >= 2 ? stringList[1] : string.Empty;
string bin3 = stringList.Count >= 3 ? stringList[2] : string.Empty;
string bin4 = stringList.Count >= 4 ? stringList[3] : string.Empty;
string bin5 = stringList.Count >= 5 ? stringList[4] : string.Empty;
string bin6 = stringList.Count >= 6 ? stringList[5] : string.Empty;
string bin7 = stringList.Count >= 7 ? stringList[6] : string.Empty;
string bin8 = stringList.Count >= 8 ? stringList[7] : string.Empty;
string mean = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Std Dev:");
string stdDev = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Area Count:");
string areaCount = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Area Total:");
string areaTotal = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Scratch Count:");
string scratchCount = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Scratch Total:");
string scratchTotal = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Sum of All Defects:");
string sumOfDefects = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Haze Region:");
string hazeRegion = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Haze Average:");
string hazeAverage = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Haze Peak:");
string hazePeak = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Laser:");
string laser = Header.GetBefore(text, i, "Gain:");
string gain = Header.GetBefore(text, i, "Diameter:");
string diameter = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Thresh:");
string thresh = Header.GetBefore(text, i, "Exclusion:");
string exclusion = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Haze Rng:");
string hazeRng = Header.GetBefore(text, i, "Thruput:");
string thruput = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Recipe ID:");
string recipe = Header.GetToEOL(text, i);
wafer = new(date: date,
id: id,
comments: comments,
sort: sort,
lPDCount: lPDCount,
lPDCM2: lPDCM2,
bin1: bin1,
bin2: bin2,
bin3: bin3,
bin4: bin4,
bin5: bin5,
bin6: bin6,
bin7: bin7,
bin8: bin8,
mean: mean,
stdDev: stdDev,
areaCount: areaCount,
areaTotal: areaTotal,
scratchCount: scratchCount,
scratchTotal: scratchTotal,
sumOfDefects: sumOfDefects,
hazeRegion: hazeRegion,
hazeAverage: hazeAverage,
hazePeak: hazePeak,
laser: laser,
gain: gain,
diameter: diameter,
thresh: thresh,
exclusion: exclusion,
hazeRng: hazeRng,
thruput: thruput,
recipe: recipe);
if (!keyValuePairs.ContainsKey(id))
keyValuePairs.Add(id, new List<Wafer>());
keyValuePairs[id].Add(wafer);
}
foreach (KeyValuePair<string, List<Wafer>> keyValuePair in keyValuePairs)
results.Add(keyValuePair.Key, keyValuePair.Value.AsReadOnly());
return new(results);
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Wafer))]
internal partial class WaferSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -1,43 +0,0 @@
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
public class WaferSummary
{
public WaferSummary(string id, string lPDCount, string lPDCM2, string areaCount, string areaTotal, string scratchCount, string scratchTotal, string sumOfDefects, string hazeRegion, string hazeAverage, string grade)
{
Id = id;
LPDCount = lPDCount;
LPDCM2 = lPDCM2;
AreaCount = areaCount;
AreaTotal = areaTotal;
ScratchCount = scratchCount;
ScratchTotal = scratchTotal;
SumOfDefects = sumOfDefects;
HazeRegion = hazeRegion;
HazeAverage = hazeAverage;
Grade = grade;
}
public string Id { get; }
public string LPDCount { get; }
public string LPDCM2 { get; }
public string AreaCount { get; }
public string AreaTotal { get; }
public string ScratchCount { get; }
public string ScratchTotal { get; }
public string SumOfDefects { get; }
public string HazeRegion { get; }
public string HazeAverage { get; }
public string Grade { get; }
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(WaferSummary))]
internal partial class WaferSummarySourceGenerationContext : JsonSerializerContext
{
}

View File

@ -226,9 +226,9 @@ public class MonIn : IMonIn, IDisposable
{
StringBuilder stringBuilder = new();
if (string.IsNullOrEmpty(subresource))
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
return stringBuilder.ToString();
}
@ -247,14 +247,14 @@ public class MonIn : IMonIn, IDisposable
if (string.IsNullOrEmpty(subresource))
{
if (unit.Equals(string.Empty) && !interval.HasValue)
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), performanceName.Trim(), value, description.Trim());
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), performanceName.Trim(), value, description.Trim());
else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} {5} {{interval={6}, unit={7}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : (object)string.Empty, unit.Trim());
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} {5} {{interval={6}, unit={7}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : string.Empty, unit.Trim());
}
else if (unit.Equals(string.Empty) && !interval.HasValue)
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim());
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim());
else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} {6} {{interval={7}, unit={8}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : (object)string.Empty, unit.Trim());
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} {6} {{interval={7}, unit={8}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : string.Empty, unit.Trim());
return stringBuilder.ToString();
}

View File

@ -10,7 +10,7 @@
<IsPackable>false</IsPackable>
<Nullable>disable</Nullable>
<RuntimeIdentifier>win-x64</RuntimeIdentifier>
<TargetFramework>net8.0</TargetFramework>
<TargetFramework>net10.0</TargetFramework>
</PropertyGroup>
<PropertyGroup>
<VSTestLogger>trx</VSTestLogger>
@ -35,8 +35,8 @@
<RuntimeHostConfigurationOption Include="AssemblyName" Value="MET08DDUPSFS6420" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.3" />
<PackageReference Include="FFMpegCore" Version="5.1.0" />
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="FFMpegCore" Version="5.4.0" />
<PackageReference Include="IKVM.AWT.WinForms" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.OpenJDK.Core" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.OpenJDK.Media" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
@ -44,29 +44,28 @@
<PackageReference Include="IKVM.OpenJDK.Util" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.OpenJDK.XML.API" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="IKVM.Runtime" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="Instances" Version="3.0.1" />
<PackageReference Include="log4net" Version="3.0.3"></PackageReference>
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.CommandLine" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.FileExtensions" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.json" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging" Version="9.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.12.0" />
<PackageReference Include="Microsoft.Win32.SystemEvents" Version="9.0.0" />
<PackageReference Include="Instances" Version="3.0.2" />
<PackageReference Include="log4net" Version="3.2.0"></PackageReference>
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.CommandLine" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.FileExtensions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.json" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging" Version="10.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="18.0.1" />
<PackageReference Include="Microsoft.Win32.SystemEvents" Version="10.0.0" />
<PackageReference Include="MSTest.TestAdapter" Version="3.7.0" />
<PackageReference Include="MSTest.TestFramework" Version="3.7.0" />
<PackageReference Include="Pdfbox" Version="1.1.1"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="RoboSharp" Version="1.6.0" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="9.0.0" />
<PackageReference Include="System.Data.OleDb" Version="9.0.0" />
<PackageReference Include="System.Data.SqlClient" Version="4.8.6" />
<PackageReference Include="System.Drawing.Common" Version="9.0.0" />
<PackageReference Include="System.Text.Json" Version="9.0.0" />
<PackageReference Include="System.Configuration.ConfigurationManager" Version="10.0.0" />
<PackageReference Include="System.Data.OleDb" Version="10.0.0" />
<PackageReference Include="System.Data.SqlClient" Version="4.9.0" />
<PackageReference Include="System.Drawing.Common" Version="10.0.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Infineon.Mesa.PDF.Text.Stripper" Version="4.8.0.2"><NoWarn>NU1701</NoWarn></PackageReference>

View File

@ -199,4 +199,31 @@ public class Logistics : ILogistics
_ProcessJobID = processJobID;
}
private static int GetCountFromFileName(Logistics logistics)
{
int result;
string[] segments = logistics.FileInfo.Name.Split('.');
string[] segmentsB = segments[0].Split('_');
string countFromFileName = segmentsB.Length < 3 ? "0" : segmentsB[2];
if (!int.TryParse(countFromFileName, out result))
result = 0;
return result;
}
internal static long GetUniqueSequence(Logistics logistics)
{
long result;
int countFromFileName = GetCountFromFileName(logistics);
result = (logistics.Sequence * 10) + countFromFileName;
return result;
}
internal static string GetUniqueId(Logistics logistics)
{
string result;
int countFromFileName = GetCountFromFileName(logistics);
result = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}_{countFromFileName}";
return result;
}
}

View File

@ -187,7 +187,7 @@ internal class ProcessDataStandardFormat
break;
}
}
string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
string? linesOne = lines.Length > 1 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
logistics = GetLogistics(footer, linesOne: linesOne);
if (logistics.Count == 0)
sequence = null;
@ -235,7 +235,7 @@ internal class ProcessDataStandardFormat
const int columnsLine = 6;
FileInfo fileInfo = new(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count != processDataStandardFormatMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count == 0 ? null : GetFullArray(processDataStandardFormat);
JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
if (jsonElements is null || jsonProperties is null || jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
result = processDataStandardFormat;
@ -665,7 +665,7 @@ internal class ProcessDataStandardFormat
return results;
}
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string? logisticsText)
{
string result;
if (jsonElements.Length == 0)
@ -850,33 +850,6 @@ internal class ProcessDataStandardFormat
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
}
private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName)
{
int? result = null;
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
if (result is null)
{
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name[0] != propertyName[0])
continue;
if (jsonProperties[i].Name.Length != propertyName.Length)
continue;
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
}
return result;
}
internal static string GetXml(ProcessDataStandardFormat processDataStandardFormat)
{
string result;

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -0,0 +1,182 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_61_1;
[TestClass]
public class MET08DDUPSFS6420 : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static MET08DDUPSFS6420 EAFLoggingUnitTesting { get; private set; }
static MET08DDUPSFS6420() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public MET08DDUPSFS6420() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public MET08DDUPSFS6420(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new MET08DDUPSFS6420(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__MoveMatchingFiles()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewer()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__IQSSi()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsight()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__APC()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__SPaCe()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__Processed()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__Archive()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__Dummy()
{
string check = "637400762709163000.zip";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,65 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_61_1;
[TestClass]
public class TENCOR1 : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static TENCOR1 EAFLoggingUnitTesting { get; private set; }
static TENCOR1() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public TENCOR1() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public TENCOR1(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new TENCOR1(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR1__pcl()
{
string check = "*.pcl";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,76 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_61_1;
[TestClass]
public class TENCOR2 : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static TENCOR2 EAFLoggingUnitTesting { get; private set; }
static TENCOR2() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public TENCOR2() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public TENCOR2(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new TENCOR2(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR2__pcl()
{
string check = "*.pcl";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR2__pdsf()
{
string check = "*EQP_*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,76 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_61_1;
[TestClass]
public class TENCOR3 : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static TENCOR3 EAFLoggingUnitTesting { get; private set; }
static TENCOR3() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public TENCOR3() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public TENCOR3(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new TENCOR3(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR3__pcl()
{
string check = "*.pcl";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR3__TransmissionControlProtocol()
{
string check = "Statistics";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation._Tests.Shared;
using Adaptation.Shared;
using Adaptation.Shared.Methods;

View File

@ -1,4 +1,4 @@
#if true
#if v2_60_0
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -0,0 +1,159 @@
#if true
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Adaptation._Tests.Extract.Production.v2_61_1;
[TestClass]
public class MET08DDUPSFS6420
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Production.v2_61_1.MET08DDUPSFS6420 _MET08DDUPSFS6420;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Production.v2_61_1.MET08DDUPSFS6420.ClassInitialize(testContext);
_MET08DDUPSFS6420 = CreateSelfDescription.Production.v2_61_1.MET08DDUPSFS6420.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__MoveMatchingFiles() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__MoveMatchingFiles();
[Ignore]
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__MoveMatchingFiles638918057133464542__Normal()
{
string check = "*.pdsf";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__MoveMatchingFiles();
string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewer() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewer();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewer638851139271252054__Normal()
{
string check = "*.pdsf";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewer();
string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__IQSSi() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__IQSSi();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsight() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__OpenInsight();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsight638851304220990490__IqsSql()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
_MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__OpenInsight();
string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF: false);
IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments638851355286349752__HeaderId()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
_MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments();
string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF: false);
IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__APC() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__APC();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__SPaCe() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__SPaCe();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__Processed() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__Processed();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__Archive() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__Archive();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__Dummy() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__Dummy();
}
#endif

View File

@ -0,0 +1,73 @@
#if true
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Adaptation._Tests.Extract.Production.v2_61_1;
[TestClass]
public class TENCOR1
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Production.v2_61_1.TENCOR1 _TENCOR1;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Production.v2_61_1.TENCOR1.ClassInitialize(testContext);
_TENCOR1 = CreateSelfDescription.Production.v2_61_1.TENCOR1.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR1__pcl() => _TENCOR1.Production__v2_61_1__TENCOR1__pcl();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR1__pcl638851335365053074__Normal()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR1.Production__v2_61_1__TENCOR1__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR1.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR1.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR1__pcl638959627725124236__Extra()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR1.Production__v2_61_1__TENCOR1__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR1.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR1.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
}
#endif

View File

@ -0,0 +1,87 @@
#if true
using Adaptation._Tests.Shared;
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Adaptation._Tests.Extract.Production.v2_61_1;
[TestClass]
public class TENCOR2
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Production.v2_61_1.TENCOR2 _TENCOR2;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Production.v2_61_1.TENCOR2.ClassInitialize(testContext);
_TENCOR2 = CreateSelfDescription.Production.v2_61_1.TENCOR2.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR2__pcl() => _TENCOR2.Production__v2_61_1__TENCOR2__pcl();
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR2__pcl638851352261289484__Normal()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR2.Production__v2_61_1__TENCOR2__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR2__pcl638860965797666706__TwoRuns()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR2.Production__v2_61_1__TENCOR2__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR2__pdsf__Normal()
{
bool validatePDSF = false;
string check = "*EQP_*.pdsf";
_TENCOR2.Production__v2_61_1__TENCOR2__pdsf();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
}
#endif

View File

@ -0,0 +1,73 @@
#if true
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Adaptation._Tests.Extract.Production.v2_61_1;
[TestClass]
public class TENCOR3
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Production.v2_61_1.TENCOR3 _TENCOR3;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Production.v2_61_1.TENCOR3.ClassInitialize(testContext);
_TENCOR3 = CreateSelfDescription.Production.v2_61_1.TENCOR3.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR3__pcl() => _TENCOR3.Production__v2_61_1__TENCOR3__pcl();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR3__pcl638851336413561558__Normal()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR3.Production__v2_61_1__TENCOR3__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR3.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR3.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR3__TransmissionControlProtocol638930712297063335__Normal()
{
bool validatePDSF = false;
string check = "Statistics";
MethodBase methodBase = new StackFrame().GetMethod();
_TENCOR3.Production__v2_61_1__TENCOR3__TransmissionControlProtocol();
string[] variables = _TENCOR3.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR3.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
}
#endif

View File

@ -193,7 +193,12 @@ public class AdaptationTesting : ISMTP
segments = withActualCICN.Split(new string[] { ticks }, StringSplitOptions.None);
dummyDirectory = Path.Combine(dummyRoot, cellInstanceName, ticks, string.Join(null, segments));
if (!Directory.Exists(dummyDirectory))
{
_ = Directory.CreateDirectory(dummyDirectory);
try
{ Directory.SetLastWriteTime(Path.Combine(dummyRoot, cellInstanceName), DateTime.Now); }
catch { }
}
}
if (string.IsNullOrEmpty(ticks))
{
@ -996,22 +1001,22 @@ public class AdaptationTesting : ISMTP
{
try
{
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation))
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation) && !fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation.Contains("10."))
{
if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation))
_ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation);
}
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.SourceFileLocation))
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.SourceFileLocation) && !fileConnectorConfigurationTuple.Item2.SourceFileLocation.Contains("10."))
{
if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.SourceFileLocation))
_ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.SourceFileLocation);
}
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.TargetFileLocation))
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.TargetFileLocation) && !fileConnectorConfigurationTuple.Item2.TargetFileLocation.Contains("10."))
{
if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.TargetFileLocation))
_ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.TargetFileLocation);
}
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder))
if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder) && !fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Contains("10."))
{
if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Split('|')[0]))
_ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Split('|')[0]);

View File

@ -64,7 +64,7 @@ public class MET08DDUPSFS6420 : LoggingUnitTesting, IDisposable
StringBuilder results = new();
(string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[]
{
new("MET08DDUPSFS6420", "v2.60.0"),
new("MET08DDUPSFS6420", "v2.61.1"),
};
string production = "http://messa08ec.infineon.com:9003/CellInstanceServiceV2";
Shared.PasteSpecialXml.EAF.XML.API.CellInstance.CellInstanceVersion cellInstanceVersion;

View File

@ -0,0 +1,36 @@
// getValue(getContextData('2', 'cds.NULL_DATA', ''));
function getValue(json) {
let result;
if (json == undefined || json.length === 0)
result = 'A) Invalid input!';
else {
let parsed;
try {
parsed = JSON.parse(json);
} catch (error) {
parsed = null;
}
if (parsed == null)
result = 'B) Invalid input!';
else {
let reactorType = parsed.rds == undefined ? '' : parsed.rds.reactorType == undefined ? '' : parsed.rds.reactorType;
if (parsed.rds == undefined)
result = '-';
else if (parsed.rds.loadLockSide == undefined)
result = '_ - ' + reactorType;
else if (parsed.rds.loadLockSide === 'L')
result = 'Left - ' + reactorType;
else if (parsed.rds.loadLockSide === 'R')
result = 'Right - ' + reactorType;
else
result = parsed.rds.loadLockSide + ' - ' + reactorType;
}
}
return result;
}
const json = '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"6IN25_ROTR","pattern":"","patternSize":0,"tool":"TENCOR"}]}}}';
const testA = getValue(json);
if (testA !== '1')
throw 'Test A failed: ' + testA;

View File

@ -203,9 +203,9 @@ public class PCL : LoggingUnitTesting, IDisposable
StringBuilder results = new();
(string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[]
{
new("TENCOR1", "v2.60.0"),
new("TENCOR2", "v2.60.0"),
new("TENCOR3", "v2.60.0"),
new("TENCOR1", "v2.61.1"),
new("TENCOR2", "v2.61.1"),
new("TENCOR3", "v2.61.1"),
new("TENCOR1-EQPT", "v2.12.3"),
new("TENCOR2-EQPT", "v2.12.3"),
new("TENCOR3-EQPT", "v2.12.3"),

View File

@ -0,0 +1,115 @@
// Recipe 1 = Matched
// recipes-and-patterns.js under IndexOf
// RecipesAndPatternsMatch
// ($('dcp.TENCOR1/csv/Index', 0) + 1) == $('dcp.TENCOR1/csv/Count', 0)
// getValue('TENCOR', $('dcp.TENCOR1/csv/Count', 0), $('dcp.TENCOR1/csv/Session', ''), 'pattern', getContextData('2', 'cds.NULL_DATA', ''));
function getValue(tool, patternSize, recipe, pattern, json) {
let result;
if (tool == undefined || tool.length === 0 || patternSize == undefined || patternSize.length === 0 || recipe == undefined || recipe.length === 0 || pattern == undefined || pattern.length === 0 || json == undefined || json.length === 0)
result = 'A) Invalid input!';
else {
let parsed;
try {
parsed = JSON.parse(json);
} catch (error) {
parsed = null;
}
if (parsed == null)
result = 'B) Invalid input!';
else if (parsed.rds == undefined || parsed.rds.prodSpec == undefined || parsed.rds.prodSpec.recipesAndPatterns == undefined)
result = 'C) No Spec!';
else {
let toolMatches = [];
for (let index = 0; index < parsed.rds.prodSpec.recipesAndPatterns.length; index++) {
if (parsed.rds.prodSpec.recipesAndPatterns[index].tool === tool) {
toolMatches.push(parsed.rds.prodSpec.recipesAndPatterns[index]);
}
}
if (toolMatches == null || toolMatches.length === 0)
result = 'Tool [' + tool + '] not found in OI API results!';
else {
let debug = '';
let matches = 0;
for (let index = 0; index < toolMatches.length; index++) {
debug += 'patternSize: ' + toolMatches[index].patternSize +
';~recipe: ' + toolMatches[index].recipe +
';~pattern: ' + toolMatches[index].pattern + ';~';
if (toolMatches[index].recipe.toLowerCase() == recipe.toLowerCase()) {
matches++;
}
}
if (matches > 0)
result = '1';
else
result = 'Value not matched~Run~patternSize: ' + patternSize + ';~recipe: ' + recipe + ';~pattern: ' + pattern + ';~API~' + debug;
}
}
}
return result;
}
getValue('TENCOR', 0, '6IN25_ROTR', 'pattern', '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"6IN25_ROTR","pattern":"","patternSize":0,"tool":"TENCOR"}]}}}');
let json;
let tool;
let recipe;
let pattern;
let patternSize;
tool = 'TENCOR'
patternSize = 0;
recipe = '6IN25_ROTR';
pattern = 'pattern';
json = '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"6IN25_ROTR","pattern":"","patternSize":0,"tool":"TENCOR"}]}}}';
const testA = getValue(tool, patternSize, recipe, pattern, json);
if (testA !== '1')
throw 'Test A failed: ' + testA;
tool = null;
const testB = getValue(tool, patternSize, recipe, pattern, json);
if (testB !== 'A) Invalid input!')
throw 'Test L failed: ' + testB;
tool = '';
const testC = getValue(tool, patternSize, recipe, pattern, json);
if (testC !== 'A) Invalid input!')
throw 'Test M failed: ' + testC;
patternSize = null;
const testD = getValue(tool, patternSize, recipe, pattern, json);
if (testD !== 'A) Invalid input!')
throw 'Test J failed: ' + testD;
patternSize = '';
const testE = getValue(tool, patternSize, recipe, pattern, json);
if (testE !== 'A) Invalid input!')
throw 'Test K failed: ' + testE;
recipe = null;
const testF = getValue(tool, patternSize, recipe, pattern, json);
if (testF !== 'A) Invalid input!')
throw 'Test F failed: ' + testF;
recipe = '';
const testG = getValue(tool, patternSize, recipe, pattern, json);
if (testG !== 'A) Invalid input!')
throw 'Test G failed: ' + testG;
pattern = null;
const testH = getValue(tool, patternSize, recipe, pattern, json);
if (testH !== 'A) Invalid input!')
throw 'Test H failed: ' + testH;
pattern = '';
const testI = getValue(tool, patternSize, recipe, pattern, json);
if (testI !== 'A) Invalid input!')
throw 'Test I failed: ' + testI;
json = '';
const testK = getValue(tool, patternSize, recipe, pattern, json);
if (testK !== 'A) Invalid input!')
throw 'Test B failed: ' + testK;
json = 'invalid';
const testL = getValue(tool, patternSize, recipe, pattern, json);
if (testL !== 'B) Invalid input!')
throw 'Test C failed: ' + testL;
json = '{"rds":{}}';
const testM = getValue(tool, patternSize, recipe, pattern, json);
if (testM !== 'C) No Spec!')
throw 'Test D failed: ' + testM;
json = '{"rds":{"prodSpec":{"recipesAndPatterns":[]}}}';
const testN = getValue(tool, patternSize, recipe, pattern, json);
if (testN !== 'Tool [TENCOR] not found in OI API results!')
throw 'Test E failed: ' + testN;

View File

@ -132,14 +132,6 @@
<Compile Include="Adaptation\FileHandlers\pcl\Run.cs" />
<Compile Include="Adaptation\FileHandlers\pcl\Wafer.cs" />
<Compile Include="Adaptation\FileHandlers\pcl\WaferSummary.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Constant.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Convert.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Header.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Row.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Run.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Wafer.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\WaferSummary.cs" />
<Compile Include="Adaptation\FileHandlers\Processed\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\SPaCe\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\TransmissionControlProtocol\FileRead.cs" />
@ -195,13 +187,13 @@
<Version>7.2.4630.5</Version>
</PackageReference>
<PackageReference Include="Infineon.EAF.Runtime">
<Version>2.60.0</Version>
<Version>2.61.1</Version>
</PackageReference>
<PackageReference Include="Pdfbox">
<Version>1.1.1</Version>
</PackageReference>
<PackageReference Include="System.Text.Json">
<Version>8.0.5</Version>
<Version>8.0.3</Version>
</PackageReference>
</ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />

View File

@ -32,5 +32,5 @@ using System.Runtime.InteropServices;
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("2.60.0.0")]
[assembly: AssemblyFileVersion("2.60.0.0")]
[assembly: AssemblyVersion("2.61.1.0")]
[assembly: AssemblyFileVersion("2.61.1.0")]

View File

@ -199,4 +199,31 @@ public class Logistics : ILogistics
_ProcessJobID = processJobID;
}
private static int GetCountFromFileName(Logistics logistics)
{
int result;
string[] segments = logistics.FileInfo.Name.Split('.');
string[] segmentsB = segments[0].Split('_');
string countFromFileName = segmentsB.Length < 3 ? "0" : segmentsB[2];
if (!int.TryParse(countFromFileName, out result))
result = 0;
return result;
}
internal static long GetUniqueSequence(Logistics logistics)
{
long result;
int countFromFileName = GetCountFromFileName(logistics);
result = (logistics.Sequence * 10) + countFromFileName;
return result;
}
internal static string GetUniqueId(Logistics logistics)
{
string result;
int countFromFileName = GetCountFromFileName(logistics);
result = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}_{countFromFileName}";
return result;
}
}