Compare commits

..

9 Commits

Author SHA1 Message Date
1603bb8175 Infineon.EAF.Runtime v2.61.1 2025-10-14 13:25:42 -07:00
10a0662561 Enhance PCL and PDSF file handlers with new constants and refactor methods for improved readability and functionality 2025-10-14 08:00:17 -07:00
0a477c7ea1 Add IndexOf property to WSRequest and Description classes; implement getValue function in recipes-and-patterns.js 2025-10-13 17:10:26 -07:00
0dc57eb3d7 Add Transmission Control Protocol file handling and update PCL serialization
- Introduced FileRead and Record classes for handling file reading in the Transmission Control Protocol.
- Enhanced Description, Detail, and other related classes with JSON serialization attributes for improved data handling.
- Implemented methods for reading and processing files, including network stream management.
- Updated unit tests to cover new functionality and ensure robust testing.
- Added new PDSF file handling classes and integrated them into the project structure.
- Refactored existing code to utilize source generation for JSON serialization, improving performance and maintainability.
2025-09-15 09:59:54 -07:00
f717c6cf91 Refactored Run and Wafer classes for improved wafer retrieval logic 2025-09-02 10:19:53 -07:00
e011bf8e37 Switched to xml for InfinityQS export 2025-09-02 08:00:18 -07:00
f70690754f Preparation to switch to xml for InfinityQS export 2025-08-27 12:11:16 -07:00
3467fb63a0 Removed save-open-insight-file to use process-data-standard-format instead 2025-06-10 07:37:23 -07:00
0ef44389c6 Updated tests with new runs 2025-06-10 07:37:09 -07:00
64 changed files with 3461 additions and 742 deletions

View File

@ -110,7 +110,7 @@ dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template
dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name
dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2"); dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2");
dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant. dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant.
dotnet_diagnostic.IDE0005.severity = warning # Using directive is unnecessary dotnet_diagnostic.IDE0005.severity = none # Using directive is unnecessary
dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified
dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031) dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031)
dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed
@ -122,6 +122,7 @@ dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]cs
dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified
dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
dotnet_diagnostic.MSTEST0015.severity = none # MSTEST0015: Test method {method} should not be ignored
dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods
dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning

View File

@ -1,10 +1,43 @@
{ {
"configurations": [ "configurations": [
{
"mode": "debug",
"name": "Go launch file",
"program": "${file}",
"request": "launch",
"type": "go"
},
{
"name": "node Launch Current Opened File",
"program": "${file}",
"request": "launch",
"type": "node"
},
{
"cwd": "${workspaceFolder}",
"internalConsoleOptions": "neverOpen",
"name": "Debug File",
"program": "${file}",
"request": "launch",
"stopOnEntry": false,
"type": "bun",
"watchMode": false
},
{
"cwd": "${workspaceFolder}",
"internalConsoleOptions": "neverOpen",
"name": "Run File",
"noDebug": true,
"program": "${file}",
"request": "launch",
"type": "bun",
"watchMode": false
},
{ {
"name": ".NET Core Attach", "name": ".NET Core Attach",
"type": "coreclr", "processId": 32760,
"request": "attach", "request": "attach",
"processId": 13036 "type": "coreclr"
} }
] ]
} }

View File

@ -1,19 +1,134 @@
{ {
"version": "2.0.0", "version": "2.0.0",
"inputs": [
{
"default": "Development",
"description": "Which ASP Net Core Environment?",
"id": "ASPNETCORE_ENVIRONMENT",
"options": [
"Development",
"Production"
],
"type": "pickString"
},
{
"default": "{AssemblyTitle}",
"description": "What Assembly Title?",
"id": "AssemblyTitle",
"type": "promptString"
},
{
"default": "{Build.BuildId}",
"description": "Which Build BuildId?",
"id": "Build.BuildId",
"type": "promptString"
},
{
"default": "{Build.Reason}",
"description": "Which Build Reason?",
"id": "Build.Reason",
"type": "promptString"
},
{
"default": "{Build.Repository.Id}",
"description": "Which Build Repository Id?",
"id": "Build.Repository.Id",
"type": "promptString"
},
{
"default": "{Build.Repository.Name}",
"description": "Which Build Repository Name?",
"id": "Build.Repository.Name",
"type": "promptString"
},
{
"default": "{Build.SourceVersion}",
"description": "Which Build Source Version?",
"id": "Build.SourceVersion",
"type": "promptString"
},
{
"default": "Debug",
"description": "Which Configuration?",
"id": "Configuration",
"options": [
"Debug",
"Release"
],
"type": "pickString"
},
{
"default": "net8.0",
"description": "Which Core Version?",
"id": "CoreVersion",
"options": [
"net8.0"
],
"type": "pickString"
},
{
"default": "C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe",
"description": "Which MS Build?",
"id": "MSBuild",
"type": "promptString"
},
{
"default": "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/",
"description": "Which Nuget Source?",
"id": "NugetSource",
"type": "promptString"
},
{
"default": "win-x64",
"description": "Which Runtime?",
"id": "Runtime",
"options": [
"win-x64",
"win-x32",
"linux-x64",
"linux-x32"
],
"type": "pickString"
},
{
"default": "L:/",
"description": "Which System DefaultWorkingDirectory?",
"id": "System.DefaultWorkingDirectory",
"options": [
"L:/",
"D:/",
"C:/"
],
"type": "pickString"
},
{
"default": "v4.8",
"description": "Which Core Target Framework Version?",
"id": "TargetFrameworkVersion",
"options": [
"v4.8"
],
"type": "pickString"
},
{
"default": "{UserSecretsId}",
"description": "Which Core User Secrets Id?",
"id": "UserSecretsId",
"type": "promptString"
}
],
"tasks": [ "tasks": [
{ {
"label": "Build", "label": "Build",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
"build", "build"
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
], ],
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Test-Debug", "label": "Test Debug",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -24,7 +139,7 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Test-Release", "label": "Test Release",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -50,7 +165,7 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Format-Whitespaces", "label": "Format Whitespaces",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -87,13 +202,13 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Project", "label": "Code Project",
"type": "shell", "type": "shell",
"command": "code ../MET08DDUPSFS6420.csproj", "command": "code ../MET08DDUPSFS6420.csproj",
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"label": "Readme", "label": "Code Read Me",
"type": "shell", "type": "shell",
"command": "code ../README.md", "command": "code ../README.md",
"problemMatcher": [] "problemMatcher": []
@ -113,7 +228,7 @@
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"label": "Git Config", "label": "Code Git Config",
"type": "shell", "type": "shell",
"command": "code ../.git/config", "command": "code ../.git/config",
"problemMatcher": [] "problemMatcher": []

View File

@ -128,7 +128,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions); FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -153,7 +153,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveArchive(reportFullPath, dateTime); MoveArchive(reportFullPath, dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -21,9 +21,11 @@ public class CellInstanceConnectionName
nameof(OpenInsight) => new OpenInsight.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(OpenInsight) => new OpenInsight.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsightMetrologyViewer) => new OpenInsightMetrologyViewer.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(OpenInsightMetrologyViewer) => new OpenInsightMetrologyViewer.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(pdsf) => new pdsf.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(pcl) => new pcl.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(pcl) => new pcl.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
nameof(TransmissionControlProtocol) => new TransmissionControlProtocol.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
_ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped") _ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped")
}; };
return result; return result;

View File

@ -8,6 +8,7 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text;
using System.Text.Json; using System.Text.Json;
namespace Adaptation.FileHandlers.IQSSi; namespace Adaptation.FileHandlers.IQSSi;
@ -107,11 +108,63 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
private static string GetLines(Logistics logistics, List<pcl.Description> descriptions)
{
StringBuilder result = new();
pcl.Description x = descriptions[0];
char del = '\t';
_ = result.Append(x.AreaCountAvg).Append(del). // 001 - AreaCountAvg
Append(x.AreaCountMax).Append(del). // 002 - AreaCountMax
Append(x.AreaCountMin).Append(del). // 003 - AreaCountMin
Append(x.AreaCountStdDev).Append(del). // 004 - AreaCountStdDev
Append(x.AreaTotalAvg).Append(del). // 005 - AreaTotalAvg
Append(x.AreaTotalMax).Append(del). // 006 - AreaTotalMax
Append(x.AreaTotalMin).Append(del). // 007 - AreaTotalMin
Append(x.AreaTotalStdDev).Append(del). // 008 - AreaTotalStdDev
Append(x.Date).Append(del). // 009 -
Append(x.HazeAverageAvg).Append(del). // 010 - Haze Average
Append(x.HazeAverageMax).Append(del). // 011 -
Append(x.HazeAverageMin).Append(del). // 012 -
Append(x.HazeAverageStdDev).Append(del). // 013 -
Append(x.HazeRegionAvg).Append(del). // 014 -
Append(x.HazeRegionMax).Append(del). // 015 -
Append(x.HazeRegionMin).Append(del). // 016 -
Append(x.HazeRegionStdDev).Append(del). // 017 -
Append(x.Lot).Append(del). // 018 -
Append(x.LPDCM2Avg).Append(del). // 019 -
Append(x.LPDCM2Max).Append(del). // 020 -
Append(x.LPDCM2Min).Append(del). // 021 -
Append(x.LPDCM2StdDev).Append(del). // 022 -
Append(x.LPDCountAvg).Append(del). // 023 -
Append(x.LPDCountMax).Append(del). // 024 -
Append(x.LPDCM2Min).Append(del). // 025 -
Append(x.LPDCountStdDev).Append(del). // 026 -
Append(x.Employee).Append(del). // 027 -
Append(x.RDS).Append(del). // 028 - Lot
Append(x.Reactor).Append(del). // 029 - Process
Append(x.Recipe.Replace(";", string.Empty)).Append(del). // 030 - Part
Append(x.ScratchCountAvg).Append(del). // 031 - Scratch Count
Append(x.ScratchCountMax).Append(del). // 032 -
Append(x.ScratchCountMin).Append(del). // 033 -
Append(x.ScratchTotalStdDev).Append(del). // 034 -
Append(x.ScratchTotalAvg).Append(del). // 035 - Scratch Length
Append(x.ScratchTotalMax).Append(del). // 036 -
Append(x.ScratchTotalMin).Append(del). // 037 -
Append(x.ScratchTotalStdDev).Append(del). // 038 -
Append(x.SumOfDefectsAvg).Append(del). // 039 - Average Sum of Defects
Append(x.SumOfDefectsMax).Append(del). // 040 - Max Sum of Defects
Append(x.SumOfDefectsMin).Append(del). // 041 - Min Sum of Defects
Append(x.SumOfDefectsStdDev).Append(del). // 042 - SumOfDefectsStdDev
Append(logistics.MesEntity).Append(del). // 043 -
AppendLine();
return result.ToString();
}
private void SaveIQSFile(string reportFullPath, DateTime dateTime, List<pcl.Description> descriptions, Test[] tests) private void SaveIQSFile(string reportFullPath, DateTime dateTime, List<pcl.Description> descriptions, Test[] tests)
{ {
bool isDummyRun = false; bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new(); List<(Shared.Properties.IScopeInfo, string)> collection = new();
string lines = OpenInsight.FileRead.GetLines(_Logistics, descriptions); string lines = GetLines(_Logistics, descriptions);
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder; string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
if (!Directory.Exists(duplicateDirectory)) if (!Directory.Exists(duplicateDirectory))
@ -126,7 +179,7 @@ public class FileRead : Shared.FileRead, IFileRead
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
} }
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription private void WriteFile<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
{ {
bool isDummyRun = false; bool isDummyRun = false;
string successDirectory = string.Empty; string successDirectory = string.Empty;
@ -134,37 +187,37 @@ public class FileRead : Shared.FileRead, IFileRead
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
if (!Directory.Exists(duplicateDirectory)) if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory); _ = Directory.CreateDirectory(duplicateDirectory);
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath)); string duplicateFile = Path.Combine(duplicateDirectory, $"{Path.GetFileName(reportFullPath)}.xml");
string lines = File.ReadAllText(reportFullPath); string xml = ProcessDataStandardFormat.GetXml(reportFullPath)
lines = lines .Replace("ppm</", "</")
.Replace("ppm\t", "\t") .Replace("mm2</", "</")
.Replace("mm2\t", "\t") .Replace("um</", "</")
.Replace("um\t", "\t") .Replace("mm</", "</")
.Replace("mm\t", "\t") .Replace("nm</", "</")
.Replace("nm\t", "\t") .Replace(">[ ", ">")
.Replace("\t[ ", "\t") .Replace(">*0", ">")
.Replace("\t*0", "\t") .Replace(">*", ">")
.Replace("\t*", "\t") .Replace("%</", "</")
.Replace("%\t", "\t") .Replace("]</", "</");
.Replace("]\t", "\t"); File.WriteAllText(duplicateFile, xml);
File.WriteAllText(duplicateFile, lines);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
} }
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime) private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{ {
Tuple<string, Test[], JsonElement[], List<FileInfo>> results; Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); string[] lines = File.ReadAllLines(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat); _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID(); SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements); List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveIQSFile(reportFullPath, dateTime, descriptions, tests); SaveIQSFile(reportFullPath, dateTime, descriptions, tests);
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions); WriteFile(reportFullPath, dateTime, descriptions);
return results; return results;
} }

View File

@ -88,9 +88,9 @@ public class FileRead : Shared.FileRead, IFileRead
string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names"); string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names"); string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices"); string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
_ProcessDataStandardFormatMapping = GetProcessDataStandardFormatMapping(processDataStandardFormatMappingOldColumnNames, _ProcessDataStandardFormatMapping = ProcessDataStandardFormatMapping.Get(processDataStandardFormatMappingOldColumnNames,
processDataStandardFormatMappingNewColumnNames, processDataStandardFormatMappingNewColumnNames,
processDataStandardFormatMappingColumnIndices); processDataStandardFormatMappingColumnIndices);
} }
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
@ -169,46 +169,6 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
{
ProcessDataStandardFormatMapping result;
string[] segmentsB;
List<string> distinct = new();
Dictionary<string, string> keyValuePairs = new();
string args4 = "Time,HeaderUniqueId,UniqueId,Date";
string args5 = "";
string args6 = "";
string args7 = "Test|EventId,Lot|Id,Slot|WaferId,AreaTotal|WaferAreaTotal,HazeAverage|WaferHazeAverage,HazeRegion|WaferHazeRegion,ScratchTotal|WaferScratchTotal";
// string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Comments,Diameter,Exclusion,Gain,HeaderUniqueId,Laser,ParseErrorText,RDS,Slot,UniqueId,AreaCount,AreaCountAvg,AreaCountMax,AreaCountMin,AreaCountStdDev,AreaTotal,AreaTotalAvg,AreaTotalMax,AreaTotalMin,AreaTotalStdDev,Bin1,Bin2,Bin3,Bin4,Bin5,Bin6,Bin7,Bin8,HazeAverage,HazeAverageAvg,HazeAverageMax,HazeAverageMin,HazeAverageStdDev,HazePeak,HazeRegion,HazeRegionAvg,HazeRegionMax,HazeRegionMin,HazeRegionStdDev,HazeRng,LPDCM2,LPDCM2Avg,LPDCM2Max,LPDCM2Min,LPDCM2StdDev,LPDCount,LPDCountAvg,LPDCountMax,LPDCountMin,LPDCountStdDev,Mean,ScratchCount,ScratchCountAvg,ScratchCountMax,ScratchCountMin,ScratchCountStdDev,ScratchTotal,ScratchTotalAvg,ScratchTotalMax,ScratchTotalMin,ScratchTotalStdDev,Sort,StdDev,SumOfDefects,SumOfDefectsAvg,SumOfDefectsMax,SumOfDefectsMin,SumOfDefectsStdDev,Thresh,Thruput";
// string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Date,Recipe,Id,WaferId,LPDCount,LPDCM2,AreaCount,AreaTotal,ScratchCount,ScratchTotal,SumOfDefects,HazeRegion,HazeAverage,Grade,LPDCountMin,LPDCM2Min,AreaCountMin,AreaTotalMin,ScratchCountMin,ScratchTotalMin,SumOfDefectsMin,HazeRegionMin,HazeAverageMin,LPDCountMax,LPDCM2Max,AreaCountMax,AreaTotalMax,ScratchCountMax,ScratchTotalMax,SumOfDefectsMax,HazeRegionMax,HazeAverageMax,LPDCountAvg,LPDCM2Avg,AreaCountAvg,AreaTotalAvg,ScratchCountAvg,ScratchTotalAvg,SumOfDefectsAvg,HazeRegionAvg,HazeAverageAvg,LPDCountStdDev,LPDCM2StdDev,AreaCountStdDev,AreaTotalStdDev,ScratchCountStdDev,ScratchTotalStdDev,SumOfDefectsStdDev,HazeRegionStdDev,HazeAverageStdDev,WaferDate,Comments,Sort,WaferLPDCount,WaferLPDCM2,Bin1,Bin2,Bin3,Bin4,Bin5,Bin6,Bin7,Bin8,Mean,StdDev,WaferAreaCount,WaferAreaTotal,WaferScratchCount,WaferScratchTotal,WaferSumOfDefects,WaferHazeRegion,WaferHazeAverage,HazePeak,Laser,Gain,Diameter,Thresh,Exclusion,HazeRng,Thruput,WaferRecipe,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,EventId";
// string args10 = "0,1,2,95,3,6,5,7,93,9,89,90,8,58,82,84,81,-1,80,-1,88,10,-1,13,41,32,23,50,73,42,33,24,51,62,63,64,65,66,67,68,69,78,47,38,29,56,79,77,46,37,28,55,85,12,40,31,22,49,11,39,30,21,48,70,15,43,34,25,52,75,44,35,26,53,59,71,17,45,36,27,54,83,86";
string[] segments = args7.Split(',');
ReadOnlyCollection<string> ignoreColumns = new(args4.Split(','));
ReadOnlyCollection<string> backfillColumns = new(args5.Split(','));
ReadOnlyCollection<string> indexOnlyColumns = new(args6.Split(','));
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
foreach (string segment in segments)
{
segmentsB = segment.Split('|');
if (segmentsB.Length != 2)
continue;
if (distinct.Contains(segmentsB[0]))
continue;
distinct.Add(segmentsB[0]);
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
}
result = new(backfillColumns: backfillColumns,
columnIndices: columnIndices,
newColumnNames: newColumnNames,
ignoreColumns: ignoreColumns,
indexOnlyColumns: indexOnlyColumns,
keyValuePairs: new(keyValuePairs),
oldColumnNames: oldColumnNames);
return result;
}
private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection) private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection)
{ {
List<PreWith> results = new(); List<PreWith> results = new();
@ -313,8 +273,13 @@ public class FileRead : Shared.FileRead, IFileRead
continue; continue;
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults)) if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
wsResults = null; wsResults = null;
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults); if (processDataStandardFormat.InputPDSF is null)
File.Delete(preWith.MatchingFile); File.Move(preWith.MatchingFile, preWith.CheckFile);
else
{
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
File.Delete(preWith.MatchingFile);
}
if (Directory.Exists(preWith.NoWaitDirectory)) if (Directory.Exists(preWith.NoWaitDirectory))
{ {
post = new(preWith.CheckFile, preWith.ErrFile); post = new(preWith.CheckFile, preWith.ErrFile);

View File

@ -6,11 +6,9 @@ using Adaptation.Shared.Methods;
using Adaptation.Shared.Metrology; using Adaptation.Shared.Metrology;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization; using System.Globalization;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text;
using System.Text.Json; using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsight; namespace Adaptation.FileHandlers.OpenInsight;
@ -18,16 +16,12 @@ namespace Adaptation.FileHandlers.OpenInsight;
public class FileRead : Shared.FileRead, IFileRead public class FileRead : Shared.FileRead, IFileRead
{ {
private int _LastIndex;
private readonly string _IqsConnectionString; private readonly string _IqsConnectionString;
private readonly string _OpenInsightFilePattern;
private readonly string _OpenInsightApiECDirectory; private readonly string _OpenInsightApiECDirectory;
private readonly ReadOnlyCollection<ModelObjectParameterDefinition> _IQSCopyCollection;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null) base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{ {
_LastIndex = -1;
_MinFileLength = 10; _MinFileLength = 10;
_Logistics = new(this); _Logistics = new(this);
_NullData = string.Empty; _NullData = string.Empty;
@ -37,10 +31,8 @@ public class FileRead : Shared.FileRead, IFileRead
throw new Exception(cellInstanceConnectionName); throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator) if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName); throw new Exception(cellInstanceConnectionName);
_IQSCopyCollection = new(GetProperties(cellInstanceConnectionName, modelObjectParameters, "IQS.Copy."));
_IqsConnectionString = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.ConnectionString"); _IqsConnectionString = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.ConnectionString");
_OpenInsightApiECDirectory = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "API.EC.Directory"); _OpenInsightApiECDirectory = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "API.EC.Directory");
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
} }
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
@ -118,201 +110,61 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
internal static string GetLines(Logistics logistics, List<pcl.Description> descriptions) private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, List<pcl.Description> descriptions, Test[] tests)
{
StringBuilder result = new();
pcl.Description x = descriptions[0];
bool ganPPTST = x.Recipe.Contains("GAN_PPTST");
if (ganPPTST)
{
string slot;
string reactor;
const int eight = 8;
DateTime dateTime = DateTime.Parse(x.Date);
string lot = x.Lot.ToLower().Replace("69-", string.Empty).Replace("71-", string.Empty).Replace("-", string.Empty);
if (string.IsNullOrEmpty(x.Lot) || x.Lot.Length < 2)
reactor = "R";
else
reactor = string.Concat("R", x.Lot.Substring(0, 2));
_ = result.Append(nameof(x.Date)).Append(';').
Append("Part").Append(';').
Append(nameof(x.Reactor)).Append(';').
Append("Lot").Append(';').
Append(nameof(pcl.Detail.Slot)).Append(';').
Append(nameof(pcl.Detail.Bin1)).Append(';').
Append(nameof(pcl.Detail.Bin2)).Append(';').
Append(nameof(pcl.Detail.Bin3)).Append(';').
Append(nameof(pcl.Detail.Bin4)).Append(';').
Append(nameof(pcl.Detail.Bin5)).Append(';').
Append(nameof(pcl.Detail.Bin6)).Append(';').
Append("Bin9").
AppendLine();
foreach (pcl.Description description in descriptions)
{
slot = description.Slot.Replace("*", string.Empty);
_ = result.Append('!').Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(';').
Append("Particle Adder;").
Append(reactor).Append(';').
Append(lot).Append(';').
Append(slot).Append(';').
Append(description.Bin1).Append(';').
Append(description.Bin2).Append(';').
Append(description.Bin3).Append(';').
Append(description.Bin4).Append(';').
Append(description.Bin5).Append(';').
Append(description.Bin6).Append(';').
Append(description.AreaCount).
AppendLine();
}
if (descriptions.Count != eight)
{
string negativeTenThousand = "-10000";
for (int i = descriptions.Count; i < eight; i++)
{
_ = result.Append('!').Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(';').
Append("Particle Adder;").
Append(reactor).Append(';').
Append(lot).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).
AppendLine();
}
}
if (result.ToString().Split('\n').Length != (eight + 2))
throw new Exception(string.Concat("Must have ", eight, " samples"));
}
else
{
char del = '\t';
_ = result.Append(x.AreaCountAvg).Append(del). // 001 - AreaCountAvg
Append(x.AreaCountMax).Append(del). // 002 - AreaCountMax
Append(x.AreaCountMin).Append(del). // 003 - AreaCountMin
Append(x.AreaCountStdDev).Append(del). // 004 - AreaCountStdDev
Append(x.AreaTotalAvg).Append(del). // 005 - AreaTotalAvg
Append(x.AreaTotalMax).Append(del). // 006 - AreaTotalMax
Append(x.AreaTotalMin).Append(del). // 007 - AreaTotalMin
Append(x.AreaTotalStdDev).Append(del). // 008 - AreaTotalStdDev
Append(x.Date).Append(del). // 009 -
Append(x.HazeAverageAvg).Append(del). // 010 - Haze Average
Append(x.HazeAverageMax).Append(del). // 011 -
Append(x.HazeAverageMin).Append(del). // 012 -
Append(x.HazeAverageStdDev).Append(del). // 013 -
Append(x.HazeRegionAvg).Append(del). // 014 -
Append(x.HazeRegionMax).Append(del). // 015 -
Append(x.HazeRegionMin).Append(del). // 016 -
Append(x.HazeRegionStdDev).Append(del). // 017 -
Append(x.Lot).Append(del). // 018 -
Append(x.LPDCM2Avg).Append(del). // 019 -
Append(x.LPDCM2Max).Append(del). // 020 -
Append(x.LPDCM2Min).Append(del). // 021 -
Append(x.LPDCM2StdDev).Append(del). // 022 -
Append(x.LPDCountAvg).Append(del). // 023 -
Append(x.LPDCountMax).Append(del). // 024 -
Append(x.LPDCM2Min).Append(del). // 025 -
Append(x.LPDCountStdDev).Append(del). // 026 -
Append(x.Employee).Append(del). // 027 -
Append(x.RDS).Append(del). // 028 - Lot
Append(x.Reactor).Append(del). // 029 - Process
Append(x.Recipe.Replace(";", string.Empty)).Append(del). // 030 - Part
Append(x.ScratchCountAvg).Append(del). // 031 - Scratch Count
Append(x.ScratchCountMax).Append(del). // 032 -
Append(x.ScratchCountMin).Append(del). // 033 -
Append(x.ScratchTotalStdDev).Append(del). // 034 -
Append(x.ScratchTotalAvg).Append(del). // 035 - Scratch Length
Append(x.ScratchTotalMax).Append(del). // 036 -
Append(x.ScratchTotalMin).Append(del). // 037 -
Append(x.ScratchTotalStdDev).Append(del). // 038 -
Append(x.SumOfDefectsAvg).Append(del). // 039 - Average Sum of Defects
Append(x.SumOfDefectsMax).Append(del). // 040 - Max Sum of Defects
Append(x.SumOfDefectsMin).Append(del). // 041 - Min Sum of Defects
Append(x.SumOfDefectsStdDev).Append(del). // 042 - SumOfDefectsStdDev
Append(logistics.MesEntity).Append(del). // 043 -
AppendLine();
}
return result.ToString();
}
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<pcl.Description> descriptions, Test[] tests)
{ {
string duplicateFile;
bool isDummyRun = false; bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new(); List<(Shared.Properties.IScopeInfo, string)> collection = new();
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
if (!Directory.Exists(duplicateDirectory)) if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory); _ = Directory.CreateDirectory(duplicateDirectory);
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder; string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
if (!Directory.Exists(Path.Combine(duplicateDirectory, "1"))) if (descriptions.Count == 0 || tests.Length == 0)
duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
else
{ {
string parentParent = GetParentParent(_FileConnectorConfiguration.SourceFileLocation); long? subgroupId;
if (parentParent.Contains(_CellInstanceName)) string fileName = Path.GetFileName(reportFullPath);
parentParent = Path.GetDirectoryName(parentParent); long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
duplicateDirectory = Path.Combine(parentParent, "Data"); long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (!Directory.Exists(duplicateDirectory)) if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
_ = Directory.CreateDirectory(duplicateDirectory); subgroupId = null;
} else
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath)); (subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (descriptions.Count != 0 && tests.Length != 0) if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
{
string lines = GetLines(_Logistics, descriptions);
if (!string.IsNullOrEmpty(lines))
{ {
_LastIndex += 1; if (wsResults is null || wsResults.Count != 1)
long? subgroupId; throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");
if (_LastIndex >= _IQSCopyCollection.Count) lock (_StaticRuns)
_LastIndex = 0; wsResults[0] = WS.Results.Get(wsResults[0], subgroupId);
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
ModelObjectParameterDefinition modelObjectParameterDefinition = _IQSCopyCollection[_LastIndex];
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
subgroupId = null;
else
(subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (subgroupId is null)
collection.Add(new(new ScopeInfo(tests[0], _OpenInsightFilePattern), lines));
else
collection.Add(new(new ScopeInfo(tests[0], $"{subgroupId.Value} {_OpenInsightFilePattern}"), lines));
if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
{
if (wsResults is null || wsResults.Count != 1)
throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");
lock (_StaticRuns)
wsResults[0] = WS.Results.Get(wsResults[0], subgroupId);
}
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), lines, subgroupId, weekOfYear);
try
{ FromIQS.SaveCopy(_FileConnectorConfiguration.SourceFileLocation, _IqsConnectionString, modelObjectParameterDefinition.Name, modelObjectParameterDefinition.Value.Split('|')); }
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
} }
if (!fileName.StartsWith("Viewer"))
duplicateFile = Path.Combine(duplicateDirectory, $"{subgroupId} {fileName}".TrimStart());
else
duplicateFile = Path.Combine(duplicateDirectory, $"{$"Viewer {subgroupId}".TrimEnd()} {fileName.Replace("Viewer", string.Empty)}");
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, descriptions.First(), subgroupId, weekOfYear);
} }
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
{
File.Copy(reportFullPath, duplicateFile, overwrite: true);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
}
} }
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime) private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{ {
Tuple<string, Test[], JsonElement[], List<FileInfo>> results; Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); string[] lines = File.ReadAllLines(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat); _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID(); SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements); List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests); SaveOpenInsightFile(reportFullPath, dateTime, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -325,174 +325,18 @@ public class FromIQS
return new(result, count, commandText); return new(result, count, commandText);
} }
private static string GetJson(Logistics logistics, ProcessDataStandardFormat processDataStandardFormat, pcl.Description description) internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, pcl.Description description, long? subGroupId, string weekOfYear)
{ {
string result;
StringBuilder stringBuilder = new();
var @object = new
{
description.MesEntity,
description.Employee,
// description.Layer,
description.PSN,
description.RDS,
description.Reactor,
description.Recipe,
// description.Zone,
logistics.DateTimeFromSequence.Ticks
};
string[] pair;
string safeValue;
string[] segments;
string serializerValue;
foreach (string line in processDataStandardFormat.Logistics)
{
segments = line.Split('\t');
if (segments.Length < 2)
continue;
segments = segments[1].Split(';');
_ = stringBuilder.Append('{');
foreach (string segment in segments)
{
pair = segment.Split('=');
if (pair.Length != 2 || pair[0].Length < 3)
continue;
serializerValue = JsonSerializer.Serialize(pair[1]);
safeValue = serializerValue.Substring(1, serializerValue.Length - 2);
_ = stringBuilder.Append('"').Append(pair[0].Substring(2)).Append('"').Append(':').Append('"').Append(safeValue).Append('"').Append(',');
}
if (stringBuilder.Length > 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.Append('}').Append(',');
}
if (stringBuilder.Length > 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.Append(']').Append('}');
_ = stringBuilder.Insert(0, ",\"Logistics\":[");
string json = JsonSerializer.Serialize(@object);
_ = stringBuilder.Insert(0, json.Substring(0, json.Length - 1));
JsonElement? jsonElement = JsonSerializer.Deserialize<JsonElement>(stringBuilder.ToString());
result = jsonElement is null ? "{}" : JsonSerializer.Serialize(jsonElement, new JsonSerializerOptions { WriteIndented = true });
return result;
}
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, pcl.Description description, string lines, long? subGroupId, string weekOfYear)
{
string checkFile;
string fileName = Path.GetFileName(reportFullPath); string fileName = Path.GetFileName(reportFullPath);
string json = GetJson(logistics, processDataStandardFormat, description);
string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory); string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);
bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot); bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot);
string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}"; string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
string ecDirectory = Path.Combine(openInsightApiECDirectory, weekYear, $"-{description.PSN}", $"-{description.Reactor}", $"-{description.RDS}", $"-{subGroupId}"); string ecDirectory = Path.Combine(openInsightApiECDirectory, weekYear, $"-{description.PSN}", $"-{description.Reactor}", $"-{description.RDS}", $"-{subGroupId}");
if (ecExists && !Directory.Exists(ecDirectory)) if (ecExists && !Directory.Exists(ecDirectory))
_ = Directory.CreateDirectory(ecDirectory); _ = Directory.CreateDirectory(ecDirectory);
checkFile = Path.Combine(ecDirectory, fileName); string checkFile = Path.Combine(ecDirectory, fileName);
if (ecExists && !File.Exists(checkFile)) if (ecExists && !File.Exists(checkFile))
File.Copy(reportFullPath, checkFile); File.Copy(reportFullPath, checkFile);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.txt");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, lines);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.lbl");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, processDataStandardFormat.Body[processDataStandardFormat.Body.Count - 1]);
} }
private static string GetCommandText(string[] iqsCopyValues)
{ // cSpell:disable
List<string> results = new();
if (iqsCopyValues.Length != 4)
throw new NotSupportedException();
string find = iqsCopyValues[1];
string replace = iqsCopyValues[3];
results.Add(" select pd.f_name [Part Name], ");
results.Add(" null [Part Revision], ");
results.Add($" '{replace}' [Test Name], ");
results.Add(" null [Description], ");
results.Add(" null [Lot Number], ");
results.Add(" null [Job Name], ");
results.Add(" null [Process Name], ");
results.Add(" case when sl.f_url = 0 then null else sl.f_url end [Reasonable Limit (Upper)], ");
results.Add(" case when sl.f_url = 0 then 0 else 1 end [Alarm Reasonable Limit (Upper)], ");
results.Add(" case when sl.f_usl = 0 then null else sl.f_usl end [Specification Limit (Upper)], ");
results.Add(" case when sl.f_usl = 0 then 0 else 1 end [Alarm Specification Limit (Upper)], ");
results.Add(" case when sl.f_ugb = 0 then null else sl.f_ugb end [Warning Limit (Upper)], ");
results.Add(" case when sl.f_ugb = 0 then 0 else 1 end [Alarm Warning Limit (Upper)], ");
results.Add(" case when sl.f_tar = 0 then null else sl.f_tar end [Specification Limit (Target)], ");
results.Add(" case when sl.f_lgb = 0 then null else sl.f_lgb end [Warning Limit (Lower)], ");
results.Add(" case when sl.f_lgb = 0 then 0 else 1 end [Alarm Warning Limit (Lower)], ");
results.Add(" case when sl.f_lsl = 0 then null else sl.f_lsl end [Specification Limit (Lower)], ");
results.Add(" case when sl.f_lsl = 0 then 0 else 1 end [Alarm Specification Limit (Lower)], ");
results.Add(" case when sl.f_lrl = 0 then null else sl.f_lrl end [Reasonable Limit (Lower)], ");
results.Add(" case when sl.f_lrl = 0 then 0 else 1 end [Alarm Reasonable Limit (Lower)], ");
results.Add(" td.f_name [Original Test Name], ");
results.Add(" td.f_test [Test Id], ");
results.Add(" ( ");
results.Add(" select count(sl_b.f_spec) ");
results.Add(" from [spcepiworld].[dbo].[spec_lim] sl_b ");
results.Add(" join [spcepiworld].[dbo].[part_dat] pd_b ");
results.Add(" on sl_b.f_part = pd_b.f_part ");
results.Add(" join [spcepiworld].[dbo].[test_dat] td_b ");
results.Add(" on sl_b.f_test = td_b.f_test ");
results.Add(" where sl_b.f_prcs = 0 ");
results.Add($" and td_b.f_name = '{replace}' ");
results.Add(" and pd_b.f_name = pd.f_name ");
results.Add(" and sl_b.f_url = sl.f_url ");
results.Add(" and sl_b.f_usl = sl.f_usl ");
results.Add(" and sl_b.f_ugb = sl.f_ugb ");
results.Add(" and sl_b.f_tar = sl.f_tar ");
results.Add(" and sl_b.f_lgb = sl.f_lgb ");
results.Add(" and sl_b.f_lsl = sl.f_lsl ");
results.Add(" and sl_b.f_lrl = sl.f_lrl ");
results.Add(" group by sl_b.f_spec ");
results.Add(" ) count ");
results.Add(" from [spcepiworld].[dbo].[spec_lim] sl ");
results.Add(" join [spcepiworld].[dbo].[part_dat] pd ");
results.Add(" on sl.f_part = pd.f_part ");
results.Add(" join [spcepiworld].[dbo].[test_dat] td ");
results.Add(" on sl.f_test = td.f_test ");
results.Add(" where sl.f_prcs = 0 ");
results.Add($" and td.f_name = '{find}' ");
results.Add(" and isnull(( ");
results.Add(" select count(sl_b.f_spec) ");
results.Add(" from [spcepiworld].[dbo].[spec_lim] sl_b ");
results.Add(" join [spcepiworld].[dbo].[part_dat] pd_b ");
results.Add(" on sl_b.f_part = pd_b.f_part ");
results.Add(" join [spcepiworld].[dbo].[test_dat] td_b ");
results.Add(" on sl_b.f_test = td_b.f_test ");
results.Add(" where sl_b.f_prcs = 0 ");
results.Add($" and td_b.f_name = '{replace}' ");
results.Add(" and pd_b.f_name = pd.f_name ");
results.Add(" and sl_b.f_url = sl.f_url ");
results.Add(" and sl_b.f_usl = sl.f_usl ");
results.Add(" and sl_b.f_ugb = sl.f_ugb ");
results.Add(" and sl_b.f_tar = sl.f_tar ");
results.Add(" and sl_b.f_lgb = sl.f_lgb ");
results.Add(" and sl_b.f_lsl = sl.f_lsl ");
results.Add(" and sl_b.f_lrl = sl.f_lrl ");
results.Add(" group by sl_b.f_spec ");
results.Add(" ), 0) = 0 ");
results.Add(" for json path ");
return string.Join(Environment.NewLine, results);
} // cSpell:restore
internal static void SaveCopy(string fileConnectorConfigurationSourceFileLocation, string connectionString, string name, string[] iqsCopyValues)
{
string checkFile = Path.Combine(fileConnectorConfigurationSourceFileLocation, $"{name}.json");
if (!File.Exists(checkFile))
{
string commandText = GetCommandText(iqsCopyValues);
StringBuilder stringBuilder = GetForJsonPath(connectionString, commandText);
if (stringBuilder.Length != 0)
File.WriteAllText(checkFile, stringBuilder.ToString());
else
File.WriteAllText(Path.Combine(fileConnectorConfigurationSourceFileLocation, $"{name}.sql"), commandText);
}
}
#nullable disable
} }

View File

@ -110,10 +110,10 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
private void SendData(string reportFullPath, DateTime dateTime, List<pcl.Description> descriptions) private void SendData(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<pcl.Description> descriptions)
{ {
string checkDirectory; string checkDirectory;
WSRequest wsRequest = new(this, _Logistics, descriptions); WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
int weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday); int weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
string directory = Path.Combine(_OpenInsightMetrologyViewerFileShare, dateTime.Year.ToString(), $"WW{weekOfYear:00}"); string directory = Path.Combine(_OpenInsightMetrologyViewerFileShare, dateTime.Year.ToString(), $"WW{weekOfYear:00}");
checkDirectory = Path.Combine(directory, _Logistics.Sequence.ToString()); checkDirectory = Path.Combine(directory, _Logistics.Sequence.ToString());
@ -139,15 +139,16 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime) private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{ {
Tuple<string, Test[], JsonElement[], List<FileInfo>> results; Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); string[] lines = File.ReadAllLines(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat); _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID(); SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements); List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(reportFullPath, dateTime, descriptions); SendData(reportFullPath, dateTime, jsonElements, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -5,6 +5,7 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Text; using System.Text;
using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer; namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer;
@ -20,6 +21,7 @@ public class WSRequest
public string AreaTotalMax { get; set; } public string AreaTotalMax { get; set; }
public string AreaTotalMin { get; set; } public string AreaTotalMin { get; set; }
public string AreaTotalStdDev { get; set; } public string AreaTotalStdDev { get; set; }
public string AttemptCounter { get; set; }
public string Date { get; set; } public string Date { get; set; }
public string HazeAverageAvg { get; set; } public string HazeAverageAvg { get; set; }
public string HazeAverageMax { get; set; } public string HazeAverageMax { get; set; }
@ -29,6 +31,7 @@ public class WSRequest
public string HazeRegionMax { get; set; } public string HazeRegionMax { get; set; }
public string HazeRegionMin { get; set; } public string HazeRegionMin { get; set; }
public string HazeRegionStdDev { get; set; } public string HazeRegionStdDev { get; set; }
public string IndexOf { get; set; }
public string Layer { get; set; } public string Layer { get; set; }
public string LotID { get; set; } public string LotID { get; set; }
public string LPDCM2Avg { get; set; } public string LPDCM2Avg { get; set; }
@ -70,7 +73,7 @@ public class WSRequest
[Obsolete("For json")] public WSRequest() { } [Obsolete("For json")] public WSRequest() { }
#pragma warning disable IDE0060 #pragma warning disable IDE0060
internal WSRequest(IFileRead fileRead, Logistics logistics, List<pcl.Description> descriptions, string processDataStandardFormat = null) internal WSRequest(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, List<pcl.Description> descriptions, string processDataStandardFormat = null)
#pragma warning restore IDE0060 #pragma warning restore IDE0060
{ {
i = -1; i = -1;
@ -92,6 +95,7 @@ public class WSRequest
AreaTotalMax = x.AreaTotalMax; AreaTotalMax = x.AreaTotalMax;
AreaTotalMin = x.AreaTotalMin; AreaTotalMin = x.AreaTotalMin;
AreaTotalStdDev = x.AreaTotalStdDev; AreaTotalStdDev = x.AreaTotalStdDev;
AttemptCounter = x.AttemptCounter;
Date = x.Date; Date = x.Date;
HazeAverageAvg = x.HazeAverageAvg; HazeAverageAvg = x.HazeAverageAvg;
HazeAverageMax = x.HazeAverageMax; HazeAverageMax = x.HazeAverageMax;
@ -101,6 +105,7 @@ public class WSRequest
HazeRegionMax = x.HazeRegionMax; HazeRegionMax = x.HazeRegionMax;
HazeRegionMin = x.HazeRegionMin; HazeRegionMin = x.HazeRegionMin;
HazeRegionStdDev = x.HazeRegionStdDev; HazeRegionStdDev = x.HazeRegionStdDev;
IndexOf = x.IndexOf;
LotID = x.Lot; LotID = x.Lot;
LPDCM2Avg = x.LPDCM2Avg; LPDCM2Avg = x.LPDCM2Avg;
LPDCM2Max = x.LPDCM2Max; LPDCM2Max = x.LPDCM2Max;
@ -290,14 +295,14 @@ public class WSRequest
pdDocument.close(); pdDocument.close();
} }
internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, List<pcl.Description> descriptions) internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, JsonElement[] jsonElements, List<pcl.Description> descriptions)
{ {
long result; long result;
if (results is not null && results.HeaderId is not null) if (results is not null && results.HeaderId is not null)
result = results.HeaderId.Value; result = results.HeaderId.Value;
else else
{ {
WSRequest wsRequest = new(fileRead, logistics, descriptions); WSRequest wsRequest = new(fileRead, logistics, jsonElements, descriptions);
string directory = Path.Combine(openInsightMetrologyViewerFileShare, logistics.DateTimeFromSequence.Year.ToString(), $"WW{weekOfYear:00}"); string directory = Path.Combine(openInsightMetrologyViewerFileShare, logistics.DateTimeFromSequence.Year.ToString(), $"WW{weekOfYear:00}");
(_, WS.Results wsResults) = WS.SendData(openInsightMetrologyViewerAPI, logistics.Sequence, directory, wsRequest); (_, WS.Results wsResults) = WS.SendData(openInsightMetrologyViewerAPI, logistics.Sequence, directory, wsRequest);
if (wsResults.Success is null || !wsResults.Success.Value) if (wsResults.Success is null || !wsResults.Success.Value)

View File

@ -135,7 +135,7 @@ public class FileRead : Shared.FileRead, IFileRead
return result; return result;
} }
private void PostOpenInsightMetrologyViewerAttachments(List<pcl.Description> descriptions) private void PostOpenInsightMetrologyViewerAttachments(JsonElement[] jsonElements, List<pcl.Description> descriptions)
{ {
Shared.Metrology.WS.Results? results; Shared.Metrology.WS.Results? results;
string jobIdDirectory = Path.Combine(Path.GetDirectoryName(_FileConnectorConfiguration.AlternateTargetFolder) ?? throw new Exception(), _Logistics.JobID); string jobIdDirectory = Path.Combine(Path.GetDirectoryName(_FileConnectorConfiguration.AlternateTargetFolder) ?? throw new Exception(), _Logistics.JobID);
@ -151,7 +151,7 @@ public class FileRead : Shared.FileRead, IFileRead
results = wsResults[0]; results = wsResults[0];
} }
int weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday); int weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
long headerId = !_IsEAFHosted ? -1 : OpenInsightMetrologyViewer.WSRequest.GetHeaderId(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OpenInsightMetrologyViewerFileShare, weekOfYear, results, descriptions); long headerId = !_IsEAFHosted ? -1 : OpenInsightMetrologyViewer.WSRequest.GetHeaderId(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OpenInsightMetrologyViewerFileShare, weekOfYear, results, jsonElements, descriptions);
string? headerIdDirectory = GetHeaderIdDirectory(headerId); string? headerIdDirectory = GetHeaderIdDirectory(headerId);
if (string.IsNullOrEmpty(headerIdDirectory)) if (string.IsNullOrEmpty(headerIdDirectory))
throw new Exception($"Didn't find header id directory <{headerId}>"); throw new Exception($"Didn't find header id directory <{headerId}>");
@ -163,15 +163,16 @@ public class FileRead : Shared.FileRead, IFileRead
if (dateTime == DateTime.MinValue) if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime)); throw new ArgumentNullException(nameof(dateTime));
Tuple<string, Test[], JsonElement[], List<FileInfo>> results; Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); string[] lines = File.ReadAllLines(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat); _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID(); SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements); List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(descriptions); PostOpenInsightMetrologyViewerAttachments(jsonElements, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -109,7 +109,7 @@ public class FileRead : Shared.FileRead, IFileRead
} }
#pragma warning disable IDE0060 #pragma warning disable IDE0060
private void DirectoryMove(string reportFullPath, DateTime dateTime, List<pcl.Description> descriptions) private void DirectoryMove(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List<pcl.Description> descriptions)
#pragma warning restore IDE0060 #pragma warning restore IDE0060
{ {
FileInfo fileInfo = new(reportFullPath); FileInfo fileInfo = new(reportFullPath);
@ -122,7 +122,7 @@ public class FileRead : Shared.FileRead, IFileRead
throw new Exception("Didn't find directory by logistics sequence"); throw new Exception("Didn't find directory by logistics sequence");
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime) if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime); File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions); OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true }; JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions); string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
string directoryName = $"{Path.GetFileName(matchDirectories[0]).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0]}{_Logistics.DateTimeFromSequence:yyyy-MM-dd_hh;mm_tt_}{DateTime.Now.Ticks - _Logistics.Sequence}"; string directoryName = $"{Path.GetFileName(matchDirectories[0]).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0]}{_Logistics.DateTimeFromSequence:yyyy-MM-dd_hh;mm_tt_}{DateTime.Now.Ticks - _Logistics.Sequence}";
@ -166,23 +166,24 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime) private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{ {
Tuple<string, Test[], JsonElement[], List<FileInfo>> results; Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); string[] lines = File.ReadAllLines(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat); _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID(); SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements); List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
DirectoryMove(reportFullPath, dateTime, descriptions); DirectoryMove(reportFullPath, dateTime, jsonElements, descriptions);
else if (!_IsEAFHosted) else if (!_IsEAFHosted)
{ {
OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions); OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true }; JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions); string check = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json"); string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
string historicalText = File.ReadAllText(jsonFileName); string historicalText = File.ReadAllText(jsonFileName);
if (json != historicalText) if (check != historicalText)
throw new Exception("File doesn't match historical!"); throw new Exception("File doesn't match historical!");
} }
return results; return results;

View File

@ -125,7 +125,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions); FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -0,0 +1,250 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Net.Sockets;
using System.Reflection;
using System.Text;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.TransmissionControlProtocol;
#nullable enable
public class FileRead : Shared.FileRead, IFileRead
{
private readonly int _Port;
private readonly Timer _Timer;
private static Record? _Record;
private static long _LastWrite;
private readonly string _IPAddress;
private readonly string _RawDirectory;
private readonly int _DelimiterSeconds;
private readonly string[] _DelimiterPatterns;
private static readonly object _Lock = new();
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
string sourceFileLocation = fileConnectorConfiguration.SourceFileLocation.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
string? ipAddress = Path.GetDirectoryName(sourceFileLocation);
string port = Path.GetFileName(sourceFileLocation);
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
_Port = int.Parse(port, System.Globalization.CultureInfo.InvariantCulture);
_DelimiterPatterns = fileConnectorConfiguration.SourceFileFilter.Split('*');
_IPAddress = Path.GetFileName(ipAddress) ?? throw new Exception(sourceFileLocation);
_RawDirectory = Path.GetDirectoryName(ipAddress) ?? throw new Exception(sourceFileLocation);
DateTime fileAgeThresholdTimeOnly = GetFileAgeThresholdTimeOnly(_FileConnectorConfiguration.FileAgeThreshold);
_DelimiterSeconds = fileAgeThresholdTimeOnly.Second;
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process || _FileConnectorConfiguration.FileScanningIntervalInSeconds is null)
Callback(null);
else
{
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName) => throw new Exception(string.Concat("See ", nameof(Callback)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract() => throw new Exception(string.Concat("See ", nameof(Callback)));
private static DateTime GetFileAgeThresholdTimeOnly(string fileAgeThreshold)
{
DateTime result = DateTime.MinValue;
string[] segments = fileAgeThreshold.Split(':');
for (int i = 0; i < segments.Length; i++)
{
result = i switch
{
0 => result.AddDays(double.Parse(segments[i])),
1 => result.AddHours(double.Parse(segments[i])),
2 => result.AddMinutes(double.Parse(segments[i])),
3 => result.AddSeconds(double.Parse(segments[i])),
_ => throw new Exception(),
};
}
return result;
}
private static void ReadFiles(log4net.ILog log, FileConnectorConfiguration fileConnectorConfiguration, string ipAddress, string rawDirectory)
{
List<byte> bytes = new();
string[] files = Directory.GetFiles(rawDirectory, $"{ipAddress}-*.raw", SearchOption.TopDirectoryOnly);
log.Info($"Read {files.Length} files");
foreach (string file in files)
{
foreach (byte @byte in File.ReadAllBytes(file))
bytes.Add(@byte);
}
if (bytes.Count > 0)
{
string bytesFile = Path.Combine(fileConnectorConfiguration.TargetFileLocation, $"{ipAddress}-{DateTime.Now.Ticks}{fileConnectorConfiguration.TargetFileName}");
File.WriteAllBytes(bytesFile, bytes.ToArray());
foreach (string file in files)
File.Delete(file);
}
}
private static void CreateClient(log4net.ILog log, string ipAddress, int port)
{
log.Debug(ipAddress);
TcpClient tcpClient = new(ipAddress, port);
NetworkStream networkStream = tcpClient.GetStream();
Type baseType = typeof(NetworkStream);
PropertyInfo? propertyInfo = baseType.GetProperty("Socket", BindingFlags.Instance | BindingFlags.NonPublic);
_Record = new(binaryReader: new(networkStream), binaryWriter: new(networkStream), networkStream: networkStream, propertyInfo: propertyInfo, readTimes: new());
}
private static byte[] GetBytes(NetworkStream networkStream)
{
List<byte> results = new();
byte[] bytes = new byte[1024];
do
{
int count = networkStream.Read(bytes, 0, bytes.Length);
if (count > 0)
results.AddRange(bytes.Take(count));
}
while (networkStream.DataAvailable);
return results.ToArray();
}
private void Callback()
{
if (_Record?.NetworkStream is null || _Record.PropertyInfo is null || _Record.PropertyInfo.GetValue(_Record.NetworkStream) is not Socket socket || !socket.Connected)
CreateClient(_Log, _IPAddress, _Port);
if (_Record is not null)
{
TimeSpan timeSpan = new(DateTime.Now.Ticks - _LastWrite);
if (_LastWrite == 0 || timeSpan.TotalMinutes > 1)
{
try
{
_Record.NetworkStream.WriteByte(Convert.ToByte('\0'));
_LastWrite = DateTime.Now.Ticks;
}
catch (Exception)
{ }
}
}
if (_Record?.NetworkStream is not null && _Record.NetworkStream.CanRead && _Record.NetworkStream.DataAvailable)
{
byte[] bytes = GetBytes(_Record.NetworkStream);
_Log.Info($"Read {bytes.Length} bytes");
if (bytes.Length > 0)
{
string path = Path.Combine(_RawDirectory, $"{_IPAddress}-{DateTime.Now.Ticks}.raw");
File.WriteAllBytes(path, bytes);
string content = Encoding.ASCII.GetString(bytes);
_Log.Debug($"Content {content}");
foreach (string delimiterPattern in _DelimiterPatterns)
{
if (content.Contains(delimiterPattern))
_Record.ReadTimes.Add(DateTime.Now.Ticks);
}
if (_Record.ReadTimes.Count > 0)
_Record.ReadTimes.Add(DateTime.Now.Ticks);
}
}
if (_Record is not null && _Record.ReadTimes.Count > 0 && _DelimiterSeconds > 0)
{
TimeSpan? timeSpan = new(DateTime.Now.Ticks - _Record.ReadTimes.Last());
if (timeSpan.Value.TotalSeconds > _DelimiterSeconds)
{
ReadFiles(_Log, _FileConnectorConfiguration, _IPAddress, _RawDirectory);
_Record.ReadTimes.Clear();
}
}
}
private void Callback(object? state)
{
try
{
lock (_Lock)
Callback();
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
try
{
if (_FileConnectorConfiguration.FileScanningIntervalInSeconds is null)
throw new Exception(_CellInstanceConnectionName);
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
}
}

View File

@ -0,0 +1,28 @@
using System.Collections.Generic;
using System.IO;
using System.Net.Sockets;
using System.Reflection;
namespace Adaptation.FileHandlers.TransmissionControlProtocol;
#nullable enable
internal class Record
{
public BinaryReader BinaryReader { get; }
public BinaryWriter BinaryWriter { get; }
public NetworkStream NetworkStream { get; }
public PropertyInfo? PropertyInfo { get; }
public List<long> ReadTimes { get; }
public Record(BinaryReader binaryReader, BinaryWriter binaryWriter, NetworkStream networkStream, PropertyInfo? propertyInfo, List<long> readTimes)
{
BinaryReader = binaryReader;
BinaryWriter = binaryWriter;
NetworkStream = networkStream;
PropertyInfo = propertyInfo;
ReadTimes = readTimes;
}
}

View File

@ -9,5 +9,7 @@ internal class Constant
public string Date { get; } = "Date:"; public string Date { get; } = "Date:";
public string StdDev { get; } = "Std Dev:"; public string StdDev { get; } = "Std Dev:";
public string Average { get; } = "Average:"; public string Average { get; } = "Average:";
public string Statistics { get; } = "Statistics:";
public string DatabaseId { get; } = "Database ID:";
} }

View File

@ -42,7 +42,7 @@ internal class Convert
RedirectStandardOutput = true, RedirectStandardOutput = true,
}; };
Process process = Process.Start(processStartInfo); Process process = Process.Start(processStartInfo);
_ = process.WaitForExit(30000); _ = process.WaitForExit(300000);
string text; string text;
string checkFile; string checkFile;
string[] pdfFiles = Directory.GetFiles(sourcePath, "*.pdf", SearchOption.TopDirectoryOnly); string[] pdfFiles = Directory.GetFiles(sourcePath, "*.pdf", SearchOption.TopDirectoryOnly);
@ -75,6 +75,7 @@ internal class Convert
string[] txtFiles = Directory.GetFiles(sourcePath, $"{sourceFileNameWithoutExtension}_*.txt", SearchOption.TopDirectoryOnly); string[] txtFiles = Directory.GetFiles(sourcePath, $"{sourceFileNameWithoutExtension}_*.txt", SearchOption.TopDirectoryOnly);
if (txtFiles.Length != 0) if (txtFiles.Length != 0)
{ {
txtFiles = (from l in txtFiles orderby l.Length, l select l).ToArray();
foreach (string txtFile in txtFiles) foreach (string txtFile in txtFiles)
{ {
sourceFiles.Add(txtFile); sourceFiles.Add(txtFile);

View File

@ -4,103 +4,106 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pcl; namespace Adaptation.FileHandlers.pcl;
public class Description : IDescription, Shared.Properties.IDescription public class Description : IDescription, Shared.Properties.IDescription
{ {
public int Test { get; set; } [JsonPropertyName("EventId")] public int Test { get; set; }
public int Count { get; set; } [JsonPropertyName("Count")] public int Count { get; set; }
public int Index { get; set; } [JsonPropertyName("Index")] public int Index { get; set; }
// //
public string EventName { get; set; } public string EventName { get; set; }
public string NullData { get; set; } public string NullData { get; set; }
public string JobID { get; set; } public string JobID { get; set; }
public string Sequence { get; set; } public string Sequence { get; set; }
public string MesEntity { get; set; } [JsonPropertyName("MesEntity")] public string MesEntity { get; set; }
public string ReportFullPath { get; set; } public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; } public string ProcessJobID { get; set; }
public string MID { get; set; } public string MID { get; set; }
// //
public string Date { get; set; } [JsonPropertyName("Date")] public string Date { get; set; }
public string Employee { get; set; } [JsonPropertyName("Employee")] public string Employee { get; set; }
public string Lot { get; set; } public string Lot { get; set; }
public string PSN { get; set; } [JsonPropertyName("PSN")] public string PSN { get; set; }
public string Reactor { get; set; } [JsonPropertyName("Reactor")] public string Reactor { get; set; }
public string Recipe { get; set; } [JsonPropertyName("Recipe")] public string Recipe { get; set; }
[JsonPropertyName("IndexOf")] public string IndexOf { get; set; }
[JsonPropertyName("AttemptCounter")] public string AttemptCounter { get; set; }
// //
public string Comments { get; set; } [JsonPropertyName("Comments")] public string Comments { get; set; }
public string Diameter { get; set; } [JsonPropertyName("Diameter")] public string Diameter { get; set; }
public string Exclusion { get; set; } [JsonPropertyName("Exclusion")] public string Exclusion { get; set; }
public string Gain { get; set; } [JsonPropertyName("Gain")] public string Gain { get; set; }
public string HeaderUniqueId { get; set; } public string HeaderUniqueId { get; set; }
public string Laser { get; set; } [JsonPropertyName("Laser")] public string Laser { get; set; }
public string ParseErrorText { get; set; } [JsonPropertyName("ParseErrorText")] public string ParseErrorText { get; set; }
public string RDS { get; set; } [JsonPropertyName("RDS")] public string RDS { get; set; }
public string Slot { get; set; } [JsonPropertyName("WaferId")] public string Slot { get; set; }
public string UniqueId { get; set; } public string UniqueId { get; set; }
// //
public string AreaCount { get; set; } [JsonPropertyName("AreaCount")] public string AreaCount { get; set; }
public string AreaCountAvg { get; set; } [JsonPropertyName("AreaCountAvg")] public string AreaCountAvg { get; set; }
public string AreaCountMax { get; set; } [JsonPropertyName("AreaCountMax")] public string AreaCountMax { get; set; }
public string AreaCountMin { get; set; } [JsonPropertyName("AreaCountMin")] public string AreaCountMin { get; set; }
public string AreaCountStdDev { get; set; } [JsonPropertyName("AreaCountStdDev")] public string AreaCountStdDev { get; set; }
public string AreaTotal { get; set; } [JsonPropertyName("WaferAreaTotal")] public string AreaTotal { get; set; }
public string AreaTotalAvg { get; set; } [JsonPropertyName("AreaTotalAvg")] public string AreaTotalAvg { get; set; }
public string AreaTotalMax { get; set; } [JsonPropertyName("AreaTotalMax")] public string AreaTotalMax { get; set; }
public string AreaTotalMin { get; set; } [JsonPropertyName("AreaTotalMin")] public string AreaTotalMin { get; set; }
public string AreaTotalStdDev { get; set; } [JsonPropertyName("AreaTotalStdDev")] public string AreaTotalStdDev { get; set; }
public string Bin1 { get; set; } [JsonPropertyName("Bin1")] public string Bin1 { get; set; }
public string Bin2 { get; set; } [JsonPropertyName("Bin2")] public string Bin2 { get; set; }
public string Bin3 { get; set; } [JsonPropertyName("Bin3")] public string Bin3 { get; set; }
public string Bin4 { get; set; } [JsonPropertyName("Bin4")] public string Bin4 { get; set; }
public string Bin5 { get; set; } [JsonPropertyName("Bin5")] public string Bin5 { get; set; }
public string Bin6 { get; set; } [JsonPropertyName("Bin6")] public string Bin6 { get; set; }
public string Bin7 { get; set; } [JsonPropertyName("Bin7")] public string Bin7 { get; set; }
public string Bin8 { get; set; } [JsonPropertyName("Bin8")] public string Bin8 { get; set; }
public string HazeAverage { get; set; } [JsonPropertyName("WaferHazeAverage")] public string HazeAverage { get; set; }
public string HazeAverageAvg { get; set; } [JsonPropertyName("HazeAverageAvg")] public string HazeAverageAvg { get; set; }
public string HazeAverageMax { get; set; } [JsonPropertyName("HazeAverageMax")] public string HazeAverageMax { get; set; }
public string HazeAverageMin { get; set; } [JsonPropertyName("HazeAverageMin")] public string HazeAverageMin { get; set; }
public string HazeAverageStdDev { get; set; } [JsonPropertyName("HazeAverageStdDev")] public string HazeAverageStdDev { get; set; }
public string HazePeak { get; set; } [JsonPropertyName("HazePeak")] public string HazePeak { get; set; }
public string HazeRegion { get; set; } [JsonPropertyName("WaferHazeRegion")] public string HazeRegion { get; set; }
public string HazeRegionAvg { get; set; } [JsonPropertyName("HazeRegionAvg")] public string HazeRegionAvg { get; set; }
public string HazeRegionMax { get; set; } [JsonPropertyName("HazeRegionMax")] public string HazeRegionMax { get; set; }
public string HazeRegionMin { get; set; } [JsonPropertyName("HazeRegionMin")] public string HazeRegionMin { get; set; }
public string HazeRegionStdDev { get; set; } [JsonPropertyName("HazeRegionStdDev")] public string HazeRegionStdDev { get; set; }
public string HazeRng { get; set; } [JsonPropertyName("HazeRng")] public string HazeRng { get; set; }
public string LPDCM2 { get; set; } [JsonPropertyName("LPDCM2")] public string LPDCM2 { get; set; }
public string LPDCM2Avg { get; set; } [JsonPropertyName("LPDCM2Avg")] public string LPDCM2Avg { get; set; }
public string LPDCM2Max { get; set; } [JsonPropertyName("LPDCM2Max")] public string LPDCM2Max { get; set; }
public string LPDCM2Min { get; set; } [JsonPropertyName("LPDCM2Min")] public string LPDCM2Min { get; set; }
public string LPDCM2StdDev { get; set; } [JsonPropertyName("LPDCM2StdDev")] public string LPDCM2StdDev { get; set; }
public string LPDCount { get; set; } [JsonPropertyName("LPDCount")] public string LPDCount { get; set; }
public string LPDCountAvg { get; set; } [JsonPropertyName("LPDCountAvg")] public string LPDCountAvg { get; set; }
public string LPDCountMax { get; set; } [JsonPropertyName("LPDCountMax")] public string LPDCountMax { get; set; }
public string LPDCountMin { get; set; } [JsonPropertyName("LPDCountMin")] public string LPDCountMin { get; set; }
public string LPDCountStdDev { get; set; } [JsonPropertyName("LPDCountStdDev")] public string LPDCountStdDev { get; set; }
public string Mean { get; set; } [JsonPropertyName("Mean")] public string Mean { get; set; }
public string ScratchCount { get; set; } [JsonPropertyName("ScratchCount")] public string ScratchCount { get; set; }
public string ScratchCountAvg { get; set; } [JsonPropertyName("ScratchCountAvg")] public string ScratchCountAvg { get; set; }
public string ScratchCountMax { get; set; } [JsonPropertyName("ScratchCountMax")] public string ScratchCountMax { get; set; }
public string ScratchCountMin { get; set; } [JsonPropertyName("ScratchCountMin")] public string ScratchCountMin { get; set; }
public string ScratchCountStdDev { get; set; } [JsonPropertyName("ScratchCountStdDev")] public string ScratchCountStdDev { get; set; }
public string ScratchTotal { get; set; } [JsonPropertyName("WaferScratchTotal")] public string ScratchTotal { get; set; }
public string ScratchTotalAvg { get; set; } [JsonPropertyName("ScratchTotalAvg")] public string ScratchTotalAvg { get; set; }
public string ScratchTotalMax { get; set; } [JsonPropertyName("ScratchTotalMax")] public string ScratchTotalMax { get; set; }
public string ScratchTotalMin { get; set; } [JsonPropertyName("ScratchTotalMin")] public string ScratchTotalMin { get; set; }
public string ScratchTotalStdDev { get; set; } [JsonPropertyName("ScratchTotalStdDev")] public string ScratchTotalStdDev { get; set; }
public string Sort { get; set; } [JsonPropertyName("Sort")] public string Sort { get; set; }
public string StdDev { get; set; } [JsonPropertyName("StdDev")] public string StdDev { get; set; }
public string SumOfDefects { get; set; } [JsonPropertyName("SumOfDefects")] public string SumOfDefects { get; set; }
public string SumOfDefectsAvg { get; set; } [JsonPropertyName("SumOfDefectsAvg")] public string SumOfDefectsAvg { get; set; }
public string SumOfDefectsMax { get; set; } [JsonPropertyName("SumOfDefectsMax")] public string SumOfDefectsMax { get; set; }
public string SumOfDefectsMin { get; set; } [JsonPropertyName("SumOfDefectsMin")] public string SumOfDefectsMin { get; set; }
public string SumOfDefectsStdDev { get; set; } [JsonPropertyName("SumOfDefectsStdDev")] public string SumOfDefectsStdDev { get; set; }
public string Thresh { get; set; } [JsonPropertyName("Thresh")] public string Thresh { get; set; }
public string Thruput { get; set; } [JsonPropertyName("Thruput")] public string Thruput { get; set; }
string IDescription.GetEventDescription() => "File Has been read and parsed"; string IDescription.GetEventDescription() => "File Has been read and parsed";
@ -306,6 +309,8 @@ public class Description : IDescription, Shared.Properties.IDescription
PSN = processData.PSN, PSN = processData.PSN,
Reactor = processData.Reactor, Reactor = processData.Reactor,
Recipe = processData.Recipe, Recipe = processData.Recipe,
IndexOf = nameof(IndexOf),
AttemptCounter = nameof(AttemptCounter),
// //
Comments = detail.Comments, Comments = detail.Comments,
Diameter = detail.Diameter, Diameter = detail.Diameter,
@ -414,6 +419,8 @@ public class Description : IDescription, Shared.Properties.IDescription
PSN = nameof(PSN), PSN = nameof(PSN),
Reactor = nameof(Reactor), Reactor = nameof(Reactor),
Recipe = nameof(Recipe), Recipe = nameof(Recipe),
IndexOf = nameof(IndexOf),
AttemptCounter = nameof(AttemptCounter),
// //
Comments = nameof(Comments), Comments = nameof(Comments),
Diameter = nameof(Diameter), Diameter = nameof(Diameter),
@ -492,4 +499,16 @@ public class Description : IDescription, Shared.Properties.IDescription
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt"; internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Description))]
internal partial class DescriptionSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Description[]))]
internal partial class DescriptionArraySourceGenerationContext : JsonSerializerContext
{
} }

View File

@ -1,46 +1,60 @@
namespace Adaptation.FileHandlers.pcl; using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pcl;
public class Detail public class Detail
{ {
public long Id { get; set; } public long Id { get; set; }
public string AreaCount { get; set; } [JsonPropertyName("AreaCount")] public string AreaCount { get; set; }
public string AreaTotal { get; set; } [JsonPropertyName("WaferAreaTotal")] public string AreaTotal { get; set; } // different name
public string Bin1 { get; set; } [JsonPropertyName("Bin1")] public string Bin1 { get; set; }
public string Bin2 { get; set; } [JsonPropertyName("Bin2")] public string Bin2 { get; set; }
public string Bin3 { get; set; } [JsonPropertyName("Bin3")] public string Bin3 { get; set; }
public string Bin4 { get; set; } [JsonPropertyName("Bin4")] public string Bin4 { get; set; }
public string Bin5 { get; set; } [JsonPropertyName("Bin5")] public string Bin5 { get; set; }
public string Bin6 { get; set; } [JsonPropertyName("Bin6")] public string Bin6 { get; set; }
public string Bin7 { get; set; } [JsonPropertyName("Bin7")] public string Bin7 { get; set; }
public string Bin8 { get; set; } [JsonPropertyName("Bin8")] public string Bin8 { get; set; }
public string Comments { get; set; } [JsonPropertyName("Comments")] public string Comments { get; set; }
public string Date { get; set; } [JsonPropertyName("Date")] public string Date { get; set; }
public string Diameter { get; set; } [JsonPropertyName("Diameter")] public string Diameter { get; set; }
public string Exclusion { get; set; } [JsonPropertyName("Exclusion")] public string Exclusion { get; set; }
public string Gain { get; set; } [JsonPropertyName("Gain")] public string Gain { get; set; }
public string HazeAverage { get; set; } [JsonPropertyName("WaferHazeAverage")] public string HazeAverage { get; set; } // different name
public string HazePeak { get; set; } [JsonPropertyName("HazePeak")] public string HazePeak { get; set; }
public string HazeRegion { get; set; } [JsonPropertyName("WaferHazeRegion")] public string HazeRegion { get; set; } // different name
public string HazeRng { get; set; } [JsonPropertyName("HazeRng")] public string HazeRng { get; set; }
public string HeaderUniqueId { get; set; } public string HeaderUniqueId { get; set; }
public string LPDCM2 { get; set; } [JsonPropertyName("LPDCM2")] public string LPDCM2 { get; set; }
public string LPDCount { get; set; } [JsonPropertyName("LPDCount")] public string LPDCount { get; set; }
public string Laser { get; set; } [JsonPropertyName("Laser")] public string Laser { get; set; }
public string Mean { get; set; } [JsonPropertyName("Mean")] public string Mean { get; set; }
public string Recipe { get; set; } [JsonPropertyName("Recipe")] public string Recipe { get; set; }
public string ScratchCount { get; set; } [JsonPropertyName("ScratchCount")] public string ScratchCount { get; set; }
public string ScratchTotal { get; set; } [JsonPropertyName("WaferScratchTotal")] public string ScratchTotal { get; set; } // different name
public string Slot { get; set; } [JsonPropertyName("WaferId")] public string Slot { get; set; } // different name
public string Sort { get; set; } [JsonPropertyName("Sort")] public string Sort { get; set; }
public string StdDev { get; set; } [JsonPropertyName("StdDev")] public string StdDev { get; set; }
public string SumOfDefects { get; set; } [JsonPropertyName("SumOfDefects")] public string SumOfDefects { get; set; }
public string Thresh { get; set; } [JsonPropertyName("Thresh")] public string Thresh { get; set; }
public string Thruput { get; set; } [JsonPropertyName("Thruput")] public string Thruput { get; set; }
public string Title { get; set; } public string Title { get; set; }
public string UniqueId { get; set; } public string UniqueId { get; set; }
public string Data { get; set; } public string Data { get; set; }
#pragma warning disable IDE1006 #pragma warning disable IDE1006
public int i { get; set; } public int i { get; set; }
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Detail))]
internal partial class DetailSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Detail[]))]
internal partial class DetailArraySourceGenerationContext : JsonSerializerContext
{
} }

View File

@ -1,6 +1,7 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Linq;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pcl; namespace Adaptation.FileHandlers.pcl;
@ -215,17 +216,35 @@ public class Header
return GetBefore(text, i, "\n", false); return GetBefore(text, i, "\n", false);
} }
internal static Header Get(ReadOnlyDictionary<string, string> pages, Constant constant, string headerFileName) private static string? GetText(ReadOnlyDictionary<string, string> pages, Constant constant)
{
string? text;
string? headerFileName = null;
foreach (KeyValuePair<string, string> keyValuePair in pages)
{
if (!pages.TryGetValue(keyValuePair.Key, out text))
throw new Exception();
if (!text.Contains(constant.Statistics))
continue;
headerFileName = keyValuePair.Key;
}
headerFileName ??= pages.Count == 0 ? string.Empty : pages.ElementAt(pages.Count - 1).Key;
if (pages.Count == 0 || !pages.TryGetValue(headerFileName, out text))
text = null;
return text;
}
internal static Header Get(ReadOnlyDictionary<string, string> pages, Constant constant)
{ {
Header? result; Header? result;
string id; string id;
string? text;
string[] segmentsB; string[] segmentsB;
string[] segmentsC; string[] segmentsC;
int[] i = new int[] { 0 }; int[] i = new int[] { 0 };
WaferSummary waferSummary; WaferSummary waferSummary;
List<WaferSummary> collection = new(); List<WaferSummary> collection = new();
if (!pages.TryGetValue(headerFileName, out text)) string? text = GetText(pages, constant);
if (string.IsNullOrEmpty(text))
throw new Exception(); throw new Exception();
ScanPast(text, i, constant.Date); ScanPast(text, i, constant.Date);
string date = GetToEOL(text, i); string date = GetToEOL(text, i);
@ -317,6 +336,6 @@ public class Header
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Header))] [JsonSerializable(typeof(Header))]
internal partial class HeaderSourceGenerationContext : JsonSerializerContext internal partial class PCLHeaderSourceGenerationContext : JsonSerializerContext
{ {
} }

View File

@ -703,12 +703,11 @@ public class ProcessData : IProcessData
{ {
List<Description> results = new(); List<Description> results = new();
Description? description; Description? description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements) foreach (JsonElement jsonElement in jsonElements)
{ {
if (jsonElement.ValueKind != JsonValueKind.Object) if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception(); throw new Exception();
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions); description = JsonSerializer.Deserialize(jsonElement.ToString(), DescriptionSourceGenerationContext.Default.Description);
if (description is null) if (description is null)
continue; continue;
results.Add(description); results.Add(description);

View File

@ -188,6 +188,6 @@ internal class Row
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Row))] [JsonSerializable(typeof(Row))]
internal partial class RowSourceGenerationContext : JsonSerializerContext internal partial class PCLRowSourceGenerationContext : JsonSerializerContext
{ {
} }

View File

@ -15,19 +15,41 @@ namespace Adaptation.FileHandlers.pcl;
internal class Run internal class Run
{ {
public Header Header { get; }
public ReadOnlyCollection<Wafer> Wafers { get; }
public Run(Header header, ReadOnlyCollection<Wafer> wafers) public Run(Header header, ReadOnlyCollection<Wafer> wafers)
{ {
Header = header; Header = header;
Wafers = wafers; Wafers = wafers;
} }
public Header Header { get; } private static ReadOnlyCollection<Wafer> GetLastWaferForEachSlot(ReadOnlyDictionary<string, string> pages, Constant constant, Header header)
public ReadOnlyCollection<Wafer> Wafers { get; } {
List<Wafer> results = new();
string id;
Wafer wafer;
ReadOnlyCollection<Wafer>? wafers;
ReadOnlyDictionary<string, ReadOnlyCollection<Wafer>> keyValuePairs = Wafer.Get(pages, constant);
ReadOnlyCollection<string> waferIds = GetWaferIds(header);
for (int i = 0; i < waferIds.Count; i++)
{
id = waferIds[i];
if (!keyValuePairs.TryGetValue(id, out wafers) || wafers.Count == 0)
wafer = Wafer.Get(id);
else
wafer = (from l in wafers where l.Recipe == header.Recipe select l).Last();
if (wafer is null)
break;
results.Add(wafer);
}
return results.AsReadOnly();
}
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, Run result) private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, Run result)
{ {
FileInfo fileInfo = new($"{logistics.ReportFullPath}.run.json"); FileInfo fileInfo = new($"{logistics.ReportFullPath}.run.json");
string json = JsonSerializer.Serialize(result, RunSourceGenerationContext.Default.Run); string json = JsonSerializer.Serialize(result, PCLRunSourceGenerationContext.Default.Run);
File.WriteAllText(fileInfo.FullName, json); File.WriteAllText(fileInfo.FullName, json);
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence); File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
fileInfoCollection.Add(fileInfo); fileInfoCollection.Add(fileInfo);
@ -101,15 +123,12 @@ internal class Run
{ {
Run? result; Run? result;
Constant constant = new(); Constant constant = new();
string headerFileName = pages.ElementAt(pages.Count - 1).Key; Header? header = Header.Get(pages, constant);
Header? header = Header.Get(pages, constant, headerFileName);
if (header is null) if (header is null)
result = null; result = null;
else else
{ {
ReadOnlyCollection<string> waferIds = GetWaferIds(header); ReadOnlyCollection<Wafer> wafers = GetLastWaferForEachSlot(pages, constant, header);
ReadOnlyDictionary<string, Wafer> keyValuePairs = Wafer.Get(pages, constant, headerFileName, header);
ReadOnlyCollection<Wafer> wafers = Wafer.Get(waferIds, keyValuePairs);
result = new(header, wafers); result = new(header, wafers);
WriteJson(logistics, fileInfoCollection, result); WriteJson(logistics, fileInfoCollection, result);
WriteCommaSeparatedValues(logistics, result); WriteCommaSeparatedValues(logistics, result);
@ -121,6 +140,6 @@ internal class Run
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Run))] [JsonSerializable(typeof(Run))]
internal partial class RunSourceGenerationContext : JsonSerializerContext internal partial class PCLRunSourceGenerationContext : JsonSerializerContext
{ {
} }

View File

@ -1,6 +1,7 @@
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Linq;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pcl; namespace Adaptation.FileHandlers.pcl;
@ -46,7 +47,7 @@ public class Wafer
Recipe = recipe; Recipe = recipe;
} }
private static Wafer? Get(string id) => internal static Wafer Get(string id) =>
new(date: string.Empty, new(date: string.Empty,
id: id, id: id,
comments: string.Empty, comments: string.Empty,
@ -113,24 +114,21 @@ public class Wafer
public string Thruput { get; } public string Thruput { get; }
public string Recipe { get; } public string Recipe { get; }
internal static ReadOnlyDictionary<string, Wafer> Get(ReadOnlyDictionary<string, string> pages, Constant constant, string headerFileName, Header header) internal static ReadOnlyDictionary<string, ReadOnlyCollection<Wafer>> Get(ReadOnlyDictionary<string, string> pages, Constant constant)
{ {
Dictionary<string, Wafer> results = new(); Dictionary<string, ReadOnlyCollection<Wafer>> results = new();
Wafer wafer; Wafer wafer;
string? text; string? text;
List<string> stringList; List<string> stringList;
int[] i = new int[] { 0 }; int[] i = new int[] { 0 };
Dictionary<string, List<Wafer>> keyValuePairs = new();
foreach (KeyValuePair<string, string> keyValuePair in pages) foreach (KeyValuePair<string, string> keyValuePair in pages)
{ {
if (keyValuePair.Key == headerFileName)
continue;
if (!pages.ContainsKey(keyValuePair.Key))
throw new Exception();
i[0] = 0; i[0] = 0;
stringList = new(); stringList = new();
if (!pages.TryGetValue(keyValuePair.Key, out text)) if (!pages.TryGetValue(keyValuePair.Key, out text))
throw new Exception(); throw new Exception();
if (string.IsNullOrEmpty(text) || !text.Contains(constant.Id)) if (string.IsNullOrEmpty(text) || !text.Contains(constant.Id) || text.Contains(constant.Statistics) || text.Contains(constant.DatabaseId))
continue; continue;
Header.ScanPast(text, i, constant.Date); Header.ScanPast(text, i, constant.Date);
string date = Header.GetToEOL(text, i); string date = Header.GetToEOL(text, i);
@ -139,8 +137,6 @@ public class Wafer
if (id.Length > 5) if (id.Length > 5)
id = string.Concat(id.Substring(0, 5), "... - ***"); id = string.Concat(id.Substring(0, 5), "... - ***");
id = id.Replace("*", ""); id = id.Replace("*", "");
if (results.ContainsKey(id))
continue;
Header.ScanPast(text, i, "Comments:"); Header.ScanPast(text, i, "Comments:");
string comments = Header.GetToEOL(text, i); string comments = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Sort:"); Header.ScanPast(text, i, "Sort:");
@ -190,8 +186,6 @@ public class Wafer
string thruput = Header.GetToEOL(text, i); string thruput = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Recipe ID:"); Header.ScanPast(text, i, "Recipe ID:");
string recipe = Header.GetToEOL(text, i); string recipe = Header.GetToEOL(text, i);
if (recipe != header.Recipe)
continue;
wafer = new(date: date, wafer = new(date: date,
id: id, id: id,
comments: comments, comments: comments,
@ -224,32 +218,19 @@ public class Wafer
hazeRng: hazeRng, hazeRng: hazeRng,
thruput: thruput, thruput: thruput,
recipe: recipe); recipe: recipe);
results.Add(id, wafer); if (!keyValuePairs.ContainsKey(id))
keyValuePairs.Add(id, new List<Wafer>());
keyValuePairs[id].Add(wafer);
} }
foreach (KeyValuePair<string, List<Wafer>> keyValuePair in keyValuePairs)
results.Add(keyValuePair.Key, keyValuePair.Value.AsReadOnly());
return new(results); return new(results);
} }
internal static ReadOnlyCollection<Wafer> Get(ReadOnlyCollection<string> waferIds, ReadOnlyDictionary<string, Wafer> keyValuePairs)
{
List<Wafer> results = new();
string id;
Wafer? wafer;
for (int i = 0; i < waferIds.Count; i++)
{
id = waferIds[i];
if (!keyValuePairs.TryGetValue(id, out wafer))
wafer = Get(id);
if (wafer is null)
break;
results.Add(wafer);
}
return results.AsReadOnly();
}
} }
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Wafer))] [JsonSerializable(typeof(Wafer))]
internal partial class WaferSourceGenerationContext : JsonSerializerContext internal partial class PCLWaferSourceGenerationContext : JsonSerializerContext
{ {
} }

View File

@ -38,6 +38,6 @@ public class WaferSummary
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(WaferSummary))] [JsonSerializable(typeof(WaferSummary))]
internal partial class WaferSummarySourceGenerationContext : JsonSerializerContext internal partial class PCLWaferSummarySourceGenerationContext : JsonSerializerContext
{ {
} }

View File

@ -0,0 +1,15 @@
namespace Adaptation.FileHandlers.pdsf;
internal class Constant
{
public string Id { get; } = "ID#";
public string Max { get; } = "Max:";
public string Min { get; } = "Min:";
public string Date { get; } = "Date:";
public string StdDev { get; } = "Std Dev:";
public string Average { get; } = "Average:";
public string Statistics { get; } = "Statistics:";
public string DatabaseId { get; } = "Database ID:";
}

View File

@ -0,0 +1,150 @@
using Adaptation.Shared;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Diagnostics;
using System.IO;
using System.Linq;
namespace Adaptation.FileHandlers.pdsf;
internal class Convert
{
/// <summary>
/// Convert the raw data file to parsable file format - in this case from PCL to PDF
/// </summary>
/// <param name="sourceFile">source file to be converted to PDF</param>
/// <returns></returns>
private static string ConvertSourceFileToPdf(string ghostPCLFileName, Logistics logistics)
{
string result = Path.ChangeExtension(logistics.ReportFullPath, ".pdf");
if (!File.Exists(result))
{
//string arguments = string.Concat("-i \"", sourceFile, "\" -o \"", result, "\"");
string arguments = string.Concat("-dSAFER -dBATCH -dNOPAUSE -sOutputFile=\"", result, "\" -sDEVICE=pdfwrite \"", logistics.ReportFullPath, "\"");
//Process process = Process.Start(configData.LincPDFCFileName, arguments);
Process process = Process.Start(ghostPCLFileName, arguments);
_ = process.WaitForExit(30000);
if (!File.Exists(result))
throw new Exception("PDF file wasn't created");
}
return result;
}
private static Dictionary<string, string> PortableDocumentFormatSplit(string pdfTextStripperFileName, string sourcePath, string sourceFileNamePdf)
{
Dictionary<string, string> results = new();
ProcessStartInfo processStartInfo = new(pdfTextStripperFileName, $"s \"{sourceFileNamePdf}\"")
{
UseShellExecute = false,
RedirectStandardError = true,
RedirectStandardOutput = true,
};
Process process = Process.Start(processStartInfo);
_ = process.WaitForExit(30000);
string text;
string checkFile;
string[] pdfFiles = Directory.GetFiles(sourcePath, "*.pdf", SearchOption.TopDirectoryOnly);
string[] textFiles = Directory.GetFiles(sourcePath, "*.txt", SearchOption.TopDirectoryOnly);
foreach (string pdfFile in pdfFiles)
{
if (pdfFile == sourceFileNamePdf)
continue;
checkFile = Path.ChangeExtension(pdfFile, ".txt");
if (!textFiles.Contains(checkFile))
continue;
text = File.ReadAllText(checkFile);
results.Add(pdfFile, text);
}
return results;
}
internal static ReadOnlyDictionary<string, string> PDF(Logistics logistics, string ghostPCLFileName, string pdfTextStripperFileName, List<FileInfo> fileInfoCollection)
{
Dictionary<string, string> results = new();
object item;
string pageText;
string pagePDFFile;
string pageTextFile;
List<string> sourceFiles = new();
string sourceFileNamePdf = ConvertSourceFileToPdf(ghostPCLFileName, logistics);
sourceFiles.Add(sourceFileNamePdf);
string sourcePath = Path.GetDirectoryName(logistics.ReportFullPath) ?? throw new Exception();
string sourceFileNameWithoutExtension = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
string[] txtFiles = Directory.GetFiles(sourcePath, $"{sourceFileNameWithoutExtension}_*.txt", SearchOption.TopDirectoryOnly);
if (txtFiles.Length != 0)
{
txtFiles = (from l in txtFiles orderby l.Length, l select l).ToArray();
foreach (string txtFile in txtFiles)
{
sourceFiles.Add(txtFile);
pageText = File.ReadAllText(txtFile);
pagePDFFile = Path.ChangeExtension(txtFile, ".pdf");
if (!File.Exists(pagePDFFile))
continue;
results.Add(pagePDFFile, pageText);
}
}
if (results.Count == 0)
{
try
{
java.io.File file = new(sourceFileNamePdf);
org.apache.pdfbox.util.Splitter splitter = new();
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
java.util.List list = splitter.split(pdDocument);
java.util.ListIterator iterator = list.listIterator();
org.apache.pdfbox.util.PDFTextStripper dataStripper = new();
for (short i = 1; i < short.MaxValue; i++)
{
if (!iterator.hasNext())
break;
item = iterator.next();
pagePDFFile = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_", i, ".pdf");
pageTextFile = Path.ChangeExtension(pagePDFFile, ".txt");
if (File.Exists(pageTextFile))
{
pageText = File.ReadAllText(pageTextFile);
sourceFiles.Add(pageTextFile);
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
continue;
pd.close();
}
else if (File.Exists(pagePDFFile))
{
org.apache.pdfbox.pdmodel.PDDocument document = org.apache.pdfbox.pdmodel.PDDocument.load(pagePDFFile);
pageText = dataStripper.getText(document);
document.close();
sourceFiles.Add(pagePDFFile);
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
continue;
pd.close();
}
else
{
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
continue;
pageText = dataStripper.getText(pd);
pd.save(pagePDFFile);
sourceFiles.Add(pagePDFFile);
pd.close();
File.WriteAllText(pageTextFile, pageText);
sourceFiles.Add(pageTextFile);
}
results.Add(pagePDFFile, pageText);
}
pdDocument.close();
}
catch (MissingMethodException)
{
if (results.Count == 0)
results = PortableDocumentFormatSplit(pdfTextStripperFileName, sourcePath, sourceFileNamePdf);
}
}
foreach (string sourceFile in sourceFiles)
fileInfoCollection.Add(new FileInfo(sourceFile));
return new(results);
}
}

View File

@ -0,0 +1,136 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Text.Json;
namespace Adaptation.FileHandlers.pdsf;
public class FileRead : Shared.FileRead, IFileRead
{
private readonly string _GhostPCLFileName;
private readonly string _PDFTextStripperFileName;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 15;
_NullData = string.Empty;
_Logistics = new(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_GhostPCLFileName = Path.Combine(AppContext.BaseDirectory, "gpcl6win64.exe");
if (!File.Exists(_GhostPCLFileName))
throw new Exception("Ghost PCL FileName doesn't Exist!");
_PDFTextStripperFileName = Path.Combine(AppContext.BaseDirectory, "PDF-Text-Stripper.exe");
if (!File.Exists(_PDFTextStripperFileName))
throw new Exception("PDF-Text-Stripper FileName doesn't Exist!");
if (_IsEAFHosted)
NestExistingFiles(_FileConnectorConfiguration);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
#nullable enable
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
string result;
JsonElement[] jsonElements;
Test[] tests = Array.Empty<Test>();
List<FileInfo> fileInfoCollection = new();
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
ReadOnlyDictionary<string, string> pages = Convert.PDF(_Logistics, _GhostPCLFileName, _PDFTextStripperFileName, fileInfoCollection);
Run? run = Run.Get(_Logistics, fileInfoCollection, pages);
if (run is null)
{
jsonElements = Array.Empty<JsonElement>();
result = string.Concat("A) No Data - ", dateTime.Ticks);
results = new(result, tests, jsonElements, fileInfoCollection);
}
else
{
result = string.Join(Environment.NewLine, _Logistics.Logistics1);
jsonElements = _IsEAFHosted ? Array.Empty<JsonElement>() : ProcessDataStandardFormat.GetArray(processDataStandardFormat);
results = new(result, tests, jsonElements, fileInfoCollection);
}
return results;
}
}

View File

@ -0,0 +1,341 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
public class Header
{
public Header(string date,
string recipe,
string id,
ReadOnlyCollection<WaferSummary> waferSummary,
string lPDCountMin,
string lPDCM2Min,
string areaCountMin,
string areaTotalMin,
string scratchCountMin,
string scratchTotalMin,
string sumOfDefectsMin,
string hazeRegionMin,
string hazeAverageMin,
string lPDCountMax,
string lPDCM2Max,
string areaCountMax,
string areaTotalMax,
string scratchCountMax,
string scratchTotalMax,
string sumOfDefectsMax,
string hazeRegionMax,
string hazeAverageMax,
string lPDCountAvg,
string lPDCM2Avg,
string areaCountAvg,
string areaTotalAvg,
string scratchCountAvg,
string scratchTotalAvg,
string sumOfDefectsAvg,
string hazeRegionAvg,
string hazeAverageAvg,
string lPDCountStdDev,
string lPDCM2StdDev,
string areaCountStdDev,
string areaTotalStdDev,
string scratchCountStdDev,
string scratchTotalStdDev,
string sumOfDefectsStdDev,
string hazeRegionStdDev,
string hazeAverageStdDev)
{
Date = date;
Recipe = recipe;
Id = id;
WaferSummary = waferSummary;
LPDCountMin = lPDCountMin;
LPDCM2Min = lPDCM2Min;
AreaCountMin = areaCountMin;
AreaTotalMin = areaTotalMin;
ScratchCountMin = scratchCountMin;
ScratchTotalMin = scratchTotalMin;
SumOfDefectsMin = sumOfDefectsMin;
HazeRegionMin = hazeRegionMin;
HazeAverageMin = hazeAverageMin;
LPDCountMax = lPDCountMax;
LPDCM2Max = lPDCM2Max;
AreaCountMax = areaCountMax;
AreaTotalMax = areaTotalMax;
ScratchCountMax = scratchCountMax;
ScratchTotalMax = scratchTotalMax;
SumOfDefectsMax = sumOfDefectsMax;
HazeRegionMax = hazeRegionMax;
HazeAverageMax = hazeAverageMax;
LPDCountAvg = lPDCountAvg;
LPDCM2Avg = lPDCM2Avg;
AreaCountAvg = areaCountAvg;
AreaTotalAvg = areaTotalAvg;
ScratchCountAvg = scratchCountAvg;
ScratchTotalAvg = scratchTotalAvg;
SumOfDefectsAvg = sumOfDefectsAvg;
HazeRegionAvg = hazeRegionAvg;
HazeAverageAvg = hazeAverageAvg;
LPDCountStdDev = lPDCountStdDev;
LPDCM2StdDev = lPDCM2StdDev;
AreaCountStdDev = areaCountStdDev;
AreaTotalStdDev = areaTotalStdDev;
ScratchCountStdDev = scratchCountStdDev;
ScratchTotalStdDev = scratchTotalStdDev;
SumOfDefectsStdDev = sumOfDefectsStdDev;
HazeRegionStdDev = hazeRegionStdDev;
HazeAverageStdDev = hazeAverageStdDev;
}
public string Date { get; }
public string Recipe { get; }
public string Id { get; }
public ReadOnlyCollection<WaferSummary> WaferSummary { get; }
public string LPDCountMin { get; }
public string LPDCM2Min { get; }
public string AreaCountMin { get; }
public string AreaTotalMin { get; }
public string ScratchCountMin { get; }
public string ScratchTotalMin { get; }
public string SumOfDefectsMin { get; }
public string HazeRegionMin { get; }
public string HazeAverageMin { get; }
public string LPDCountMax { get; }
public string LPDCM2Max { get; }
public string AreaCountMax { get; }
public string AreaTotalMax { get; }
public string ScratchCountMax { get; }
public string ScratchTotalMax { get; }
public string SumOfDefectsMax { get; }
public string HazeRegionMax { get; }
public string HazeAverageMax { get; }
public string LPDCountAvg { get; }
public string LPDCM2Avg { get; }
public string AreaCountAvg { get; }
public string AreaTotalAvg { get; }
public string ScratchCountAvg { get; }
public string ScratchTotalAvg { get; }
public string SumOfDefectsAvg { get; }
public string HazeRegionAvg { get; }
public string HazeAverageAvg { get; }
public string LPDCountStdDev { get; }
public string LPDCM2StdDev { get; }
public string AreaCountStdDev { get; }
public string AreaTotalStdDev { get; }
public string ScratchCountStdDev { get; }
public string ScratchTotalStdDev { get; }
public string SumOfDefectsStdDev { get; }
public string HazeRegionStdDev { get; }
public string HazeAverageStdDev { get; }
private static ReadOnlyCollection<string> FixToEolArray(string[] toEol)
{
List<string> results = new();
const int MAX_COLUMNS = 9;
if (toEol.Length >= MAX_COLUMNS)
results.AddRange(toEol);
else
{
string leftVal, rightVal;
List<string> toEolList = new(toEol);
int[] mColumnWidths = new int[MAX_COLUMNS] { 8, 6, 6, 6, 6, 7, 7, 5, 7 };
if (string.IsNullOrEmpty(toEolList[toEolList.Count - 1]))
toEolList.RemoveAt(toEolList.Count - 1);
for (int i = toEolList.Count; i < MAX_COLUMNS; i++)
toEolList.Insert(0, "");
for (int i = MAX_COLUMNS - 1; i >= 0; i--)
{
if (toEolList[i].Length > mColumnWidths[i])
{
leftVal = toEolList[i].Substring(0, toEolList[i].Length - mColumnWidths[i]);
rightVal = toEolList[i].Substring(leftVal.Length);
toEolList[i] = rightVal;
toEolList.Insert(i, leftVal);
if (string.IsNullOrEmpty(toEolList[0]))
toEolList.RemoveAt(0);
}
}
results.AddRange(toEolList);
}
return results.AsReadOnly();
}
internal static void ScanPast(string text, int[] i, string search)
{
int num = text.IndexOf(search, i[0]);
if (num > -1)
i[0] = num + search.Length;
else
i[0] = text.Length;
}
internal static string GetBefore(string text, int[] i, string search)
{
int num = text.IndexOf(search, i[0]);
if (num > -1)
{
string str = text.Substring(i[0], num - i[0]);
i[0] = num + search.Length;
return str.Trim();
}
string str1 = text.Substring(i[0]);
i[0] = text.Length;
return str1.Trim();
}
private static string GetBefore(string text, int[] i, string search, bool trim)
{
if (trim)
return GetBefore(text, i, search);
int num = text.IndexOf(search, i[0]);
if (num > -1)
{
string str = text.Substring(i[0], num - i[0]);
i[0] = num + search.Length;
return str;
}
string str1 = text.Substring(i[0]);
i[0] = text.Length;
return str1;
}
internal static string GetToEOL(string text, int[] i) =>
GetBefore(text, i, "\n");
private static string GetToEOL(string text, int[] i, bool trim)
{
if (trim)
return GetToEOL(text, i);
return GetBefore(text, i, "\n", false);
}
private static string? GetText(ReadOnlyDictionary<string, string> pages, Constant constant)
{
string? text;
string? headerFileName = null;
foreach (KeyValuePair<string, string> keyValuePair in pages)
{
if (!pages.TryGetValue(keyValuePair.Key, out text))
throw new Exception();
if (!text.Contains(constant.Statistics))
continue;
headerFileName = keyValuePair.Key;
}
headerFileName ??= pages.Count == 0 ? string.Empty : pages.ElementAt(pages.Count - 1).Key;
if (pages.Count == 0 || !pages.TryGetValue(headerFileName, out text))
text = null;
return text;
}
internal static Header Get(ReadOnlyDictionary<string, string> pages, Constant constant)
{
Header? result;
string id;
string[] segmentsB;
string[] segmentsC;
int[] i = new int[] { 0 };
WaferSummary waferSummary;
List<WaferSummary> collection = new();
string? text = GetText(pages, constant);
if (string.IsNullOrEmpty(text))
throw new Exception();
ScanPast(text, i, constant.Date);
string date = GetToEOL(text, i);
ScanPast(text, i, "Recipe ID:");
string recipe = GetBefore(text, i, "LotID:");
recipe = recipe.Replace(";", "");
if (text.Contains("[]"))
id = GetBefore(text, i, "[]");
else if (text.Contains("[7]"))
id = GetBefore(text, i, "[7]");
else
id = GetBefore(text, i, "[");
ScanPast(text, i, "*");
string[] segments = text.Substring(i[0]).Split('*');
string[] split = new string[] { Environment.NewLine };
foreach (string segment in segments)
{
segmentsB = segment.Split(split, StringSplitOptions.None);
segmentsC = segmentsB[0].Split(' ');
waferSummary = new(id: segmentsC.Length < 1 ? string.Empty : segmentsC[0].Trim(),
lPDCount: segmentsC.Length < 2 ? string.Empty : segmentsC[1].Trim(),
lPDCM2: segmentsC.Length < 3 ? string.Empty : segmentsC[2].Trim(),
areaCount: segmentsC.Length < 4 ? string.Empty : segmentsC[3].Trim(),
areaTotal: segmentsC.Length < 5 ? string.Empty : segmentsC[4].Trim(),
scratchCount: segmentsC.Length < 6 ? string.Empty : segmentsC[5].Trim(),
scratchTotal: segmentsC.Length < 7 ? string.Empty : segmentsC[6].Trim(),
sumOfDefects: segmentsC.Length < 8 ? string.Empty : segmentsC[7].Trim(),
hazeRegion: segmentsC.Length < 9 ? string.Empty : segmentsC[8].Trim(),
hazeAverage: segmentsC.Length < 10 ? string.Empty : segmentsC[9].Trim(),
grade: segmentsC.Length < 11 ? string.Empty : segmentsC[10].Trim());
collection.Add(waferSummary);
}
ScanPast(text, i, constant.Min);
string[] preToEol1 = GetToEOL(text, i, false).Trim().Split(' ');
ReadOnlyCollection<string> toEol1 = FixToEolArray(preToEol1);
ScanPast(text, i, constant.Max);
string[] preToEol2 = GetToEOL(text, i, false).Trim().Split(' ');
ReadOnlyCollection<string> toEol2 = FixToEolArray(preToEol2);
ScanPast(text, i, constant.Average);
string[] preToEol3 = GetToEOL(text, i, false).Trim().Split(' ');
ReadOnlyCollection<string> toEol3 = FixToEolArray(preToEol3);
ScanPast(text, i, constant.StdDev);
string[] preToEol4 = GetToEOL(text, i, false).Trim().Split(' ');
ReadOnlyCollection<string> toEol4 = FixToEolArray(preToEol4);
result = new(date: date,
recipe: recipe,
id: id,
waferSummary: collection.AsReadOnly(),
lPDCountMin: toEol1[0].Trim(),
lPDCM2Min: toEol1[1].Trim(),
areaCountMin: toEol1[2].Trim(),
areaTotalMin: toEol1[3].Trim(),
scratchCountMin: toEol1[4].Trim(),
scratchTotalMin: toEol1[5].Trim(),
sumOfDefectsMin: toEol1[6].Trim(),
hazeRegionMin: toEol1[7].Trim(),
hazeAverageMin: toEol1[8].Trim(),
lPDCountMax: toEol2[0].Trim(),
lPDCM2Max: toEol2[1].Trim(),
areaCountMax: toEol2[2].Trim(),
areaTotalMax: toEol2[3].Trim(),
scratchCountMax: toEol2[4].Trim(),
scratchTotalMax: toEol2[5].Trim(),
sumOfDefectsMax: toEol2[6].Trim(),
hazeRegionMax: toEol2[7].Trim(),
hazeAverageMax: toEol2[8].Trim(),
lPDCountAvg: toEol3[0].Trim(),
lPDCM2Avg: toEol3[1].Trim(),
areaCountAvg: toEol3[2].Trim(),
areaTotalAvg: toEol3[3].Trim(),
scratchCountAvg: toEol3[4].Trim(),
scratchTotalAvg: toEol3[5].Trim(),
sumOfDefectsAvg: toEol3[6].Trim(),
hazeRegionAvg: toEol3[7].Trim(),
hazeAverageAvg: toEol3[8].Trim(),
lPDCountStdDev: toEol4[0].Trim(),
lPDCM2StdDev: toEol4[1].Trim(),
areaCountStdDev: toEol4[2].Trim(),
areaTotalStdDev: toEol4[3].Trim(),
scratchCountStdDev: toEol4[4].Trim(),
scratchTotalStdDev: toEol4[5].Trim(),
sumOfDefectsStdDev: toEol4[6].Trim(),
hazeRegionStdDev: toEol4[7].Trim(),
hazeAverageStdDev: toEol4[8].Trim());
return result;
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Header))]
internal partial class HeaderSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -0,0 +1,193 @@
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
internal class Row
{
public Row(Run run, int i)
{
Index = i;
//
Date = run.Header.Date;
Recipe = run.Header.Recipe;
Id = run.Header.Id;
//
WaferId = run.Header.WaferSummary[i].Id;
LPDCount = run.Header.WaferSummary[i].LPDCount;
LPDCM2 = run.Header.WaferSummary[i].LPDCM2;
AreaCount = run.Header.WaferSummary[i].AreaCount;
AreaTotal = run.Header.WaferSummary[i].AreaTotal;
ScratchCount = run.Header.WaferSummary[i].ScratchCount;
ScratchTotal = run.Header.WaferSummary[i].ScratchTotal;
SumOfDefects = run.Header.WaferSummary[i].SumOfDefects;
HazeRegion = run.Header.WaferSummary[i].HazeRegion;
HazeAverage = run.Header.WaferSummary[i].HazeAverage;
Grade = run.Header.WaferSummary[i].Grade;
//
LPDCountMin = run.Header.LPDCountMin;
LPDCM2Min = run.Header.LPDCM2Min;
AreaCountMin = run.Header.AreaCountMin;
AreaTotalMin = run.Header.AreaTotalMin;
ScratchCountMin = run.Header.ScratchCountMin;
ScratchTotalMin = run.Header.ScratchTotalMin;
SumOfDefectsMin = run.Header.SumOfDefectsMin;
HazeRegionMin = run.Header.HazeRegionMin;
HazeAverageMin = run.Header.HazeAverageMin;
LPDCountMax = run.Header.LPDCountMax;
LPDCM2Max = run.Header.LPDCM2Max;
AreaCountMax = run.Header.AreaCountMax;
AreaTotalMax = run.Header.AreaTotalMax;
ScratchCountMax = run.Header.ScratchCountMax;
ScratchTotalMax = run.Header.ScratchTotalMax;
SumOfDefectsMax = run.Header.SumOfDefectsMax;
HazeRegionMax = run.Header.HazeRegionMax;
HazeAverageMax = run.Header.HazeAverageMax;
LPDCountAvg = run.Header.LPDCountAvg;
LPDCM2Avg = run.Header.LPDCM2Avg;
AreaCountAvg = run.Header.AreaCountAvg;
AreaTotalAvg = run.Header.AreaTotalAvg;
ScratchCountAvg = run.Header.ScratchCountAvg;
ScratchTotalAvg = run.Header.ScratchTotalAvg;
SumOfDefectsAvg = run.Header.SumOfDefectsAvg;
HazeRegionAvg = run.Header.HazeRegionAvg;
HazeAverageAvg = run.Header.HazeAverageAvg;
LPDCountStdDev = run.Header.LPDCountStdDev;
LPDCM2StdDev = run.Header.LPDCM2StdDev;
AreaCountStdDev = run.Header.AreaCountStdDev;
AreaTotalStdDev = run.Header.AreaTotalStdDev;
ScratchCountStdDev = run.Header.ScratchCountStdDev;
ScratchTotalStdDev = run.Header.ScratchTotalStdDev;
SumOfDefectsStdDev = run.Header.SumOfDefectsStdDev;
HazeRegionStdDev = run.Header.HazeRegionStdDev;
HazeAverageStdDev = run.Header.HazeAverageStdDev;
//
WaferDate = run.Wafers[i].Date;
Comments = run.Wafers[i].Comments;
Sort = run.Wafers[i].Sort;
WaferLPDCount = run.Wafers[i].LPDCount;
WaferLPDCM2 = run.Wafers[i].LPDCM2;
Bin1 = run.Wafers[i].Bin1;
Bin2 = run.Wafers[i].Bin2;
Bin3 = run.Wafers[i].Bin3;
Bin4 = run.Wafers[i].Bin4;
Bin5 = run.Wafers[i].Bin5;
Bin6 = run.Wafers[i].Bin6;
Bin7 = run.Wafers[i].Bin7;
Bin8 = run.Wafers[i].Bin8;
Mean = run.Wafers[i].Mean;
StdDev = run.Wafers[i].StdDev;
WaferAreaCount = run.Wafers[i].AreaCount;
WaferAreaTotal = run.Wafers[i].AreaTotal;
WaferScratchCount = run.Wafers[i].ScratchCount;
WaferScratchTotal = run.Wafers[i].ScratchTotal;
WaferSumOfDefects = run.Wafers[i].SumOfDefects;
WaferHazeRegion = run.Wafers[i].HazeRegion;
WaferHazeAverage = run.Wafers[i].HazeAverage;
HazePeak = run.Wafers[i].HazePeak;
Laser = run.Wafers[i].Laser;
Gain = run.Wafers[i].Gain;
Diameter = run.Wafers[i].Diameter;
Thresh = run.Wafers[i].Thresh;
Exclusion = run.Wafers[i].Exclusion;
HazeRng = run.Wafers[i].HazeRng;
Thruput = run.Wafers[i].Thruput;
WaferRecipe = run.Wafers[i].Recipe;
}
public int Index { get; }
//
public string Date { get; }
public string Recipe { get; }
public string Id { get; }
//
public string WaferId { get; }
public string LPDCount { get; }
public string LPDCM2 { get; }
public string AreaCount { get; }
public string AreaTotal { get; }
public string ScratchCount { get; }
public string ScratchTotal { get; }
public string SumOfDefects { get; }
public string HazeRegion { get; }
public string HazeAverage { get; }
public string Grade { get; }
//
public string LPDCountMin { get; }
public string LPDCM2Min { get; }
public string AreaCountMin { get; }
public string AreaTotalMin { get; }
public string ScratchCountMin { get; }
public string ScratchTotalMin { get; }
public string SumOfDefectsMin { get; }
public string HazeRegionMin { get; }
public string HazeAverageMin { get; }
public string LPDCountMax { get; }
public string LPDCM2Max { get; }
public string AreaCountMax { get; }
public string AreaTotalMax { get; }
public string ScratchCountMax { get; }
public string ScratchTotalMax { get; }
public string SumOfDefectsMax { get; }
public string HazeRegionMax { get; }
public string HazeAverageMax { get; }
public string LPDCountAvg { get; }
public string LPDCM2Avg { get; }
public string AreaCountAvg { get; }
public string AreaTotalAvg { get; }
public string ScratchCountAvg { get; }
public string ScratchTotalAvg { get; }
public string SumOfDefectsAvg { get; }
public string HazeRegionAvg { get; }
public string HazeAverageAvg { get; }
public string LPDCountStdDev { get; }
public string LPDCM2StdDev { get; }
public string AreaCountStdDev { get; }
public string AreaTotalStdDev { get; }
public string ScratchCountStdDev { get; }
public string ScratchTotalStdDev { get; }
public string SumOfDefectsStdDev { get; }
public string HazeRegionStdDev { get; }
public string HazeAverageStdDev { get; }
//
public string WaferDate { get; }
public string Comments { get; }
public string Sort { get; }
public string WaferLPDCount { get; }
public string WaferLPDCM2 { get; }
public string Bin1 { get; }
public string Bin2 { get; }
public string Bin3 { get; }
public string Bin4 { get; }
public string Bin5 { get; }
public string Bin6 { get; }
public string Bin7 { get; }
public string Bin8 { get; }
public string Mean { get; }
public string StdDev { get; }
public string WaferAreaCount { get; }
public string WaferAreaTotal { get; }
public string WaferScratchCount { get; }
public string WaferScratchTotal { get; }
public string WaferSumOfDefects { get; }
public string WaferHazeRegion { get; }
public string WaferHazeAverage { get; }
public string HazePeak { get; }
public string Laser { get; }
public string Gain { get; }
public string Diameter { get; }
public string Thresh { get; }
public string Exclusion { get; }
public string HazeRng { get; }
public string Thruput { get; }
public string WaferRecipe { get; }
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Row))]
internal partial class RowSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -0,0 +1,145 @@
using Adaptation.Shared;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
internal class Run
{
public Header Header { get; }
public ReadOnlyCollection<Wafer> Wafers { get; }
public Run(Header header, ReadOnlyCollection<Wafer> wafers)
{
Header = header;
Wafers = wafers;
}
private static ReadOnlyCollection<Wafer> GetLastWaferForEachSlot(ReadOnlyDictionary<string, string> pages, Constant constant, Header header)
{
List<Wafer> results = new();
string id;
Wafer wafer;
ReadOnlyCollection<Wafer>? wafers;
ReadOnlyDictionary<string, ReadOnlyCollection<Wafer>> keyValuePairs = Wafer.Get(pages, constant);
ReadOnlyCollection<string> waferIds = GetWaferIds(header);
for (int i = 0; i < waferIds.Count; i++)
{
id = waferIds[i];
if (!keyValuePairs.TryGetValue(id, out wafers) || wafers.Count == 0)
wafer = Wafer.Get(id);
else
wafer = (from l in wafers where l.Recipe == header.Recipe select l).Last();
if (wafer is null)
break;
results.Add(wafer);
}
return results.AsReadOnly();
}
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, Run result)
{
FileInfo fileInfo = new($"{logistics.ReportFullPath}.run.json");
string json = JsonSerializer.Serialize(result, RunSourceGenerationContext.Default.Run);
File.WriteAllText(fileInfo.FullName, json);
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
fileInfoCollection.Add(fileInfo);
}
private static ReadOnlyCollection<string> GetLines(Logistics logistics, JsonElement[]? jsonElements)
{
List<string> results = new();
int columns = 0;
StringBuilder stringBuilder = new();
results.Add($"\"Count\",{jsonElements?.Length}");
results.Add($"\"{nameof(logistics.Sequence)}\",\"{logistics.Sequence}\"");
results.Add($"\"{nameof(logistics.MesEntity)}\",\"{logistics.MesEntity}\"");
string dateTimeFromSequence = logistics.DateTimeFromSequence.ToString("MM/dd/yyyy hh:mm:ss tt");
for (int i = 0; i < jsonElements?.Length;)
{
_ = stringBuilder.Append('"').Append(nameof(logistics.DateTimeFromSequence)).Append('"').Append(',');
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append(',');
}
break;
}
if (jsonElements?.Length != 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements?.Length; i++)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('"').Append(dateTimeFromSequence).Append('"').Append(',');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
{
if (jsonProperty.Value.ValueKind == JsonValueKind.Object)
_ = stringBuilder.Append(',');
else if (jsonProperty.Value.ValueKind != JsonValueKind.String)
_ = stringBuilder.Append(jsonProperty.Value).Append(',');
else
_ = stringBuilder.Append('"').Append(jsonProperty.Value).Append('"').Append(',');
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
}
return results.AsReadOnly();
}
private static void WriteCommaSeparatedValues(Logistics logistics, Run run)
{
List<Row> results = new();
Row row;
for (int i = 0; i < run.Wafers.Count; i++)
{
row = new(run, i);
results.Add(row);
}
string json = JsonSerializer.Serialize(results);
JsonElement[]? jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
ReadOnlyCollection<string> lines = GetLines(logistics, jsonElements);
File.WriteAllText($"{logistics.ReportFullPath}.csv", string.Join(Environment.NewLine, lines));
}
private static ReadOnlyCollection<string> GetWaferIds(Header header)
{
List<string> results = new();
foreach (WaferSummary waferSummary in header.WaferSummary)
results.Add(waferSummary.Id);
return results.AsReadOnly();
}
internal static Run? Get(Logistics logistics, List<FileInfo> fileInfoCollection, ReadOnlyDictionary<string, string> pages)
{
Run? result;
Constant constant = new();
Header? header = Header.Get(pages, constant);
if (header is null)
result = null;
else
{
ReadOnlyCollection<Wafer> wafers = GetLastWaferForEachSlot(pages, constant, header);
result = new(header, wafers);
WriteJson(logistics, fileInfoCollection, result);
WriteCommaSeparatedValues(logistics, result);
}
return result;
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Run))]
internal partial class RunSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -0,0 +1,242 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
public class Wafer
{
public Wafer(string date, string id, string comments, string sort, string lPDCount, string lPDCM2, string bin1, string bin2, string bin3, string bin4, string bin5, string bin6, string bin7, string bin8, string mean, string stdDev, string areaCount, string areaTotal, string scratchCount, string scratchTotal, string sumOfDefects, string hazeRegion, string hazeAverage, string hazePeak, string laser, string gain, string diameter, string thresh, string exclusion, string hazeRng, string thruput, string recipe)
{
Date = date;
Id = id;
Comments = comments;
Sort = sort;
LPDCount = lPDCount;
LPDCM2 = lPDCM2;
Bin1 = bin1;
Bin2 = bin2;
Bin3 = bin3;
Bin4 = bin4;
Bin5 = bin5;
Bin6 = bin6;
Bin7 = bin7;
Bin8 = bin8;
Mean = mean;
StdDev = stdDev;
AreaCount = areaCount;
AreaTotal = areaTotal;
ScratchCount = scratchCount;
ScratchTotal = scratchTotal;
SumOfDefects = sumOfDefects;
HazeRegion = hazeRegion;
HazeAverage = hazeAverage;
HazePeak = hazePeak;
Laser = laser;
Gain = gain;
Diameter = diameter;
Thresh = thresh;
Exclusion = exclusion;
HazeRng = hazeRng;
Thruput = thruput;
Recipe = recipe;
}
internal static Wafer Get(string id) =>
new(date: string.Empty,
id: id,
comments: string.Empty,
sort: string.Empty,
lPDCount: string.Empty,
lPDCM2: string.Empty,
bin1: string.Empty,
bin2: string.Empty,
bin3: string.Empty,
bin4: string.Empty,
bin5: string.Empty,
bin6: string.Empty,
bin7: string.Empty,
bin8: string.Empty,
mean: string.Empty,
stdDev: string.Empty,
areaCount: string.Empty,
areaTotal: string.Empty,
scratchCount: string.Empty,
scratchTotal: string.Empty,
sumOfDefects: string.Empty,
hazeRegion: string.Empty,
hazeAverage: string.Empty,
hazePeak: string.Empty,
laser: string.Empty,
gain: string.Empty,
diameter: string.Empty,
thresh: string.Empty,
exclusion: string.Empty,
hazeRng: string.Empty,
thruput: string.Empty,
recipe: string.Empty);
public string Date { get; }
public string Id { get; }
public string Comments { get; }
public string Sort { get; }
public string LPDCount { get; }
public string LPDCM2 { get; }
public string Bin1 { get; }
public string Bin2 { get; }
public string Bin3 { get; }
public string Bin4 { get; }
public string Bin5 { get; }
public string Bin6 { get; }
public string Bin7 { get; }
public string Bin8 { get; }
public string Mean { get; }
public string StdDev { get; }
public string AreaCount { get; }
public string AreaTotal { get; }
public string ScratchCount { get; }
public string ScratchTotal { get; }
public string SumOfDefects { get; }
public string HazeRegion { get; }
public string HazeAverage { get; }
public string HazePeak { get; }
public string Laser { get; }
public string Gain { get; }
public string Diameter { get; }
public string Thresh { get; }
public string Exclusion { get; }
public string HazeRng { get; }
public string Thruput { get; }
public string Recipe { get; }
internal static ReadOnlyDictionary<string, ReadOnlyCollection<Wafer>> Get(ReadOnlyDictionary<string, string> pages, Constant constant)
{
Dictionary<string, ReadOnlyCollection<Wafer>> results = new();
Wafer wafer;
string? text;
List<string> stringList;
int[] i = new int[] { 0 };
Dictionary<string, List<Wafer>> keyValuePairs = new();
foreach (KeyValuePair<string, string> keyValuePair in pages)
{
i[0] = 0;
stringList = new();
if (!pages.TryGetValue(keyValuePair.Key, out text))
throw new Exception();
if (string.IsNullOrEmpty(text) || !text.Contains(constant.Id) || text.Contains(constant.Statistics) || text.Contains(constant.DatabaseId))
continue;
i[0] = 0;
stringList = new();
if (!pages.TryGetValue(keyValuePair.Key, out text))
throw new Exception();
if (string.IsNullOrEmpty(text) || !text.Contains(constant.Id))
continue;
Header.ScanPast(text, i, constant.Date);
string date = Header.GetToEOL(text, i);
Header.ScanPast(text, i, constant.Id);
string id = Header.GetToEOL(text, i);
if (id.Length > 5)
id = string.Concat(id.Substring(0, 5), "... - ***");
id = id.Replace("*", "");
Header.ScanPast(text, i, "Comments:");
string comments = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Sort:");
string sort = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "LPD Count:");
string lPDCount = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "LPD / cm2:");
string lPDCM2 = Header.GetToEOL(text, i);
while (Header.GetBefore(text, i, ":").Contains("Bin"))
stringList.Add(Header.GetToEOL(text, i));
string bin1 = stringList.Count >= 1 ? stringList[0] : string.Empty;
string bin2 = stringList.Count >= 2 ? stringList[1] : string.Empty;
string bin3 = stringList.Count >= 3 ? stringList[2] : string.Empty;
string bin4 = stringList.Count >= 4 ? stringList[3] : string.Empty;
string bin5 = stringList.Count >= 5 ? stringList[4] : string.Empty;
string bin6 = stringList.Count >= 6 ? stringList[5] : string.Empty;
string bin7 = stringList.Count >= 7 ? stringList[6] : string.Empty;
string bin8 = stringList.Count >= 8 ? stringList[7] : string.Empty;
string mean = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Std Dev:");
string stdDev = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Area Count:");
string areaCount = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Area Total:");
string areaTotal = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Scratch Count:");
string scratchCount = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Scratch Total:");
string scratchTotal = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Sum of All Defects:");
string sumOfDefects = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Haze Region:");
string hazeRegion = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Haze Average:");
string hazeAverage = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Haze Peak:");
string hazePeak = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Laser:");
string laser = Header.GetBefore(text, i, "Gain:");
string gain = Header.GetBefore(text, i, "Diameter:");
string diameter = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Thresh:");
string thresh = Header.GetBefore(text, i, "Exclusion:");
string exclusion = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Haze Rng:");
string hazeRng = Header.GetBefore(text, i, "Thruput:");
string thruput = Header.GetToEOL(text, i);
Header.ScanPast(text, i, "Recipe ID:");
string recipe = Header.GetToEOL(text, i);
wafer = new(date: date,
id: id,
comments: comments,
sort: sort,
lPDCount: lPDCount,
lPDCM2: lPDCM2,
bin1: bin1,
bin2: bin2,
bin3: bin3,
bin4: bin4,
bin5: bin5,
bin6: bin6,
bin7: bin7,
bin8: bin8,
mean: mean,
stdDev: stdDev,
areaCount: areaCount,
areaTotal: areaTotal,
scratchCount: scratchCount,
scratchTotal: scratchTotal,
sumOfDefects: sumOfDefects,
hazeRegion: hazeRegion,
hazeAverage: hazeAverage,
hazePeak: hazePeak,
laser: laser,
gain: gain,
diameter: diameter,
thresh: thresh,
exclusion: exclusion,
hazeRng: hazeRng,
thruput: thruput,
recipe: recipe);
if (!keyValuePairs.ContainsKey(id))
keyValuePairs.Add(id, new List<Wafer>());
keyValuePairs[id].Add(wafer);
}
foreach (KeyValuePair<string, List<Wafer>> keyValuePair in keyValuePairs)
results.Add(keyValuePair.Key, keyValuePair.Value.AsReadOnly());
return new(results);
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(Wafer))]
internal partial class WaferSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -0,0 +1,43 @@
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.pdsf;
#nullable enable
public class WaferSummary
{
public WaferSummary(string id, string lPDCount, string lPDCM2, string areaCount, string areaTotal, string scratchCount, string scratchTotal, string sumOfDefects, string hazeRegion, string hazeAverage, string grade)
{
Id = id;
LPDCount = lPDCount;
LPDCM2 = lPDCM2;
AreaCount = areaCount;
AreaTotal = areaTotal;
ScratchCount = scratchCount;
ScratchTotal = scratchTotal;
SumOfDefects = sumOfDefects;
HazeRegion = hazeRegion;
HazeAverage = hazeAverage;
Grade = grade;
}
public string Id { get; }
public string LPDCount { get; }
public string LPDCM2 { get; }
public string AreaCount { get; }
public string AreaTotal { get; }
public string ScratchCount { get; }
public string ScratchTotal { get; }
public string SumOfDefects { get; }
public string HazeRegion { get; }
public string HazeAverage { get; }
public string Grade { get; }
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(WaferSummary))]
internal partial class WaferSummarySourceGenerationContext : JsonSerializerContext
{
}

View File

@ -226,9 +226,9 @@ public class MonIn : IMonIn, IDisposable
{ {
StringBuilder stringBuilder = new(); StringBuilder stringBuilder = new();
if (string.IsNullOrEmpty(subresource)) if (string.IsNullOrEmpty(subresource))
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), stateName.Trim(), state.Trim(), description.Trim()); _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
else else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), stateName.Trim(), state.Trim(), description.Trim()); _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
return stringBuilder.ToString(); return stringBuilder.ToString();
} }
@ -247,14 +247,14 @@ public class MonIn : IMonIn, IDisposable
if (string.IsNullOrEmpty(subresource)) if (string.IsNullOrEmpty(subresource))
{ {
if (unit.Equals(string.Empty) && !interval.HasValue) if (unit.Equals(string.Empty) && !interval.HasValue)
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), performanceName.Trim(), value, description.Trim()); _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), performanceName.Trim(), value, description.Trim());
else else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} {5} {{interval={6}, unit={7}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : (object)string.Empty, unit.Trim()); _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} {5} {{interval={6}, unit={7}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : string.Empty, unit.Trim());
} }
else if (unit.Equals(string.Empty) && !interval.HasValue) else if (unit.Equals(string.Empty) && !interval.HasValue)
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim()); _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim());
else else
_ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} {6} {{interval={7}, unit={8}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : (object)string.Empty, unit.Trim()); _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} {6} {{interval={7}, unit={8}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : string.Empty, unit.Trim());
return stringBuilder.ToString(); return stringBuilder.ToString();
} }

View File

@ -87,16 +87,16 @@
</None> </None>
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<None Condition="'$(Configuration)' == 'Debug'" Include="\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64\gpcl6dll64.dll"> <None Condition="'$(Configuration)' == 'Debug'" Include="D:\EAF-Mesa-Integration\copy\GhostPCL\gpcl6win64\gpcl6dll64.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None> </None>
<None Condition="'$(Configuration)' == 'Debug'" Include="\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64\gpcl6win64.exe"> <None Condition="'$(Configuration)' == 'Debug'" Include="D:\EAF-Mesa-Integration\copy\GhostPCL\gpcl6win64\gpcl6win64.exe">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None> </None>
<None Condition="'$(Configuration)' == 'Release'" Include="\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64\gpcl6dll64.dll"> <None Condition="'$(Configuration)' == 'Release'" Include="D:\EAF-Mesa-Integration\copy\GhostPCL\gpcl6win64\gpcl6dll64.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None> </None>
<None Condition="'$(Configuration)' == 'Release'" Include="\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64\gpcl6win64.exe"> <None Condition="'$(Configuration)' == 'Release'" Include="D:\EAF-Mesa-Integration\copy\GhostPCL\gpcl6win64\gpcl6win64.exe">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None> </None>
</ItemGroup> </ItemGroup>

View File

@ -3,6 +3,7 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Linq; using System.Linq;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.Shared.Duplicator; namespace Adaptation.Shared.Duplicator;
@ -178,4 +179,16 @@ public class Description : IDescription, Properties.IDescription
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt"; internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Description))]
internal partial class SharedDescriptionSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
[JsonSerializable(typeof(Description[]))]
internal partial class SharedDescriptionArraySourceGenerationContext : JsonSerializerContext
{
} }

View File

@ -9,7 +9,6 @@ using System.IO;
using System.Linq; using System.Linq;
using System.Text; using System.Text;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading; using System.Threading;
namespace Adaptation.Shared; namespace Adaptation.Shared;
@ -383,17 +382,24 @@ public class FileRead : Properties.IFileRead
else else
{ {
string[] files; string[] files;
string logisticsSequence = _Logistics.Sequence.ToString(); string[] directories;
string[] directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly); string logisticsSequence;
foreach (string directory in directories) for (int i = 0; i < 10; i++)
{ {
files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly); logisticsSequence = (_Logistics.Sequence + -i).ToString();
if (files.Length == 0) directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
continue; foreach (string directory in directories)
results.Add(directory); {
files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
if (files.Length == 0)
continue;
results.Add(directory);
}
if (results.Count == 1)
break;
} }
} }
if ((results is null) || results.Count != 1) if (results.Count != 1)
throw new Exception("Didn't find directory by logistics sequence"); throw new Exception("Didn't find directory by logistics sequence");
return results.ToArray(); return results.ToArray();
} }
@ -440,12 +446,13 @@ public class FileRead : Properties.IFileRead
{ {
List<Properties.IDescription> results = new(); List<Properties.IDescription> results = new();
Duplicator.Description description; Duplicator.Description description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements) foreach (JsonElement jsonElement in jsonElements)
{ {
if (jsonElement.ValueKind != JsonValueKind.Object) if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception(); throw new Exception();
description = JsonSerializer.Deserialize<Duplicator.Description>(jsonElement.ToString(), jsonSerializerOptions); description = JsonSerializer.Deserialize(jsonElement.ToString(), Duplicator.SharedDescriptionSourceGenerationContext.Default.Description);
if (description is null)
continue;
results.Add(description); results.Add(description);
} }
return results; return results;
@ -478,27 +485,14 @@ public class FileRead : Properties.IFileRead
} }
} }
protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements) protected static void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
{ {
string directory; #pragma warning disable CA1510
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}"; if (fileRead is null)
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00"); throw new ArgumentNullException(nameof(fileRead));
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}"; if (jsonElements is null)
if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType) throw new ArgumentNullException(nameof(jsonElements));
directory = Path.Combine(_TracePath, _EquipmentType, "Target", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName); #pragma warning restore CA1510
else
directory = Path.Combine(_TracePath, _EquipmentType, "Source", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName);
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
File.WriteAllText(file, lines);
if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
try
{ File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
catch (Exception) { }
}
} }
protected void WaitForThread(Thread thread, List<Exception> threadExceptions) protected void WaitForThread(Thread thread, List<Exception> threadExceptions)

View File

@ -2,12 +2,14 @@ using Adaptation.Shared.Methods;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Globalization; using System.Globalization;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text; using System.Text;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
namespace Adaptation.Shared; namespace Adaptation.Shared;
@ -136,6 +138,7 @@ internal class ProcessDataStandardFormat
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6) internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6)
{ {
ProcessDataStandardFormat result; ProcessDataStandardFormat result;
long? sequence;
string segment; string segment;
string[] segments; string[] segments;
bool addToFooter = false; bool addToFooter = false;
@ -184,15 +187,27 @@ internal class ProcessDataStandardFormat
break; break;
} }
} }
string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null; string? linesOne = lines.Length > 1 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
logistics = GetLogistics(footer, linesOne: linesOne); logistics = GetLogistics(footer, linesOne: linesOne);
if (logistics.Count == 0)
sequence = null;
else
{
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
}
if (sequence is null && !string.IsNullOrEmpty(reportFullPath))
{
FileInfo fileInfo = new(reportFullPath);
sequence = fileInfo.LastWriteTime.Ticks;
}
result = new(body: body.AsReadOnly(), result = new(body: body.AsReadOnly(),
columns: columns.AsReadOnly(), columns: columns.AsReadOnly(),
footer: footer.AsReadOnly(), footer: footer.AsReadOnly(),
header: header.AsReadOnly(), header: header.AsReadOnly(),
inputPDSF: null, inputPDSF: null,
logistics: logistics, logistics: logistics,
sequence: null); sequence: sequence);
return result; return result;
} }
@ -214,19 +229,19 @@ internal class ProcessDataStandardFormat
return results.AsReadOnly(); return results.AsReadOnly();
} }
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping) internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping processDataStandardFormatMapping)
{ {
ProcessDataStandardFormat result; ProcessDataStandardFormat result;
const int columnsLine = 6; const int columnsLine = 6;
FileInfo fileInfo = new(reportFullPath); FileInfo fileInfo = new(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null); ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat); JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count == 0 ? null : GetFullArray(processDataStandardFormat);
JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray(); JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
if (jsonElements is null || jsonProperties is null || jsonProperties.Length != pdsfMapping.NewColumnNames.Count) if (jsonElements is null || jsonProperties is null || jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
result = processDataStandardFormat; result = processDataStandardFormat;
else else
{ {
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat); result = GetProcessDataStandardFormat(processDataStandardFormatMapping, jsonElements, processDataStandardFormat);
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0) if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
result = processDataStandardFormat; result = processDataStandardFormat;
} }
@ -236,7 +251,7 @@ internal class ProcessDataStandardFormat
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines) private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines)
{ {
ProcessDataStandardFormat result; ProcessDataStandardFormat result;
long sequence; long? sequence;
string[] segments; string[] segments;
bool addToFooter = false; bool addToFooter = false;
List<string> body = new(); List<string> body = new();
@ -268,12 +283,13 @@ internal class ProcessDataStandardFormat
} }
logistics = GetLogistics(footer, linesOne: null); logistics = GetLogistics(footer, linesOne: null);
if (logistics.Count == 0) if (logistics.Count == 0)
sequence = lastWriteTime.Ticks; sequence = null;
else else
{ {
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None); segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s; sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
} }
sequence ??= lastWriteTime.Ticks;
result = new(body: body.AsReadOnly(), result = new(body: body.AsReadOnly(),
columns: new(columns), columns: new(columns),
footer: footer.AsReadOnly(), footer: footer.AsReadOnly(),
@ -302,7 +318,7 @@ internal class ProcessDataStandardFormat
segments = bodyLine.Split('\t').ToList(); segments = bodyLine.Split('\t').ToList();
for (int c = 0; c < segments.Count; c++) for (int c = 0; c < segments.Count; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\","); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
} }
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1); _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
@ -321,12 +337,14 @@ internal class ProcessDataStandardFormat
int column; int column;
string value; string value;
JsonProperty jsonProperty; JsonProperty jsonProperty;
List<string> debug = new();
List<string> values = new(); List<string> values = new();
List<string> results = new(); List<string> results = new();
JsonProperty[] jsonProperties; JsonProperty[] jsonProperties;
List<string> unknownColumns = new(); List<string> unknownColumns = new();
for (int i = 0; i < jsonElements.Length; i++) for (int i = 0; i < jsonElements.Length; i++)
{ {
debug.Clear();
values.Clear(); values.Clear();
if (jsonElements[i].ValueKind != JsonValueKind.Object) if (jsonElements[i].ValueKind != JsonValueKind.Object)
{ {
@ -340,16 +358,22 @@ internal class ProcessDataStandardFormat
{ {
column = processDataStandardFormatMapping.ColumnIndices[c]; column = processDataStandardFormatMapping.ColumnIndices[c];
if (column == -1) if (column == -1)
{
value = processDataStandardFormatMapping.OldColumnNames[c]; value = processDataStandardFormatMapping.OldColumnNames[c];
debug.Add($"<Item C=-01 Name=\"{value}\" DataType=\"8\" XmlType=\"1\" XPath=\"//records/record/{value}\" />");
}
else else
{ {
jsonProperty = jsonProperties[column]; jsonProperty = jsonProperties[column];
value = jsonProperty.Value.ToString(); value = jsonProperty.Value.ToString();
debug.Add($"<Item C={column + 2:000} Name=\"{processDataStandardFormatMapping.OldColumnNames[c]}\" DataType=\"8\" XmlType=\"1\" XPath=\"//records/record/{jsonProperty.Name}\" />");
} }
values.Add(value); values.Add(value);
} }
results.Add(string.Join("\t", values)); results.Add(string.Join("\t", values));
} }
if (Debugger.IsAttached)
File.WriteAllText("../../.txt", string.Join(Environment.NewLine, debug.OrderBy(l => l)));
result = new(body: new(results), result = new(body: new(results),
columns: processDataStandardFormatMapping.OldColumnNames, columns: processDataStandardFormatMapping.OldColumnNames,
footer: processDataStandardFormat.Footer, footer: processDataStandardFormat.Footer,
@ -364,7 +388,6 @@ internal class ProcessDataStandardFormat
{ {
if (processDataStandardFormat.InputPDSF is null) if (processDataStandardFormat.InputPDSF is null)
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF)); throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
#pragma warning disable CA1845, IDE0057
string result; string result;
string line; string line;
string value; string value;
@ -378,19 +401,27 @@ internal class ProcessDataStandardFormat
break; break;
for (int c = 0; c < segments.Length; c++) for (int c = 0; c < segments.Length; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ','); line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ',');
} }
line = string.Concat(line.Substring(0, line.Length - 1), '}'); line = string.Concat(line.Substring(0, line.Length - 1), '}');
lines.Add(line); lines.Add(line);
} }
string? json = null;
if (processDataStandardFormat.Footer is not null && processDataStandardFormat.Footer.Count > 0)
{
Dictionary<string, string> footerKeyValuePairs = GetFooterKeyValuePairs(processDataStandardFormat.Footer);
Dictionary<string, Dictionary<string, string>> logisticKeyValuePairs = GetLogisticKeyValuePairs(processDataStandardFormat.Footer, footerKeyValuePairs);
json = JsonSerializer.Serialize(logisticKeyValuePairs, DictionaryStringDictionaryStringStringSourceGenerationContext.Default.DictionaryStringDictionaryStringString);
}
string footerText = string.IsNullOrEmpty(json) || json == "{}" ? string.Empty : $",{Environment.NewLine}\"PDSF\":{Environment.NewLine}{json}";
result = string.Concat( result = string.Concat(
'{', '{',
Environment.NewLine, Environment.NewLine,
'"', '"',
"Count", "Count",
'"', '"',
": ", ": ",
processDataStandardFormat.Body.Count, processDataStandardFormat.Body.Count,
',', ',',
Environment.NewLine, Environment.NewLine,
@ -409,17 +440,95 @@ internal class ProcessDataStandardFormat
'"', '"',
"Sequence", "Sequence",
'"', '"',
": ", ": ",
processDataStandardFormat.Sequence, processDataStandardFormat.Sequence,
Environment.NewLine, Environment.NewLine,
footerText,
Environment.NewLine,
'}'); '}');
return result; return result;
#pragma warning restore CA1845, IDE0057 }
private static Dictionary<string, string> GetFooterKeyValuePairs(ReadOnlyCollection<string> footerLines)
{
Dictionary<string, string> results = new();
string[] segments;
foreach (string footerLine in footerLines)
{
segments = footerLine.Split('\t');
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
{
continue;
}
if (segments[1].Contains(';'))
{
continue;
}
else
{
if (results.ContainsKey(segments[0]))
{
continue;
}
results.Add(segments[0], segments[1]);
}
}
return results;
}
private static Dictionary<string, Dictionary<string, string>> GetLogisticKeyValuePairs(ReadOnlyCollection<string> footerLines, Dictionary<string, string> footerKeyValuePairs)
{
Dictionary<string, Dictionary<string, string>> results = new();
string[] segments;
string[] subSegments;
string[] subSubSegments;
Dictionary<string, string>? keyValue;
results.Add("Footer", footerKeyValuePairs);
foreach (string footerLine in footerLines)
{
segments = footerLine.Split('\t');
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
{
continue;
}
if (!segments[1].Contains(';') || !segments[1].Contains('='))
{
continue;
}
else
{
subSegments = segments[1].Split(';');
if (subSegments.Length < 1)
{
continue;
}
if (!results.TryGetValue(segments[0], out keyValue))
{
results.Add(segments[0], new());
if (!results.TryGetValue(segments[0], out keyValue))
{
throw new Exception();
}
}
foreach (string segment in subSegments)
{
subSubSegments = segment.Split('=');
if (subSubSegments.Length != 2)
{
continue;
}
keyValue.Add(subSubSegments[0], subSubSegments[1]);
}
}
}
return results;
} }
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults) internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults)
{ {
List<string> results = new(); List<string> results = new();
if (processDataStandardFormat.InputPDSF is null)
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
if (processDataStandardFormat.Sequence is null) if (processDataStandardFormat.Sequence is null)
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence)); throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
string endOffset = "E#######T"; string endOffset = "E#######T";
@ -457,25 +566,25 @@ internal class ProcessDataStandardFormat
} }
} }
results.Add("END_HEADER"); results.Add("END_HEADER");
if (processDataStandardFormat.InputPDSF is not null) results.Add(string.Empty);
{ List<char> hyphens = new();
results.Add(string.Empty); results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => $"|{l.Replace('\t', '|')}|"));
List<char> hyphens = new(); results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|'))); results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
results.Add(string.Empty); for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|"); hyphens.Add('-');
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++) results.Add($"|{string.Join("|", hyphens)}|");
hyphens.Add('-'); results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => $"|{l.Replace('\t', '|')}|"));
results.Add($"|{string.Join("|", hyphens)}|"); results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|'))); results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => $"|{l.Replace('\t', '|')}|"));
results.Add(string.Empty); results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|'))); string xml = GetXml(processDataStandardFormat);
results.Add(string.Empty); results.Add(xml);
results.Add("EOF"); results.Add(string.Empty);
results.Add(string.Empty); results.Add("EOF");
string json = GetJson(processDataStandardFormat); results.Add(string.Empty);
results.Add(json); string json = GetJson(processDataStandardFormat);
} results.Add(json);
File.WriteAllText(path, string.Join(Environment.NewLine, results)); File.WriteAllText(path, string.Join(Environment.NewLine, results));
} }
@ -518,7 +627,7 @@ internal class ProcessDataStandardFormat
{ {
for (int c = 1; c < segments.Length; c++) for (int c = 1; c < segments.Length; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\","); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
} }
} }
@ -526,7 +635,7 @@ internal class ProcessDataStandardFormat
{ {
for (int c = 1; c < segments.Length; c++) for (int c = 1; c < segments.Length; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
if (string.IsNullOrEmpty(value)) if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,"); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit)) else if (value.All(char.IsDigit))
@ -545,6 +654,17 @@ internal class ProcessDataStandardFormat
return results; return results;
} }
internal static JsonElement[] GetArray(string reportFullPath, string[] lines, ProcessDataStandardFormat processDataStandardFormat)
{
JsonElement[] results;
string? json = GetRecordsJson(reportFullPath, lines);
if (string.IsNullOrEmpty(json))
results = GetArray(processDataStandardFormat);
else
results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
return results;
}
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText) internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
{ {
string result; string result;
@ -757,10 +877,126 @@ internal class ProcessDataStandardFormat
return result; return result;
} }
internal static string GetXml(ProcessDataStandardFormat processDataStandardFormat)
{
string result;
string tag;
string value;
string[] segments;
List<string> values;
Dictionary<string, List<string>> results = new();
ReadOnlyCollection<string> body = processDataStandardFormat.InputPDSF is null ?
processDataStandardFormat.Body : processDataStandardFormat.InputPDSF.Body;
ReadOnlyCollection<string> columns = processDataStandardFormat.InputPDSF is null ?
processDataStandardFormat.Columns : processDataStandardFormat.InputPDSF.Columns;
List<string> lines = new() { "<?xml version=\"1.0\" encoding=\"UTF-8\"?>", "<records>" };
for (int i = 0; i < body.Count; i++)
{
segments = body[i].Trim().Split('\t');
if (segments.Length != columns.Count)
break;
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("&", "&amp;")
.Replace("<", "&lt;")
.Replace(">", "&gt;")
.Replace("\"", "&quot;")
.Replace("'", "&apos;");
tag = Regex.Replace(columns[c].Trim('"'), @"[^a-zA-Z0-9]", "_").Split('\r')[0].Split('\n')[0];
if (i == 0)
{
if (results.ContainsKey(tag))
continue;
results.Add(tag, new List<string>());
}
results[tag].Add(value);
}
}
foreach (KeyValuePair<string, List<string>> keyValuePair in results)
{
if (body.Count < 2)
break;
if (keyValuePair.Value.Count != body.Count)
continue;
values = keyValuePair.Value.Distinct().ToList();
if (values.Count == 2 && (string.IsNullOrEmpty(values[0]) || string.IsNullOrEmpty(values[1])))
{
for (int i = 0; i < body.Count; i++)
keyValuePair.Value[i] = string.Empty;
foreach (string v in values)
{
if (string.IsNullOrEmpty(v))
continue;
keyValuePair.Value[0] = v;
}
}
}
for (int i = 0; i < body.Count; i++)
{
lines.Add(" <record>");
foreach (KeyValuePair<string, List<string>> keyValuePair in results)
{
if (keyValuePair.Value.Count != body.Count)
continue;
lines.Add(string.Concat(" <", keyValuePair.Key, '>', keyValuePair.Value[i], "</", keyValuePair.Key, '>'));
}
lines.Add(" </record>");
}
lines.Add("</records>");
result = string.Join(Environment.NewLine, lines);
return result;
}
internal static string GetXml(string reportFullPath, string[]? lines = null)
{
string result;
bool foundXml = false;
List<string> results = new();
lines ??= File.ReadAllLines(reportFullPath);
foreach (string line in lines)
{
if (line.StartsWith("<?xml"))
foundXml = true;
if (!foundXml)
continue;
if (line.StartsWith("EOF"))
break;
results.Add(line);
}
result = string.Join(Environment.NewLine, results);
return result;
}
private static string? GetRecordsJson(string reportFullPath, string[] lines)
{
string? result;
bool foundRecords = false;
List<string> results = new();
lines ??= File.ReadAllLines(reportFullPath);
foreach (string line in lines)
{
if (line.StartsWith("\"Records\""))
foundRecords = true;
if (!foundRecords)
continue;
if (line == "],")
break;
results.Add(line);
}
result = results.Count == 0 ? null : $"{string.Join(Environment.NewLine, results.Skip(1))}{Environment.NewLine}]";
return result;
}
} }
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(JsonElement[]))] [JsonSerializable(typeof(JsonElement[]))]
internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
{ {
}
[JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
[JsonSerializable(typeof(Dictionary<string, Dictionary<string, string>>))]
internal partial class DictionaryStringDictionaryStringStringSourceGenerationContext : JsonSerializerContext
{
} }

View File

@ -1,33 +1,34 @@
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Linq;
namespace Adaptation.Shared; namespace Adaptation.Shared;
public class ProcessDataStandardFormatMapping public class ProcessDataStandardFormatMapping
{ {
public ReadOnlyCollection<string> BackfillColumns { get; private set; }
public ReadOnlyCollection<int> ColumnIndices { get; private set; } public ReadOnlyCollection<int> ColumnIndices { get; private set; }
public ReadOnlyCollection<string> IgnoreColumns { get; private set; }
public ReadOnlyCollection<string> IndexOnlyColumns { get; private set; }
public ReadOnlyDictionary<string, string> KeyValuePairs { get; private set; }
public ReadOnlyCollection<string> NewColumnNames { get; private set; } public ReadOnlyCollection<string> NewColumnNames { get; private set; }
public ReadOnlyCollection<string> OldColumnNames { get; private set; } public ReadOnlyCollection<string> OldColumnNames { get; private set; }
public ProcessDataStandardFormatMapping(ReadOnlyCollection<string> backfillColumns, public ProcessDataStandardFormatMapping(ReadOnlyCollection<int> columnIndices,
ReadOnlyCollection<int> columnIndices,
ReadOnlyCollection<string> ignoreColumns,
ReadOnlyCollection<string> indexOnlyColumns,
ReadOnlyDictionary<string, string> keyValuePairs,
ReadOnlyCollection<string> newColumnNames, ReadOnlyCollection<string> newColumnNames,
ReadOnlyCollection<string> oldColumnNames) ReadOnlyCollection<string> oldColumnNames)
{ {
BackfillColumns = backfillColumns;
ColumnIndices = columnIndices; ColumnIndices = columnIndices;
IgnoreColumns = ignoreColumns;
IndexOnlyColumns = indexOnlyColumns;
KeyValuePairs = keyValuePairs;
NewColumnNames = newColumnNames; NewColumnNames = newColumnNames;
OldColumnNames = oldColumnNames; OldColumnNames = oldColumnNames;
} }
internal static ProcessDataStandardFormatMapping Get(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
{
ProcessDataStandardFormatMapping result;
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
result = new(columnIndices: columnIndices,
newColumnNames: newColumnNames,
oldColumnNames: oldColumnNames);
return result;
}
} }

View File

@ -1,4 +1,4 @@
#if true #if v2_60_0
using Adaptation._Tests.Shared; using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true #if v2_60_0
using Adaptation._Tests.Shared; using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;

View File

@ -1,4 +1,4 @@
#if true #if v2_60_0
using Adaptation._Tests.Shared; using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;
@ -61,5 +61,16 @@ public class TENCOR2 : EAFLoggingUnitTesting
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit")); EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
} }
[Ignore]
[TestMethod]
public void Production__v2_60_0__TENCOR2__pdsf()
{
string check = "*EQP_*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
} }
#endif #endif

View File

@ -1,4 +1,4 @@
#if true #if v2_60_0
using Adaptation._Tests.Shared; using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging; using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;
@ -61,5 +61,16 @@ public class TENCOR3 : EAFLoggingUnitTesting
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit")); EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
} }
[Ignore]
[TestMethod]
public void Production__v2_60_0__TENCOR3__TransmissionControlProtocol()
{
string check = "Statistics";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
} }
#endif #endif

View File

@ -0,0 +1,182 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_61_1;
[TestClass]
public class MET08DDUPSFS6420 : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static MET08DDUPSFS6420 EAFLoggingUnitTesting { get; private set; }
static MET08DDUPSFS6420() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public MET08DDUPSFS6420() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public MET08DDUPSFS6420(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new MET08DDUPSFS6420(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__MoveMatchingFiles()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewer()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__IQSSi()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsight()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__APC()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__SPaCe()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__Processed()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__Archive()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__Dummy()
{
string check = "637400762709163000.zip";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,65 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_61_1;
[TestClass]
public class TENCOR1 : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static TENCOR1 EAFLoggingUnitTesting { get; private set; }
static TENCOR1() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public TENCOR1() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public TENCOR1(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new TENCOR1(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR1__pcl()
{
string check = "*.pcl";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,76 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_61_1;
[TestClass]
public class TENCOR2 : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static TENCOR2 EAFLoggingUnitTesting { get; private set; }
static TENCOR2() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public TENCOR2() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public TENCOR2(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new TENCOR2(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR2__pcl()
{
string check = "*.pcl";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR2__pdsf()
{
string check = "*EQP_*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -0,0 +1,76 @@
#if true
using Adaptation._Tests.Shared;
using Microsoft.Extensions.Logging;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_61_1;
[TestClass]
public class TENCOR3 : EAFLoggingUnitTesting
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
internal static string DummyRoot { get; private set; }
internal static TENCOR3 EAFLoggingUnitTesting { get; private set; }
static TENCOR3() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
public TENCOR3() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
{
if (EAFLoggingUnitTesting is null)
throw new Exception();
}
public TENCOR3(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
{
}
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
EAFLoggingUnitTesting ??= new TENCOR3(testContext);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
}
[ClassCleanup()]
public static void ClassCleanup()
{
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
EAFLoggingUnitTesting?.Dispose();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR3__pcl()
{
string check = "*.pcl";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR3__TransmissionControlProtocol()
{
string check = "Statistics";
MethodBase methodBase = new StackFrame().GetMethod();
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
}
}
#endif

View File

@ -1,4 +1,4 @@
#if true #if v2_60_0
using Adaptation.Shared; using Adaptation.Shared;
using Adaptation.Shared.Methods; using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;
@ -37,6 +37,23 @@ public class MET08DDUPSFS6420
[TestMethod] [TestMethod]
public void Production__v2_60_0__MET08DDUPSFS6420__MoveMatchingFiles() => _MET08DDUPSFS6420.Production__v2_60_0__MET08DDUPSFS6420__MoveMatchingFiles(); public void Production__v2_60_0__MET08DDUPSFS6420__MoveMatchingFiles() => _MET08DDUPSFS6420.Production__v2_60_0__MET08DDUPSFS6420__MoveMatchingFiles();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_60_0__MET08DDUPSFS6420__MoveMatchingFiles638918057133464542__Normal()
{
string check = "*.pdsf";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_MET08DDUPSFS6420.Production__v2_60_0__MET08DDUPSFS6420__MoveMatchingFiles();
string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
#if DEBUG #if DEBUG
[Ignore] [Ignore]
#endif #endif
@ -47,7 +64,7 @@ public class MET08DDUPSFS6420
[Ignore] [Ignore]
#endif #endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__MET08DDUPSFS6420__OpenInsightMetrologyViewer637810124350899080__Normal() public void Production__v2_60_0__MET08DDUPSFS6420__OpenInsightMetrologyViewer638851139271252054__Normal()
{ {
string check = "*.pdsf"; string check = "*.pdsf";
bool validatePDSF = false; bool validatePDSF = false;
@ -76,7 +93,7 @@ public class MET08DDUPSFS6420
[Ignore] [Ignore]
#endif #endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__MET08DDUPSFS6420__OpenInsight638052163299572098__IqsSql() public void Production__v2_60_0__MET08DDUPSFS6420__OpenInsight638851304220990490__IqsSql()
{ {
string check = "*.pdsf"; string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod(); MethodBase methodBase = new StackFrame().GetMethod();
@ -98,7 +115,7 @@ public class MET08DDUPSFS6420
[Ignore] [Ignore]
#endif #endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments638519735942138814__HeaderId() public void Production__v2_60_0__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments638851355286349752__HeaderId()
{ {
string check = "*.pdsf"; string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod(); MethodBase methodBase = new StackFrame().GetMethod();

View File

@ -1,4 +1,4 @@
#if true #if v2_60_0
using Adaptation.Shared; using Adaptation.Shared;
using Adaptation.Shared.Methods; using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;
@ -41,26 +41,7 @@ public class TENCOR1
[Ignore] [Ignore]
#endif #endif
[TestMethod] [TestMethod]
[ExpectedException(typeof(MissingMethodException))] public void Production__v2_60_0__TENCOR1__pcl638851335365053074__Normal()
public void Production__v2_60_0__TENCOR1__pcl637955518212649513__Normal()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR1.Production__v2_60_0__TENCOR1__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR1.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR1.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
[ExpectedException(typeof(MissingMethodException))]
public void Production__v2_60_0__TENCOR1__pcl638838745567643708__TooMany()
{ {
string check = "*.pcl"; string check = "*.pcl";
bool validatePDSF = false; bool validatePDSF = false;

View File

@ -1,4 +1,5 @@
#if true #if v2_60_0
using Adaptation._Tests.Shared;
using Adaptation.Shared; using Adaptation.Shared;
using Adaptation.Shared.Methods; using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;
@ -41,8 +42,7 @@ public class TENCOR2
[Ignore] [Ignore]
#endif #endif
[TestMethod] [TestMethod]
[ExpectedException(typeof(MissingMethodException))] public void Production__v2_60_0__TENCOR2__pcl638851352261289484__Normal()
public void Production__v2_60_0__TENCOR2__pcl637955534973701250__Normal()
{ {
string check = "*.pcl"; string check = "*.pcl";
bool validatePDSF = false; bool validatePDSF = false;
@ -51,7 +51,39 @@ public class TENCOR2
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF); string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false); IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead); Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF); _ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_60_0__TENCOR2__pcl638860965797666706__TwoRuns()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR2.Production__v2_60_0__TENCOR2__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_60_0__TENCOR2__pdsf__Normal()
{
bool validatePDSF = false;
string check = "*EQP_*.pdsf";
_TENCOR2.Production__v2_60_0__TENCOR2__pdsf();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch(); NonThrowTryCatch();
} }

View File

@ -1,4 +1,4 @@
#if true #if v2_60_0
using Adaptation.Shared; using Adaptation.Shared;
using Adaptation.Shared.Methods; using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting; using Microsoft.VisualStudio.TestTools.UnitTesting;
@ -41,8 +41,7 @@ public class TENCOR3
[Ignore] [Ignore]
#endif #endif
[TestMethod] [TestMethod]
[ExpectedException(typeof(MissingMethodException))] public void Production__v2_60_0__TENCOR3__pcl638851336413561558__Normal()
public void Production__v2_60_0__TENCOR3__pcl637955520360305921__Normal()
{ {
string check = "*.pcl"; string check = "*.pcl";
bool validatePDSF = false; bool validatePDSF = false;
@ -55,17 +54,14 @@ public class TENCOR3
NonThrowTryCatch(); NonThrowTryCatch();
} }
#if DEBUG
[Ignore] [Ignore]
#endif
[TestMethod] [TestMethod]
[ExpectedException(typeof(MissingMethodException))] public void Production__v2_60_0__TENCOR3__TransmissionControlProtocol638930712297063335__Normal()
public void Production__v2_60_0__TENCOR3__pcl638725158781216195__Parital()
{ {
string check = "*.pcl";
bool validatePDSF = false; bool validatePDSF = false;
_TENCOR3.Production__v2_60_0__TENCOR3__pcl(); string check = "Statistics";
MethodBase methodBase = new StackFrame().GetMethod(); MethodBase methodBase = new StackFrame().GetMethod();
_TENCOR3.Production__v2_60_0__TENCOR3__TransmissionControlProtocol();
string[] variables = _TENCOR3.AdaptationTesting.GetVariables(methodBase, check, validatePDSF); string[] variables = _TENCOR3.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR3.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false); IFileRead fileRead = _TENCOR3.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead); Logistics logistics = new(fileRead);

View File

@ -0,0 +1,159 @@
#if true
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Adaptation._Tests.Extract.Production.v2_61_1;
[TestClass]
public class MET08DDUPSFS6420
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Production.v2_61_1.MET08DDUPSFS6420 _MET08DDUPSFS6420;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Production.v2_61_1.MET08DDUPSFS6420.ClassInitialize(testContext);
_MET08DDUPSFS6420 = CreateSelfDescription.Production.v2_61_1.MET08DDUPSFS6420.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__MoveMatchingFiles() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__MoveMatchingFiles();
[Ignore]
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__MoveMatchingFiles638918057133464542__Normal()
{
string check = "*.pdsf";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__MoveMatchingFiles();
string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewer() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewer();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewer638851139271252054__Normal()
{
string check = "*.pdsf";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewer();
string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__IQSSi() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__IQSSi();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsight() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__OpenInsight();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsight638851304220990490__IqsSql()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
_MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__OpenInsight();
string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF: false);
IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments638851355286349752__HeaderId()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
_MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments();
string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF: false);
IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__APC() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__APC();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__SPaCe() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__SPaCe();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__Processed() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__Processed();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__Archive() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__Archive();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__MET08DDUPSFS6420__Dummy() => _MET08DDUPSFS6420.Production__v2_61_1__MET08DDUPSFS6420__Dummy();
}
#endif

View File

@ -0,0 +1,73 @@
#if true
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Adaptation._Tests.Extract.Production.v2_61_1;
[TestClass]
public class TENCOR1
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Production.v2_61_1.TENCOR1 _TENCOR1;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Production.v2_61_1.TENCOR1.ClassInitialize(testContext);
_TENCOR1 = CreateSelfDescription.Production.v2_61_1.TENCOR1.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR1__pcl() => _TENCOR1.Production__v2_61_1__TENCOR1__pcl();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR1__pcl638851335365053074__Normal()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR1.Production__v2_61_1__TENCOR1__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR1.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR1.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR1__pcl638959627725124236__Extra()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR1.Production__v2_61_1__TENCOR1__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR1.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR1.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
}
#endif

View File

@ -0,0 +1,87 @@
#if true
using Adaptation._Tests.Shared;
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Adaptation._Tests.Extract.Production.v2_61_1;
[TestClass]
public class TENCOR2
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Production.v2_61_1.TENCOR2 _TENCOR2;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Production.v2_61_1.TENCOR2.ClassInitialize(testContext);
_TENCOR2 = CreateSelfDescription.Production.v2_61_1.TENCOR2.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR2__pcl() => _TENCOR2.Production__v2_61_1__TENCOR2__pcl();
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR2__pcl638851352261289484__Normal()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR2.Production__v2_61_1__TENCOR2__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR2__pcl638860965797666706__TwoRuns()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR2.Production__v2_61_1__TENCOR2__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR2__pdsf__Normal()
{
bool validatePDSF = false;
string check = "*EQP_*.pdsf";
_TENCOR2.Production__v2_61_1__TENCOR2__pdsf();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
}
#endif

View File

@ -0,0 +1,73 @@
#if true
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Diagnostics;
using System.Reflection;
namespace Adaptation._Tests.Extract.Production.v2_61_1;
[TestClass]
public class TENCOR3
{
#pragma warning disable CA2254
#pragma warning disable IDE0060
private static CreateSelfDescription.Production.v2_61_1.TENCOR3 _TENCOR3;
[ClassInitialize]
public static void ClassInitialize(TestContext testContext)
{
CreateSelfDescription.Production.v2_61_1.TENCOR3.ClassInitialize(testContext);
_TENCOR3 = CreateSelfDescription.Production.v2_61_1.TENCOR3.EAFLoggingUnitTesting;
}
private static void NonThrowTryCatch()
{
try
{ throw new Exception(); }
catch (Exception) { }
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR3__pcl() => _TENCOR3.Production__v2_61_1__TENCOR3__pcl();
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_61_1__TENCOR3__pcl638851336413561558__Normal()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR3.Production__v2_61_1__TENCOR3__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR3.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR3.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
[Ignore]
[TestMethod]
public void Production__v2_61_1__TENCOR3__TransmissionControlProtocol638930712297063335__Normal()
{
bool validatePDSF = false;
string check = "Statistics";
MethodBase methodBase = new StackFrame().GetMethod();
_TENCOR3.Production__v2_61_1__TENCOR3__TransmissionControlProtocol();
string[] variables = _TENCOR3.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR3.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
}
#endif

View File

@ -193,7 +193,12 @@ public class AdaptationTesting : ISMTP
segments = withActualCICN.Split(new string[] { ticks }, StringSplitOptions.None); segments = withActualCICN.Split(new string[] { ticks }, StringSplitOptions.None);
dummyDirectory = Path.Combine(dummyRoot, cellInstanceName, ticks, string.Join(null, segments)); dummyDirectory = Path.Combine(dummyRoot, cellInstanceName, ticks, string.Join(null, segments));
if (!Directory.Exists(dummyDirectory)) if (!Directory.Exists(dummyDirectory))
{
_ = Directory.CreateDirectory(dummyDirectory); _ = Directory.CreateDirectory(dummyDirectory);
try
{ Directory.SetLastWriteTime(Path.Combine(dummyRoot, cellInstanceName), DateTime.Now); }
catch { }
}
} }
if (string.IsNullOrEmpty(ticks)) if (string.IsNullOrEmpty(ticks))
{ {

View File

@ -64,7 +64,7 @@ public class MET08DDUPSFS6420 : LoggingUnitTesting, IDisposable
StringBuilder results = new(); StringBuilder results = new();
(string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[] (string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[]
{ {
new("MET08DDUPSFS6420", "v2.60.0"), new("MET08DDUPSFS6420", "v2.61.1"),
}; };
string production = "http://messa08ec.infineon.com:9003/CellInstanceServiceV2"; string production = "http://messa08ec.infineon.com:9003/CellInstanceServiceV2";
Shared.PasteSpecialXml.EAF.XML.API.CellInstance.CellInstanceVersion cellInstanceVersion; Shared.PasteSpecialXml.EAF.XML.API.CellInstance.CellInstanceVersion cellInstanceVersion;

View File

@ -0,0 +1,36 @@
// getValue(getContextData('2', 'cds.NULL_DATA', ''));
function getValue(json) {
let result;
if (json == undefined || json.length === 0)
result = 'A) Invalid input!';
else {
let parsed;
try {
parsed = JSON.parse(json);
} catch (error) {
parsed = null;
}
if (parsed == null)
result = 'B) Invalid input!';
else {
let reactorType = parsed.rds == undefined ? '' : parsed.rds.reactorType == undefined ? '' : parsed.rds.reactorType;
if (parsed.rds == undefined)
result = '-';
else if (parsed.rds.loadLockSide == undefined)
result = '_ - ' + reactorType;
else if (parsed.rds.loadLockSide === 'L')
result = 'Left - ' + reactorType;
else if (parsed.rds.loadLockSide === 'R')
result = 'Right - ' + reactorType;
else
result = parsed.rds.loadLockSide + ' - ' + reactorType;
}
}
return result;
}
const json = '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"6IN25_ROTR","pattern":"","patternSize":0,"tool":"TENCOR"}]}}}';
const testA = getValue(json);
if (testA !== '1')
throw 'Test A failed: ' + testA;

View File

@ -203,9 +203,9 @@ public class PCL : LoggingUnitTesting, IDisposable
StringBuilder results = new(); StringBuilder results = new();
(string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[] (string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[]
{ {
new("TENCOR1", "v2.60.0"), new("TENCOR1", "v2.61.1"),
new("TENCOR2", "v2.60.0"), new("TENCOR2", "v2.61.1"),
new("TENCOR3", "v2.60.0"), new("TENCOR3", "v2.61.1"),
new("TENCOR1-EQPT", "v2.12.3"), new("TENCOR1-EQPT", "v2.12.3"),
new("TENCOR2-EQPT", "v2.12.3"), new("TENCOR2-EQPT", "v2.12.3"),
new("TENCOR3-EQPT", "v2.12.3"), new("TENCOR3-EQPT", "v2.12.3"),

View File

@ -0,0 +1,115 @@
// Recipe 1 = Matched
// recipes-and-patterns.js under IndexOf
// RecipesAndPatternsMatch
// ($('dcp.TENCOR1/csv/Index', 0) + 1) == $('dcp.TENCOR1/csv/Count', 0)
// getValue('TENCOR', $('dcp.TENCOR1/csv/Count', 0), $('dcp.TENCOR1/csv/Session', ''), 'pattern', getContextData('2', 'cds.NULL_DATA', ''));
function getValue(tool, patternSize, recipe, pattern, json) {
let result;
if (tool == undefined || tool.length === 0 || patternSize == undefined || patternSize.length === 0 || recipe == undefined || recipe.length === 0 || pattern == undefined || pattern.length === 0 || json == undefined || json.length === 0)
result = 'A) Invalid input!';
else {
let parsed;
try {
parsed = JSON.parse(json);
} catch (error) {
parsed = null;
}
if (parsed == null)
result = 'B) Invalid input!';
else if (parsed.rds == undefined || parsed.rds.prodSpec == undefined || parsed.rds.prodSpec.recipesAndPatterns == undefined)
result = 'C) No Spec!';
else {
let toolMatches = [];
for (let index = 0; index < parsed.rds.prodSpec.recipesAndPatterns.length; index++) {
if (parsed.rds.prodSpec.recipesAndPatterns[index].tool === tool) {
toolMatches.push(parsed.rds.prodSpec.recipesAndPatterns[index]);
}
}
if (toolMatches == null || toolMatches.length === 0)
result = 'Tool [' + tool + '] not found in OI API results!';
else {
let debug = '';
let matches = 0;
for (let index = 0; index < toolMatches.length; index++) {
debug += 'patternSize: ' + toolMatches[index].patternSize +
'; recipe: ' + toolMatches[index].recipe +
'; pattern: ' + toolMatches[index].pattern + ';~';
if (toolMatches[index].recipe.localeCompare(recipe, ['en-US'], { sensitivity: 'base' }) === 0) {
matches++;
}
}
if (matches > 0)
result = '1';
else
result = 'Value not matched~Run~patternSize: ' + patternSize + '; recipe: ' + recipe + '; pattern: ' + pattern + ';~API~' + debug;
}
}
}
return result;
}
getValue('TENCOR', 0, '6IN25_ROTR', 'pattern', '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"6IN25_ROTR","pattern":"","patternSize":0,"tool":"TENCOR"}]}}}');
let json;
let tool;
let recipe;
let pattern;
let patternSize;
tool = 'TENCOR'
patternSize = 0;
recipe = '6IN25_ROTR';
pattern = 'pattern';
json = '{"rds":{"prodSpec":{"recipesAndPatterns":[{"recipe":"6IN25_ROTR","pattern":"","patternSize":0,"tool":"TENCOR"}]}}}';
const testA = getValue(tool, patternSize, recipe, pattern, json);
if (testA !== '1')
throw 'Test A failed: ' + testA;
tool = null;
const testB = getValue(tool, patternSize, recipe, pattern, json);
if (testB !== 'A) Invalid input!')
throw 'Test L failed: ' + testB;
tool = '';
const testC = getValue(tool, patternSize, recipe, pattern, json);
if (testC !== 'A) Invalid input!')
throw 'Test M failed: ' + testC;
patternSize = null;
const testD = getValue(tool, patternSize, recipe, pattern, json);
if (testD !== 'A) Invalid input!')
throw 'Test J failed: ' + testD;
patternSize = '';
const testE = getValue(tool, patternSize, recipe, pattern, json);
if (testE !== 'A) Invalid input!')
throw 'Test K failed: ' + testE;
recipe = null;
const testF = getValue(tool, patternSize, recipe, pattern, json);
if (testF !== 'A) Invalid input!')
throw 'Test F failed: ' + testF;
recipe = '';
const testG = getValue(tool, patternSize, recipe, pattern, json);
if (testG !== 'A) Invalid input!')
throw 'Test G failed: ' + testG;
pattern = null;
const testH = getValue(tool, patternSize, recipe, pattern, json);
if (testH !== 'A) Invalid input!')
throw 'Test H failed: ' + testH;
pattern = '';
const testI = getValue(tool, patternSize, recipe, pattern, json);
if (testI !== 'A) Invalid input!')
throw 'Test I failed: ' + testI;
json = '';
const testK = getValue(tool, patternSize, recipe, pattern, json);
if (testK !== 'A) Invalid input!')
throw 'Test B failed: ' + testK;
json = 'invalid';
const testL = getValue(tool, patternSize, recipe, pattern, json);
if (testL !== 'B) Invalid input!')
throw 'Test C failed: ' + testL;
json = '{"rds":{}}';
const testM = getValue(tool, patternSize, recipe, pattern, json);
if (testM !== 'C) No Spec!')
throw 'Test D failed: ' + testM;
json = '{"rds":{"prodSpec":{"recipesAndPatterns":[]}}}';
const testN = getValue(tool, patternSize, recipe, pattern, json);
if (testN !== 'Tool [TENCOR] not found in OI API results!')
throw 'Test E failed: ' + testN;

View File

@ -132,8 +132,18 @@
<Compile Include="Adaptation\FileHandlers\pcl\Run.cs" /> <Compile Include="Adaptation\FileHandlers\pcl\Run.cs" />
<Compile Include="Adaptation\FileHandlers\pcl\Wafer.cs" /> <Compile Include="Adaptation\FileHandlers\pcl\Wafer.cs" />
<Compile Include="Adaptation\FileHandlers\pcl\WaferSummary.cs" /> <Compile Include="Adaptation\FileHandlers\pcl\WaferSummary.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Constant.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Convert.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Header.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Row.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Run.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\Wafer.cs" />
<Compile Include="Adaptation\FileHandlers\pdsf\WaferSummary.cs" />
<Compile Include="Adaptation\FileHandlers\Processed\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\Processed\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\SPaCe\FileRead.cs" /> <Compile Include="Adaptation\FileHandlers\SPaCe\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\TransmissionControlProtocol\FileRead.cs" />
<Compile Include="Adaptation\FileHandlers\TransmissionControlProtocol\Record.cs" />
<Compile Include="Adaptation\Ifx\Eaf\Common\Configuration\ConnectionSetting.cs" /> <Compile Include="Adaptation\Ifx\Eaf\Common\Configuration\ConnectionSetting.cs" />
<Compile Include="Adaptation\Ifx\Eaf\EquipmentConnector\File\Component\File.cs" /> <Compile Include="Adaptation\Ifx\Eaf\EquipmentConnector\File\Component\File.cs" />
<Compile Include="Adaptation\Ifx\Eaf\EquipmentConnector\File\Component\FilePathGenerator.cs" /> <Compile Include="Adaptation\Ifx\Eaf\EquipmentConnector\File\Component\FilePathGenerator.cs" />
@ -185,13 +195,13 @@
<Version>7.2.4630.5</Version> <Version>7.2.4630.5</Version>
</PackageReference> </PackageReference>
<PackageReference Include="Infineon.EAF.Runtime"> <PackageReference Include="Infineon.EAF.Runtime">
<Version>2.60.0</Version> <Version>2.61.1</Version>
</PackageReference> </PackageReference>
<PackageReference Include="Pdfbox"> <PackageReference Include="Pdfbox">
<Version>1.1.1</Version> <Version>1.1.1</Version>
</PackageReference> </PackageReference>
<PackageReference Include="System.Text.Json"> <PackageReference Include="System.Text.Json">
<Version>8.0.5</Version> <Version>8.0.3</Version>
</PackageReference> </PackageReference>
</ItemGroup> </ItemGroup>
<Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" /> <Import Project="$(MSBuildToolsPath)\Microsoft.CSharp.targets" />

View File

@ -32,5 +32,5 @@ using System.Runtime.InteropServices;
// You can specify all the values or you can default the Build and Revision Numbers // You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below: // by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")] // [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("2.60.0.0")] [assembly: AssemblyVersion("2.61.1.0")]
[assembly: AssemblyFileVersion("2.60.0.0")] [assembly: AssemblyFileVersion("2.61.1.0")]