Compare commits

..

6 Commits

Author SHA1 Message Date
5c2a3c97e9 Switched to xml for InfinityQS export 2025-09-02 07:55:26 -07:00
e6533e152f Preparation to switch to xml for InfinityQS export 2025-08-27 11:53:36 -07:00
19b54a7cc8 load-lock-side from reactor and open-insight api 2025-08-25 09:31:01 -07:00
43024c6c30 Export run-data-sheet-root in last-update-user of logistics
Started Bun support but CORS fails

process-data-standard-format code alignment
2025-08-04 16:58:37 -07:00
e3910d700f Removed save-open-insight-file to use process-data-standard-format instead
Removed last logic
2025-07-22 15:28:15 -07:00
e651c2804c Now relying on pipeline to copy files from file shares for ghost-pcl and linc-pdfc
Now using entered-date-time-filter and load-signature-date-time-filter for logistics query
2025-07-22 15:28:05 -07:00
32 changed files with 1015 additions and 457 deletions

View File

@ -110,7 +110,7 @@ dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template
dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name
dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2"); dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2");
dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant. dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant.
dotnet_diagnostic.IDE0005.severity = warning # Using directive is unnecessary dotnet_diagnostic.IDE0005.severity = none # Using directive is unnecessary
dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified
dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031) dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031)
dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed
@ -122,6 +122,7 @@ dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]cs
dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified
dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
dotnet_diagnostic.MSTEST0015.severity = none # MSTEST0015: Test method {method} should not be ignored
dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods
dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning

View File

@ -4,7 +4,7 @@
"name": ".NET Core Attach", "name": ".NET Core Attach",
"type": "coreclr", "type": "coreclr",
"request": "attach", "request": "attach",
"processId": 8912 "processId": 13604
} }
] ]
} }

View File

@ -7,6 +7,7 @@
"CASS", "CASS",
"CEPIEPSILON", "CEPIEPSILON",
"CUST", "CUST",
"DDUPSFS",
"DDUPSP", "DDUPSP",
"EQPT", "EQPT",
"GETJOBS", "GETJOBS",
@ -31,6 +32,8 @@
"substr", "substr",
"SUSCEPTOR", "SUSCEPTOR",
"targ", "targ",
"TENCOR",
"THFTIRQS",
"TIBCO", "TIBCO",
"Wafr" "Wafr"
], ],

View File

@ -1,19 +1,134 @@
{ {
"version": "2.0.0", "version": "2.0.0",
"inputs": [
{
"default": "Development",
"description": "Which ASP Net Core Environment?",
"id": "ASPNETCORE_ENVIRONMENT",
"options": [
"Development",
"Production"
],
"type": "pickString"
},
{
"default": "{AssemblyTitle}",
"description": "What Assembly Title?",
"id": "AssemblyTitle",
"type": "promptString"
},
{
"default": "{Build.BuildId}",
"description": "Which Build BuildId?",
"id": "Build.BuildId",
"type": "promptString"
},
{
"default": "{Build.Reason}",
"description": "Which Build Reason?",
"id": "Build.Reason",
"type": "promptString"
},
{
"default": "{Build.Repository.Id}",
"description": "Which Build Repository Id?",
"id": "Build.Repository.Id",
"type": "promptString"
},
{
"default": "{Build.Repository.Name}",
"description": "Which Build Repository Name?",
"id": "Build.Repository.Name",
"type": "promptString"
},
{
"default": "{Build.SourceVersion}",
"description": "Which Build Source Version?",
"id": "Build.SourceVersion",
"type": "promptString"
},
{
"default": "Debug",
"description": "Which Configuration?",
"id": "Configuration",
"options": [
"Debug",
"Release"
],
"type": "pickString"
},
{
"default": "net8.0",
"description": "Which Core Version?",
"id": "CoreVersion",
"options": [
"net8.0"
],
"type": "pickString"
},
{
"default": "C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe",
"description": "Which MS Build?",
"id": "MSBuild",
"type": "promptString"
},
{
"default": "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/",
"description": "Which Nuget Source?",
"id": "NugetSource",
"type": "promptString"
},
{
"default": "win-x64",
"description": "Which Runtime?",
"id": "Runtime",
"options": [
"win-x64",
"win-x32",
"linux-x64",
"linux-x32"
],
"type": "pickString"
},
{
"default": "L:/",
"description": "Which System DefaultWorkingDirectory?",
"id": "System.DefaultWorkingDirectory",
"options": [
"L:/",
"D:/",
"C:/"
],
"type": "pickString"
},
{
"default": "v4.8",
"description": "Which Core Target Framework Version?",
"id": "TargetFrameworkVersion",
"options": [
"v4.8"
],
"type": "pickString"
},
{
"default": "{UserSecretsId}",
"description": "Which Core User Secrets Id?",
"id": "UserSecretsId",
"type": "promptString"
}
],
"tasks": [ "tasks": [
{ {
"label": "Build", "label": "Build",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
"build", "build"
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
], ],
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Test-Debug", "label": "Test Debug",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -24,7 +139,7 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Test-Release", "label": "Test Release",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -50,7 +165,7 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Format-Whitespaces", "label": "Format Whitespaces",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -87,13 +202,13 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Project", "label": "Code Project",
"type": "shell", "type": "shell",
"command": "code ../MET08DDUPSP1TBI.csproj", "command": "code ../MET08DDUPSP1TBI.csproj",
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"label": "Readme", "label": "Code Read Me",
"type": "shell", "type": "shell",
"command": "code ../README.md", "command": "code ../README.md",
"problemMatcher": [] "problemMatcher": []
@ -113,7 +228,7 @@
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"label": "Git Config", "label": "Code Git Config",
"type": "shell", "type": "shell",
"command": "code ../.git/config", "command": "code ../.git/config",
"problemMatcher": [] "problemMatcher": []

View File

@ -128,7 +128,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions); FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -153,7 +153,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveArchive(reportFullPath, dateTime); MoveArchive(reportFullPath, dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -8,6 +8,7 @@ using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text;
using System.Text.Json; using System.Text.Json;
namespace Adaptation.FileHandlers.IQSSi; namespace Adaptation.FileHandlers.IQSSi;
@ -109,6 +110,59 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
private static Tuple<string, string> GetLines(Logistics logistics, List<txt.Description> descriptions)
{
StringBuilder result = new();
char del = '\t';
txt.Description x = descriptions[0];
_ = result.Append(x.DcnLpdMin).Append(del). // 001 -
Append(x.DcnLpdMax).Append(del). // 002 -
Append(x.DcnLpdMean).Append(del). // 003 - DCN LPD
Append(x.DcnAreaCountMin).Append(del). // 004 -
Append(x.DcnAreaCountMax).Append(del). // 005 -
Append(x.DcnAreaCountMean).Append(del).// 006 - DCN Area
Append(x.DcnAreaMin).Append(del). // 007 -
Append(x.DcnAreaMax).Append(del). // 008 -
Append(x.Date).Append(del). // 009 -
Append(x.DcnHazeAvgMean).Append(del). // 010 - Haze Average
Append(string.Empty).Append(del). // 011 -
Append(string.Empty).Append(del). // 012 -
Append(string.Empty).Append(del). // 013 -
Append(string.Empty).Append(del). // 014 -
Append(string.Empty).Append(del). // 015 -
Append(string.Empty).Append(del). // 016 -
Append(string.Empty).Append(del). // 017 -
Append(string.Empty).Append(del). // 018 -
Append(string.Empty).Append(del). // 019 -
Append(string.Empty).Append(del). // 020 -
Append(string.Empty).Append(del). // 021 -
Append(string.Empty).Append(del). // 022 -
Append(string.Empty).Append(del). // 023 -
Append(string.Empty).Append(del). // 024 -
Append(string.Empty).Append(del). // 025 -
Append(string.Empty).Append(del). // 026 -
Append(string.Empty).Append(del). // 027 -
Append(x.RDS).Append(del). // 028 - Lot
Append(x.Reactor).Append(del). // 029 - Process
Append(x.Recipe).Append(del). // 030 - Part
Append(x.DcnScrMean).Append(del). // 031 - Scratch Count
Append(string.Empty).Append(del). // 032 -
Append(string.Empty).Append(del). // 033 -
Append(string.Empty).Append(del). // 034 -
Append(x.DcnMicroScrMean).Append(del). // 035 - Scratch Length
Append(string.Empty).Append(del). // 036 -
Append(string.Empty).Append(del). // 037 -
Append(string.Empty).Append(del). // 038 -
Append(x.DcnAllMean).Append(del). // 039 - Average Sum of Defects
Append(x.DcnAllMax).Append(del). // 040 - Max Sum of defects
Append(x.DcnAllMin).Append(del). // 041 - Min Sum of Defects
Append(string.Empty).Append(del). // 042 -
Append(logistics.MesEntity).Append(del). // 043 -
Append(x.DcnAreaMean).Append(del). // 044 - DCN MM2
AppendLine();
return new Tuple<string, string>(result.ToString(), x.Date);
}
private void SaveIQSFile(string reportFullPath, DateTime dateTime, List<txt.Description> descriptions, Test[] tests) private void SaveIQSFile(string reportFullPath, DateTime dateTime, List<txt.Description> descriptions, Test[] tests)
{ {
if (tests.Length == 0) if (tests.Length == 0)
@ -116,7 +170,7 @@ public class FileRead : Shared.FileRead, IFileRead
else else
{ {
bool isDummyRun = false; bool isDummyRun = false;
Tuple<string, string> lines = OpenInsight.FileRead.GetLines(_Logistics, descriptions); Tuple<string, string> lines = GetLines(_Logistics, descriptions);
string check = lines.Item1.Replace(lines.Item2, "$Date$"); string check = lines.Item1.Replace(lines.Item2, "$Date$");
ScopeInfo scopeInfo = new(tests[0], _IQSFile); ScopeInfo scopeInfo = new(tests[0], _IQSFile);
List<(Shared.Properties.IScopeInfo, string)> collection = new(); List<(Shared.Properties.IScopeInfo, string)> collection = new();
@ -132,7 +186,7 @@ public class FileRead : Shared.FileRead, IFileRead
} }
} }
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription private void WriteFile<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
{ {
bool isDummyRun = false; bool isDummyRun = false;
string successDirectory = string.Empty; string successDirectory = string.Empty;
@ -140,8 +194,9 @@ public class FileRead : Shared.FileRead, IFileRead
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
if (!Directory.Exists(duplicateDirectory)) if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory); _ = Directory.CreateDirectory(duplicateDirectory);
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath)); string duplicateFile = Path.Combine(duplicateDirectory, $"{Path.GetFileName(reportFullPath)}.xml");
File.Copy(reportFullPath, duplicateFile, overwrite: true); string xml = ProcessDataStandardFormat.GetXml(reportFullPath);
File.WriteAllText(duplicateFile, xml);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
} }
@ -157,8 +212,8 @@ public class FileRead : Shared.FileRead, IFileRead
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveIQSFile(reportFullPath, dateTime, descriptions, tests); SaveIQSFile(reportFullPath, dateTime, descriptions, tests);
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions); WriteFile(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -88,7 +88,7 @@ public class FileRead : Shared.FileRead, IFileRead
string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names"); string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names"); string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices"); string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
_ProcessDataStandardFormatMapping = GetProcessDataStandardFormatMapping(processDataStandardFormatMappingOldColumnNames, _ProcessDataStandardFormatMapping = ProcessDataStandardFormatMapping.Get(processDataStandardFormatMappingOldColumnNames,
processDataStandardFormatMappingNewColumnNames, processDataStandardFormatMappingNewColumnNames,
processDataStandardFormatMappingColumnIndices); processDataStandardFormatMappingColumnIndices);
} }
@ -169,46 +169,6 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
{
ProcessDataStandardFormatMapping result;
string[] segmentsB;
List<string> distinct = new();
Dictionary<string, string> keyValuePairs = new();
string args4 = "Time,HeaderUniqueId,UniqueId,Date,SP101";
string args5 = ",SP101";
string args6 = ",SP101";
string args7 = "Test|EventId,Recipe|Session,|SP101";
// string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Grade,HeaderUniqueId,RDS,Session,Side,SrcDest,UniqueId,WaferID,DcnAllMax,DcnAllMean,DcnAllMin,DcnAllStdDev,DcnAreaCountMax,DcnAreaCountMean,DcnAreaCountMin,DcnAreaCountStdDev,DcnAreaMax,DcnAreaMean,DcnAreaMin,DcnAreaStdDev,DcnBin1Max,DcnBin1Mean,DcnBin1Min,DcnBin1StdDev,DcnBin2Max,DcnBin2Mean,DcnBin2Min,DcnBin2StdDev,DcnBin3Max,DcnBin3Mean,DcnBin3Min,DcnBin3StdDev,DcnBin4Max,DcnBin4Mean,DcnBin4Min,DcnBin4StdDev,DcnBin5Max,DcnBin5Mean,DcnBin5Min,DcnBin5StdDev,DcnBin6Max,DcnBin6Mean,DcnBin6Min,DcnBin6StdDev,DcnBin7Max,DcnBin7Mean,DcnBin7Min,DcnBin7StdDev,DcnBin8Max,DcnBin8Mean,DcnBin8Min,DcnBin8StdDev,DcnHazeAvgMax,DcnHazeAvgMean,DcnHazeAvgMin,DcnHazeAvgStdDev,DcnHazeMedianMax,DcnHazeMedianMean,DcnHazeMedianMin,DcnHazeMedianStdDev,DcnHazeStdDevMax,DcnHazeStdDevMean,DcnHazeStdDevMin,DcnHazeStdDevStdDev,DcnLpdESMax,DcnLpdESMean,DcnLpdESMin,DcnLpdESStdDev,DcnLpdMax,DcnLpdMean,DcnLpdMin,DcnLpdNMax,DcnLpdNMean,DcnLpdNMin,DcnLpdNStdDev,DcnLpdStdDev,DcnMicroScrMax,DcnMicroScrMean,DcnMicroScrMin,DcnMicroScrStdDev,DcnScrMax,DcnScrMean,DcnScrMin,DcnScrStdDev,DcnSlipMax,DcnSlipMean,DcnSlipMin,DcnSlipStdDev,DnnAllMax,DnnAllMean,DnnAllMin,DnnAllStdDev,DnnAreaCountMax,DnnAreaCountMean,DnnAreaCountMin,DnnAreaCountStdDev,DnnAreaMax,DnnAreaMean,DnnAreaMin,DnnAreaStdDev,DnnBin1Max,DnnBin1Mean,DnnBin1Min,DnnBin1StdDev,DnnBin2Max,DnnBin2Mean,DnnBin2Min,DnnBin2StdDev,DnnBin3Max,DnnBin3Mean,DnnBin3Min,DnnBin3StdDev,DnnBin4Max,DnnBin4Mean,DnnBin4Min,DnnBin4StdDev,DnnBin5Max,DnnBin5Mean,DnnBin5Min,DnnBin5StdDev,DnnBin6Max,DnnBin6Mean,DnnBin6Min,DnnBin6StdDev,DnnBin7Max,DnnBin7Mean,DnnBin7Min,DnnBin7StdDev,DnnBin8Max,DnnBin8Mean,DnnBin8Min,DnnBin8StdDev,DnnHazeAvgMax,DnnHazeAvgMean,DnnHazeAvgMin,DnnHazeAvgStdDev,DnnHazeMedianMax,DnnHazeMedianMean,DnnHazeMedianMin,DnnHazeMedianStdDev,DnnHazeStdDevMax,DnnHazeStdDevMean,DnnHazeStdDevMin,DnnHazeStdDevStdDev,DnnLpdESMax,DnnLpdESMean,DnnLpdESMin,DnnLpdESStdDev,DnnLpdMax,DnnLpdMean,DnnLpdMin,DnnLpdNMax,DnnLpdNMean,DnnLpdNMin,DnnLpdNStdDev,DnnLpdStdDev,DnnMicroScrMax,DnnMicroScrMean,DnnMicroScrMin,DnnMicroScrStdDev,DnnScrMax,DnnScrMean,DnnScrMin,DnnScrStdDev,DnnSlipMax,DnnSlipMean,DnnSlipMin,DnnSlipStdDev,DwnAllMax,DwnAllMean,DwnAllMin,DwnAllStdDev,DwnAreaCountMax,DwnAreaCountMean,DwnAreaCountMin,DwnAreaCountStdDev,DwnAreaMax,DwnAreaMean,DwnAreaMin,DwnAreaStdDev,DwnBin1Max,DwnBin1Mean,DwnBin1Min,DwnBin1StdDev,DwnBin2Max,DwnBin2Mean,DwnBin2Min,DwnBin2StdDev,DwnBin3Max,DwnBin3Mean,DwnBin3Min,DwnBin3StdDev,DwnBin4Max,DwnBin4Mean,DwnBin4Min,DwnBin4StdDev,DwnBin5Max,DwnBin5Mean,DwnBin5Min,DwnBin5StdDev,DwnBin6Max,DwnBin6Mean,DwnBin6Min,DwnBin6StdDev,DwnBin7Max,DwnBin7Mean,DwnBin7Min,DwnBin7StdDev,DwnBin8Max,DwnBin8Mean,DwnBin8Min,DwnBin8StdDev,DwnHazeAvgMax,DwnHazeAvgMean,DwnHazeAvgMin,DwnHazeAvgStdDev,DwnHazeMedianMax,DwnHazeMedianMean,DwnHazeMedianMin,DwnHazeMedianStdDev,DwnHazeStdDevMax,DwnHazeStdDevMean,DwnHazeStdDevMin,DwnHazeStdDevStdDev,DwnLpdESMax,DwnLpdESMean,DwnLpdESMin,DwnLpdESStdDev,DwnLpdMax,DwnLpdMean,DwnLpdMin,DwnLpdNMax,DwnLpdNMean,DwnLpdNMin,DwnLpdNStdDev,DwnLpdStdDev,DwnMicroScrMax,DwnMicroScrMean,DwnMicroScrMin,DwnMicroScrStdDev,DwnScrMax,DwnScrMean,DwnScrMin,DwnScrStdDev,DwnSlipMax,DwnSlipMean,DwnSlipMin,DwnSlipStdDev,DcnAll,DcnArea,DcnAreaCount,DcnBin1,DcnBin2,DcnBin3,DcnBin4,DcnBin5,DcnBin6,DcnBin7,DcnBin8,DcnHazeAvg,DcnHazeMedian,DcnHazeStdDev,DcnLpd,DcnLpdES,DcnLpdN,DcnMicroScr,DcnScr,DcnSlip,DnnAll,DnnArea,DnnAreaCount,DnnBin1,DnnBin2,DnnBin3,DnnBin4,DnnBin5,DnnBin6,DnnBin7,DnnBin8,DnnHazeAvg,DnnHazeMedian,DnnHazeStdDev,DnnLpd,DnnLpdES,DnnLpdN,DnnMicroScr,DnnScr,DnnSlip,DwnAll,DwnArea,DwnAreaCount,DwnBin1,DwnBin2,DwnBin3,DwnBin4,DwnBin5,DwnBin6,DwnBin7,DwnBin8,DwnHazeAvg,DwnHazeMedian,DwnHazeStdDev,DwnLpd,DwnLpdES,DwnLpdN,DwnMicroScr,DwnScr,DwnSlip";
// string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Lot,Session,DcnAllMin,DcnLpdMin,DcnLpdNMin,DcnLpdESMin,DcnMicroScrMin,DcnScrMin,DcnSlipMin,DcnAreaCountMin,DcnAreaMin,DcnHazeAvgMin,DcnHazeMedianMin,DcnHazeStdDevMin,DcnBin1Min,DcnBin2Min,DcnBin3Min,DcnBin4Min,DcnBin5Min,DcnBin6Min,DcnBin7Min,DcnBin8Min,DcnAllMax,DcnLpdMax,DcnLpdNMax,DcnLpdESMax,DcnMicroScrMax,DcnScrMax,DcnSlipMax,DcnAreaCountMax,DcnAreaMax,DcnHazeAvgMax,DcnHazeMedianMax,DcnHazeStdDevMax,DcnBin1Max,DcnBin2Max,DcnBin3Max,DcnBin4Max,DcnBin5Max,DcnBin6Max,DcnBin7Max,DcnBin8Max,DcnAllMean,DcnLpdMean,DcnLpdNMean,DcnLpdESMean,DcnMicroScrMean,DcnScrMean,DcnSlipMean,DcnAreaCountMean,DcnAreaMean,DcnHazeAvgMean,DcnHazeMedianMean,DcnHazeStdDevMean,DcnBin1Mean,DcnBin2Mean,DcnBin3Mean,DcnBin4Mean,DcnBin5Mean,DcnBin6Mean,DcnBin7Mean,DcnBin8Mean,DcnAllStdDev,DcnLpdStdDev,DcnLpdNStdDev,DcnLpdESStdDev,DcnMicroScrStdDev,DcnScrStdDev,DcnSlipStdDev,DcnAreaCountStdDev,DcnAreaStdDev,DcnHazeAvgStdDev,DcnHazeMedianStdDev,DcnHazeStdDevStdDev,DcnBin1StdDev,DcnBin2StdDev,DcnBin3StdDev,DcnBin4StdDev,DcnBin5StdDev,DcnBin6StdDev,DcnBin7StdDev,DcnBin8StdDev,DwnAllMin,DwnLpdMin,DwnLpdNMin,DwnLpdESMin,DwnMicroScrMin,DwnScrMin,DwnSlipMin,DwnAreaCountMin,DwnAreaMin,DwnHazeAvgMin,DwnHazeMedianMin,DwnHazeStdDevMin,DwnBin1Min,DwnBin2Min,DwnBin3Min,DwnBin4Min,DwnBin5Min,DwnBin6Min,DwnBin7Min,DwnBin8Min,DwnAllMax,DwnLpdMax,DwnLpdNMax,DwnLpdESMax,DwnMicroScrMax,DwnScrMax,DwnSlipMax,DwnAreaCountMax,DwnAreaMax,DwnHazeAvgMax,DwnHazeMedianMax,DwnHazeStdDevMax,DwnBin1Max,DwnBin2Max,DwnBin3Max,DwnBin4Max,DwnBin5Max,DwnBin6Max,DwnBin7Max,DwnBin8Max,DwnAllMean,DwnLpdMean,DwnLpdNMean,DwnLpdESMean,DwnMicroScrMean,DwnScrMean,DwnSlipMean,DwnAreaCountMean,DwnAreaMean,DwnHazeAvgMean,DwnHazeMedianMean,DwnHazeStdDevMean,DwnBin1Mean,DwnBin2Mean,DwnBin3Mean,DwnBin4Mean,DwnBin5Mean,DwnBin6Mean,DwnBin7Mean,DwnBin8Mean,DwnAllStdDev,DwnLpdStdDev,DwnLpdNStdDev,DwnLpdESStdDev,DwnMicroScrStdDev,DwnScrStdDev,DwnSlipStdDev,DwnAreaCountStdDev,DwnAreaStdDev,DwnHazeAvgStdDev,DwnHazeMedianStdDev,DwnHazeStdDevStdDev,DwnBin1StdDev,DwnBin2StdDev,DwnBin3StdDev,DwnBin4StdDev,DwnBin5StdDev,DwnBin6StdDev,DwnBin7StdDev,DwnBin8StdDev,DnnAllMin,DnnLpdMin,DnnLpdNMin,DnnLpdESMin,DnnMicroScrMin,DnnScrMin,DnnSlipMin,DnnAreaCountMin,DnnAreaMin,DnnHazeAvgMin,DnnHazeMedianMin,DnnHazeStdDevMin,DnnBin1Min,DnnBin2Min,DnnBin3Min,DnnBin4Min,DnnBin5Min,DnnBin6Min,DnnBin7Min,DnnBin8Min,DnnAllMax,DnnLpdMax,DnnLpdNMax,DnnLpdESMax,DnnMicroScrMax,DnnScrMax,DnnSlipMax,DnnAreaCountMax,DnnAreaMax,DnnHazeAvgMax,DnnHazeMedianMax,DnnHazeStdDevMax,DnnBin1Max,DnnBin2Max,DnnBin3Max,DnnBin4Max,DnnBin5Max,DnnBin6Max,DnnBin7Max,DnnBin8Max,DnnAllMean,DnnLpdMean,DnnLpdNMean,DnnLpdESMean,DnnMicroScrMean,DnnScrMean,DnnSlipMean,DnnAreaCountMean,DnnAreaMean,DnnHazeAvgMean,DnnHazeMedianMean,DnnHazeStdDevMean,DnnBin1Mean,DnnBin2Mean,DnnBin3Mean,DnnBin4Mean,DnnBin5Mean,DnnBin6Mean,DnnBin7Mean,DnnBin8Mean,DnnAllStdDev,DnnLpdStdDev,DnnLpdNStdDev,DnnLpdESStdDev,DnnMicroScrStdDev,DnnScrStdDev,DnnSlipStdDev,DnnAreaCountStdDev,DnnAreaStdDev,DnnHazeAvgStdDev,DnnHazeMedianStdDev,DnnHazeStdDevStdDev,DnnBin1StdDev,DnnBin2StdDev,DnnBin3StdDev,DnnBin4StdDev,DnnBin5StdDev,DnnBin6StdDev,DnnBin7StdDev,DnnBin8StdDev,Side,WaferID,Grade,SrcDest,DcnAll,DcnLpd,DcnLpdN,DcnLpdES,DcnMicroScr,DcnScr,DcnSlip,DcnAreaCount,DcnArea,DcnHazeAvg,DcnHazeMedian,DcnHazeStdDev,DcnBin1,DcnBin2,DcnBin3,DcnBin4,DcnBin5,DcnBin6,DcnBin7,DcnBin8,DwnAll,DwnLpd,DwnLpdN,DwnLpdES,DwnMicroScr,DwnScr,DwnSlip,DwnAreaCount,DwnArea,DwnHazeAvg,DwnHazeMedian,DwnHazeStdDev,DwnBin1,DwnBin2,DwnBin3,DwnBin4,DwnBin5,DwnBin6,DwnBin7,DwnBin8,DnnAll,DnnLpd,DnnLpdN,DnnLpdES,DnnMicroScr,DnnScr,DnnSlip,DnnAreaCount,DnnArea,DnnHazeAvg,DnnHazeMedian,DnnHazeStdDev,DnnBin1,DnnBin2,DnnBin3,DnnBin4,DnnBin5,DnnBin6,DnnBin7,DnnBin8,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Date,EventId";
// string args10 = "0,1,2,319,3,6,5,320,318,7,314,315,8,251,-1,313,8,249,252,-1,250,29,49,9,69,36,56,16,76,37,57,17,77,41,61,21,81,42,62,22,82,43,63,23,83,44,64,24,84,45,65,25,85,46,66,26,86,47,67,27,87,48,68,28,88,38,58,18,78,39,59,19,79,40,60,20,80,32,52,12,72,30,50,10,31,51,11,71,70,33,53,13,73,34,54,14,74,35,55,15,75,189,209,169,229,196,216,176,236,197,217,177,237,201,221,181,241,202,222,182,242,203,223,183,243,204,224,184,244,205,225,185,245,206,226,186,246,207,227,187,247,208,228,188,248,198,218,178,238,199,219,179,239,200,220,180,240,192,212,172,232,190,210,170,191,211,171,231,230,193,213,173,233,194,214,174,234,195,215,175,235,109,129,89,149,116,136,96,156,117,137,97,157,121,141,101,161,122,142,102,162,123,143,103,163,124,144,104,164,125,145,105,165,126,146,106,166,127,147,107,167,128,148,108,168,118,138,98,158,119,139,99,159,120,140,100,160,112,132,92,152,110,130,90,111,131,91,151,150,113,133,93,153,114,134,94,154,115,135,95,155,253,261,260,265,266,267,268,269,270,271,272,262,263,264,254,256,255,257,258,259,293,301,300,305,306,307,308,309,310,311,312,302,303,304,294,296,295,297,298,299,273,281,280,285,286,287,288,289,290,291,292,282,283,284,274,276,275,277,278,279";
string[] segments = args7.Split(',');
ReadOnlyCollection<string> ignoreColumns = new(args4.Split(','));
ReadOnlyCollection<string> backfillColumns = new(args5.Split(','));
ReadOnlyCollection<string> indexOnlyColumns = new(args6.Split(','));
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
foreach (string segment in segments)
{
segmentsB = segment.Split('|');
if (segmentsB.Length != 2)
continue;
if (distinct.Contains(segmentsB[0]))
continue;
distinct.Add(segmentsB[0]);
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
}
result = new(backfillColumns: backfillColumns,
columnIndices: columnIndices,
newColumnNames: newColumnNames,
ignoreColumns: ignoreColumns,
indexOnlyColumns: indexOnlyColumns,
keyValuePairs: new(keyValuePairs),
oldColumnNames: oldColumnNames);
return result;
}
private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection) private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection)
{ {
List<PreWith> results = new(); List<PreWith> results = new();
@ -268,7 +228,7 @@ public class FileRead : Shared.FileRead, IFileRead
} }
} }
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles, bool _) private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
{ {
List<Pre> results = new(); List<Pre> results = new();
Pre pre; Pre pre;
@ -313,8 +273,13 @@ public class FileRead : Shared.FileRead, IFileRead
continue; continue;
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults)) if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
wsResults = null; wsResults = null;
if (processDataStandardFormat.InputPDSF is null)
File.Move(preWith.MatchingFile, preWith.CheckFile);
else
{
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults); ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
File.Delete(preWith.MatchingFile); File.Delete(preWith.MatchingFile);
}
if (Directory.Exists(preWith.NoWaitDirectory)) if (Directory.Exists(preWith.NoWaitDirectory))
{ {
post = new(preWith.CheckFile, preWith.ErrFile); post = new(preWith.CheckFile, preWith.ErrFile);
@ -366,10 +331,7 @@ public class FileRead : Shared.FileRead, IFileRead
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); } { CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { } catch (Exception) { }
} }
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
List<txt.Description> descriptions = txt.ProcessData.GetDescriptions(jsonElements);
bool mesEntityMatchesProcess = descriptions.Count > 0 && descriptions[0].MesEntity == descriptions[0].Reactor;
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles, mesEntityMatchesProcess);
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection); ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
MoveCollection(dateTime, processDataStandardFormat, preWithCollection); MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
return results; return results;

View File

@ -9,7 +9,6 @@ using System.Collections.Generic;
using System.Globalization; using System.Globalization;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text;
using System.Text.Json; using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsight; namespace Adaptation.FileHandlers.OpenInsight;
@ -17,9 +16,7 @@ namespace Adaptation.FileHandlers.OpenInsight;
public class FileRead : Shared.FileRead, IFileRead public class FileRead : Shared.FileRead, IFileRead
{ {
private string _LastLines;
private readonly string _IqsConnectionString; private readonly string _IqsConnectionString;
private readonly string _OpenInsightFilePattern;
private readonly string _OpenInsightApiECDirectory; private readonly string _OpenInsightApiECDirectory;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
@ -34,10 +31,8 @@ public class FileRead : Shared.FileRead, IFileRead
throw new Exception(cellInstanceConnectionName); throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator) if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName); throw new Exception(cellInstanceConnectionName);
_LastLines = string.Empty;
_IqsConnectionString = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.ConnectionString"); _IqsConnectionString = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.ConnectionString");
_OpenInsightApiECDirectory = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "API.EC.Directory"); _OpenInsightApiECDirectory = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "API.EC.Directory");
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
} }
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
@ -115,104 +110,44 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
internal static Tuple<string, string> GetLines(Logistics logistics, List<txt.Description> descriptions)
{
StringBuilder result = new();
char del = '\t';
txt.Description x = descriptions[0];
_ = result.Append(x.DcnLpdMin).Append(del). // 001 -
Append(x.DcnLpdMax).Append(del). // 002 -
Append(x.DcnLpdMean).Append(del). // 003 - DCN LPD
Append(x.DcnAreaCountMin).Append(del). // 004 -
Append(x.DcnAreaCountMax).Append(del). // 005 -
Append(x.DcnAreaCountMean).Append(del).// 006 - DCN Area
Append(x.DcnAreaMin).Append(del). // 007 -
Append(x.DcnAreaMax).Append(del). // 008 -
Append(x.Date).Append(del). // 009 -
Append(x.DcnHazeAvgMean).Append(del). // 010 - Haze Average
Append(string.Empty).Append(del). // 011 -
Append(string.Empty).Append(del). // 012 -
Append(string.Empty).Append(del). // 013 -
Append(string.Empty).Append(del). // 014 -
Append(string.Empty).Append(del). // 015 -
Append(string.Empty).Append(del). // 016 -
Append(string.Empty).Append(del). // 017 -
Append(string.Empty).Append(del). // 018 -
Append(string.Empty).Append(del). // 019 -
Append(string.Empty).Append(del). // 020 -
Append(string.Empty).Append(del). // 021 -
Append(string.Empty).Append(del). // 022 -
Append(string.Empty).Append(del). // 023 -
Append(string.Empty).Append(del). // 024 -
Append(string.Empty).Append(del). // 025 -
Append(string.Empty).Append(del). // 026 -
Append(string.Empty).Append(del). // 027 -
Append(x.RDS).Append(del). // 028 - Lot
Append(x.Reactor).Append(del). // 029 - Process
Append(x.Recipe).Append(del). // 030 - Part
Append(x.DcnScrMean).Append(del). // 031 - Scratch Count
Append(string.Empty).Append(del). // 032 -
Append(string.Empty).Append(del). // 033 -
Append(string.Empty).Append(del). // 034 -
Append(x.DcnMicroScrMean).Append(del). // 035 - Scratch Length
Append(string.Empty).Append(del). // 036 -
Append(string.Empty).Append(del). // 037 -
Append(string.Empty).Append(del). // 038 -
Append(x.DcnAllMean).Append(del). // 039 - Average Sum of Defects
Append(x.DcnAllMax).Append(del). // 040 - Max Sum of defects
Append(x.DcnAllMin).Append(del). // 041 - Min Sum of Defects
Append(string.Empty).Append(del). // 042 -
Append(logistics.MesEntity).Append(del). // 043 -
Append(x.DcnAreaMean).Append(del). // 044 - DCN MM2
AppendLine();
return new Tuple<string, string>(result.ToString(), x.Date);
}
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<txt.Description> descriptions, Test[] tests) private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<txt.Description> descriptions, Test[] tests)
{ {
string duplicateFile;
bool isDummyRun = false; bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new(); List<(Shared.Properties.IScopeInfo, string)> collection = new();
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
if (!Directory.Exists(duplicateDirectory)) if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory); _ = Directory.CreateDirectory(duplicateDirectory);
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder; string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
if (!Directory.Exists(Path.Combine(duplicateDirectory, "1")))
{
string parentParent = GetParentParent(_FileConnectorConfiguration.SourceFileLocation);
if (parentParent.Contains(_CellInstanceName))
parentParent = Path.GetDirectoryName(parentParent);
duplicateDirectory = Path.Combine(parentParent, "Data");
if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory);
}
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
if (descriptions.Count == 0 || tests.Length == 0) if (descriptions.Count == 0 || tests.Length == 0)
_LastLines = string.Empty; duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
else else
{
Tuple<string, string> lines = GetLines(_Logistics, descriptions);
string check = lines.Item1.Replace(lines.Item2, "$Date$");
bool save = string.IsNullOrEmpty(_LastLines) || check != _LastLines;
if (save && !string.IsNullOrEmpty(check))
{ {
long? subgroupId; long? subgroupId;
_LastLines = check; string fileName = Path.GetFileName(reportFullPath);
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks; long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks; long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN)) if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
subgroupId = null; subgroupId = null;
else else
(subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait); (subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (subgroupId is null) if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
collection.Add(new(new ScopeInfo(tests[0], _OpenInsightFilePattern), lines.Item1)); {
else if (wsResults is null || wsResults.Count != 1)
collection.Add(new(new ScopeInfo(tests[0], $"{subgroupId.Value} {_OpenInsightFilePattern}"), lines.Item1)); throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00"); lock (_StaticRuns)
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), lines.Item1, subgroupId, weekOfYear); wsResults[0] = WS.Results.Get(wsResults[0], subgroupId);
}
if (!fileName.StartsWith("Viewer"))
duplicateFile = Path.Combine(duplicateDirectory, $"{subgroupId} {fileName}".TrimStart());
else
duplicateFile = Path.Combine(duplicateDirectory, $"{$"Viewer {subgroupId}".TrimEnd()} {fileName.Replace("Viewer", string.Empty)}");
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), subgroupId, weekOfYear);
} }
if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory);
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
{
File.Copy(reportFullPath, duplicateFile, overwrite: true);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
} }
} }
@ -228,7 +163,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests); SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -376,7 +376,7 @@ public class FromIQS
return result; return result;
} }
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, txt.Description description, string lines, long? subGroupId, string weekOfYear) internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, txt.Description description, long? subGroupId, string weekOfYear)
{ {
string checkFile; string checkFile;
string fileName = Path.GetFileName(reportFullPath); string fileName = Path.GetFileName(reportFullPath);
@ -390,15 +390,9 @@ public class FromIQS
checkFile = Path.Combine(ecDirectory, fileName); checkFile = Path.Combine(ecDirectory, fileName);
if (ecExists && !File.Exists(checkFile)) if (ecExists && !File.Exists(checkFile))
File.Copy(reportFullPath, checkFile); File.Copy(reportFullPath, checkFile);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.txt");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, lines);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json"); checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ecExists && !File.Exists(checkFile)) if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json); File.WriteAllText(checkFile, json);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.lbl");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, processDataStandardFormat.Body[processDataStandardFormat.Body.Count - 1]);
} }
private static string GetCommandText(string[] iqsCopyValues) private static string GetCommandText(string[] iqsCopyValues)

View File

@ -147,7 +147,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(reportFullPath, dateTime, descriptions); SendData(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -175,7 +175,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(descriptions); PostOpenInsightMetrologyViewerAttachments(descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -172,7 +172,7 @@ public class FileRead : Shared.FileRead, IFileRead
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<txt.Description> descriptions = txt.ProcessData.GetDescriptions(jsonElements); List<txt.Description> descriptions = txt.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
DirectoryMove(reportFullPath, dateTime, descriptions); DirectoryMove(reportFullPath, dateTime, descriptions);
else if (!_IsEAFHosted) else if (!_IsEAFHosted)

View File

@ -125,7 +125,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions); FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -11,13 +11,17 @@ public class Common
public int? ReactorNumber { get; } public int? ReactorNumber { get; }
public string? Zone { get; } public string? Zone { get; }
public string? Employee { get; } public string? Employee { get; }
public RunDataSheetRoot? RunDataSheetRoot { get; }
public WorkOrder? WorkOrder { get; }
public Common(string? layer, public Common(string? layer,
string? psn, string? psn,
int? rdsNumber, int? rdsNumber,
int? reactor, int? reactor,
string? zone, string? zone,
string? employee) string? employee,
RunDataSheetRoot? runDataSheetRoot,
WorkOrder? workOrder)
{ {
Layer = layer; Layer = layer;
PSN = psn; PSN = psn;
@ -25,6 +29,8 @@ public class Common
ReactorNumber = reactor; ReactorNumber = reactor;
Zone = zone; Zone = zone;
Employee = employee; Employee = employee;
RunDataSheetRoot = runDataSheetRoot;
WorkOrder = workOrder;
} }
} }

View File

@ -3,18 +3,20 @@ namespace Adaptation.FileHandlers.TIBCO.Transport;
public class CommonB public class CommonB
{ {
public string Comment { get; }
public string Layer { get; } public string Layer { get; }
public string LoadLockSide { get; }
public int? RDSNumber { get; } public int? RDSNumber { get; }
public string ReactorType { get; }
public string PSN { get; } public string PSN { get; }
public int? ReactorNumber { get; } public int? ReactorNumber { get; }
public string Zone { get; } public string Zone { get; }
public CommonB(string comment, string layer, int? rdsNumber, string psn, int? reactorNumber, string zone) public CommonB(string layer, string loadLockSide, int? rdsNumber, string reactorType, string psn, int? reactorNumber, string zone)
{ {
Comment = comment;
Layer = layer; Layer = layer;
LoadLockSide = loadLockSide;
RDSNumber = rdsNumber; RDSNumber = rdsNumber;
ReactorType = reactorType;
PSN = psn; PSN = psn;
ReactorNumber = reactorNumber; ReactorNumber = reactorNumber;
Zone = zone; Zone = zone;

View File

@ -7,15 +7,17 @@ public class Input
public string? Area { get; } public string? Area { get; }
public string? EquipmentType { get; } public string? EquipmentType { get; }
public string? MID { get; } public string? LoadLock { get; }
public string? Slot { get; }
public string? MesEntity { get; } public string? MesEntity { get; }
public string? MID { get; }
public string? Recipe { get; } public string? Recipe { get; }
public string? Sequence { get; } public string? Sequence { get; }
public string? Slot { get; }
[System.Text.Json.Serialization.JsonConstructor] [System.Text.Json.Serialization.JsonConstructor]
public Input(string? area, public Input(string? area,
string? equipmentType, string? equipmentType,
string? loadLock,
string? mid, string? mid,
string? slot, string? slot,
string? mesEntity, string? mesEntity,
@ -25,6 +27,7 @@ public class Input
Area = area; Area = area;
EquipmentType = equipmentType; EquipmentType = equipmentType;
LoadLock = loadLock;
MID = mid; MID = mid;
Slot = slot; Slot = slot;
MesEntity = mesEntity; MesEntity = mesEntity;
@ -36,6 +39,7 @@ public class Input
{ {
Area = input.Area; Area = input.Area;
EquipmentType = input.EquipmentType; EquipmentType = input.EquipmentType;
LoadLock = input.LoadLock;
MID = mid; MID = mid;
Slot = input.Slot; Slot = input.Slot;
MesEntity = input.MesEntity; MesEntity = input.MesEntity;

View File

@ -41,7 +41,7 @@ public partial class Job
public DateTime DateTime { get; } public DateTime DateTime { get; }
public List<Item> Items { get; } public List<Item> Items { get; }
public Job(string lsl2SQLConnectionString, string metrologyFileShare, string barcodeHostFileShare, HttpClient httpClient, string mid) public Job(string lsl2SQLConnectionString, string metrologyFileShare, string barcodeHostFileShare, HttpClient httpClient, string mid, DateTime enteredDateTimeFilter, DateTime loadSignatureDateTimeFilter)
{ {
const int zero = 0; const int zero = 0;
Items = new List<Item>(); Items = new List<Item>();
@ -52,10 +52,10 @@ public partial class Job
Common common; Common common;
CommonB commonB; CommonB commonB;
int? reactorNumber; int? reactorNumber;
WorkOrder workOrder;
const string hyphen = "-"; const string hyphen = "-";
const string bioRad2 = "BIORAD2"; const string bioRad2 = "BIORAD2";
const string bioRad3 = "BIORAD3"; const string bioRad3 = "BIORAD3";
RunDataSheetRoot? runDataSheetRoot;
const string twoAlphaPattern = "^[a-zA-z]{2,3}"; const string twoAlphaPattern = "^[a-zA-z]{2,3}";
const string reactorNumberPattern = @"^[0-9]{2}--"; const string reactorNumberPattern = @"^[0-9]{2}--";
Input input = JsonSerializer.Deserialize<Input>(mid) ?? throw new Exception(); Input input = JsonSerializer.Deserialize<Input>(mid) ?? throw new Exception();
@ -65,42 +65,53 @@ public partial class Job
DateTime = new DateTime(sequence); DateTime = new DateTime(sequence);
const string dep08CEPIEPSILON = "DEP08CEPIEPSILON"; const string dep08CEPIEPSILON = "DEP08CEPIEPSILON";
if (input.EquipmentType == dep08CEPIEPSILON) if (input.EquipmentType == dep08CEPIEPSILON)
(common, workOrder) = Get(input, httpClient); common = ReactorGet(input, httpClient);
else if (!string.IsNullOrEmpty(input.MID) && !string.IsNullOrEmpty(input.MesEntity) && Regex.IsMatch(input.MID, reactorNumberPattern) && input.MesEntity is bioRad2 or bioRad3) else if (!string.IsNullOrEmpty(input.MID) && !string.IsNullOrEmpty(input.MesEntity) && Regex.IsMatch(input.MID, reactorNumberPattern) && input.MesEntity is bioRad2 or bioRad3)
(common, workOrder) = Get(input, barcodeHostFileShare); common = Get(input, barcodeHostFileShare, httpClient);
else else
{ {
workOrder = GetWorkOrder(input);
reactorNumber = GetReactorNumber(input); reactorNumber = GetReactorNumber(input);
WorkOrder workOrder = GetWorkOrder(input);
if (workOrder.IsWorkOrder || reactorNumber.HasValue) if (workOrder.IsWorkOrder || reactorNumber.HasValue)
common = new(layer: null, common = new(layer: null,
psn: null, psn: null,
rdsNumber: null, rdsNumber: null,
reactor: null, reactor: null,
zone: null, zone: null,
employee: null); employee: null,
runDataSheetRoot: null,
workOrder: workOrder);
else if (!string.IsNullOrEmpty(input.MID) && input.MID.Length is 2 or 3 && Regex.IsMatch(input.MID, twoAlphaPattern)) else if (!string.IsNullOrEmpty(input.MID) && input.MID.Length is 2 or 3 && Regex.IsMatch(input.MID, twoAlphaPattern))
common = GetTwoAlphaPattern(metrologyFileShare, input); common = GetTwoAlphaPattern(metrologyFileShare, input);
else else
common = Get(input); common = Get(input, httpClient);
} }
bool isValid = IsValid(common.RDSNumber); bool isValid = IsValid(common.RDSNumber);
if (isValid) if (isValid)
commonB = GetWithValidRDS(lsl2SQLConnectionString, common.Layer, common.PSN, common.RDSNumber, common.ReactorNumber, common.Zone); commonB = GetWithValidRDS(lsl2SQLConnectionString, enteredDateTimeFilter, loadSignatureDateTimeFilter, common.Layer, common.PSN, common.RDSNumber, common.ReactorNumber, common.Zone);
else if (workOrder.IsWorkOrder || common.RDSNumber.HasValue) else if (common.WorkOrder is null || common.WorkOrder.IsWorkOrder || common.RDSNumber.HasValue)
commonB = Get(lsl2SQLConnectionString, common.Layer, common.PSN, common.ReactorNumber, workOrder.SlotNumber, workOrder.WorkOrderNumber, workOrder.WorkOrderCassette, common.Zone); commonB = Get(lsl2SQLConnectionString, enteredDateTimeFilter, loadSignatureDateTimeFilter, common);
else else
commonB = new(comment: hyphen, commonB = new(layer: hyphen,
layer: hyphen, loadLockSide: hyphen,
rdsNumber: common.RDSNumber, rdsNumber: common.RDSNumber,
reactorType: hyphen,
psn: common.PSN, psn: common.PSN,
reactorNumber: common.ReactorNumber, reactorNumber: common.ReactorNumber,
zone: hyphen); zone: hyphen);
if (commonB.RDSNumber is null || common.RunDataSheetRoot is not null)
runDataSheetRoot = common.RunDataSheetRoot;
else
{
try
{ runDataSheetRoot = GetRunDataSheetRoot(httpClient, commonB.RDSNumber.Value); }
catch (Exception)
{ runDataSheetRoot = null; }
}
Qty = "1"; Qty = "1";
Status = hyphen; // INFO Status = hyphen; // INFO
CreationUser = hyphen; // ? CreationUser = hyphen; // ?
LotState = hyphen; // LAYER2 LotState = hyphen; // LAYER2
LastUpdateUser = hyphen; // ?
Equipment = input.MesEntity; // ? Equipment = input.MesEntity; // ?
PackageName = hyphen; // WAFER_ID PackageName = hyphen; // WAFER_ID
Qty2 = input.Sequence; // SEQUENCE Qty2 = input.Sequence; // SEQUENCE
@ -108,17 +119,39 @@ public partial class Job
IsAreaSi = input.Area == "Si"; // N/A IsAreaSi = input.Area == "Si"; // N/A
StateModel = input.EquipmentType; // ? StateModel = input.EquipmentType; // ?
JobName = DateTime.Ticks.ToString(); // ? JobName = DateTime.Ticks.ToString(); // ?
BasicType = GetComment(hyphen, runDataSheetRoot, commonB); // BASIC_TYPE
AutomationMode = string.Concat(DateTime.Ticks, ".", input.MesEntity); // ? AutomationMode = string.Concat(DateTime.Ticks, ".", input.MesEntity); // ?
SpecName = !string.IsNullOrEmpty(commonB.Layer) ? commonB.Layer : hyphen; // LAYER SpecName = !string.IsNullOrEmpty(commonB.Layer) ? commonB.Layer : hyphen; // LAYER
ProductName = !string.IsNullOrEmpty(commonB.PSN) ? commonB.PSN : hyphen; // PRODUCT ProductName = !string.IsNullOrEmpty(commonB.PSN) ? commonB.PSN : hyphen; // PRODUCT
ProcessSpecName = !string.IsNullOrEmpty(commonB.Zone) ? commonB.Zone : hyphen; // WAFER_POS ProcessSpecName = !string.IsNullOrEmpty(commonB.Zone) ? commonB.Zone : hyphen; // WAFER_POS
BasicType = !string.IsNullOrEmpty(commonB.Comment) ? commonB.Comment : hyphen; // BASIC_TYPE
LotName = commonB.RDSNumber is not null ? commonB.RDSNumber.Value.ToString() : input.MID; // MID LotName = commonB.RDSNumber is not null ? commonB.RDSNumber.Value.ToString() : input.MID; // MID
LastUpdateUser = string.IsNullOrEmpty(runDataSheetRoot?.Json) ? "{}" : runDataSheetRoot.Json; // NULL_DATA
ProcessType = commonB.ReactorNumber is not null ? commonB.ReactorNumber.Value.ToString() : hyphen; // PROCESS_JOBID ProcessType = commonB.ReactorNumber is not null ? commonB.ReactorNumber.Value.ToString() : hyphen; // PROCESS_JOBID
Items.Add(new Item { Name = "0", Type = "NA", Number = (0 + 1).ToString(), Qty = "1", CarrierName = hyphen }); Items.Add(new Item { Name = "0", Type = "NA", Number = (0 + 1).ToString(), Qty = "1", CarrierName = hyphen });
} }
} }
private static string GetComment(string hyphen, RunDataSheetRoot? runDataSheetRoot, CommonB commonB)
{
string result;
string? loadLockSide = commonB.LoadLockSide;
if (string.IsNullOrEmpty(loadLockSide) && commonB.RDSNumber is not null)
loadLockSide = runDataSheetRoot?.RunDataSheet?.LoadLockSide;
if (string.IsNullOrEmpty(loadLockSide) || string.IsNullOrEmpty(commonB.ReactorType))
result = hyphen;
else
{
string loadLockSideFull = loadLockSide switch
{
"L" => "Left",
"R" => "Right",
_ => loadLockSide,
};
result = $"{loadLockSideFull} - {commonB.ReactorType}";
}
return result;
}
private static int? GetReactorNumber(Input input) private static int? GetReactorNumber(Input input)
{ {
int? result; int? result;
@ -239,8 +272,9 @@ public partial class Job
return result; return result;
} }
private static Common Get(Input input) private static Common Get(Input input, HttpClient httpClient)
{ {
Common result;
string? psn; string? psn;
string? rds; string? rds;
int rdsCheck; int rdsCheck;
@ -250,6 +284,7 @@ public partial class Job
string? reactor; string? reactor;
string? employee; string? employee;
int? reactorNumber; int? reactorNumber;
RunDataSheetRoot? runDataSheetRoot;
string mid = string.IsNullOrEmpty(input.MID) ? string.Empty : input.MID; string mid = string.IsNullOrEmpty(input.MID) ? string.Empty : input.MID;
if (mid.Length > 2 && mid[0] == '1' && (mid[1] == 'T' || mid[1] == 't')) if (mid.Length > 2 && mid[0] == '1' && (mid[1] == 'T' || mid[1] == 't'))
mid = mid.Substring(2); mid = mid.Substring(2);
@ -259,13 +294,14 @@ public partial class Job
(reactor, rds) = GetReactorAndRDS(input.MID, mid, segments); (reactor, rds) = GetReactorAndRDS(input.MID, mid, segments);
rdsNumber = string.IsNullOrEmpty(rds) || !int.TryParse(rds, out rdsCheck) ? null : rdsCheck; rdsNumber = string.IsNullOrEmpty(rds) || !int.TryParse(rds, out rdsCheck) ? null : rdsCheck;
bool isInvalid = IsInvalid(rdsNumber); bool isInvalid = IsInvalid(rdsNumber);
if (isInvalid || !int.TryParse(reactor, out int reactorCheck)) if (rdsNumber is null || isInvalid || !int.TryParse(reactor, out int reactorCheck))
{ {
psn = null; psn = null;
zone = null; zone = null;
layer = null; layer = null;
employee = null; employee = null;
reactorNumber = null; reactorNumber = null;
runDataSheetRoot = null;
} }
else else
{ {
@ -273,13 +309,17 @@ public partial class Job
reactorNumber = reactorCheck; reactorNumber = reactorCheck;
(layer, psn) = GetLayerAndPSN(segments); (layer, psn) = GetLayerAndPSN(segments);
employee = segments.Length <= 4 ? null : segments[4]; employee = segments.Length <= 4 ? null : segments[4];
runDataSheetRoot = GetRunDataSheetRoot(httpClient, rdsNumber.Value);
} }
return new(layer: layer, result = new(layer: layer,
psn: psn, psn: psn,
rdsNumber: rdsNumber, rdsNumber: rdsNumber,
reactor: reactorNumber, reactor: reactorNumber,
zone: zone, zone: zone,
employee: employee); employee: employee,
runDataSheetRoot: runDataSheetRoot,
workOrder: null);
return result;
} }
private static string[] GetDirectories(string fileShare) private static string[] GetDirectories(string fileShare)
@ -298,6 +338,7 @@ public partial class Job
private static Common GetTwoAlphaPattern(string metrologyFileShare, Input input) private static Common GetTwoAlphaPattern(string metrologyFileShare, Input input)
{ {
Common result;
string lines; string lines;
const int zero = 0; const int zero = 0;
string? psn = null; string? psn = null;
@ -337,12 +378,15 @@ public partial class Job
zone = workMaterialOut.Zone; zone = workMaterialOut.Zone;
break; break;
} }
return new(layer: layer, result = new(layer: layer,
psn: psn, psn: psn,
rdsNumber: rdsNumber, rdsNumber: rdsNumber,
reactor: reactor, reactor: reactor,
zone: zone, zone: zone,
employee: null); employee: null,
runDataSheetRoot: null,
workOrder: null);
return result;
} }
private static List<string> GetFiles(Input input, string barcodeHostFileShare) private static List<string> GetFiles(Input input, string barcodeHostFileShare)
@ -381,84 +425,107 @@ public partial class Job
return result; return result;
} }
private static (Common common, WorkOrder workOrder) Get(Input input, HttpClient httpClient) private static Common ReactorGet(Input input, HttpClient httpClient)
{ {
int? rds; int? rds;
string psn; string? psn;
Common common; Common result;
WorkOrder workOrder;
Task<Stream> streamTask;
Task<HttpResponseMessage> httpResponseMessageTask;
string mid = string.IsNullOrEmpty(input.MID) ? string.Empty : input.MID; string mid = string.IsNullOrEmpty(input.MID) ? string.Empty : input.MID;
JsonSerializerOptions jsonSerializerOptions = new() { PropertyNameCaseInsensitive = true }; JsonSerializerOptions jsonSerializerOptions = new() { PropertyNameCaseInsensitive = true };
int? reactor = mid.Length < 2 || !int.TryParse(mid.Substring(0, 2), out int reactorNumber) ? null : reactorNumber; int? reactor = mid.Length < 2 || !int.TryParse(mid.Substring(0, 2), out int reactorNumber) ? null : reactorNumber;
httpResponseMessageTask = httpClient.GetAsync($"{httpClient.BaseAddress}/reactors/{reactor}"); char? loadLockSide = string.IsNullOrEmpty(input.LoadLock) || input.LoadLock[0] is not 'L' and not 'R' ? null : input.LoadLock[0];
httpResponseMessageTask.Wait(); ReactorRoot? reactorRoot = reactor is null || loadLockSide is null ? null : GetReactorRoot(httpClient, jsonSerializerOptions, reactor);
if (httpResponseMessageTask.Result.StatusCode != System.Net.HttpStatusCode.OK) rds = loadLockSide is null ? null : loadLockSide == 'L' ? reactorRoot?.Reactor?.LoadLockLeftRDS : loadLockSide == 'R' ? reactorRoot?.Reactor?.LoadLockRightRDS : null;
throw new Exception($"Unable to OI <{httpResponseMessageTask.Result.StatusCode}>"); if (reactorRoot?.Reactor is null || reactor != reactorRoot.Reactor.ReactorNo || rds is null)
streamTask = httpResponseMessageTask.Result.Content.ReadAsStreamAsync();
streamTask.Wait();
if (!streamTask.Result.CanRead)
throw new NullReferenceException(nameof(streamTask));
ReactorRoot? reactorRoot = JsonSerializer.Deserialize<ReactorRoot>(streamTask.Result, jsonSerializerOptions);
streamTask.Result.Dispose();
if (reactorRoot is null || reactor != reactorRoot.Reactor.ReactorNo || reactorRoot.Reactor.LoadedRDS is null || reactorRoot.Reactor.LoadedRDS.Length < 1)
{ {
rds = null; rds = null;
psn = string.Empty; psn = null;
workOrder = new(null, null, null, null, false); result = new(layer: null,
common = new(layer: null,
psn: psn, psn: psn,
rdsNumber: rds, rdsNumber: rds,
reactor: reactor, reactor: reactor,
zone: null, zone: null,
employee: null); employee: null,
runDataSheetRoot: null,
workOrder: null);
} }
else else
{ {
rds = reactorRoot.Reactor.LoadedRDS[0]; RunDataSheetRoot? runDataSheetRoot = GetRunDataSheetRoot(httpClient, rds.Value);
workOrder = new(null, null, null, null, false); if (runDataSheetRoot?.RunDataSheet is null || reactor != runDataSheetRoot.RunDataSheet.Reactor)
httpResponseMessageTask = httpClient.GetAsync($"{httpClient.BaseAddress}/materials/rds/{rds}");
httpResponseMessageTask.Wait();
if (httpResponseMessageTask.Result.StatusCode != System.Net.HttpStatusCode.OK)
throw new Exception($"Unable to OI <{httpResponseMessageTask.Result.StatusCode}>");
streamTask = httpResponseMessageTask.Result.Content.ReadAsStreamAsync();
streamTask.Wait();
if (!streamTask.Result.CanRead)
throw new NullReferenceException(nameof(streamTask));
RunDataSheetRoot? runDataSheetRoot = JsonSerializer.Deserialize<RunDataSheetRoot>(streamTask.Result, jsonSerializerOptions);
streamTask.Result.Dispose();
if (runDataSheetRoot is null || reactor != runDataSheetRoot.RunDataSheet.Reactor)
{ {
psn = string.Empty; psn = null;
common = new(layer: null, result = new(layer: null,
psn: psn, psn: psn,
rdsNumber: rds, rdsNumber: rds,
reactor: reactor, reactor: reactor,
zone: null, zone: null,
employee: null); employee: null,
runDataSheetRoot: runDataSheetRoot,
workOrder: null);
} }
else else
{ {
psn = runDataSheetRoot.RunDataSheet.PSN.ToString(); psn = runDataSheetRoot.RunDataSheet.PSN.ToString();
common = new(layer: null, result = new(layer: null,
psn: psn, psn: psn,
rdsNumber: rds, rdsNumber: rds,
reactor: reactor, reactor: reactor,
zone: null, zone: null,
employee: null); employee: null,
runDataSheetRoot: runDataSheetRoot,
workOrder: null);
} }
} }
return new(common, workOrder); return result;
} }
private static (Common common, WorkOrder workOrder) Get(Input input, string barcodeHostFileShare) private static ReactorRoot? GetReactorRoot(HttpClient httpClient, JsonSerializerOptions jsonSerializerOptions, int? reactor)
{ {
ReactorRoot? result;
try
{
Task<HttpResponseMessage> httpResponseMessageTask = httpClient.GetAsync($"{httpClient.BaseAddress}/reactors/{reactor}");
httpResponseMessageTask.Wait();
if (httpResponseMessageTask.Result.StatusCode != System.Net.HttpStatusCode.OK)
throw new Exception($"Unable to OI <{httpResponseMessageTask.Result.StatusCode}>");
using Task<Stream> streamTask = httpResponseMessageTask.Result.Content.ReadAsStreamAsync();
streamTask.Wait();
result = !streamTask.Result.CanRead ? null : JsonSerializer.Deserialize<ReactorRoot>(streamTask.Result, jsonSerializerOptions);
}
catch (Exception) { result = null; }
return result;
}
private static RunDataSheetRoot? GetRunDataSheetRoot(HttpClient httpClient, int rds)
{
RunDataSheetRoot? result;
try
{
JsonSerializerOptions jsonSerializerOptions = new() { PropertyNameCaseInsensitive = true };
using Task<HttpResponseMessage> httpResponseMessageTask = httpClient.GetAsync($"{httpClient.BaseAddress}/materials/rds/{rds}");
httpResponseMessageTask.Wait();
if (httpResponseMessageTask.Result.StatusCode != System.Net.HttpStatusCode.OK)
throw new Exception($"Unable to OI <{httpResponseMessageTask.Result.StatusCode}>");
Task<string> json = httpResponseMessageTask.Result.Content.ReadAsStringAsync();
json.Wait();
result = JsonSerializer.Deserialize<RunDataSheetRoot>(json.Result, jsonSerializerOptions);
result ??= new RunDataSheetRoot(null);
result.Json = json.Result;
}
catch (Exception) { result = null; }
return result;
}
private static Common Get(Input input, string barcodeHostFileShare, HttpClient httpClient)
{
Common result;
if (string.IsNullOrEmpty(barcodeHostFileShare) || !Directory.Exists(barcodeHostFileShare)) if (string.IsNullOrEmpty(barcodeHostFileShare) || !Directory.Exists(barcodeHostFileShare))
throw new Exception($"Unable to access file-share <{barcodeHostFileShare}>"); throw new Exception($"Unable to access file-share <{barcodeHostFileShare}>");
int? rds; int? rds;
long sequence = 0; long sequence = 0;
WorkOrder workOrder; WorkOrder? workOrder;
RunDataSheetRoot? runDataSheetRoot;
string mid = string.IsNullOrEmpty(input.MID) ? string.Empty : input.MID; string mid = string.IsNullOrEmpty(input.MID) ? string.Empty : input.MID;
int? reactor = mid.Length < 2 || !int.TryParse(mid.Substring(0, 2), out int reactorNumber) ? null : reactorNumber; int? reactor = mid.Length < 2 || !int.TryParse(mid.Substring(0, 2), out int reactorNumber) ? null : reactorNumber;
bool parsed = !string.IsNullOrEmpty(input.Sequence) && long.TryParse(input.Sequence, out sequence); bool parsed = !string.IsNullOrEmpty(input.Sequence) && long.TryParse(input.Sequence, out sequence);
@ -471,29 +538,32 @@ public partial class Job
if (text is null || text.Length < 3) if (text is null || text.Length < 3)
{ {
rds = null; rds = null;
workOrder = new(null, null, null, null, false); workOrder = null;
runDataSheetRoot = null;
} }
else if (!text.Contains('.')) else if (!text.Contains('.'))
{ {
rds = !int.TryParse(text.Substring(2), out int rdsNumber) ? null : rdsNumber; workOrder = null;
workOrder = new(null, null, null, null, false); rds = !int.TryParse(text.Substring(2), out int rdsNumber) || IsInvalid(rdsNumber) ? null : rdsNumber;
runDataSheetRoot = rds is null ? null : GetRunDataSheetRoot(httpClient, rds.Value);
} }
else else
{ {
rds = null; rds = null;
runDataSheetRoot = null;
workOrder = GetWorkOrder(new(input, text.Substring(2))); workOrder = GetWorkOrder(new(input, text.Substring(2)));
} }
Common common = new(layer: null, result = new(layer: null,
psn: null, psn: null,
rdsNumber: rds, rdsNumber: rds,
reactor: reactor, reactor: reactor,
zone: null, zone: null,
employee: null); employee: null,
return new(common, workOrder); runDataSheetRoot: runDataSheetRoot,
workOrder: workOrder);
return result;
} }
#nullable disable
private static string GetRunJson(string lsl2SQLConnectionString, string commandText) private static string GetRunJson(string lsl2SQLConnectionString, string commandText)
{ {
StringBuilder result = new(); StringBuilder result = new();
@ -513,7 +583,7 @@ public partial class Job
return result.ToString(); return result.ToString();
} }
private static string GetCommandText(int? rds, int? workOrderNumber, int? workOrderCassette, int? slot, int? reactor) private static string GetCommandText(DateTime enteredDateTimeFilter, DateTime loadSignatureDateTimeFilter, int? rds, int? workOrderNumber, int? workOrderCassette, int? slot, int? reactor)
{ // cSpell:disable { // cSpell:disable
List<string> results = new(); List<string> results = new();
int rdsValue = rds is null ? -1 : rds.Value; int rdsValue = rds is null ? -1 : rds.Value;
@ -540,6 +610,7 @@ public partial class Job
results.Add(" ) zone "); results.Add(" ) zone ");
results.Add(" from lsl2sql.dbo.react_run rr "); results.Add(" from lsl2sql.dbo.react_run rr ");
results.Add($" where rr.rds_no = {rdsValue}"); results.Add($" where rr.rds_no = {rdsValue}");
results.Add($" and rr.enter_dtm > '{enteredDateTimeFilter:yyyy-MM-dd} 00:00:00.000' ");
results.Add(" union all "); results.Add(" union all ");
results.Add(" select "); results.Add(" select ");
results.Add(" rr.rds_no "); results.Add(" rr.rds_no ");
@ -589,80 +660,87 @@ public partial class Job
results.Add(" select max(qa.rds_no) "); results.Add(" select max(qa.rds_no) ");
results.Add(" from lsl2sql.dbo.react_run qa "); results.Add(" from lsl2sql.dbo.react_run qa ");
results.Add(" where qa.load_sig != '' "); results.Add(" where qa.load_sig != '' ");
results.Add(" and qa.load_sig_dtm > '2023-05-01 00:00:00.000' "); results.Add($" and qa.load_sig_dtm > '{loadSignatureDateTimeFilter:yyyy-MM-dd} 00:00:00.000' ");
results.Add($" and qa.reactor = {reactorValue}"); results.Add($" and qa.reactor = {reactorValue}");
results.Add(" ) "); results.Add(" ) ");
results.Add(" for json path "); results.Add(" for json path ");
return string.Join(Environment.NewLine, results); return string.Join(Environment.NewLine, results);
} // cSpell:restore } // cSpell:restore
private static CommonB Get(string lsl2SQLConnectionString, string layer, string psn, int? reactorNumber, int? slotNumber, int? workOrderNumber, int? workOrderCassette, string zone) private static CommonB Get(string lsl2SQLConnectionString, DateTime enteredDateTimeFilter, DateTime loadSignatureDateTimeFilter, Common common)
{ {
int? rdsNumber; int? rdsNumber;
string comment; string? psn;
string? zone;
string? layer;
const int zero = 0; const int zero = 0;
const string hyphen = "-"; int? reactorNumber;
string commandText = GetCommandText(rds: null, string? reactorType;
workOrderNumber: workOrderNumber, string? loadLockSide;
workOrderCassette: workOrderCassette, string commandText = GetCommandText(enteredDateTimeFilter,
slot: slotNumber, loadSignatureDateTimeFilter,
reactor: reactorNumber); rds: null,
workOrderNumber: common.WorkOrder?.WorkOrderNumber,
workOrderCassette: common.WorkOrder?.WorkOrderCassette,
slot: common.WorkOrder?.SlotNumber,
reactor: common.ReactorNumber);
string json = GetRunJson(lsl2SQLConnectionString, commandText); string json = GetRunJson(lsl2SQLConnectionString, commandText);
if (string.IsNullOrEmpty(json)) if (string.IsNullOrEmpty(json))
{ {
psn = common.PSN;
rdsNumber = null; rdsNumber = null;
comment = hyphen; reactorType = null;
psn = string.Empty; zone = common.Zone;
zone = string.Empty; loadLockSide = null;
layer = common.Layer;
reactorNumber = common.ReactorNumber;
} }
else else
{ {
Run[] runs; Run[]? runs;
try try
{ runs = JsonSerializer.Deserialize<Run[]>(json); } { runs = JsonSerializer.Deserialize<Run[]>(json); }
catch (Exception) catch (Exception)
{ runs = Array.Empty<Run>(); } { runs = Array.Empty<Run>(); }
if (runs.Length == 0) if (runs is null || runs.Length == 0)
{ {
psn = common.PSN;
rdsNumber = null; rdsNumber = null;
comment = hyphen; reactorType = null;
psn = string.Empty; zone = common.Zone;
zone = string.Empty; loadLockSide = null;
layer = common.Layer;
reactorNumber = common.ReactorNumber;
} }
else else
{ {
rdsNumber = runs[zero].RdsNo; reactorType = null;
if (string.IsNullOrEmpty(psn)) Run run = runs[zero];
psn = runs[zero].PSN; rdsNumber = run.RdsNo;
if (string.IsNullOrEmpty(zone)) reactorNumber = run.Reactor;
zone = runs[zero].Zone; loadLockSide = run.LoadLockSide;
if (string.IsNullOrEmpty(layer)) psn = string.IsNullOrEmpty(common.PSN) ? run.PSN : common.PSN;
layer = runs[zero].EpiLayer; zone = string.IsNullOrEmpty(common.Zone) ? run.Zone : common.Zone;
reactorNumber = runs[zero].Reactor; layer = string.IsNullOrEmpty(common.Layer) ? run.EpiLayer : common.Layer;
string loadLockSide = runs[zero].LoadLockSide;
string loadLockSideFull = loadLockSide switch
{
"L" => "Left",
"R" => "Right",
_ => loadLockSide,
};
comment = $"{loadLockSideFull} - {runs[zero].ReactorType}";
} }
} }
return new(comment: comment, return new(layer: layer,
layer: layer, loadLockSide: loadLockSide,
rdsNumber: rdsNumber, rdsNumber: rdsNumber,
psn: psn, psn: psn,
reactorNumber: reactorNumber, reactorNumber: reactorNumber,
reactorType: reactorType,
zone: zone); zone: zone);
} }
private static CommonB GetWithValidRDS(string lsl2SQLConnectionString, string layer, string psn, int? rdsNumber, int? reactorNumber, string zone) private static CommonB GetWithValidRDS(string lsl2SQLConnectionString, DateTime enteredDateTimeFilter, DateTime loadSignatureDateTimeFilter, string? layer, string? psn, int? rdsNumber, int? reactorNumber, string? zone)
{ {
string comment;
const int zero = 0; const int zero = 0;
const string hyphen = "-"; string? reactorType;
string commandText = GetCommandText(rds: rdsNumber, string? loadLockSide;
string commandText = GetCommandText(enteredDateTimeFilter,
loadSignatureDateTimeFilter,
rds: rdsNumber,
workOrderNumber: null, workOrderNumber: null,
workOrderCassette: null, workOrderCassette: null,
slot: null, slot: null,
@ -670,43 +748,41 @@ public partial class Job
string json = GetRunJson(lsl2SQLConnectionString, commandText); string json = GetRunJson(lsl2SQLConnectionString, commandText);
if (string.IsNullOrEmpty(json)) if (string.IsNullOrEmpty(json))
{ {
comment = hyphen; zone = null;
zone = string.Empty; reactorType = null;
loadLockSide = null;
} }
else else
{ {
Run[] runs; Run[]? runs;
try try
{ runs = JsonSerializer.Deserialize<Run[]>(json); } { runs = JsonSerializer.Deserialize<Run[]>(json); }
catch (Exception) catch (Exception)
{ runs = Array.Empty<Run>(); } { runs = Array.Empty<Run>(); }
if (runs.Length == 0) if (runs is null || runs.Length == 0)
{ {
comment = hyphen; zone = null;
zone = string.Empty; reactorType = null;
loadLockSide = null;
} }
else else
{ {
Run run = runs[zero];
if (string.IsNullOrEmpty(psn)) if (string.IsNullOrEmpty(psn))
psn = runs[zero].PSN; psn = run.PSN;
if (string.IsNullOrEmpty(zone)) if (string.IsNullOrEmpty(zone))
zone = runs[zero].Zone; zone = run.Zone;
if (string.IsNullOrEmpty(layer)) if (string.IsNullOrEmpty(layer))
layer = runs[zero].EpiLayer; layer = run.EpiLayer;
reactorNumber = runs[zero].Reactor is null ? reactorNumber : runs[zero].Reactor.Value; reactorNumber = run.Reactor is null ? reactorNumber : run.Reactor.Value;
string loadLockSide = runs[zero].LoadLockSide; loadLockSide = run.LoadLockSide;
string loadLockSideFull = loadLockSide switch reactorType = run.ReactorType;
{
"L" => "Left",
"R" => "Right",
_ => loadLockSide,
};
comment = $"{loadLockSideFull} - {runs[zero].ReactorType}";
} }
} }
return new(comment: comment, return new(layer: layer,
layer: layer, loadLockSide: loadLockSide,
rdsNumber: rdsNumber, rdsNumber: rdsNumber,
reactorType: reactorType,
psn: psn, psn: psn,
reactorNumber: reactorNumber, reactorNumber: reactorNumber,
zone: zone); zone: zone);

View File

@ -167,13 +167,12 @@ internal partial class Main
{ {
try try
{ {
string mid = string.Empty;
string[] sourceFiles = null; string[] sourceFiles = null;
DateTime dateTime = DateTime.Now; DateTime dateTime = DateTime.Now;
string pdsfFileLogistics = string.Empty;
IfxDoc envelopeDocument = ifxEnvelope.ExtractDocument();
CultureInfo cultureInfo = new("en-US"); CultureInfo cultureInfo = new("en-US");
string pdsfFileLogistics = string.Empty;
Calendar calendar = cultureInfo.Calendar; Calendar calendar = cultureInfo.Calendar;
IfxDoc envelopeDocument = ifxEnvelope.ExtractDocument();
string weekOfYear = calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00"); string weekOfYear = calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekOfYearSegment = string.Concat(@"\", dateTime.ToString("yyyy"), "_Week_", weekOfYear, @"\", dateTime.ToString("yyyy-MM-dd")); string weekOfYearSegment = string.Concat(@"\", dateTime.ToString("yyyy"), "_Week_", weekOfYear, @"\", dateTime.ToString("yyyy-MM-dd"));
if (!string.IsNullOrEmpty(_FileConnectorConfiguration.SourceFileLocation)) if (!string.IsNullOrEmpty(_FileConnectorConfiguration.SourceFileLocation))
@ -188,8 +187,10 @@ internal partial class Main
} }
if (!subject.Contains(_TibcoParameterSubjectPrefix)) if (!subject.Contains(_TibcoParameterSubjectPrefix))
throw new Exception("Invalid Subject"); throw new Exception("Invalid Subject");
mid = GetJobsMID(envelopeDocument); string mid = GetJobsMID(envelopeDocument);
Job job = new(_LSL2SQLConnectionString, _MetrologyFileShare, _BarcodeHostFileShare, _HttpClient, mid); DateTime enteredDateTimeFilter = dateTime.AddDays(-356);
DateTime loadSignatureDateTimeFilter = dateTime.AddDays(-4);
Job job = new(_LSL2SQLConnectionString, _MetrologyFileShare, _BarcodeHostFileShare, _HttpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
if (job.IsAreaSi) if (job.IsAreaSi)
{ {
IfxDoc sendReply = GetJobsReply(job); IfxDoc sendReply = GetJobsReply(job);

View File

@ -6,13 +6,15 @@ public class Reactor
{ {
[JsonConstructor] [JsonConstructor]
public Reactor(int reactorNo, int[] loadedRDS) public Reactor(int reactorNo, int? loadLockLeftRDS, int? loadLockRightRDS)
{ {
LoadLockLeftRDS = loadLockLeftRDS;
LoadLockRightRDS = loadLockRightRDS;
ReactorNo = reactorNo; ReactorNo = reactorNo;
LoadedRDS = loadedRDS;
} }
[JsonPropertyName("reactorNo")] public int ReactorNo { get; } // { init; get; } [JsonPropertyName("reactorNo")] public int ReactorNo { get; } // { init; get; }
[JsonPropertyName("loadedRDS")] public int[] LoadedRDS { get; } // { init; get; } [JsonPropertyName("loadLockLeftRDS")] public int? LoadLockLeftRDS { get; } // { init; get; }
[JsonPropertyName("loadLockRightRDS")] public int? LoadLockRightRDS { get; } // { init; get; }
} }

View File

@ -2,13 +2,15 @@ using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.TIBCO.Transport; namespace Adaptation.FileHandlers.TIBCO.Transport;
#nullable enable
public class ReactorRoot public class ReactorRoot
{ {
[JsonConstructor] [JsonConstructor]
public ReactorRoot(Reactor reactor) => public ReactorRoot(Reactor? reactor) =>
Reactor = reactor; Reactor = reactor;
[JsonPropertyName("reactor")] public Reactor Reactor { get; } // { init; get; } [JsonPropertyName("reactor")] public Reactor? Reactor { get; } // { init; get; }
} }

View File

@ -6,12 +6,14 @@ public class RunDataSheet
{ {
[JsonConstructor] [JsonConstructor]
public RunDataSheet(int psn, int reactor) public RunDataSheet(string loadLockSide, int psn, int reactor)
{ {
PSN = psn; PSN = psn;
LoadLockSide = loadLockSide;
Reactor = reactor; Reactor = reactor;
} }
[JsonPropertyName("loadLockSide")] public string LoadLockSide { get; } // { init; get; }
[JsonPropertyName("PSN")] public int PSN { get; } // { init; get; } [JsonPropertyName("PSN")] public int PSN { get; } // { init; get; }
[JsonPropertyName("reactor")] public int Reactor { get; } // { init; get; } [JsonPropertyName("reactor")] public int Reactor { get; } // { init; get; }

View File

@ -2,13 +2,16 @@ using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.TIBCO.Transport; namespace Adaptation.FileHandlers.TIBCO.Transport;
#nullable enable
public class RunDataSheetRoot public class RunDataSheetRoot
{ {
[JsonConstructor] [JsonConstructor]
public RunDataSheetRoot(RunDataSheet runDataSheet) => public RunDataSheetRoot(RunDataSheet? runDataSheet) =>
RunDataSheet = runDataSheet; RunDataSheet = runDataSheet;
[JsonPropertyName("rds")] public RunDataSheet RunDataSheet { get; } // { init; get; } public string? Json { get; set; }
[JsonPropertyName("rds")] public RunDataSheet? RunDataSheet { get; } // { init; get; }
} }

View File

@ -87,16 +87,16 @@
</None> </None>
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<None Condition="'$(Configuration)' == 'Debug'" Include="\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64\gpcl6dll64.dll"> <None Condition="'$(Configuration)' == 'Debug'" Include="D:\EAF-Mesa-Integration\copy\GhostPCL\gpcl6win64\gpcl6dll64.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None> </None>
<None Condition="'$(Configuration)' == 'Debug'" Include="\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64\gpcl6win64.exe"> <None Condition="'$(Configuration)' == 'Debug'" Include="D:\EAF-Mesa-Integration\copy\GhostPCL\gpcl6win64\gpcl6win64.exe">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None> </None>
<None Condition="'$(Configuration)' == 'Release'" Include="\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64\gpcl6dll64.dll"> <None Condition="'$(Configuration)' == 'Release'" Include="D:\EAF-Mesa-Integration\copy\GhostPCL\gpcl6win64\gpcl6dll64.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None> </None>
<None Condition="'$(Configuration)' == 'Release'" Include="\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64\gpcl6win64.exe"> <None Condition="'$(Configuration)' == 'Release'" Include="D:\EAF-Mesa-Integration\copy\GhostPCL\gpcl6win64\gpcl6win64.exe">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None> </None>
</ItemGroup> </ItemGroup>

View File

@ -383,8 +383,12 @@ public class FileRead : Properties.IFileRead
else else
{ {
string[] files; string[] files;
string logisticsSequence = _Logistics.Sequence.ToString(); string[] directories;
string[] directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly); string logisticsSequence;
for (int i = 0; i < 10; i++)
{
logisticsSequence = (_Logistics.Sequence + -i).ToString();
directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories) foreach (string directory in directories)
{ {
files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly); files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
@ -392,8 +396,11 @@ public class FileRead : Properties.IFileRead
continue; continue;
results.Add(directory); results.Add(directory);
} }
if (results.Count == 1)
break;
} }
if ((results is null) || results.Count != 1) }
if (results.Count != 1)
throw new Exception("Didn't find directory by logistics sequence"); throw new Exception("Didn't find directory by logistics sequence");
return results.ToArray(); return results.ToArray();
} }
@ -478,27 +485,14 @@ public class FileRead : Properties.IFileRead
} }
} }
protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements) protected static void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
{ {
string directory; #pragma warning disable CA1510
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}"; if (fileRead is null)
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00"); throw new ArgumentNullException(nameof(fileRead));
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}"; if (jsonElements is null)
if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType) throw new ArgumentNullException(nameof(jsonElements));
directory = Path.Combine(_TracePath, _EquipmentType, "Target", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName); #pragma warning restore CA1510
else
directory = Path.Combine(_TracePath, _EquipmentType, "Source", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName);
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
File.WriteAllText(file, lines);
if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
try
{ File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
catch (Exception) { }
}
} }
protected void WaitForThread(Thread thread, List<Exception> threadExceptions) protected void WaitForThread(Thread thread, List<Exception> threadExceptions)

View File

@ -2,12 +2,14 @@ using Adaptation.Shared.Methods;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Globalization; using System.Globalization;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text; using System.Text;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
namespace Adaptation.Shared; namespace Adaptation.Shared;
@ -136,6 +138,7 @@ internal class ProcessDataStandardFormat
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6) internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6)
{ {
ProcessDataStandardFormat result; ProcessDataStandardFormat result;
long? sequence;
string segment; string segment;
string[] segments; string[] segments;
bool addToFooter = false; bool addToFooter = false;
@ -186,13 +189,25 @@ internal class ProcessDataStandardFormat
} }
string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null; string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
logistics = GetLogistics(footer, linesOne: linesOne); logistics = GetLogistics(footer, linesOne: linesOne);
if (logistics.Count == 0)
sequence = null;
else
{
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
}
if (sequence is null && !string.IsNullOrEmpty(reportFullPath))
{
FileInfo fileInfo = new(reportFullPath);
sequence = fileInfo.LastWriteTime.Ticks;
}
result = new(body: body.AsReadOnly(), result = new(body: body.AsReadOnly(),
columns: columns.AsReadOnly(), columns: columns.AsReadOnly(),
footer: footer.AsReadOnly(), footer: footer.AsReadOnly(),
header: header.AsReadOnly(), header: header.AsReadOnly(),
inputPDSF: null, inputPDSF: null,
logistics: logistics, logistics: logistics,
sequence: null); sequence: sequence);
return result; return result;
} }
@ -214,19 +229,19 @@ internal class ProcessDataStandardFormat
return results.AsReadOnly(); return results.AsReadOnly();
} }
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping) internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping processDataStandardFormatMapping)
{ {
ProcessDataStandardFormat result; ProcessDataStandardFormat result;
const int columnsLine = 6; const int columnsLine = 6;
FileInfo fileInfo = new(reportFullPath); FileInfo fileInfo = new(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null); ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat); JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count != processDataStandardFormatMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray(); JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
if (jsonElements is null || jsonProperties is null || jsonProperties.Length != pdsfMapping.NewColumnNames.Count) if (jsonElements is null || jsonProperties is null || jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
result = processDataStandardFormat; result = processDataStandardFormat;
else else
{ {
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat); result = GetProcessDataStandardFormat(processDataStandardFormatMapping, jsonElements, processDataStandardFormat);
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0) if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
result = processDataStandardFormat; result = processDataStandardFormat;
} }
@ -236,7 +251,7 @@ internal class ProcessDataStandardFormat
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines) private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines)
{ {
ProcessDataStandardFormat result; ProcessDataStandardFormat result;
long sequence; long? sequence;
string[] segments; string[] segments;
bool addToFooter = false; bool addToFooter = false;
List<string> body = new(); List<string> body = new();
@ -268,12 +283,13 @@ internal class ProcessDataStandardFormat
} }
logistics = GetLogistics(footer, linesOne: null); logistics = GetLogistics(footer, linesOne: null);
if (logistics.Count == 0) if (logistics.Count == 0)
sequence = lastWriteTime.Ticks; sequence = null;
else else
{ {
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None); segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s; sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
} }
sequence ??= lastWriteTime.Ticks;
result = new(body: body.AsReadOnly(), result = new(body: body.AsReadOnly(),
columns: new(columns), columns: new(columns),
footer: footer.AsReadOnly(), footer: footer.AsReadOnly(),
@ -302,7 +318,7 @@ internal class ProcessDataStandardFormat
segments = bodyLine.Split('\t').ToList(); segments = bodyLine.Split('\t').ToList();
for (int c = 0; c < segments.Count; c++) for (int c = 0; c < segments.Count; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\","); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
} }
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1); _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
@ -321,12 +337,14 @@ internal class ProcessDataStandardFormat
int column; int column;
string value; string value;
JsonProperty jsonProperty; JsonProperty jsonProperty;
List<string> debug = new();
List<string> values = new(); List<string> values = new();
List<string> results = new(); List<string> results = new();
JsonProperty[] jsonProperties; JsonProperty[] jsonProperties;
List<string> unknownColumns = new(); List<string> unknownColumns = new();
for (int i = 0; i < jsonElements.Length; i++) for (int i = 0; i < jsonElements.Length; i++)
{ {
debug.Clear();
values.Clear(); values.Clear();
if (jsonElements[i].ValueKind != JsonValueKind.Object) if (jsonElements[i].ValueKind != JsonValueKind.Object)
{ {
@ -340,16 +358,22 @@ internal class ProcessDataStandardFormat
{ {
column = processDataStandardFormatMapping.ColumnIndices[c]; column = processDataStandardFormatMapping.ColumnIndices[c];
if (column == -1) if (column == -1)
{
value = processDataStandardFormatMapping.OldColumnNames[c]; value = processDataStandardFormatMapping.OldColumnNames[c];
debug.Add($"<Item C=-01 Name=\"{value}\" DataType=\"8\" XmlType=\"1\" XPath=\"//records/record/{value}\" />");
}
else else
{ {
jsonProperty = jsonProperties[column]; jsonProperty = jsonProperties[column];
value = jsonProperty.Value.ToString(); value = jsonProperty.Value.ToString();
debug.Add($"<Item C={column + 2:000} Name=\"{processDataStandardFormatMapping.OldColumnNames[c]}\" DataType=\"8\" XmlType=\"1\" XPath=\"//records/record/{jsonProperty.Name}\" />");
} }
values.Add(value); values.Add(value);
} }
results.Add(string.Join("\t", values)); results.Add(string.Join("\t", values));
} }
if (Debugger.IsAttached)
File.WriteAllText("../../.txt", string.Join(Environment.NewLine, debug.OrderBy(l => l)));
result = new(body: new(results), result = new(body: new(results),
columns: processDataStandardFormatMapping.OldColumnNames, columns: processDataStandardFormatMapping.OldColumnNames,
footer: processDataStandardFormat.Footer, footer: processDataStandardFormat.Footer,
@ -364,7 +388,6 @@ internal class ProcessDataStandardFormat
{ {
if (processDataStandardFormat.InputPDSF is null) if (processDataStandardFormat.InputPDSF is null)
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF)); throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
#pragma warning disable CA1845, IDE0057
string result; string result;
string line; string line;
string value; string value;
@ -378,12 +401,20 @@ internal class ProcessDataStandardFormat
break; break;
for (int c = 0; c < segments.Length; c++) for (int c = 0; c < segments.Length; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ','); line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ',');
} }
line = string.Concat(line.Substring(0, line.Length - 1), '}'); line = string.Concat(line.Substring(0, line.Length - 1), '}');
lines.Add(line); lines.Add(line);
} }
string? json = null;
if (processDataStandardFormat.Footer is not null && processDataStandardFormat.Footer.Count > 0)
{
Dictionary<string, string> footerKeyValuePairs = GetFooterKeyValuePairs(processDataStandardFormat.Footer);
Dictionary<string, Dictionary<string, string>> logisticKeyValuePairs = GetLogisticKeyValuePairs(processDataStandardFormat.Footer, footerKeyValuePairs);
json = JsonSerializer.Serialize(logisticKeyValuePairs, DictionaryStringDictionaryStringStringSourceGenerationContext.Default.DictionaryStringDictionaryStringString);
}
string footerText = string.IsNullOrEmpty(json) || json == "{}" ? string.Empty : $",{Environment.NewLine}\"PDSF\":{Environment.NewLine}{json}";
result = string.Concat( result = string.Concat(
'{', '{',
Environment.NewLine, Environment.NewLine,
@ -412,14 +443,92 @@ internal class ProcessDataStandardFormat
": ", ": ",
processDataStandardFormat.Sequence, processDataStandardFormat.Sequence,
Environment.NewLine, Environment.NewLine,
footerText,
Environment.NewLine,
'}'); '}');
return result; return result;
#pragma warning restore CA1845, IDE0057 }
private static Dictionary<string, string> GetFooterKeyValuePairs(ReadOnlyCollection<string> footerLines)
{
Dictionary<string, string> results = new();
string[] segments;
foreach (string footerLine in footerLines)
{
segments = footerLine.Split('\t');
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
{
continue;
}
if (segments[1].Contains(';'))
{
continue;
}
else
{
if (results.ContainsKey(segments[0]))
{
continue;
}
results.Add(segments[0], segments[1]);
}
}
return results;
}
private static Dictionary<string, Dictionary<string, string>> GetLogisticKeyValuePairs(ReadOnlyCollection<string> footerLines, Dictionary<string, string> footerKeyValuePairs)
{
Dictionary<string, Dictionary<string, string>> results = new();
string[] segments;
string[] subSegments;
string[] subSubSegments;
Dictionary<string, string>? keyValue;
results.Add("Footer", footerKeyValuePairs);
foreach (string footerLine in footerLines)
{
segments = footerLine.Split('\t');
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
{
continue;
}
if (!segments[1].Contains(';') || !segments[1].Contains('='))
{
continue;
}
else
{
subSegments = segments[1].Split(';');
if (subSegments.Length < 1)
{
continue;
}
if (!results.TryGetValue(segments[0], out keyValue))
{
results.Add(segments[0], new());
if (!results.TryGetValue(segments[0], out keyValue))
{
throw new Exception();
}
}
foreach (string segment in subSegments)
{
subSubSegments = segment.Split('=');
if (subSubSegments.Length != 2)
{
continue;
}
keyValue.Add(subSubSegments[0], subSubSegments[1]);
}
}
}
return results;
} }
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults) internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults)
{ {
List<string> results = new(); List<string> results = new();
if (processDataStandardFormat.InputPDSF is null)
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
if (processDataStandardFormat.Sequence is null) if (processDataStandardFormat.Sequence is null)
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence)); throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
string endOffset = "E#######T"; string endOffset = "E#######T";
@ -457,25 +566,25 @@ internal class ProcessDataStandardFormat
} }
} }
results.Add("END_HEADER"); results.Add("END_HEADER");
if (processDataStandardFormat.InputPDSF is not null)
{
results.Add(string.Empty); results.Add(string.Empty);
List<char> hyphens = new(); List<char> hyphens = new();
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|'))); results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => $"|{l.Replace('\t', '|')}|"));
results.Add(string.Empty); results.Add(string.Empty);
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|"); results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++) for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
hyphens.Add('-'); hyphens.Add('-');
results.Add($"|{string.Join("|", hyphens)}|"); results.Add($"|{string.Join("|", hyphens)}|");
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|'))); results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => $"|{l.Replace('\t', '|')}|"));
results.Add(string.Empty); results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|'))); results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => $"|{l.Replace('\t', '|')}|"));
results.Add(string.Empty);
string xml = GetXml(processDataStandardFormat);
results.Add(xml);
results.Add(string.Empty); results.Add(string.Empty);
results.Add("EOF"); results.Add("EOF");
results.Add(string.Empty); results.Add(string.Empty);
string json = GetJson(processDataStandardFormat); string json = GetJson(processDataStandardFormat);
results.Add(json); results.Add(json);
}
File.WriteAllText(path, string.Join(Environment.NewLine, results)); File.WriteAllText(path, string.Join(Environment.NewLine, results));
} }
@ -518,7 +627,7 @@ internal class ProcessDataStandardFormat
{ {
for (int c = 1; c < segments.Length; c++) for (int c = 1; c < segments.Length; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\","); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
} }
} }
@ -526,7 +635,7 @@ internal class ProcessDataStandardFormat
{ {
for (int c = 1; c < segments.Length; c++) for (int c = 1; c < segments.Length; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
if (string.IsNullOrEmpty(value)) if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,"); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit)) else if (value.All(char.IsDigit))
@ -757,6 +866,96 @@ internal class ProcessDataStandardFormat
return result; return result;
} }
internal static string GetXml(ProcessDataStandardFormat processDataStandardFormat)
{
string result;
string tag;
string value;
string[] segments;
List<string> values;
Dictionary<string, List<string>> results = new();
ReadOnlyCollection<string> body = processDataStandardFormat.InputPDSF is null ?
processDataStandardFormat.Body : processDataStandardFormat.InputPDSF.Body;
ReadOnlyCollection<string> columns = processDataStandardFormat.InputPDSF is null ?
processDataStandardFormat.Columns : processDataStandardFormat.InputPDSF.Columns;
List<string> lines = new() { "<?xml version=\"1.0\" encoding=\"UTF-8\"?>", "<records>" };
for (int i = 0; i < body.Count; i++)
{
segments = body[i].Trim().Split('\t');
if (segments.Length != columns.Count)
break;
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("&", "&amp;")
.Replace("<", "&lt;")
.Replace(">", "&gt;")
.Replace("\"", "&quot;")
.Replace("'", "&apos;");
tag = Regex.Replace(columns[c].Trim('"'), @"[^a-zA-Z0-9]", "_").Split('\r')[0].Split('\n')[0];
if (i == 0)
{
if (results.ContainsKey(tag))
continue;
results.Add(tag, new List<string>());
}
results[tag].Add(value);
}
}
foreach (KeyValuePair<string, List<string>> keyValuePair in results)
{
if (body.Count < 3)
break;
if (keyValuePair.Value.Count != body.Count)
continue;
values = keyValuePair.Value.Distinct().ToList();
if (values.Count == 2 && (string.IsNullOrEmpty(values[0]) || string.IsNullOrEmpty(values[1])))
{
for (int i = 0; i < body.Count; i++)
keyValuePair.Value[i] = string.Empty;
foreach (string v in values)
{
if (string.IsNullOrEmpty(v))
continue;
keyValuePair.Value[0] = v;
}
}
}
for (int i = 0; i < body.Count; i++)
{
lines.Add(" <record>");
foreach (KeyValuePair<string, List<string>> keyValuePair in results)
{
if (keyValuePair.Value.Count != body.Count)
continue;
lines.Add(string.Concat(" <", keyValuePair.Key, '>', keyValuePair.Value[i], "</", keyValuePair.Key, '>'));
}
lines.Add(" </record>");
}
lines.Add("</records>");
result = string.Join(Environment.NewLine, lines);
return result;
}
internal static string GetXml(string reportFullPath, string[]? lines = null)
{
string result;
bool foundXml = false;
List<string> results = new();
lines ??= File.ReadAllLines(reportFullPath);
foreach (string line in lines)
{
if (line.StartsWith("<?xml"))
foundXml = true;
if (!foundXml)
continue;
if (line.StartsWith("EOF"))
break;
results.Add(line);
}
result = string.Join(Environment.NewLine, results);
return result;
}
} }
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
@ -764,3 +963,9 @@ internal class ProcessDataStandardFormat
internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
{ {
} }
[JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
[JsonSerializable(typeof(Dictionary<string, Dictionary<string, string>>))]
internal partial class DictionaryStringDictionaryStringStringSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -1,33 +1,34 @@
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Linq;
namespace Adaptation.Shared; namespace Adaptation.Shared;
public class ProcessDataStandardFormatMapping public class ProcessDataStandardFormatMapping
{ {
public ReadOnlyCollection<string> BackfillColumns { get; private set; }
public ReadOnlyCollection<int> ColumnIndices { get; private set; } public ReadOnlyCollection<int> ColumnIndices { get; private set; }
public ReadOnlyCollection<string> IgnoreColumns { get; private set; }
public ReadOnlyCollection<string> IndexOnlyColumns { get; private set; }
public ReadOnlyDictionary<string, string> KeyValuePairs { get; private set; }
public ReadOnlyCollection<string> NewColumnNames { get; private set; } public ReadOnlyCollection<string> NewColumnNames { get; private set; }
public ReadOnlyCollection<string> OldColumnNames { get; private set; } public ReadOnlyCollection<string> OldColumnNames { get; private set; }
public ProcessDataStandardFormatMapping(ReadOnlyCollection<string> backfillColumns, public ProcessDataStandardFormatMapping(ReadOnlyCollection<int> columnIndices,
ReadOnlyCollection<int> columnIndices,
ReadOnlyCollection<string> ignoreColumns,
ReadOnlyCollection<string> indexOnlyColumns,
ReadOnlyDictionary<string, string> keyValuePairs,
ReadOnlyCollection<string> newColumnNames, ReadOnlyCollection<string> newColumnNames,
ReadOnlyCollection<string> oldColumnNames) ReadOnlyCollection<string> oldColumnNames)
{ {
BackfillColumns = backfillColumns;
ColumnIndices = columnIndices; ColumnIndices = columnIndices;
IgnoreColumns = ignoreColumns;
IndexOnlyColumns = indexOnlyColumns;
KeyValuePairs = keyValuePairs;
NewColumnNames = newColumnNames; NewColumnNames = newColumnNames;
OldColumnNames = oldColumnNames; OldColumnNames = oldColumnNames;
} }
internal static ProcessDataStandardFormatMapping Get(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
{
ProcessDataStandardFormatMapping result;
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
result = new(columnIndices: columnIndices,
newColumnNames: newColumnNames,
oldColumnNames: oldColumnNames);
return result;
}
} }

View File

@ -52,44 +52,57 @@ public class Job : LoggingUnitTesting, IDisposable
{ {
string mid; string mid;
FileHandlers.TIBCO.Transport.Job job; FileHandlers.TIBCO.Transport.Job job;
DateTime enteredDateTimeFilter = new(2023, 05, 01);
MethodBase methodBase = new StackFrame().GetMethod(); MethodBase methodBase = new StackFrame().GetMethod();
DateTime loadSignatureDateTimeFilter = new(2023, 05, 01);
string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare; string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare;
string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare; string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare;
string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString; string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString;
LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration")); LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
HttpClient httpClient = new() { BaseAddress = new(FileHandlers.TIBCO.FileRead.OpenInsightApplicationProgrammingInterface) }; HttpClient httpClient = new() { BaseAddress = new(FileHandlers.TIBCO.FileRead.OpenInsightApplicationProgrammingInterface) };
mid = "{\"Area\": \"Si\", \"EquipmentType\": \"MET08RESIMAPCDE\", \"MesEntity\": \"CDE4\", \"Sequence\": \"123456789\", \"MID\": \"12-123456-1234\", \"Recipe\": \"Recipe\"}"; mid = """
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); {"Area": "Si", "EquipmentType": "MET08RESIMAPCDE", "MesEntity": "CDE4", "Sequence": "123456789", "MID": "12-123456-1234", "Recipe": "Recipe"}
""";
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "21"); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "21");
Assert.AreEqual("123456", job.LotName); Assert.AreEqual("123456", job.LotName);
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "4609"); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "4609");
mid = "{\"Area\": \"Si\", \"EquipmentType\": \"MET08RESIMAPCDE\", \"MesEntity\": \"CDE4\", \"Sequence\": \"123456789\", \"MID\": \"12-1234567-1234\", \"Recipe\": \"Recipe\"}"; mid = """
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); {"Area": "Si", "EquipmentType": "MET08RESIMAPCDE", "MesEntity": "CDE4", "Sequence": "123456789", "MID": "12-1234567-1234", "Recipe": "Recipe"}
""";
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "21"); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "21");
Assert.AreEqual("1234567", job.LotName); Assert.AreEqual("1234567", job.LotName);
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "4609"); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "4609");
mid = "{\"Area\": \"Si\", \"EquipmentType\": \"MET08RESIMAPCDE\", \"MesEntity\": \"CDE4\", \"Sequence\": \"123456789\", \"MID\": \"-544481-\", \"Recipe\": \"Recipe\"}"; mid = """
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); {"Area": "Si", "EquipmentType": "MET08RESIMAPCDE", "MesEntity": "CDE4", "Sequence": "123456789", "MID": "-544481-", "Recipe": "Recipe"}
""";
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "51"); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "51");
Assert.AreEqual("544481", job.LotName); Assert.AreEqual("544481", job.LotName);
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "5158"); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "5158");
mid = "{\"Area\": \"Si\", \"EquipmentType\": \"MET08RESIMAPCDE\", \"MesEntity\": \"CDE4\", \"Sequence\": \"123456789\", \"MID\": \"00-544481-0000\", \"Recipe\": \"Recipe\"}"; mid = """
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); {"Area": "Si", "EquipmentType": "MET08RESIMAPCDE", "MesEntity": "CDE4", "Sequence": "123456789", "MID": "00-544481-0000", "Recipe": "Recipe"}
""";
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "51"); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "51");
Assert.AreEqual("544481", job.LotName); Assert.AreEqual("544481", job.LotName);
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "5158"); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "5158");
mid = "{\"Area\": \"Si\", \"EquipmentType\": \"MET08RESIMAPCDE\", \"MesEntity\": \"CDE4\", \"Sequence\": \"123456789\", \"MID\": \"00-o171308.1.51-0000\", \"Recipe\": \"Recipe\", \"Slot\": \"11\"}"; mid = """
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); {"Area": "Si", "EquipmentType": "MET08RESIMAPCDE", "MesEntity": "CDE4", "Sequence": "123456789", "MID": "00-o171308.1.51-0000", "Recipe": "Recipe", "Slot": "11"}
""";
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "54"); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "54");
Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); // == "547000"); Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); // == "547000");
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "4445"); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "4445");
LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit")); LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
mid = "{\"Area\": \"Si\", \"EquipmentType\": \"MET08RESIMAPCDE\", \"MesEntity\": \"CDE5\", \"Sequence\": \"638163023363575829\", \"MID\": \"B48\", \"Recipe\": \"lsl_6in \"}"; mid = """
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); {"Area": "Si", "EquipmentType": "MET08RESIMAPCDE", "MesEntity": "CDE5", "Sequence": "638163023363575829", "MID": "B48", "Recipe": "lsl_6in "}
""";
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "54"); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "54");
Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); // == "547000"); Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); // == "547000");
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "4445"); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "4445");
LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
NonThrowTryCatch(); NonThrowTryCatch();
} }
@ -99,16 +112,19 @@ public class Job : LoggingUnitTesting, IDisposable
[TestMethod] [TestMethod]
public void TestJobAA() public void TestJobAA()
{ {
string mid;
FileHandlers.TIBCO.Transport.Job job; FileHandlers.TIBCO.Transport.Job job;
DateTime enteredDateTimeFilter = new(2023, 05, 01);
MethodBase methodBase = new StackFrame().GetMethod(); MethodBase methodBase = new StackFrame().GetMethod();
DateTime loadSignatureDateTimeFilter = new(2023, 05, 01);
string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare; string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare;
string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare; string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare;
string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString; string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString;
LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration")); LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
HttpClient httpClient = new() { BaseAddress = new(FileHandlers.TIBCO.FileRead.OpenInsightApplicationProgrammingInterface) }; HttpClient httpClient = new() { BaseAddress = new(FileHandlers.TIBCO.FileRead.OpenInsightApplicationProgrammingInterface) };
mid = "{\"Area\": \"Si\", \"EquipmentType\": \"MET08THFTIRQS408M\", \"MesEntity\": \"BIORAD2\", \"Sequence\": \"123456789\", \"MID\": \"37--\", \"Recipe\": \"Recipe\"}"; string mid = """
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); {"Area": "Si", "EquipmentType": "MET08THFTIRQS408M", "MesEntity": "BIORAD2", "Sequence": "123456789", "MID": "37--", "Recipe": "Recipe"}
""";
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.AreEqual("37", job.ProcessType); Assert.AreEqual("37", job.ProcessType);
Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); // == "549918"); Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); // == "549918");
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "5101"); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "5101");
@ -123,14 +139,18 @@ public class Job : LoggingUnitTesting, IDisposable
public void TestJobB() public void TestJobB()
{ {
FileHandlers.TIBCO.Transport.Job job; FileHandlers.TIBCO.Transport.Job job;
DateTime enteredDateTimeFilter = new(2023, 05, 01);
MethodBase methodBase = new StackFrame().GetMethod(); MethodBase methodBase = new StackFrame().GetMethod();
DateTime loadSignatureDateTimeFilter = new(2023, 05, 01);
string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare; string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare;
string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare; string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare;
string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString; string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString;
LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration")); LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
HttpClient httpClient = new() { BaseAddress = new(FileHandlers.TIBCO.FileRead.OpenInsightApplicationProgrammingInterface) }; HttpClient httpClient = new() { BaseAddress = new(FileHandlers.TIBCO.FileRead.OpenInsightApplicationProgrammingInterface) };
string mid = "{\"Area\": \"Si\", \"EquipmentType\": \"MET08RESIMAPCDE\", \"MesEntity\": \"CDE4\", \"Sequence\": \"123456789\", \"MID\": \"P1234\", \"Recipe\": \"Recipe\"}"; string mid = """
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); {"Area": "Si", "EquipmentType": "MET08RESIMAPCDE", "MesEntity": "CDE4", "Sequence": "123456789", "MID": "P1234", "Recipe": "Recipe"}
""";
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType));
Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); Assert.IsFalse(string.IsNullOrEmpty(job.LotName));
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName));
@ -145,14 +165,18 @@ public class Job : LoggingUnitTesting, IDisposable
public void TestJobC() public void TestJobC()
{ {
FileHandlers.TIBCO.Transport.Job job; FileHandlers.TIBCO.Transport.Job job;
DateTime enteredDateTimeFilter = new(2023, 05, 01);
MethodBase methodBase = new StackFrame().GetMethod(); MethodBase methodBase = new StackFrame().GetMethod();
DateTime loadSignatureDateTimeFilter = new(2023, 05, 01);
string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare; string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare;
string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare; string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare;
string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString; string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString;
LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration")); LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
HttpClient httpClient = new() { BaseAddress = new(FileHandlers.TIBCO.FileRead.OpenInsightApplicationProgrammingInterface) }; HttpClient httpClient = new() { BaseAddress = new(FileHandlers.TIBCO.FileRead.OpenInsightApplicationProgrammingInterface) };
string mid = "{\"Area\": \"Si\", \"EquipmentType\": \"MET08RESIMAPCDE\", \"MesEntity\": \"BIORAD3\", \"Sequence\": \"638234699589174855\", \"MID\": \"33--\", \"Recipe\": \"Recipe\"}"; string mid = """
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); {"Area": "Si", "EquipmentType": "MET08RESIMAPCDE", "MesEntity": "BIORAD3", "Sequence": "638234699589174855", "MID": "33--", "Recipe": "Recipe"}
""";
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType));
Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); Assert.IsFalse(string.IsNullOrEmpty(job.LotName));
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName));
@ -167,15 +191,18 @@ public class Job : LoggingUnitTesting, IDisposable
public void TestJobD() public void TestJobD()
{ {
FileHandlers.TIBCO.Transport.Job job; FileHandlers.TIBCO.Transport.Job job;
DateTime enteredDateTimeFilter = new(2023, 05, 01);
MethodBase methodBase = new StackFrame().GetMethod(); MethodBase methodBase = new StackFrame().GetMethod();
string metrologyFileShare = DateTime loadSignatureDateTimeFilter = new(2023, 05, 01);
FileHandlers.TIBCO.FileRead.MetrologyFileShare; string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare;
string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare; string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare;
string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString; string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString;
LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration")); LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
HttpClient httpClient = new() { BaseAddress = new(FileHandlers.TIBCO.FileRead.OpenInsightApplicationProgrammingInterface) }; HttpClient httpClient = new() { BaseAddress = new(FileHandlers.TIBCO.FileRead.OpenInsightApplicationProgrammingInterface) };
string mid = "{\"Area\": \"Si\", \"EquipmentType\": \"DEP08CEPIEPSILON\", \"MesEntity\": \"R32\", \"Sequence\": \"\", \"MID\": \"32--\", \"Recipe\": \"Recipe\"}"; string mid = """
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); {"Area": "Si", "EquipmentType": "DEP08CEPIEPSILON", "MesEntity": "R32", "Sequence": "", "MID": "32--", "Recipe": "Recipe"}
""";
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType));
Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); Assert.IsFalse(string.IsNullOrEmpty(job.LotName));
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName));
@ -191,7 +218,9 @@ public class Job : LoggingUnitTesting, IDisposable
public void TestJobE() public void TestJobE()
{ {
FileHandlers.TIBCO.Transport.Job job; FileHandlers.TIBCO.Transport.Job job;
DateTime enteredDateTimeFilter = new(2023, 05, 01);
MethodBase methodBase = new StackFrame().GetMethod(); MethodBase methodBase = new StackFrame().GetMethod();
DateTime loadSignatureDateTimeFilter = new(2023, 05, 01);
string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare; string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare;
string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare; string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare;
string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString; string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString;
@ -200,7 +229,7 @@ public class Job : LoggingUnitTesting, IDisposable
string mid = """ string mid = """
{"Area": "Si", "EquipmentType": "MET08RESIMAPCDE", "MesEntity": "CDE5", "Sequence": "638756365880000000", "MID": "38-660275-5095.1", "Recipe": "IRC6mm"} {"Area": "Si", "EquipmentType": "MET08RESIMAPCDE", "MesEntity": "CDE5", "Sequence": "638756365880000000", "MID": "38-660275-5095.1", "Recipe": "IRC6mm"}
"""; """;
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType));
Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); Assert.IsFalse(string.IsNullOrEmpty(job.LotName));
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName));
@ -216,7 +245,9 @@ public class Job : LoggingUnitTesting, IDisposable
public void TestJobF() public void TestJobF()
{ {
FileHandlers.TIBCO.Transport.Job job; FileHandlers.TIBCO.Transport.Job job;
DateTime enteredDateTimeFilter = new(2023, 05, 01);
MethodBase methodBase = new StackFrame().GetMethod(); MethodBase methodBase = new StackFrame().GetMethod();
DateTime loadSignatureDateTimeFilter = new(2023, 05, 01);
string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare; string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare;
string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare; string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare;
string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString; string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString;
@ -225,7 +256,7 @@ public class Job : LoggingUnitTesting, IDisposable
string mid = """ string mid = """
{"Area": "Si", "EquipmentType": "MET08THFTIRQS408M", "MesEntity": "BIORAD2", "Sequence": "638757112479659597", "MID": "173308.1.5", "Recipe": "6inTHICK"} {"Area": "Si", "EquipmentType": "MET08THFTIRQS408M", "MesEntity": "BIORAD2", "Sequence": "638757112479659597", "MID": "173308.1.5", "Recipe": "6inTHICK"}
"""; """;
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType));
Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); Assert.IsFalse(string.IsNullOrEmpty(job.LotName));
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName));
@ -241,7 +272,9 @@ public class Job : LoggingUnitTesting, IDisposable
public void TestJobG() public void TestJobG()
{ {
FileHandlers.TIBCO.Transport.Job job; FileHandlers.TIBCO.Transport.Job job;
DateTime enteredDateTimeFilter = new(2023, 05, 01);
MethodBase methodBase = new StackFrame().GetMethod(); MethodBase methodBase = new StackFrame().GetMethod();
DateTime loadSignatureDateTimeFilter = new(2023, 05, 01);
string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare; string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare;
string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare; string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare;
string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString; string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString;
@ -250,7 +283,7 @@ public class Job : LoggingUnitTesting, IDisposable
string mid = """ string mid = """
{"Area": "Si", "EquipmentType": "MET08DDUPSFS6420", "MesEntity": "TENCOR1", "Sequence": "638765945581765554", "MID": "1T661282", "Recipe": "8IN_THIN ROTR"} {"Area": "Si", "EquipmentType": "MET08DDUPSFS6420", "MesEntity": "TENCOR1", "Sequence": "638765945581765554", "MID": "1T661282", "Recipe": "8IN_THIN ROTR"}
"""; """;
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType));
Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); Assert.IsFalse(string.IsNullOrEmpty(job.LotName));
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName));
@ -266,7 +299,9 @@ public class Job : LoggingUnitTesting, IDisposable
public void TestJobH() public void TestJobH()
{ {
FileHandlers.TIBCO.Transport.Job job; FileHandlers.TIBCO.Transport.Job job;
DateTime enteredDateTimeFilter = new(2023, 05, 01);
MethodBase methodBase = new StackFrame().GetMethod(); MethodBase methodBase = new StackFrame().GetMethod();
DateTime loadSignatureDateTimeFilter = new(2023, 05, 01);
string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare; string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare;
string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare; string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare;
string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString; string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString;
@ -275,7 +310,7 @@ public class Job : LoggingUnitTesting, IDisposable
string mid = """ string mid = """
{"Area": "Si", "EquipmentType": "MET08DDUPSFS6420", "MesEntity": "TENCOR1", "Sequence": "638765945581765554", "MID": "AK1PL2", "Recipe": "8INCLEAN"} {"Area": "Si", "EquipmentType": "MET08DDUPSFS6420", "MesEntity": "TENCOR1", "Sequence": "638765945581765554", "MID": "AK1PL2", "Recipe": "8INCLEAN"}
"""; """;
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid); job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType));
Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); Assert.IsFalse(string.IsNullOrEmpty(job.LotName));
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); Assert.IsFalse(string.IsNullOrEmpty(job.ProductName));
@ -284,4 +319,28 @@ public class Job : LoggingUnitTesting, IDisposable
NonThrowTryCatch(); NonThrowTryCatch();
} }
[TestMethod]
public void TestJobI()
{
FileHandlers.TIBCO.Transport.Job job;
DateTime enteredDateTimeFilter = new(2023, 05, 01);
MethodBase methodBase = new StackFrame().GetMethod();
DateTime loadSignatureDateTimeFilter = new(2023, 05, 01);
string metrologyFileShare = FileHandlers.TIBCO.FileRead.MetrologyFileShare;
string barcodeHostFileShare = FileHandlers.TIBCO.FileRead.BarcodeHostFileShare;
string lsl2SQLConnectionString = FileHandlers.TIBCO.FileRead.LSL2SQLConnectionString;
LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
HttpClient httpClient = new() { BaseAddress = new(FileHandlers.TIBCO.FileRead.OpenInsightApplicationProgrammingInterface) };
string mid = """
{"Area": "Si", "EquipmentType": "MET08RESIMAPCDE", "MesEntity": "CDE5", "Sequence": "638163023363575829", "MID": "23-111111-5053", "Recipe": "lsl_6in "}
""";
job = new(lsl2SQLConnectionString, metrologyFileShare, barcodeHostFileShare, httpClient, mid, enteredDateTimeFilter, loadSignatureDateTimeFilter);
Assert.IsFalse(string.IsNullOrEmpty(job.ProcessType)); // == "23");
Assert.IsFalse(string.IsNullOrEmpty(job.LotName)); // == "111111");
Assert.IsFalse(string.IsNullOrEmpty(job.ProductName)); // == "5053");
LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
LoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
NonThrowTryCatch();
}
} }

View File

@ -0,0 +1,20 @@
"use strict";
const rds = 'http://messa020ec.infineon.com:8080/api/oiWizard/materials/rds/1005602';
let workItemCollection = [];
await fetch(rds)
.then((res) => res.text())
.then((text) => {
const records = JSON.parse(text);
if (records != undefined) {
if (records != undefined) {
}
}
})
.catch((e) => console.error(e));
workItemCollection.forEach(element => {
console.log(`{"Id": "${element.Id}", "State": "${element.State}", "ClosedDate": "${element.ClosedDate}", "TargetDate": "${element.TargetDate}", "IterationPath": "${element.IterationPath}", "AssignedTo": "${element.AssignedTo}", "Title": "${element.Title}"},`);
});

76
Adaptation/bun.lock Normal file
View File

@ -0,0 +1,76 @@
{
"lockfileVersion": 1,
"workspaces": {
"": {
"name": "adaptation",
"devDependencies": {
"@types/bun": "latest",
"axios": "^1.11.0",
},
"peerDependencies": {
"typescript": "^5",
},
},
},
"packages": {
"@types/bun": ["@types/bun@1.2.19", "", { "dependencies": { "bun-types": "1.2.19" } }, "sha512-d9ZCmrH3CJ2uYKXQIUuZ/pUnTqIvLDS0SK7pFmbx8ma+ziH/FRMoAq5bYpRG7y+w1gl+HgyNZbtqgMq4W4e2Lg=="],
"@types/node": ["@types/node@24.1.0", "", { "dependencies": { "undici-types": "~7.8.0" } }, "sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w=="],
"@types/react": ["@types/react@19.1.9", "", { "dependencies": { "csstype": "^3.0.2" } }, "sha512-WmdoynAX8Stew/36uTSVMcLJJ1KRh6L3IZRx1PZ7qJtBqT3dYTgyDTx8H1qoRghErydW7xw9mSJ3wS//tCRpFA=="],
"asynckit": ["asynckit@0.4.0", "", {}, "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="],
"axios": ["axios@1.11.0", "", { "dependencies": { "follow-redirects": "^1.15.6", "form-data": "^4.0.4", "proxy-from-env": "^1.1.0" } }, "sha512-1Lx3WLFQWm3ooKDYZD1eXmoGO9fxYQjrycfHFC8P0sCfQVXyROp0p9PFWBehewBOdCwHc+f/b8I0fMto5eSfwA=="],
"bun-types": ["bun-types@1.2.19", "", { "dependencies": { "@types/node": "*" }, "peerDependencies": { "@types/react": "^19" } }, "sha512-uAOTaZSPuYsWIXRpj7o56Let0g/wjihKCkeRqUBhlLVM/Bt+Fj9xTo+LhC1OV1XDaGkz4hNC80et5xgy+9KTHQ=="],
"call-bind-apply-helpers": ["call-bind-apply-helpers@1.0.2", "", { "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2" } }, "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ=="],
"combined-stream": ["combined-stream@1.0.8", "", { "dependencies": { "delayed-stream": "~1.0.0" } }, "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg=="],
"csstype": ["csstype@3.1.3", "", {}, "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw=="],
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
"dunder-proto": ["dunder-proto@1.0.1", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.1", "es-errors": "^1.3.0", "gopd": "^1.2.0" } }, "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A=="],
"es-define-property": ["es-define-property@1.0.1", "", {}, "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g=="],
"es-errors": ["es-errors@1.3.0", "", {}, "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw=="],
"es-object-atoms": ["es-object-atoms@1.1.1", "", { "dependencies": { "es-errors": "^1.3.0" } }, "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA=="],
"es-set-tostringtag": ["es-set-tostringtag@2.1.0", "", { "dependencies": { "es-errors": "^1.3.0", "get-intrinsic": "^1.2.6", "has-tostringtag": "^1.0.2", "hasown": "^2.0.2" } }, "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA=="],
"follow-redirects": ["follow-redirects@1.15.11", "", {}, "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ=="],
"form-data": ["form-data@4.0.4", "", { "dependencies": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", "es-set-tostringtag": "^2.1.0", "hasown": "^2.0.2", "mime-types": "^2.1.12" } }, "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow=="],
"function-bind": ["function-bind@1.1.2", "", {}, "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA=="],
"get-intrinsic": ["get-intrinsic@1.3.0", "", { "dependencies": { "call-bind-apply-helpers": "^1.0.2", "es-define-property": "^1.0.1", "es-errors": "^1.3.0", "es-object-atoms": "^1.1.1", "function-bind": "^1.1.2", "get-proto": "^1.0.1", "gopd": "^1.2.0", "has-symbols": "^1.1.0", "hasown": "^2.0.2", "math-intrinsics": "^1.1.0" } }, "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ=="],
"get-proto": ["get-proto@1.0.1", "", { "dependencies": { "dunder-proto": "^1.0.1", "es-object-atoms": "^1.0.0" } }, "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g=="],
"gopd": ["gopd@1.2.0", "", {}, "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg=="],
"has-symbols": ["has-symbols@1.1.0", "", {}, "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ=="],
"has-tostringtag": ["has-tostringtag@1.0.2", "", { "dependencies": { "has-symbols": "^1.0.3" } }, "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw=="],
"hasown": ["hasown@2.0.2", "", { "dependencies": { "function-bind": "^1.1.2" } }, "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ=="],
"math-intrinsics": ["math-intrinsics@1.1.0", "", {}, "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g=="],
"mime-db": ["mime-db@1.52.0", "", {}, "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg=="],
"mime-types": ["mime-types@2.1.35", "", { "dependencies": { "mime-db": "1.52.0" } }, "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw=="],
"proxy-from-env": ["proxy-from-env@1.1.0", "", {}, "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="],
"typescript": ["typescript@5.9.2", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A=="],
"undici-types": ["undici-types@7.8.0", "", {}, "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw=="],
}
}

View File

@ -1,18 +1,29 @@
{ {
"devDependencies": {
"@types/bun": "latest",
"axios": "^1.11.0"
},
"module": "index.ts",
"name": "adaptation",
"peerDependencies": {
"typescript": "^5"
},
"private": true,
"scripts": { "scripts": {
"AA-CreateSelfDescription.Staging.v2_47_1-SP101_EQPT": "dotnet test --runtime win-x64 --no-build --filter \"FullyQualifiedName~Adaptation._Tests.CreateSelfDescription.Staging.v2_47_1 & ClassName~SP101_EQPT\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")", "AA-CreateSelfDescription.Staging.v2_47_1-SP101_EQPT": "dotnet test --runtime win-x64 --no-build --filter \"FullyQualifiedName~Adaptation._Tests.CreateSelfDescription.Staging.v2_47_1 & ClassName~SP101_EQPT\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")",
"Alpha": "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
"BA-CreateSelfDescription.Staging.v2_47_1-SP101": "dotnet test --runtime win-x64 --no-build --filter \"FullyQualifiedName~Adaptation._Tests.CreateSelfDescription.Staging.v2_47_1 & ClassName~SP101\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")", "BA-CreateSelfDescription.Staging.v2_47_1-SP101": "dotnet test --runtime win-x64 --no-build --filter \"FullyQualifiedName~Adaptation._Tests.CreateSelfDescription.Staging.v2_47_1 & ClassName~SP101\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")",
"CA-CreateSelfDescription.Staging.v2_47_1-MET08DDUPSP1TBI": "dotnet test --runtime win-x64 --no-build --filter \"FullyQualifiedName~Adaptation._Tests.CreateSelfDescription.Staging.v2_47_1 & ClassName~MET08DDUPSP1TBI\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")", "CA-CreateSelfDescription.Staging.v2_47_1-MET08DDUPSP1TBI": "dotnet test --runtime win-x64 --no-build --filter \"FullyQualifiedName~Adaptation._Tests.CreateSelfDescription.Staging.v2_47_1 & ClassName~MET08DDUPSP1TBI\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")",
"DA-CreateSelfDescription.Staging.v2_47_1": "dotnet test --runtime win-x64 --no-build --filter \"FullyQualifiedName~Adaptation._Tests.CreateSelfDescription.Staging.v2_47_1\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")", "DA-CreateSelfDescription.Staging.v2_47_1": "dotnet test --runtime win-x64 --no-build --filter \"FullyQualifiedName~Adaptation._Tests.CreateSelfDescription.Staging.v2_47_1\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")",
"EA-Extract.Staging.v2_47_1-SP101_EQPT-Staging__v2_47_1__SP101_EQPT__DownloadRsMFile637953072332628623__Normal": "dotnet test --runtime win-x64 --no-build --filter \"FullyQualifiedName~Adaptation._Tests.Extract.Staging.v2_47_1 & ClassName~SP101_EQPT & Name~Staging__v2_47_1__SP101_EQPT__DownloadRsMFile637953072332628623__Normal\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")", "EA-Extract.Staging.v2_47_1-SP101_EQPT-Staging__v2_47_1__SP101_EQPT__DownloadRsMFile637953072332628623__Normal": "dotnet test --runtime win-x64 --no-build --filter \"FullyQualifiedName~Adaptation._Tests.Extract.Staging.v2_47_1 & ClassName~SP101_EQPT & Name~Staging__v2_47_1__SP101_EQPT__DownloadRsMFile637953072332628623__Normal\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")",
"FA-Extract.Staging.v2_47_1-SP101-Staging__v2_47_1__SP101__txt637955319879801344__Normal": "dotnet test --filter \"FullyQualifiedName~Adaptation._Tests.Extract.Staging.v2_47_1 & ClassName~SP101 & Name~Staging__v2_47_1__SP101__txt637955319879801344__Normal\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")", "FA-Extract.Staging.v2_47_1-SP101-Staging__v2_47_1__SP101__txt637955319879801344__Normal": "dotnet test --filter \"FullyQualifiedName~Adaptation._Tests.Extract.Staging.v2_47_1 & ClassName~SP101 & Name~Staging__v2_47_1__SP101__txt637955319879801344__Normal\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")",
"GA-Extract.Staging.v2_47_1-MET08DDUPSP1TBI-Staging__v2_47_1__MET08DDUPSP1TBI__MoveMatchingFiles637955319879801344__Normal": "dotnet test --filter \"FullyQualifiedName~Adaptation._Tests.Extract.Staging.v2_47_1 & ClassName~MET08DDUPSP1TBI & Name~Staging__v2_47_1__MET08DDUPSP1TBI__MoveMatchingFiles637955319879801344__Normal\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")", "GA-Extract.Staging.v2_47_1-MET08DDUPSP1TBI-Staging__v2_47_1__MET08DDUPSP1TBI__MoveMatchingFiles637955319879801344__Normal": "dotnet test --filter \"FullyQualifiedName~Adaptation._Tests.Extract.Staging.v2_47_1 & ClassName~MET08DDUPSP1TBI & Name~Staging__v2_47_1__MET08DDUPSP1TBI__MoveMatchingFiles637955319879801344__Normal\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")",
"garbage-collect": "git gc",
"HA-Extract.Staging.v2_47_1-SP101-Staging__v2_47_1__SP101__pcl637812984345592512__MinFileLength": "dotnet test --filter \"FullyQualifiedName~Adaptation._Tests.Extract.Staging.v2_47_1 & ClassName~SP101 & Name~Staging__v2_47_1__SP101__pcl637812984345592512__MinFileLength\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")", "HA-Extract.Staging.v2_47_1-SP101-Staging__v2_47_1__SP101__pcl637812984345592512__MinFileLength": "dotnet test --filter \"FullyQualifiedName~Adaptation._Tests.Extract.Staging.v2_47_1 & ClassName~SP101 & Name~Staging__v2_47_1__SP101__pcl637812984345592512__MinFileLength\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")",
"HB-Extract.Staging.v2_47_1-MET08DDUPSP1TBI-Staging__v2_47_1__MET08DDUPSP1TBI__OpenInsight638052814829645888__IqsSql": "dotnet test --filter \"FullyQualifiedName~Adaptation._Tests.Extract.Staging.v2_47_1 & ClassName~MET08DDUPSP1TBI & Name~Staging__v2_47_1__MET08DDUPSP1TBI__OpenInsight638052814829645888__IqsSql\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")", "HB-Extract.Staging.v2_47_1-MET08DDUPSP1TBI-Staging__v2_47_1__MET08DDUPSP1TBI__OpenInsight638052814829645888__IqsSql": "dotnet test --filter \"FullyQualifiedName~Adaptation._Tests.Extract.Staging.v2_47_1 & ClassName~MET08DDUPSP1TBI & Name~Staging__v2_47_1__MET08DDUPSP1TBI__OpenInsight638052814829645888__IqsSql\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")",
"IA-Extract.Staging.v2_47_1-MET08DDUPSP1TBI-Staging__v2_47_1__MET08DDUPSP1TBI__TIBCO": "dotnet test --runtime win-x64 --no-build --filter \"FullyQualifiedName~Adaptation._Tests.Extract.Staging.v2_47_1 & ClassName~MET08DDUPSP1TBI & Name~Staging__v2_47_1__MET08DDUPSP1TBI__TIBCO\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")", "IA-Extract.Staging.v2_47_1-MET08DDUPSP1TBI-Staging__v2_47_1__MET08DDUPSP1TBI__TIBCO": "dotnet test --runtime win-x64 --no-build --filter \"FullyQualifiedName~Adaptation._Tests.Extract.Staging.v2_47_1 & ClassName~MET08DDUPSP1TBI & Name~Staging__v2_47_1__MET08DDUPSP1TBI__TIBCO\" -- TestRunParameters.Parameter(name=\\\"WaitFor\\\", value=\\\"Debugger.IsAttached\\\")",
"Alpha": "ABCDEFGHIJKLMNOPQRSTUVWXYZ",
"kanbn.board": "kanbn board", "kanbn.board": "kanbn board",
"kanbn.board.json": "kanbn board -j > .kanbn/board.json", "kanbn.board.json": "kanbn board -j > .kanbn/board.json"
"garbage-collect": "git gc" },
} "type": "module"
} }

29
Adaptation/tsconfig.json Normal file
View File

@ -0,0 +1,29 @@
{
"compilerOptions": {
// Environment setup & latest features
"lib": ["ESNext"],
"target": "ESNext",
"module": "Preserve",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
// Bundler mode
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"noEmit": true,
// Best practices
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedIndexedAccess": true,
"noImplicitOverride": true,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false
}
}