Compare commits

...

3 Commits

Author SHA1 Message Date
e37d2a0a56 Remove txt logic for standard BioRad runs 2025-08-12 10:31:03 -07:00
38bd1f5507 Push Stratus Qual runs at OI level back to Stratus
process-data-standard-format alignment

Tasks Input
2025-08-05 12:30:08 -07:00
7556b12a0d Now relying on pipeline to copy files from file shares for ghost-pcl and linc-pdfc 2025-06-27 12:44:22 -07:00
15 changed files with 390 additions and 126 deletions

View File

@ -110,7 +110,7 @@ dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template
dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name
dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2"); dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2");
dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant. dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant.
dotnet_diagnostic.IDE0005.severity = warning # Using directive is unnecessary dotnet_diagnostic.IDE0005.severity = none # Using directive is unnecessary
dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified
dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031) dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031)
dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed
@ -122,6 +122,7 @@ dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]cs
dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified
dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
dotnet_diagnostic.MSTEST0015.severity = none # MSTEST0015: Test method {method} should not be ignored
dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods
dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning

View File

@ -1,19 +1,134 @@
{ {
"version": "2.0.0", "version": "2.0.0",
"inputs": [
{
"default": "Development",
"description": "Which ASP Net Core Environment?",
"id": "ASPNETCORE_ENVIRONMENT",
"options": [
"Development",
"Production"
],
"type": "pickString"
},
{
"default": "{AssemblyTitle}",
"description": "What Assembly Title?",
"id": "AssemblyTitle",
"type": "promptString"
},
{
"default": "{Build.BuildId}",
"description": "Which Build BuildId?",
"id": "Build.BuildId",
"type": "promptString"
},
{
"default": "{Build.Reason}",
"description": "Which Build Reason?",
"id": "Build.Reason",
"type": "promptString"
},
{
"default": "{Build.Repository.Id}",
"description": "Which Build Repository Id?",
"id": "Build.Repository.Id",
"type": "promptString"
},
{
"default": "{Build.Repository.Name}",
"description": "Which Build Repository Name?",
"id": "Build.Repository.Name",
"type": "promptString"
},
{
"default": "{Build.SourceVersion}",
"description": "Which Build Source Version?",
"id": "Build.SourceVersion",
"type": "promptString"
},
{
"default": "Debug",
"description": "Which Configuration?",
"id": "Configuration",
"options": [
"Debug",
"Release"
],
"type": "pickString"
},
{
"default": "net8.0",
"description": "Which Core Version?",
"id": "CoreVersion",
"options": [
"net8.0"
],
"type": "pickString"
},
{
"default": "C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe",
"description": "Which MS Build?",
"id": "MSBuild",
"type": "promptString"
},
{
"default": "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/",
"description": "Which Nuget Source?",
"id": "NugetSource",
"type": "promptString"
},
{
"default": "win-x64",
"description": "Which Runtime?",
"id": "Runtime",
"options": [
"win-x64",
"win-x32",
"linux-x64",
"linux-x32"
],
"type": "pickString"
},
{
"default": "L:/",
"description": "Which System DefaultWorkingDirectory?",
"id": "System.DefaultWorkingDirectory",
"options": [
"L:/",
"D:/",
"C:/"
],
"type": "pickString"
},
{
"default": "v4.8",
"description": "Which Core Target Framework Version?",
"id": "TargetFrameworkVersion",
"options": [
"v4.8"
],
"type": "pickString"
},
{
"default": "{UserSecretsId}",
"description": "Which Core User Secrets Id?",
"id": "UserSecretsId",
"type": "promptString"
}
],
"tasks": [ "tasks": [
{ {
"label": "Build", "label": "Build",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
"build", "build"
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
], ],
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Test-Debug", "label": "Test Debug",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -24,7 +139,7 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Test-Release", "label": "Test Release",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -50,7 +165,7 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Format-Whitespaces", "label": "Format Whitespaces",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -87,13 +202,13 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Project", "label": "Code Project",
"type": "shell", "type": "shell",
"command": "code ../MET08THFTIRQS408M.csproj", "command": "code ../MET08THFTIRQS408M.csproj",
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"label": "Readme", "label": "Code Read Me",
"type": "shell", "type": "shell",
"command": "code ../README.md", "command": "code ../README.md",
"problemMatcher": [] "problemMatcher": []
@ -113,7 +228,7 @@
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"label": "Git Config", "label": "Code Git Config",
"type": "shell", "type": "shell",
"command": "code ../.git/config", "command": "code ../.git/config",
"problemMatcher": [] "problemMatcher": []

View File

@ -128,7 +128,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions); FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -153,7 +153,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveArchive(reportFullPath, dateTime); MoveArchive(reportFullPath, dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -127,7 +127,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions); FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -268,7 +268,7 @@ public class FileRead : Shared.FileRead, IFileRead
} }
} }
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles, bool _) private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
{ {
List<Pre> results = new(); List<Pre> results = new();
Pre pre; Pre pre;
@ -313,8 +313,13 @@ public class FileRead : Shared.FileRead, IFileRead
continue; continue;
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults)) if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
wsResults = null; wsResults = null;
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults); if (processDataStandardFormat.InputPDSF is null)
File.Delete(preWith.MatchingFile); File.Move(preWith.MatchingFile, preWith.CheckFile);
else
{
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
File.Delete(preWith.MatchingFile);
}
if (Directory.Exists(preWith.NoWaitDirectory)) if (Directory.Exists(preWith.NoWaitDirectory))
{ {
post = new(preWith.CheckFile, preWith.ErrFile); post = new(preWith.CheckFile, preWith.ErrFile);
@ -366,10 +371,7 @@ public class FileRead : Shared.FileRead, IFileRead
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); } { CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { } catch (Exception) { }
} }
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
bool mesEntityMatchesProcess = descriptions.Count > 0 && descriptions[0].MesEntity == descriptions[0].Reactor;
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles, mesEntityMatchesProcess);
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection); ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
MoveCollection(dateTime, processDataStandardFormat, preWithCollection); MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
return results; return results;

View File

@ -113,76 +113,102 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
private static string GetLines(List<QS408M.Description> descriptions) private static string GetLines(Logistics logistics, List<QS408M.Description> descriptions, bool isStratusQual)
{ {
StringBuilder results = new(); StringBuilder results = new();
char del = '\t'; char del = '\t';
QS408M.Description x = descriptions[0]; QS408M.Description x = descriptions[0];
_ = results.Append("Bio-Rad ").Append(x.UniqueId).Append(del). if (isStratusQual)
Append(x.Date).Append(del). {
Append(x.ThicknessFourteenCriticalPointsAverage).Append(del). _ = results.Append("Stratus_").Append(logistics.MID).Append('_').Append(logistics.DateTimeFromSequence.ToString("yyyyMMddhhmmssfff")).Append(del).
Append(x.Recipe).Append(del). Append(x.Date).Append(del).
Append(x.Reactor).Append(del). Append(logistics.JobID).Append(del).
Append(x.RDS).Append(del). Append("FQA Thickness").Append(del).
Append(x.PSN).Append(del). Append(x.Employee).Append(del).
Append(x.Layer).Append(del). Append(x.Recipe).Append(del).
Append(x.Zone).Append(del). Append(x.Reactor).Append(del).
Append(x.Cassette).Append(del). Append(x.RDS).Append(del).
Append(x.Wafer).Append(del). Append(x.PSN).Append(del).
Append(x.RVThickness); Append(x.Lot).Append(del).
for (int i = 0; i < descriptions.Count; i++) Append(x.Cassette).Append(del).
_ = results.Append(del).Append(descriptions[i].Position). Append(x.MeanThickness);
Append(del).Append(descriptions[i].Thickness); for (int i = 0; i < descriptions.Count; i++)
_ = results.Append(del).Append(descriptions[i].Slot).
Append(del).Append(descriptions[i].Thickness);
}
else
{
_ = results.Append("Bio-Rad ").Append(x.UniqueId).Append(del).
Append(x.Date).Append(del).
Append(x.ThicknessFourteenCriticalPointsAverage).Append(del).
Append(x.Recipe).Append(del).
Append(x.Reactor).Append(del).
Append(x.RDS).Append(del).
Append(x.PSN).Append(del).
Append(x.Layer).Append(del).
Append(x.Zone).Append(del).
Append(x.Cassette).Append(del).
Append(x.Wafer).Append(del).
Append(x.RVThickness);
for (int i = 0; i < descriptions.Count; i++)
_ = results.Append(del).Append(descriptions[i].Position).
Append(del).Append(descriptions[i].Thickness);
}
return results.ToString(); return results.ToString();
} }
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<QS408M.Description> descriptions, Test[] tests) private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<QS408M.Description> descriptions, Test[] tests)
{ {
string duplicateFile;
bool isDummyRun = false; bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new(); List<(Shared.Properties.IScopeInfo, string)> collection = new();
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
bool isStratusQual = _Logistics.MesEntity is "BIORAD4" or "BIORAD5" && _Logistics.MesEntity == _Logistics.ProcessJobID;
if (isStratusQual)
duplicateDirectory = duplicateDirectory.Replace("MET08THFTIRQS408M", "MET08THFTIRSTRATUS");
if (!Directory.Exists(duplicateDirectory)) if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory); _ = Directory.CreateDirectory(duplicateDirectory);
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder; string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
if (!Directory.Exists(Path.Combine(duplicateDirectory, "1"))) if (descriptions.Count == 0 || tests.Length == 0)
duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
else
{ {
string parentParent = GetParentParent(_FileConnectorConfiguration.SourceFileLocation); long? subgroupId;
if (parentParent.Contains(_CellInstanceName)) string fileName = Path.GetFileName(reportFullPath);
parentParent = Path.GetDirectoryName(parentParent); long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
duplicateDirectory = Path.Combine(parentParent, "Data"); long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (!Directory.Exists(duplicateDirectory)) if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
_ = Directory.CreateDirectory(duplicateDirectory); subgroupId = null;
} else
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath)); (subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (descriptions.Count != 0 && tests.Length != 0) if (isStratusQual)
{
string lines = GetLines(descriptions);
if (!string.IsNullOrEmpty(lines))
{ {
long? subgroupId; string lines = GetLines(_Logistics, descriptions, isStratusQual);
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
subgroupId = null;
else
(subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (subgroupId is null) if (subgroupId is null)
collection.Add(new(new ScopeInfo(tests[0], _OpenInsightFilePattern), lines)); collection.Add(new(new ScopeInfo(tests[0], _OpenInsightFilePattern), lines));
else else
collection.Add(new(new ScopeInfo(tests[0], $"{subgroupId.Value} {_OpenInsightFilePattern}"), lines)); collection.Add(new(new ScopeInfo(tests[0], $"{subgroupId.Value} {_OpenInsightFilePattern}"), lines));
if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
{
if (wsResults is null || wsResults.Count != 1)
throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");
lock (_StaticRuns)
wsResults[0] = WS.Results.Get(wsResults[0], subgroupId);
}
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), lines, subgroupId, weekOfYear);
} }
if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
{
if (wsResults is null || wsResults.Count != 1)
throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");
lock (_StaticRuns)
wsResults[0] = WS.Results.Get(wsResults[0], subgroupId);
}
if (!fileName.StartsWith("Viewer"))
duplicateFile = Path.Combine(duplicateDirectory, $"{subgroupId} {fileName}".TrimStart());
else
duplicateFile = Path.Combine(duplicateDirectory, $"{$"Viewer {subgroupId}".TrimEnd()} {fileName.Replace("Viewer", string.Empty)}");
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), subgroupId, weekOfYear);
} }
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
{
if (!isStratusQual)
File.Copy(reportFullPath, duplicateFile, overwrite: true);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
}
} }
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime) private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)

View File

@ -376,7 +376,7 @@ public class FromIQS
return result; return result;
} }
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, QS408M.Description description, string lines, long? subGroupId, string weekOfYear) internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, QS408M.Description description, long? subGroupId, string weekOfYear)
{ {
string checkFile; string checkFile;
string fileName = Path.GetFileName(reportFullPath); string fileName = Path.GetFileName(reportFullPath);
@ -390,15 +390,9 @@ public class FromIQS
checkFile = Path.Combine(ecDirectory, fileName); checkFile = Path.Combine(ecDirectory, fileName);
if (ecExists && !File.Exists(checkFile)) if (ecExists && !File.Exists(checkFile))
File.Copy(reportFullPath, checkFile); File.Copy(reportFullPath, checkFile);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.txt");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, lines);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json"); checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ecExists && !File.Exists(checkFile)) if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json); File.WriteAllText(checkFile, json);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.lbl");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, processDataStandardFormat.Body[processDataStandardFormat.Body.Count - 1]);
} }
private static string GetCommandText(string[] iqsCopyValues) private static string GetCommandText(string[] iqsCopyValues)

View File

@ -147,7 +147,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(reportFullPath, dateTime, descriptions); SendData(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -171,7 +171,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(descriptions); PostOpenInsightMetrologyViewerAttachments(descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -172,7 +172,7 @@ public class FileRead : Shared.FileRead, IFileRead
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements); List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
DirectoryMove(reportFullPath, dateTime, descriptions); DirectoryMove(reportFullPath, dateTime, descriptions);
else if (!_IsEAFHosted) else if (!_IsEAFHosted)

View File

@ -125,7 +125,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions); FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -41,6 +41,24 @@ stages:
displayName: "Nuget Clear" displayName: "Nuget Clear"
enabled: false enabled: false
- task: CopyFiles@2
displayName: 'Copy GhostPCL Files to: D:\EAF-Mesa-Integration\copy'
inputs:
Contents: "*"
SourceFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL'
TargetFolder: 'D:\EAF-Mesa-Integration\copy\GhostPCL'
OverWrite: true
enabled: true
- task: CopyFiles@2
displayName: 'Copy LincPDFC Files to: D:\EAF-Mesa-Integration\copy'
inputs:
Contents: "*"
SourceFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC'
TargetFolder: 'D:\EAF-Mesa-Integration\copy\LincPDFC'
OverWrite: true
enabled: true
- script: | - script: |
"C:\program files\dotnet\dotnet.exe" user-secrets init "C:\program files\dotnet\dotnet.exe" user-secrets init
"C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)" "C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"
@ -184,6 +202,24 @@ stages:
displayName: "Nuget Clear" displayName: "Nuget Clear"
enabled: false enabled: false
- task: CopyFiles@2
displayName: 'Copy GhostPCL Files to: D:\EAF-Mesa-Integration\copy'
inputs:
Contents: "*"
SourceFolder: '\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL'
TargetFolder: 'D:\EAF-Mesa-Integration\copy\GhostPCL'
OverWrite: true
enabled: true
- task: CopyFiles@2
displayName: 'Copy LincPDFC Files to: D:\EAF-Mesa-Integration\copy'
inputs:
Contents: "*"
SourceFolder: '\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC'
TargetFolder: 'D:\EAF-Mesa-Integration\copy\LincPDFC'
OverWrite: true
enabled: true
- script: | - script: |
"C:\program files\dotnet\dotnet.exe" user-secrets init "C:\program files\dotnet\dotnet.exe" user-secrets init
"C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)" "C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"

View File

@ -478,27 +478,14 @@ public class FileRead : Properties.IFileRead
} }
} }
protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements) protected static void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
{ {
string directory; #pragma warning disable CA1510
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}"; if (fileRead is null)
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00"); throw new ArgumentNullException(nameof(fileRead));
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}"; if (jsonElements is null)
if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType) throw new ArgumentNullException(nameof(jsonElements));
directory = Path.Combine(_TracePath, _EquipmentType, "Target", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName); #pragma warning restore CA1510
else
directory = Path.Combine(_TracePath, _EquipmentType, "Source", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName);
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
File.WriteAllText(file, lines);
if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
try
{ File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
catch (Exception) { }
}
} }
protected void WaitForThread(Thread thread, List<Exception> threadExceptions) protected void WaitForThread(Thread thread, List<Exception> threadExceptions)

View File

@ -136,6 +136,7 @@ internal class ProcessDataStandardFormat
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6) internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6)
{ {
ProcessDataStandardFormat result; ProcessDataStandardFormat result;
long? sequence;
string segment; string segment;
string[] segments; string[] segments;
bool addToFooter = false; bool addToFooter = false;
@ -186,13 +187,25 @@ internal class ProcessDataStandardFormat
} }
string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null; string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
logistics = GetLogistics(footer, linesOne: linesOne); logistics = GetLogistics(footer, linesOne: linesOne);
if (logistics.Count == 0)
sequence = null;
else
{
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
}
if (sequence is null && !string.IsNullOrEmpty(reportFullPath))
{
FileInfo fileInfo = new(reportFullPath);
sequence = fileInfo.LastWriteTime.Ticks;
}
result = new(body: body.AsReadOnly(), result = new(body: body.AsReadOnly(),
columns: columns.AsReadOnly(), columns: columns.AsReadOnly(),
footer: footer.AsReadOnly(), footer: footer.AsReadOnly(),
header: header.AsReadOnly(), header: header.AsReadOnly(),
inputPDSF: null, inputPDSF: null,
logistics: logistics, logistics: logistics,
sequence: null); sequence: sequence);
return result; return result;
} }
@ -236,7 +249,7 @@ internal class ProcessDataStandardFormat
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines) private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines)
{ {
ProcessDataStandardFormat result; ProcessDataStandardFormat result;
long sequence; long? sequence;
string[] segments; string[] segments;
bool addToFooter = false; bool addToFooter = false;
List<string> body = new(); List<string> body = new();
@ -268,12 +281,13 @@ internal class ProcessDataStandardFormat
} }
logistics = GetLogistics(footer, linesOne: null); logistics = GetLogistics(footer, linesOne: null);
if (logistics.Count == 0) if (logistics.Count == 0)
sequence = lastWriteTime.Ticks; sequence = null;
else else
{ {
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None); segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s; sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
} }
sequence ??= lastWriteTime.Ticks;
result = new(body: body.AsReadOnly(), result = new(body: body.AsReadOnly(),
columns: new(columns), columns: new(columns),
footer: footer.AsReadOnly(), footer: footer.AsReadOnly(),
@ -302,7 +316,7 @@ internal class ProcessDataStandardFormat
segments = bodyLine.Split('\t').ToList(); segments = bodyLine.Split('\t').ToList();
for (int c = 0; c < segments.Count; c++) for (int c = 0; c < segments.Count; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\","); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
} }
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1); _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
@ -378,19 +392,27 @@ internal class ProcessDataStandardFormat
break; break;
for (int c = 0; c < segments.Length; c++) for (int c = 0; c < segments.Length; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ','); line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ',');
} }
line = string.Concat(line.Substring(0, line.Length - 1), '}'); line = string.Concat(line.Substring(0, line.Length - 1), '}');
lines.Add(line); lines.Add(line);
} }
string? json = null;
if (processDataStandardFormat.Footer is not null && processDataStandardFormat.Footer.Count > 0)
{
Dictionary<string, string> footerKeyValuePairs = GetFooterKeyValuePairs(processDataStandardFormat.Footer);
Dictionary<string, Dictionary<string, string>> logisticKeyValuePairs = GetLogisticKeyValuePairs(processDataStandardFormat.Footer, footerKeyValuePairs);
json = JsonSerializer.Serialize(logisticKeyValuePairs, DictionaryStringDictionaryStringStringSourceGenerationContext.Default.DictionaryStringDictionaryStringString);
}
string footerText = string.IsNullOrEmpty(json) || json == "{}" ? string.Empty : $",{Environment.NewLine}\"PDSF\":{Environment.NewLine}{json}";
result = string.Concat( result = string.Concat(
'{', '{',
Environment.NewLine, Environment.NewLine,
'"', '"',
"Count", "Count",
'"', '"',
": ", ": ",
processDataStandardFormat.Body.Count, processDataStandardFormat.Body.Count,
',', ',',
Environment.NewLine, Environment.NewLine,
@ -409,17 +431,95 @@ internal class ProcessDataStandardFormat
'"', '"',
"Sequence", "Sequence",
'"', '"',
": ", ": ",
processDataStandardFormat.Sequence, processDataStandardFormat.Sequence,
Environment.NewLine, Environment.NewLine,
footerText,
Environment.NewLine,
'}'); '}');
return result; return result;
#pragma warning restore CA1845, IDE0057 }
private static Dictionary<string, string> GetFooterKeyValuePairs(ReadOnlyCollection<string> footerLines)
{
Dictionary<string, string> results = new();
string[] segments;
foreach (string footerLine in footerLines)
{
segments = footerLine.Split('\t');
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
{
continue;
}
if (segments[1].Contains(';'))
{
continue;
}
else
{
if (results.ContainsKey(segments[0]))
{
continue;
}
results.Add(segments[0], segments[1]);
}
}
return results;
}
private static Dictionary<string, Dictionary<string, string>> GetLogisticKeyValuePairs(ReadOnlyCollection<string> footerLines, Dictionary<string, string> footerKeyValuePairs)
{
Dictionary<string, Dictionary<string, string>> results = new();
string[] segments;
string[] subSegments;
string[] subSubSegments;
Dictionary<string, string>? keyValue;
results.Add("Footer", footerKeyValuePairs);
foreach (string footerLine in footerLines)
{
segments = footerLine.Split('\t');
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
{
continue;
}
if (!segments[1].Contains(';') || !segments[1].Contains('='))
{
continue;
}
else
{
subSegments = segments[1].Split(';');
if (subSegments.Length < 1)
{
continue;
}
if (!results.TryGetValue(segments[0], out keyValue))
{
results.Add(segments[0], new());
if (!results.TryGetValue(segments[0], out keyValue))
{
throw new Exception();
}
}
foreach (string segment in subSegments)
{
subSubSegments = segment.Split('=');
if (subSubSegments.Length != 2)
{
continue;
}
keyValue.Add(subSubSegments[0], subSubSegments[1]);
}
}
}
return results;
} }
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults) internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults)
{ {
List<string> results = new(); List<string> results = new();
if (processDataStandardFormat.InputPDSF is null)
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
if (processDataStandardFormat.Sequence is null) if (processDataStandardFormat.Sequence is null)
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence)); throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
string endOffset = "E#######T"; string endOffset = "E#######T";
@ -457,25 +557,22 @@ internal class ProcessDataStandardFormat
} }
} }
results.Add("END_HEADER"); results.Add("END_HEADER");
if (processDataStandardFormat.InputPDSF is not null) results.Add(string.Empty);
{ List<char> hyphens = new();
results.Add(string.Empty); results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => $"|{l.Replace('\t', '|')}|"));
List<char> hyphens = new(); results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|'))); results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
results.Add(string.Empty); for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|"); hyphens.Add('-');
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++) results.Add($"|{string.Join("|", hyphens)}|");
hyphens.Add('-'); results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => $"|{l.Replace('\t', '|')}|"));
results.Add($"|{string.Join("|", hyphens)}|"); results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|'))); results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => $"|{l.Replace('\t', '|')}|"));
results.Add(string.Empty); results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|'))); results.Add("EOF");
results.Add(string.Empty); results.Add(string.Empty);
results.Add("EOF"); string json = GetJson(processDataStandardFormat);
results.Add(string.Empty); results.Add(json);
string json = GetJson(processDataStandardFormat);
results.Add(json);
}
File.WriteAllText(path, string.Join(Environment.NewLine, results)); File.WriteAllText(path, string.Join(Environment.NewLine, results));
} }
@ -518,7 +615,7 @@ internal class ProcessDataStandardFormat
{ {
for (int c = 1; c < segments.Length; c++) for (int c = 1; c < segments.Length; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\","); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
} }
} }
@ -526,7 +623,7 @@ internal class ProcessDataStandardFormat
{ {
for (int c = 1; c < segments.Length; c++) for (int c = 1; c < segments.Length; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
if (string.IsNullOrEmpty(value)) if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,"); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit)) else if (value.All(char.IsDigit))
@ -763,4 +860,10 @@ internal class ProcessDataStandardFormat
[JsonSerializable(typeof(JsonElement[]))] [JsonSerializable(typeof(JsonElement[]))]
internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
{ {
}
[JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
[JsonSerializable(typeof(Dictionary<string, Dictionary<string, string>>))]
internal partial class DictionaryStringDictionaryStringStringSourceGenerationContext : JsonSerializerContext
{
} }