Compare commits

...

2 Commits

17 changed files with 496 additions and 226 deletions

View File

@ -110,7 +110,7 @@ dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template
dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name
dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2"); dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2");
dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant. dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant.
dotnet_diagnostic.IDE0005.severity = warning # Using directive is unnecessary dotnet_diagnostic.IDE0005.severity = none # Using directive is unnecessary
dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified
dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031) dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031)
dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed
@ -122,6 +122,7 @@ dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]cs
dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified
dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
dotnet_diagnostic.MSTEST0015.severity = none # MSTEST0015: Test method {method} should not be ignored
dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods
dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning

View File

@ -1,16 +1,43 @@
{ {
"configurations": [ "configurations": [
{ {
"name": ".NET Core Attach", "mode": "debug",
"type": "coreclr", "name": "Go launch file",
"request": "attach", "program": "${file}",
"processId": 24012 "request": "launch",
"type": "go"
}, },
{ {
"type": "node", "name": "node Launch Current Opened File",
"request": "launch", "program": "${file}",
"name": "node Launch Current Opened File", "request": "launch",
"program": "${file}" "type": "node"
},
{
"cwd": "${workspaceFolder}",
"internalConsoleOptions": "neverOpen",
"name": "Debug File",
"program": "${file}",
"request": "launch",
"stopOnEntry": false,
"type": "bun",
"watchMode": false
},
{
"cwd": "${workspaceFolder}",
"internalConsoleOptions": "neverOpen",
"name": "Run File",
"noDebug": true,
"program": "${file}",
"request": "launch",
"type": "bun",
"watchMode": false
},
{
"name": ".NET Core Attach",
"processId": 32760,
"request": "attach",
"type": "coreclr"
} }
] ]
} }

View File

@ -1,19 +1,134 @@
{ {
"version": "2.0.0", "version": "2.0.0",
"inputs": [
{
"default": "Development",
"description": "Which ASP Net Core Environment?",
"id": "ASPNETCORE_ENVIRONMENT",
"options": [
"Development",
"Production"
],
"type": "pickString"
},
{
"default": "{AssemblyTitle}",
"description": "What Assembly Title?",
"id": "AssemblyTitle",
"type": "promptString"
},
{
"default": "{Build.BuildId}",
"description": "Which Build BuildId?",
"id": "Build.BuildId",
"type": "promptString"
},
{
"default": "{Build.Reason}",
"description": "Which Build Reason?",
"id": "Build.Reason",
"type": "promptString"
},
{
"default": "{Build.Repository.Id}",
"description": "Which Build Repository Id?",
"id": "Build.Repository.Id",
"type": "promptString"
},
{
"default": "{Build.Repository.Name}",
"description": "Which Build Repository Name?",
"id": "Build.Repository.Name",
"type": "promptString"
},
{
"default": "{Build.SourceVersion}",
"description": "Which Build Source Version?",
"id": "Build.SourceVersion",
"type": "promptString"
},
{
"default": "Debug",
"description": "Which Configuration?",
"id": "Configuration",
"options": [
"Debug",
"Release"
],
"type": "pickString"
},
{
"default": "net8.0",
"description": "Which Core Version?",
"id": "CoreVersion",
"options": [
"net8.0"
],
"type": "pickString"
},
{
"default": "C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe",
"description": "Which MS Build?",
"id": "MSBuild",
"type": "promptString"
},
{
"default": "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/",
"description": "Which Nuget Source?",
"id": "NugetSource",
"type": "promptString"
},
{
"default": "win-x64",
"description": "Which Runtime?",
"id": "Runtime",
"options": [
"win-x64",
"win-x32",
"linux-x64",
"linux-x32"
],
"type": "pickString"
},
{
"default": "L:/",
"description": "Which System DefaultWorkingDirectory?",
"id": "System.DefaultWorkingDirectory",
"options": [
"L:/",
"D:/",
"C:/"
],
"type": "pickString"
},
{
"default": "v4.8",
"description": "Which Core Target Framework Version?",
"id": "TargetFrameworkVersion",
"options": [
"v4.8"
],
"type": "pickString"
},
{
"default": "{UserSecretsId}",
"description": "Which Core User Secrets Id?",
"id": "UserSecretsId",
"type": "promptString"
}
],
"tasks": [ "tasks": [
{ {
"label": "Build", "label": "Build",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
"build", "build"
"/property:GenerateFullPaths=true",
"/consoleloggerparameters:NoSummary"
], ],
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Test-Debug", "label": "Test Debug",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -24,7 +139,7 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Test-Release", "label": "Test Release",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -50,7 +165,7 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Format-Whitespaces", "label": "Format Whitespaces",
"command": "dotnet", "command": "dotnet",
"type": "process", "type": "process",
"args": [ "args": [
@ -87,13 +202,13 @@
"problemMatcher": "$msCompile" "problemMatcher": "$msCompile"
}, },
{ {
"label": "Project", "label": "Code Project",
"type": "shell", "type": "shell",
"command": "code ../MET08THFTIRSTRATUS.csproj", "command": "code ../MET08THFTIRSTRATUS.csproj",
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"label": "Readme", "label": "Code Read Me",
"type": "shell", "type": "shell",
"command": "code ../README.md", "command": "code ../README.md",
"problemMatcher": [] "problemMatcher": []
@ -113,7 +228,7 @@
"problemMatcher": [] "problemMatcher": []
}, },
{ {
"label": "Git Config", "label": "Code Git Config",
"type": "shell", "type": "shell",
"command": "code ../.git/config", "command": "code ../.git/config",
"problemMatcher": [] "problemMatcher": []

View File

@ -128,7 +128,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions); FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -153,7 +153,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveArchive(reportFullPath, dateTime); MoveArchive(reportFullPath, dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -103,7 +103,7 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription private void WriteFile<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
{ {
bool isDummyRun = false; bool isDummyRun = false;
string successDirectory = string.Empty; string successDirectory = string.Empty;
@ -111,8 +111,9 @@ public class FileRead : Shared.FileRead, IFileRead
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
if (!Directory.Exists(duplicateDirectory)) if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory); _ = Directory.CreateDirectory(duplicateDirectory);
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath)); string duplicateFile = Path.Combine(duplicateDirectory, $"{Path.GetFileName(reportFullPath)}.xml");
File.Copy(reportFullPath, duplicateFile, overwrite: true); string xml = ProcessDataStandardFormat.GetXml(reportFullPath);
File.WriteAllText(duplicateFile, xml);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
} }
@ -126,8 +127,8 @@ public class FileRead : Shared.FileRead, IFileRead
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements); List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions); WriteFile(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -88,9 +88,9 @@ public class FileRead : Shared.FileRead, IFileRead
string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names"); string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names"); string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices"); string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
_ProcessDataStandardFormatMapping = GetProcessDataStandardFormatMapping(processDataStandardFormatMappingOldColumnNames, _ProcessDataStandardFormatMapping = ProcessDataStandardFormatMapping.Get(processDataStandardFormatMappingOldColumnNames,
processDataStandardFormatMappingNewColumnNames, processDataStandardFormatMappingNewColumnNames,
processDataStandardFormatMappingColumnIndices); processDataStandardFormatMappingColumnIndices);
} }
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
@ -169,46 +169,6 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
{
ProcessDataStandardFormatMapping result;
string[] segmentsB;
List<string> distinct = new();
Dictionary<string, string> keyValuePairs = new();
string args4 = "Time,HeaderUniqueId,UniqueId,Date,Wafer,Position,BIORAD4";
string args5 = ",BIORAD4";
string args6 = ",BIORAD4";
string args7 = "Test|EventId,Date|DateTime,Position|Slot,DeltaThicknessSlotsOneAndTwentyFive|Actual Delta Thick Pts 1 and 25,PercentDeltaThicknessSlotsOneAndTwentyFive|% Delta Thick Pts 1 and 25,MID|Cassette,Lot|Batch,Title|Batch,Wafer|Text,Thickness|Site,MeanThickness|GradeMean,|BIORAD4";
// string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,MID,Date,Employee,Lot,PSN,Reactor,Recipe,Cassette,GradeStdDev,HeaderUniqueId,Layer,MeanThickness,PassFail,RDS,Slot,Title,UniqueId,Wafer,Zone,Mean,Position,StdDev,Thickness,ThicknessSlotOne,ThicknessSlotTwentyFive,DeltaThicknessSlotsOneAndTwentyFive,PercentDeltaThicknessSlotsOneAndTwentyFive";
// string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Batch,Cassette,DateTime,Destination,Mean,PassFail,Recipe,Reference,Site,Slot,Source,StdDev,Text,GradeMean,GradeStdDev,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Actual Delta Thick Pts 1 and 25,% Delta Thick Pts 1 and 25,EventId",
// string args10 = "0,1,2,31,3,6,5,8,9,27,7,23,24,13,8,21,-1,25,20,12,22,16,7,-1,19,26,11,16,18,15,-1,-1,29,30";
string[] segments = args7.Split(',');
ReadOnlyCollection<string> ignoreColumns = new(args4.Split(','));
ReadOnlyCollection<string> backfillColumns = new(args5.Split(','));
ReadOnlyCollection<string> indexOnlyColumns = new(args6.Split(','));
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
foreach (string segment in segments)
{
segmentsB = segment.Split('|');
if (segmentsB.Length != 2)
continue;
if (distinct.Contains(segmentsB[0]))
continue;
distinct.Add(segmentsB[0]);
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
}
result = new(backfillColumns: backfillColumns,
columnIndices: columnIndices,
newColumnNames: newColumnNames,
ignoreColumns: ignoreColumns,
indexOnlyColumns: indexOnlyColumns,
keyValuePairs: new(keyValuePairs),
oldColumnNames: oldColumnNames);
return result;
}
private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection) private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection)
{ {
List<PreWith> results = new(); List<PreWith> results = new();
@ -268,7 +228,7 @@ public class FileRead : Shared.FileRead, IFileRead
} }
} }
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles, bool mesEntityMatchesProcess) private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
{ {
List<Pre> results = new(); List<Pre> results = new();
Pre pre; Pre pre;
@ -277,8 +237,6 @@ public class FileRead : Shared.FileRead, IFileRead
foreach (string matchingFile in matchingFiles) foreach (string matchingFile in matchingFiles)
{ {
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}"; checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
if (mesEntityMatchesProcess)
checkFile = checkFile.Replace("MET08THFTIRSTRATUS", "MET08THFTIRQS408M");
pre = new(matchingFile, checkFile); pre = new(matchingFile, checkFile);
results.Add(pre); results.Add(pre);
} }
@ -315,8 +273,13 @@ public class FileRead : Shared.FileRead, IFileRead
continue; continue;
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults)) if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
wsResults = null; wsResults = null;
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults); if (processDataStandardFormat.InputPDSF is null)
File.Delete(preWith.MatchingFile); File.Move(preWith.MatchingFile, preWith.CheckFile);
else
{
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
File.Delete(preWith.MatchingFile);
}
if (Directory.Exists(preWith.NoWaitDirectory)) if (Directory.Exists(preWith.NoWaitDirectory))
{ {
post = new(preWith.CheckFile, preWith.ErrFile); post = new(preWith.CheckFile, preWith.ErrFile);
@ -368,10 +331,7 @@ public class FileRead : Shared.FileRead, IFileRead
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); } { CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { } catch (Exception) { }
} }
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements);
bool mesEntityMatchesProcess = descriptions.Count > 0 && descriptions[0].MesEntity == descriptions[0].Reactor;
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles, mesEntityMatchesProcess);
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection); ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
MoveCollection(dateTime, processDataStandardFormat, preWithCollection); MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
return results; return results;

View File

@ -113,76 +113,46 @@ public class FileRead : Shared.FileRead, IFileRead
return results; return results;
} }
private static string GetLines(Logistics logistics, List<Stratus.Description> descriptions)
{
StringBuilder results = new();
char del = '\t';
Stratus.Description x = descriptions[0];
_ = results.Append("Stratus_").Append(logistics.MID).Append('_').Append(logistics.DateTimeFromSequence.ToString("yyyyMMddhhmmssfff")).Append(del).
Append(x.Date).Append(del).
Append(logistics.JobID).Append(del).
Append("FQA Thickness").Append(del).
Append(x.Employee).Append(del).
Append(x.Recipe).Append(del).
Append(x.Reactor).Append(del).
Append(x.RDS).Append(del).
Append(x.PSN).Append(del).
Append(x.Lot).Append(del).
Append(x.Cassette).Append(del).
Append(x.MeanThickness);
for (int i = 0; i < descriptions.Count; i++)
_ = results.Append(del).Append(descriptions[i].Slot).
Append(del).Append(descriptions[i].Mean);
return results.ToString();
}
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<Stratus.Description> descriptions, Test[] tests) private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<Stratus.Description> descriptions, Test[] tests)
{ {
string duplicateFile;
bool isDummyRun = false; bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new(); List<(Shared.Properties.IScopeInfo, string)> collection = new();
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
if (!Directory.Exists(duplicateDirectory)) if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory); _ = Directory.CreateDirectory(duplicateDirectory);
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder; string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
if (!Directory.Exists(Path.Combine(duplicateDirectory, "1"))) if (descriptions.Count == 0 || tests.Length == 0)
duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
else
{ {
string parentParent = GetParentParent(_FileConnectorConfiguration.SourceFileLocation); long? subgroupId;
if (parentParent.Contains(_CellInstanceName)) string fileName = Path.GetFileName(reportFullPath);
parentParent = Path.GetDirectoryName(parentParent); long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
duplicateDirectory = Path.Combine(parentParent, "Data"); long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (!Directory.Exists(duplicateDirectory)) if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
_ = Directory.CreateDirectory(duplicateDirectory); subgroupId = null;
} else
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath)); (subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (descriptions.Count != 0 && tests.Length != 0) if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
{
string lines = GetLines(_Logistics, descriptions);
if (!string.IsNullOrEmpty(lines))
{ {
long? subgroupId; if (wsResults is null || wsResults.Count != 1)
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks; throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks; lock (_StaticRuns)
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN)) wsResults[0] = WS.Results.Get(wsResults[0], subgroupId);
subgroupId = null;
else
(subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (subgroupId is null)
collection.Add(new(new ScopeInfo(tests[0], _OpenInsightFilePattern), lines));
else
collection.Add(new(new ScopeInfo(tests[0], $"{subgroupId.Value} {_OpenInsightFilePattern}"), lines));
if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
{
if (wsResults is null || wsResults.Count != 1)
throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");
lock (_StaticRuns)
wsResults[0] = WS.Results.Get(wsResults[0], subgroupId);
}
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), lines, subgroupId, weekOfYear);
} }
if (!fileName.StartsWith("Viewer"))
duplicateFile = Path.Combine(duplicateDirectory, $"{subgroupId} {fileName}".TrimStart());
else
duplicateFile = Path.Combine(duplicateDirectory, $"{$"Viewer {subgroupId}".TrimEnd()} {fileName.Replace("Viewer", string.Empty)}");
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), subgroupId, weekOfYear);
} }
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
{
File.Copy(reportFullPath, duplicateFile, overwrite: true);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
}
} }
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime) private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)

View File

@ -376,7 +376,7 @@ public class FromIQS
return result; return result;
} }
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, Stratus.Description description, string lines, long? subGroupId, string weekOfYear) internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, Stratus.Description description, long? subGroupId, string weekOfYear)
{ {
string checkFile; string checkFile;
string fileName = Path.GetFileName(reportFullPath); string fileName = Path.GetFileName(reportFullPath);
@ -390,15 +390,9 @@ public class FromIQS
checkFile = Path.Combine(ecDirectory, fileName); checkFile = Path.Combine(ecDirectory, fileName);
if (ecExists && !File.Exists(checkFile)) if (ecExists && !File.Exists(checkFile))
File.Copy(reportFullPath, checkFile); File.Copy(reportFullPath, checkFile);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.txt");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, lines);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json"); checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ecExists && !File.Exists(checkFile)) if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json); File.WriteAllText(checkFile, json);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.lbl");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, processDataStandardFormat.Body[processDataStandardFormat.Body.Count - 1]);
} }
private static string GetCommandText(string[] iqsCopyValues) private static string GetCommandText(string[] iqsCopyValues)

View File

@ -147,7 +147,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(reportFullPath, dateTime, descriptions); SendData(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -173,7 +173,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(descriptions); PostOpenInsightMetrologyViewerAttachments(descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -172,7 +172,7 @@ public class FileRead : Shared.FileRead, IFileRead
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements); List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
DirectoryMove(reportFullPath, dateTime, descriptions); DirectoryMove(reportFullPath, dateTime, descriptions);
else if (!_IsEAFHosted) else if (!_IsEAFHosted)

View File

@ -125,7 +125,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions); FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>()); results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results; return results;
} }

View File

@ -383,17 +383,24 @@ public class FileRead : Properties.IFileRead
else else
{ {
string[] files; string[] files;
string logisticsSequence = _Logistics.Sequence.ToString(); string[] directories;
string[] directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly); string logisticsSequence;
foreach (string directory in directories) for (int i = 0; i < 10; i++)
{ {
files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly); logisticsSequence = (_Logistics.Sequence + -i).ToString();
if (files.Length == 0) directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
continue; foreach (string directory in directories)
results.Add(directory); {
files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
if (files.Length == 0)
continue;
results.Add(directory);
}
if (results.Count == 1)
break;
} }
} }
if ((results is null) || results.Count != 1) if (results.Count != 1)
throw new Exception("Didn't find directory by logistics sequence"); throw new Exception("Didn't find directory by logistics sequence");
return results.ToArray(); return results.ToArray();
} }
@ -478,27 +485,14 @@ public class FileRead : Properties.IFileRead
} }
} }
protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements) protected static void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
{ {
string directory; #pragma warning disable CA1510
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}"; if (fileRead is null)
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00"); throw new ArgumentNullException(nameof(fileRead));
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}"; if (jsonElements is null)
if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType) throw new ArgumentNullException(nameof(jsonElements));
directory = Path.Combine(_TracePath, _EquipmentType, "Target", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName); #pragma warning restore CA1510
else
directory = Path.Combine(_TracePath, _EquipmentType, "Source", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName);
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
File.WriteAllText(file, lines);
if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
try
{ File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
catch (Exception) { }
}
} }
protected void WaitForThread(Thread thread, List<Exception> threadExceptions) protected void WaitForThread(Thread thread, List<Exception> threadExceptions)

View File

@ -2,12 +2,14 @@ using Adaptation.Shared.Methods;
using System; using System;
using System.Collections.Generic; using System.Collections.Generic;
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Globalization; using System.Globalization;
using System.IO; using System.IO;
using System.Linq; using System.Linq;
using System.Text; using System.Text;
using System.Text.Json; using System.Text.Json;
using System.Text.Json.Serialization; using System.Text.Json.Serialization;
using System.Text.RegularExpressions;
namespace Adaptation.Shared; namespace Adaptation.Shared;
@ -136,6 +138,7 @@ internal class ProcessDataStandardFormat
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6) internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6)
{ {
ProcessDataStandardFormat result; ProcessDataStandardFormat result;
long? sequence;
string segment; string segment;
string[] segments; string[] segments;
bool addToFooter = false; bool addToFooter = false;
@ -186,13 +189,25 @@ internal class ProcessDataStandardFormat
} }
string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null; string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
logistics = GetLogistics(footer, linesOne: linesOne); logistics = GetLogistics(footer, linesOne: linesOne);
if (logistics.Count == 0)
sequence = null;
else
{
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
}
if (sequence is null && !string.IsNullOrEmpty(reportFullPath))
{
FileInfo fileInfo = new(reportFullPath);
sequence = fileInfo.LastWriteTime.Ticks;
}
result = new(body: body.AsReadOnly(), result = new(body: body.AsReadOnly(),
columns: columns.AsReadOnly(), columns: columns.AsReadOnly(),
footer: footer.AsReadOnly(), footer: footer.AsReadOnly(),
header: header.AsReadOnly(), header: header.AsReadOnly(),
inputPDSF: null, inputPDSF: null,
logistics: logistics, logistics: logistics,
sequence: null); sequence: sequence);
return result; return result;
} }
@ -214,19 +229,19 @@ internal class ProcessDataStandardFormat
return results.AsReadOnly(); return results.AsReadOnly();
} }
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping) internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping processDataStandardFormatMapping)
{ {
ProcessDataStandardFormat result; ProcessDataStandardFormat result;
const int columnsLine = 6; const int columnsLine = 6;
FileInfo fileInfo = new(reportFullPath); FileInfo fileInfo = new(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null); ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat); JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count != processDataStandardFormatMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray(); JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
if (jsonElements is null || jsonProperties is null || jsonProperties.Length != pdsfMapping.NewColumnNames.Count) if (jsonElements is null || jsonProperties is null || jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
result = processDataStandardFormat; result = processDataStandardFormat;
else else
{ {
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat); result = GetProcessDataStandardFormat(processDataStandardFormatMapping, jsonElements, processDataStandardFormat);
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0) if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
result = processDataStandardFormat; result = processDataStandardFormat;
} }
@ -236,7 +251,7 @@ internal class ProcessDataStandardFormat
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines) private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines)
{ {
ProcessDataStandardFormat result; ProcessDataStandardFormat result;
long sequence; long? sequence;
string[] segments; string[] segments;
bool addToFooter = false; bool addToFooter = false;
List<string> body = new(); List<string> body = new();
@ -268,12 +283,13 @@ internal class ProcessDataStandardFormat
} }
logistics = GetLogistics(footer, linesOne: null); logistics = GetLogistics(footer, linesOne: null);
if (logistics.Count == 0) if (logistics.Count == 0)
sequence = lastWriteTime.Ticks; sequence = null;
else else
{ {
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None); segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s; sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
} }
sequence ??= lastWriteTime.Ticks;
result = new(body: body.AsReadOnly(), result = new(body: body.AsReadOnly(),
columns: new(columns), columns: new(columns),
footer: footer.AsReadOnly(), footer: footer.AsReadOnly(),
@ -302,7 +318,7 @@ internal class ProcessDataStandardFormat
segments = bodyLine.Split('\t').ToList(); segments = bodyLine.Split('\t').ToList();
for (int c = 0; c < segments.Count; c++) for (int c = 0; c < segments.Count; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\","); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
} }
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1); _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
@ -321,12 +337,14 @@ internal class ProcessDataStandardFormat
int column; int column;
string value; string value;
JsonProperty jsonProperty; JsonProperty jsonProperty;
List<string> debug = new();
List<string> values = new(); List<string> values = new();
List<string> results = new(); List<string> results = new();
JsonProperty[] jsonProperties; JsonProperty[] jsonProperties;
List<string> unknownColumns = new(); List<string> unknownColumns = new();
for (int i = 0; i < jsonElements.Length; i++) for (int i = 0; i < jsonElements.Length; i++)
{ {
debug.Clear();
values.Clear(); values.Clear();
if (jsonElements[i].ValueKind != JsonValueKind.Object) if (jsonElements[i].ValueKind != JsonValueKind.Object)
{ {
@ -340,16 +358,22 @@ internal class ProcessDataStandardFormat
{ {
column = processDataStandardFormatMapping.ColumnIndices[c]; column = processDataStandardFormatMapping.ColumnIndices[c];
if (column == -1) if (column == -1)
{
value = processDataStandardFormatMapping.OldColumnNames[c]; value = processDataStandardFormatMapping.OldColumnNames[c];
debug.Add($"<Item C=-01 Name=\"{value}\" DataType=\"8\" XmlType=\"1\" XPath=\"//records/record/{value}\" />");
}
else else
{ {
jsonProperty = jsonProperties[column]; jsonProperty = jsonProperties[column];
value = jsonProperty.Value.ToString(); value = jsonProperty.Value.ToString();
debug.Add($"<Item C={column + 2:000} Name=\"{processDataStandardFormatMapping.OldColumnNames[c]}\" DataType=\"8\" XmlType=\"1\" XPath=\"//records/record/{jsonProperty.Name}\" />");
} }
values.Add(value); values.Add(value);
} }
results.Add(string.Join("\t", values)); results.Add(string.Join("\t", values));
} }
if (Debugger.IsAttached)
File.WriteAllText("../../.txt", string.Join(Environment.NewLine, debug.OrderBy(l => l)));
result = new(body: new(results), result = new(body: new(results),
columns: processDataStandardFormatMapping.OldColumnNames, columns: processDataStandardFormatMapping.OldColumnNames,
footer: processDataStandardFormat.Footer, footer: processDataStandardFormat.Footer,
@ -364,7 +388,6 @@ internal class ProcessDataStandardFormat
{ {
if (processDataStandardFormat.InputPDSF is null) if (processDataStandardFormat.InputPDSF is null)
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF)); throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
#pragma warning disable CA1845, IDE0057
string result; string result;
string line; string line;
string value; string value;
@ -378,19 +401,27 @@ internal class ProcessDataStandardFormat
break; break;
for (int c = 0; c < segments.Length; c++) for (int c = 0; c < segments.Length; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ','); line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ',');
} }
line = string.Concat(line.Substring(0, line.Length - 1), '}'); line = string.Concat(line.Substring(0, line.Length - 1), '}');
lines.Add(line); lines.Add(line);
} }
string? json = null;
if (processDataStandardFormat.Footer is not null && processDataStandardFormat.Footer.Count > 0)
{
Dictionary<string, string> footerKeyValuePairs = GetFooterKeyValuePairs(processDataStandardFormat.Footer);
Dictionary<string, Dictionary<string, string>> logisticKeyValuePairs = GetLogisticKeyValuePairs(processDataStandardFormat.Footer, footerKeyValuePairs);
json = JsonSerializer.Serialize(logisticKeyValuePairs, DictionaryStringDictionaryStringStringSourceGenerationContext.Default.DictionaryStringDictionaryStringString);
}
string footerText = string.IsNullOrEmpty(json) || json == "{}" ? string.Empty : $",{Environment.NewLine}\"PDSF\":{Environment.NewLine}{json}";
result = string.Concat( result = string.Concat(
'{', '{',
Environment.NewLine, Environment.NewLine,
'"', '"',
"Count", "Count",
'"', '"',
": ", ": ",
processDataStandardFormat.Body.Count, processDataStandardFormat.Body.Count,
',', ',',
Environment.NewLine, Environment.NewLine,
@ -409,17 +440,95 @@ internal class ProcessDataStandardFormat
'"', '"',
"Sequence", "Sequence",
'"', '"',
": ", ": ",
processDataStandardFormat.Sequence, processDataStandardFormat.Sequence,
Environment.NewLine, Environment.NewLine,
footerText,
Environment.NewLine,
'}'); '}');
return result; return result;
#pragma warning restore CA1845, IDE0057 }
private static Dictionary<string, string> GetFooterKeyValuePairs(ReadOnlyCollection<string> footerLines)
{
Dictionary<string, string> results = new();
string[] segments;
foreach (string footerLine in footerLines)
{
segments = footerLine.Split('\t');
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
{
continue;
}
if (segments[1].Contains(';'))
{
continue;
}
else
{
if (results.ContainsKey(segments[0]))
{
continue;
}
results.Add(segments[0], segments[1]);
}
}
return results;
}
private static Dictionary<string, Dictionary<string, string>> GetLogisticKeyValuePairs(ReadOnlyCollection<string> footerLines, Dictionary<string, string> footerKeyValuePairs)
{
Dictionary<string, Dictionary<string, string>> results = new();
string[] segments;
string[] subSegments;
string[] subSubSegments;
Dictionary<string, string>? keyValue;
results.Add("Footer", footerKeyValuePairs);
foreach (string footerLine in footerLines)
{
segments = footerLine.Split('\t');
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
{
continue;
}
if (!segments[1].Contains(';') || !segments[1].Contains('='))
{
continue;
}
else
{
subSegments = segments[1].Split(';');
if (subSegments.Length < 1)
{
continue;
}
if (!results.TryGetValue(segments[0], out keyValue))
{
results.Add(segments[0], new());
if (!results.TryGetValue(segments[0], out keyValue))
{
throw new Exception();
}
}
foreach (string segment in subSegments)
{
subSubSegments = segment.Split('=');
if (subSubSegments.Length != 2)
{
continue;
}
keyValue.Add(subSubSegments[0], subSubSegments[1]);
}
}
}
return results;
} }
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults) internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults)
{ {
List<string> results = new(); List<string> results = new();
if (processDataStandardFormat.InputPDSF is null)
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
if (processDataStandardFormat.Sequence is null) if (processDataStandardFormat.Sequence is null)
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence)); throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
string endOffset = "E#######T"; string endOffset = "E#######T";
@ -457,25 +566,25 @@ internal class ProcessDataStandardFormat
} }
} }
results.Add("END_HEADER"); results.Add("END_HEADER");
if (processDataStandardFormat.InputPDSF is not null) results.Add(string.Empty);
{ List<char> hyphens = new();
results.Add(string.Empty); results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => $"|{l.Replace('\t', '|')}|"));
List<char> hyphens = new(); results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|'))); results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
results.Add(string.Empty); for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|"); hyphens.Add('-');
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++) results.Add($"|{string.Join("|", hyphens)}|");
hyphens.Add('-'); results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => $"|{l.Replace('\t', '|')}|"));
results.Add($"|{string.Join("|", hyphens)}|"); results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|'))); results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => $"|{l.Replace('\t', '|')}|"));
results.Add(string.Empty); results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|'))); string xml = GetXml(processDataStandardFormat);
results.Add(string.Empty); results.Add(xml);
results.Add("EOF"); results.Add(string.Empty);
results.Add(string.Empty); results.Add("EOF");
string json = GetJson(processDataStandardFormat); results.Add(string.Empty);
results.Add(json); string json = GetJson(processDataStandardFormat);
} results.Add(json);
File.WriteAllText(path, string.Join(Environment.NewLine, results)); File.WriteAllText(path, string.Join(Environment.NewLine, results));
} }
@ -518,7 +627,7 @@ internal class ProcessDataStandardFormat
{ {
for (int c = 1; c < segments.Length; c++) for (int c = 1; c < segments.Length; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\","); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
} }
} }
@ -526,7 +635,7 @@ internal class ProcessDataStandardFormat
{ {
for (int c = 1; c < segments.Length; c++) for (int c = 1; c < segments.Length; c++)
{ {
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
if (string.IsNullOrEmpty(value)) if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,"); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit)) else if (value.All(char.IsDigit))
@ -757,10 +866,106 @@ internal class ProcessDataStandardFormat
return result; return result;
} }
internal static string GetXml(ProcessDataStandardFormat processDataStandardFormat)
{
string result;
string tag;
string value;
string[] segments;
List<string> values;
Dictionary<string, List<string>> results = new();
ReadOnlyCollection<string> body = processDataStandardFormat.InputPDSF is null ?
processDataStandardFormat.Body : processDataStandardFormat.InputPDSF.Body;
ReadOnlyCollection<string> columns = processDataStandardFormat.InputPDSF is null ?
processDataStandardFormat.Columns : processDataStandardFormat.InputPDSF.Columns;
List<string> lines = new() { "<?xml version=\"1.0\" encoding=\"UTF-8\"?>", "<records>" };
for (int i = 0; i < body.Count; i++)
{
segments = body[i].Trim().Split('\t');
if (segments.Length != columns.Count)
break;
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("&", "&amp;")
.Replace("<", "&lt;")
.Replace(">", "&gt;")
.Replace("\"", "&quot;")
.Replace("'", "&apos;");
tag = Regex.Replace(columns[c].Trim('"'), @"[^a-zA-Z0-9]", "_").Split('\r')[0].Split('\n')[0];
if (i == 0)
{
if (results.ContainsKey(tag))
continue;
results.Add(tag, new List<string>());
}
results[tag].Add(value);
}
}
foreach (KeyValuePair<string, List<string>> keyValuePair in results)
{
if (body.Count < 3)
break;
if (keyValuePair.Value.Count != body.Count)
continue;
values = keyValuePair.Value.Distinct().ToList();
if (values.Count == 2 && (string.IsNullOrEmpty(values[0]) || string.IsNullOrEmpty(values[1])))
{
for (int i = 0; i < body.Count; i++)
keyValuePair.Value[i] = string.Empty;
foreach (string v in values)
{
if (string.IsNullOrEmpty(v))
continue;
keyValuePair.Value[0] = v;
}
}
}
for (int i = 0; i < body.Count; i++)
{
lines.Add(" <record>");
foreach (KeyValuePair<string, List<string>> keyValuePair in results)
{
if (keyValuePair.Value.Count != body.Count)
continue;
lines.Add(string.Concat(" <", keyValuePair.Key, '>', keyValuePair.Value[i], "</", keyValuePair.Key, '>'));
}
lines.Add(" </record>");
}
lines.Add("</records>");
result = string.Join(Environment.NewLine, lines);
return result;
}
internal static string GetXml(string reportFullPath, string[]? lines = null)
{
string result;
bool foundXml = false;
List<string> results = new();
lines ??= File.ReadAllLines(reportFullPath);
foreach (string line in lines)
{
if (line.StartsWith("<?xml"))
foundXml = true;
if (!foundXml)
continue;
if (line.StartsWith("EOF"))
break;
results.Add(line);
}
result = string.Join(Environment.NewLine, results);
return result;
}
} }
[JsonSourceGenerationOptions(WriteIndented = true)] [JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(JsonElement[]))] [JsonSerializable(typeof(JsonElement[]))]
internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
{ {
}
[JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
[JsonSerializable(typeof(Dictionary<string, Dictionary<string, string>>))]
internal partial class DictionaryStringDictionaryStringStringSourceGenerationContext : JsonSerializerContext
{
} }

View File

@ -1,33 +1,34 @@
using System.Collections.ObjectModel; using System.Collections.ObjectModel;
using System.Linq;
namespace Adaptation.Shared; namespace Adaptation.Shared;
public class ProcessDataStandardFormatMapping public class ProcessDataStandardFormatMapping
{ {
public ReadOnlyCollection<string> BackfillColumns { get; private set; }
public ReadOnlyCollection<int> ColumnIndices { get; private set; } public ReadOnlyCollection<int> ColumnIndices { get; private set; }
public ReadOnlyCollection<string> IgnoreColumns { get; private set; }
public ReadOnlyCollection<string> IndexOnlyColumns { get; private set; }
public ReadOnlyDictionary<string, string> KeyValuePairs { get; private set; }
public ReadOnlyCollection<string> NewColumnNames { get; private set; } public ReadOnlyCollection<string> NewColumnNames { get; private set; }
public ReadOnlyCollection<string> OldColumnNames { get; private set; } public ReadOnlyCollection<string> OldColumnNames { get; private set; }
public ProcessDataStandardFormatMapping(ReadOnlyCollection<string> backfillColumns, public ProcessDataStandardFormatMapping(ReadOnlyCollection<int> columnIndices,
ReadOnlyCollection<int> columnIndices,
ReadOnlyCollection<string> ignoreColumns,
ReadOnlyCollection<string> indexOnlyColumns,
ReadOnlyDictionary<string, string> keyValuePairs,
ReadOnlyCollection<string> newColumnNames, ReadOnlyCollection<string> newColumnNames,
ReadOnlyCollection<string> oldColumnNames) ReadOnlyCollection<string> oldColumnNames)
{ {
BackfillColumns = backfillColumns;
ColumnIndices = columnIndices; ColumnIndices = columnIndices;
IgnoreColumns = ignoreColumns;
IndexOnlyColumns = indexOnlyColumns;
KeyValuePairs = keyValuePairs;
NewColumnNames = newColumnNames; NewColumnNames = newColumnNames;
OldColumnNames = oldColumnNames; OldColumnNames = oldColumnNames;
} }
internal static ProcessDataStandardFormatMapping Get(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
{
ProcessDataStandardFormatMapping result;
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
result = new(columnIndices: columnIndices,
newColumnNames: newColumnNames,
oldColumnNames: oldColumnNames);
return result;
}
} }

View File

@ -34,14 +34,16 @@ public class MET08THFTIRSTRATUS
[Ignore] [Ignore]
#endif #endif
[TestMethod] [TestMethod]
public void Production__v2_60_0__MET08THFTIRSTRATUS__MoveMatchingFiles638014829236768048__Normal() public void Production__v2_60_0__MET08THFTIRSTRATUS__MoveMatchingFiles638918243047505986__Normal()
{ {
string check = "*.pdsf"; string check = "*.pdsf";
bool validatePDSF = false;
MethodBase methodBase = new StackFrame().GetMethod();
_MET08THFTIRSTRATUS.Production__v2_60_0__MET08THFTIRSTRATUS__MoveMatchingFiles(); _MET08THFTIRSTRATUS.Production__v2_60_0__MET08THFTIRSTRATUS__MoveMatchingFiles();
MethodBase method = new StackFrame().GetMethod(); string[] variables = _MET08THFTIRSTRATUS.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
string[] variables = _MET08THFTIRSTRATUS.AdaptationTesting.GetVariables(method, check); IFileRead fileRead = _MET08THFTIRSTRATUS.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
_ = _MET08THFTIRSTRATUS.AdaptationTesting.Get(method, variables[2], variables[3], false).ReExtract(); Logistics logistics = new(fileRead);
AdaptationTesting.UpdatePassDirectory(variables[2]); _ = AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
} }
#if DEBUG #if DEBUG