diff --git a/Adaptation/.editorconfig b/Adaptation/.editorconfig index d117dc5..b02c0bc 100644 --- a/Adaptation/.editorconfig +++ b/Adaptation/.editorconfig @@ -109,7 +109,7 @@ dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2"); dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant. -dotnet_diagnostic.IDE0005.severity = warning # Using directive is unnecessary +dotnet_diagnostic.IDE0005.severity = none # Using directive is unnecessary dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031) dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed @@ -121,6 +121,7 @@ dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]cs dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified +dotnet_diagnostic.MSTEST0015.severity = none # MSTEST0015: Test method {method} should not be ignored dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning diff --git a/Adaptation/.vscode/tasks.json b/Adaptation/.vscode/tasks.json index 7b6c0ce..79f62c3 100644 --- a/Adaptation/.vscode/tasks.json +++ b/Adaptation/.vscode/tasks.json @@ -1,19 +1,134 @@ { "version": "2.0.0", + "inputs": [ + { + "default": "Development", + "description": "Which ASP Net Core Environment?", + "id": "ASPNETCORE_ENVIRONMENT", + "options": [ + "Development", + "Production" + ], + "type": "pickString" + }, + { + "default": "{AssemblyTitle}", + "description": "What Assembly Title?", + "id": "AssemblyTitle", + "type": "promptString" + }, + { + "default": "{Build.BuildId}", + "description": "Which Build BuildId?", + "id": "Build.BuildId", + "type": "promptString" + }, + { + "default": "{Build.Reason}", + "description": "Which Build Reason?", + "id": "Build.Reason", + "type": "promptString" + }, + { + "default": "{Build.Repository.Id}", + "description": "Which Build Repository Id?", + "id": "Build.Repository.Id", + "type": "promptString" + }, + { + "default": "{Build.Repository.Name}", + "description": "Which Build Repository Name?", + "id": "Build.Repository.Name", + "type": "promptString" + }, + { + "default": "{Build.SourceVersion}", + "description": "Which Build Source Version?", + "id": "Build.SourceVersion", + "type": "promptString" + }, + { + "default": "Debug", + "description": "Which Configuration?", + "id": "Configuration", + "options": [ + "Debug", + "Release" + ], + "type": "pickString" + }, + { + "default": "net8.0", + "description": "Which Core Version?", + "id": "CoreVersion", + "options": [ + "net8.0" + ], + "type": "pickString" + }, + { + "default": "C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe", + "description": "Which MS Build?", + "id": "MSBuild", + "type": "promptString" + }, + { + "default": "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/", + "description": "Which Nuget Source?", + "id": "NugetSource", + "type": "promptString" + }, + { + "default": "win-x64", + "description": "Which Runtime?", + "id": "Runtime", + "options": [ + "win-x64", + "win-x32", + "linux-x64", + "linux-x32" + ], + "type": "pickString" + }, + { + "default": "L:/", + "description": "Which System DefaultWorkingDirectory?", + "id": "System.DefaultWorkingDirectory", + "options": [ + "L:/", + "D:/", + "C:/" + ], + "type": "pickString" + }, + { + "default": "v4.8", + "description": "Which Core Target Framework Version?", + "id": "TargetFrameworkVersion", + "options": [ + "v4.8" + ], + "type": "pickString" + }, + { + "default": "{UserSecretsId}", + "description": "Which Core User Secrets Id?", + "id": "UserSecretsId", + "type": "promptString" + } + ], "tasks": [ { "label": "Build", "command": "dotnet", "type": "process", "args": [ - "build", - "/property:GenerateFullPaths=true", - "/consoleloggerparameters:NoSummary" + "build" ], "problemMatcher": "$msCompile" }, { - "label": "Test-Debug", + "label": "Test Debug", "command": "dotnet", "type": "process", "args": [ @@ -24,7 +139,7 @@ "problemMatcher": "$msCompile" }, { - "label": "Test-Release", + "label": "Test Release", "command": "dotnet", "type": "process", "args": [ @@ -77,13 +192,13 @@ "problemMatcher": "$msCompile" }, { - "label": "Project", + "label": "Code Project", "type": "shell", "command": "code ../MET08RESISRP2100.csproj", "problemMatcher": [] }, { - "label": "Readme", + "label": "Code Read Me", "type": "shell", "command": "code ../README.md", "problemMatcher": [] @@ -103,7 +218,7 @@ "problemMatcher": [] }, { - "label": "Git Config", + "label": "Code Git Config", "type": "shell", "command": "code ../.git/config", "problemMatcher": [] diff --git a/Adaptation/FileHandlers/APC/FileRead.cs b/Adaptation/FileHandlers/APC/FileRead.cs index a9ed0b0..a90c002 100644 --- a/Adaptation/FileHandlers/APC/FileRead.cs +++ b/Adaptation/FileHandlers/APC/FileRead.cs @@ -128,7 +128,7 @@ public class FileRead : Shared.FileRead, IFileRead Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) FileCopy(reportFullPath, dateTime, descriptions); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/Archive/FileRead.cs b/Adaptation/FileHandlers/Archive/FileRead.cs index 26fc9fd..03029d6 100644 --- a/Adaptation/FileHandlers/Archive/FileRead.cs +++ b/Adaptation/FileHandlers/Archive/FileRead.cs @@ -152,7 +152,7 @@ public class FileRead : Shared.FileRead, IFileRead Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) MoveArchive(reportFullPath, dateTime); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/IQSSi/FileRead.cs b/Adaptation/FileHandlers/IQSSi/FileRead.cs index a60ec91..92f59fd 100644 --- a/Adaptation/FileHandlers/IQSSi/FileRead.cs +++ b/Adaptation/FileHandlers/IQSSi/FileRead.cs @@ -103,7 +103,7 @@ public class FileRead : Shared.FileRead, IFileRead return results; } - private void FileCopy(string reportFullPath, DateTime dateTime, List descriptions) where T : Shared.Properties.IDescription + private void WriteFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List descriptions) where T : Shared.Properties.IDescription { bool isDummyRun = false; string successDirectory = string.Empty; @@ -111,8 +111,9 @@ public class FileRead : Shared.FileRead, IFileRead string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); if (!Directory.Exists(duplicateDirectory)) _ = Directory.CreateDirectory(duplicateDirectory); - string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath)); - File.Copy(reportFullPath, duplicateFile, overwrite: true); + string duplicateFile = Path.Combine(duplicateDirectory, $"{Path.GetFileName(reportFullPath)}.xml"); + string xml = ProcessDataStandardFormat.GetXml(processDataStandardFormat); + File.WriteAllText(duplicateFile, xml); WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); } @@ -126,8 +127,8 @@ public class FileRead : Shared.FileRead, IFileRead List descriptions = GetDuplicatorDescriptions(jsonElements); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) - FileCopy(reportFullPath, dateTime, descriptions); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + WriteFile(reportFullPath, dateTime, processDataStandardFormat, descriptions); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs b/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs index 2ee4e12..f0dca14 100644 --- a/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs +++ b/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs @@ -305,8 +305,13 @@ public class FileRead : Shared.FileRead, IFileRead continue; if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List? wsResults)) wsResults = null; - ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults); - File.Delete(preWith.MatchingFile); + if (processDataStandardFormat.InputPDSF is null) + File.Move(preWith.MatchingFile, preWith.CheckFile); + else + { + ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults); + File.Delete(preWith.MatchingFile); + } if (Directory.Exists(preWith.NoWaitDirectory)) { post = new(preWith.CheckFile, preWith.ErrFile); diff --git a/Adaptation/FileHandlers/OpenInsight/FileRead.cs b/Adaptation/FileHandlers/OpenInsight/FileRead.cs index 458f72a..4874013 100644 --- a/Adaptation/FileHandlers/OpenInsight/FileRead.cs +++ b/Adaptation/FileHandlers/OpenInsight/FileRead.cs @@ -119,15 +119,6 @@ public class FileRead : Shared.FileRead, IFileRead if (!Directory.Exists(duplicateDirectory)) _ = Directory.CreateDirectory(duplicateDirectory); string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder; - if (!Directory.Exists(Path.Combine(duplicateDirectory, "1"))) - { - string parentParent = GetParentParent(_FileConnectorConfiguration.SourceFileLocation); - if (parentParent.Contains(_CellInstanceName)) - parentParent = Path.GetDirectoryName(parentParent); - duplicateDirectory = Path.Combine(parentParent, "Data"); - if (!Directory.Exists(duplicateDirectory)) - _ = Directory.CreateDirectory(duplicateDirectory); - } if (descriptions.Count == 0 || tests.Length == 0) duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath)); else @@ -173,7 +164,7 @@ public class FileRead : Shared.FileRead, IFileRead Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs b/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs index 462a9c6..404d728 100644 --- a/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs +++ b/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs @@ -147,7 +147,7 @@ public class FileRead : Shared.FileRead, IFileRead Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) SendData(reportFullPath, dateTime, descriptions); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs b/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs index 22c9afa..a89f16d 100644 --- a/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs +++ b/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs @@ -171,7 +171,7 @@ public class FileRead : Shared.FileRead, IFileRead Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) PostOpenInsightMetrologyViewerAttachments(descriptions); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/Processed/FileRead.cs b/Adaptation/FileHandlers/Processed/FileRead.cs index fb77b2d..23fc7b1 100644 --- a/Adaptation/FileHandlers/Processed/FileRead.cs +++ b/Adaptation/FileHandlers/Processed/FileRead.cs @@ -172,7 +172,7 @@ public class FileRead : Shared.FileRead, IFileRead JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); List descriptions = json.ProcessData.GetDescriptions(jsonElements); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) DirectoryMove(reportFullPath, dateTime, descriptions); else if (!_IsEAFHosted) diff --git a/Adaptation/FileHandlers/SPaCe/FileRead.cs b/Adaptation/FileHandlers/SPaCe/FileRead.cs index 2e0f55a..e258bd6 100644 --- a/Adaptation/FileHandlers/SPaCe/FileRead.cs +++ b/Adaptation/FileHandlers/SPaCe/FileRead.cs @@ -125,7 +125,7 @@ public class FileRead : Shared.FileRead, IFileRead Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) FileCopy(reportFullPath, dateTime, descriptions); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/MET08RESISRP2100.yml b/Adaptation/MET08RESISRP2100.yml index 1e480b1..29f3baa 100644 --- a/Adaptation/MET08RESISRP2100.yml +++ b/Adaptation/MET08RESISRP2100.yml @@ -41,6 +41,24 @@ stages: displayName: "Nuget Clear" enabled: false + - task: CopyFiles@2 + displayName: 'Copy GhostPCL Files to: D:\EAF-Mesa-Integration\copy' + inputs: + Contents: "*" + SourceFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL' + TargetFolder: 'D:\EAF-Mesa-Integration\copy\GhostPCL' + OverWrite: true + enabled: true + + - task: CopyFiles@2 + displayName: 'Copy LincPDFC Files to: D:\EAF-Mesa-Integration\copy' + inputs: + Contents: "*" + SourceFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC' + TargetFolder: 'D:\EAF-Mesa-Integration\copy\LincPDFC' + OverWrite: true + enabled: true + - script: | "C:\program files\dotnet\dotnet.exe" user-secrets init "C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)" @@ -184,6 +202,24 @@ stages: displayName: "Nuget Clear" enabled: false + - task: CopyFiles@2 + displayName: 'Copy GhostPCL Files to: D:\EAF-Mesa-Integration\copy' + inputs: + Contents: "*" + SourceFolder: '\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL' + TargetFolder: 'D:\EAF-Mesa-Integration\copy\GhostPCL' + OverWrite: true + enabled: true + + - task: CopyFiles@2 + displayName: 'Copy LincPDFC Files to: D:\EAF-Mesa-Integration\copy' + inputs: + Contents: "*" + SourceFolder: '\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC' + TargetFolder: 'D:\EAF-Mesa-Integration\copy\LincPDFC' + OverWrite: true + enabled: true + - script: | "C:\program files\dotnet\dotnet.exe" user-secrets init "C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)" diff --git a/Adaptation/Shared/FileRead.cs b/Adaptation/Shared/FileRead.cs index 0c8551c..ffbe9de 100644 --- a/Adaptation/Shared/FileRead.cs +++ b/Adaptation/Shared/FileRead.cs @@ -769,17 +769,24 @@ public class FileRead : Properties.IFileRead else { string[] files; - string logisticsSequence = _Logistics.Sequence.ToString(); - string[] directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly); - foreach (string directory in directories) + string[] directories; + string logisticsSequence; + for (int i = 0; i < 10; i++) { - files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly); - if (files.Length == 0) - continue; - results.Add(directory); + logisticsSequence = (_Logistics.Sequence + -i).ToString(); + directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly); + foreach (string directory in directories) + { + files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly); + if (files.Length == 0) + continue; + results.Add(directory); + } + if (results.Count == 1) + break; } } - if ((results is null) || results.Count != 1) + if (results.Count != 1) throw new Exception("Didn't find directory by logistics sequence"); return results.ToArray(); } diff --git a/Adaptation/Shared/ProcessDataStandardFormat.cs b/Adaptation/Shared/ProcessDataStandardFormat.cs index a86241d..1fb56c5 100644 --- a/Adaptation/Shared/ProcessDataStandardFormat.cs +++ b/Adaptation/Shared/ProcessDataStandardFormat.cs @@ -136,6 +136,7 @@ internal class ProcessDataStandardFormat internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6) { ProcessDataStandardFormat result; + long? sequence; string segment; string[] segments; bool addToFooter = false; @@ -186,13 +187,25 @@ internal class ProcessDataStandardFormat } string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null; logistics = GetLogistics(footer, linesOne: linesOne); + if (logistics.Count == 0) + sequence = null; + else + { + segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None); + sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s; + } + if (sequence is null && !string.IsNullOrEmpty(reportFullPath)) + { + FileInfo fileInfo = new(reportFullPath); + sequence = fileInfo.LastWriteTime.Ticks; + } result = new(body: body.AsReadOnly(), columns: columns.AsReadOnly(), footer: footer.AsReadOnly(), header: header.AsReadOnly(), inputPDSF: null, logistics: logistics, - sequence: null); + sequence: sequence); return result; } @@ -236,7 +249,7 @@ internal class ProcessDataStandardFormat private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines) { ProcessDataStandardFormat result; - long sequence; + long? sequence; string[] segments; bool addToFooter = false; List body = new(); @@ -268,12 +281,13 @@ internal class ProcessDataStandardFormat } logistics = GetLogistics(footer, linesOne: null); if (logistics.Count == 0) - sequence = lastWriteTime.Ticks; + sequence = null; else { segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None); - sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s; + sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s; } + sequence ??= lastWriteTime.Ticks; result = new(body: body.AsReadOnly(), columns: new(columns), footer: footer.AsReadOnly(), @@ -302,7 +316,7 @@ internal class ProcessDataStandardFormat segments = bodyLine.Split('\t').ToList(); for (int c = 0; c < segments.Count; c++) { - value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); + value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\""); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\","); } _ = stringBuilder.Remove(stringBuilder.Length - 1, 1); @@ -364,7 +378,6 @@ internal class ProcessDataStandardFormat { if (processDataStandardFormat.InputPDSF is null) throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF)); -#pragma warning disable CA1845, IDE0057 string result; string line; string value; @@ -378,19 +391,27 @@ internal class ProcessDataStandardFormat break; for (int c = 0; c < segments.Length; c++) { - value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); + value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\""); line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ','); } line = string.Concat(line.Substring(0, line.Length - 1), '}'); lines.Add(line); } + string? json = null; + if (processDataStandardFormat.Footer is not null && processDataStandardFormat.Footer.Count > 0) + { + Dictionary footerKeyValuePairs = GetFooterKeyValuePairs(processDataStandardFormat.Footer); + Dictionary> logisticKeyValuePairs = GetLogisticKeyValuePairs(processDataStandardFormat.Footer, footerKeyValuePairs); + json = JsonSerializer.Serialize(logisticKeyValuePairs, DictionaryStringDictionaryStringStringSourceGenerationContext.Default.DictionaryStringDictionaryStringString); + } + string footerText = string.IsNullOrEmpty(json) || json == "{}" ? string.Empty : $",{Environment.NewLine}\"PDSF\":{Environment.NewLine}{json}"; result = string.Concat( '{', Environment.NewLine, '"', "Count", '"', - ": ", + ": ", processDataStandardFormat.Body.Count, ',', Environment.NewLine, @@ -409,17 +430,95 @@ internal class ProcessDataStandardFormat '"', "Sequence", '"', - ": ", + ": ", processDataStandardFormat.Sequence, Environment.NewLine, + footerText, + Environment.NewLine, '}'); return result; -#pragma warning restore CA1845, IDE0057 + } + + private static Dictionary GetFooterKeyValuePairs(ReadOnlyCollection footerLines) + { + Dictionary results = new(); + string[] segments; + foreach (string footerLine in footerLines) + { + segments = footerLine.Split('\t'); + if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim())) + { + continue; + } + if (segments[1].Contains(';')) + { + continue; + } + else + { + if (results.ContainsKey(segments[0])) + { + continue; + } + results.Add(segments[0], segments[1]); + } + } + return results; + } + + private static Dictionary> GetLogisticKeyValuePairs(ReadOnlyCollection footerLines, Dictionary footerKeyValuePairs) + { + Dictionary> results = new(); + string[] segments; + string[] subSegments; + string[] subSubSegments; + Dictionary? keyValue; + results.Add("Footer", footerKeyValuePairs); + foreach (string footerLine in footerLines) + { + segments = footerLine.Split('\t'); + if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim())) + { + continue; + } + if (!segments[1].Contains(';') || !segments[1].Contains('=')) + { + continue; + } + else + { + subSegments = segments[1].Split(';'); + if (subSegments.Length < 1) + { + continue; + } + if (!results.TryGetValue(segments[0], out keyValue)) + { + results.Add(segments[0], new()); + if (!results.TryGetValue(segments[0], out keyValue)) + { + throw new Exception(); + } + } + foreach (string segment in subSegments) + { + subSubSegments = segment.Split('='); + if (subSubSegments.Length != 2) + { + continue; + } + keyValue.Add(subSubSegments[0], subSubSegments[1]); + } + } + } + return results; } internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List? wsResults) { List results = new(); + if (processDataStandardFormat.InputPDSF is null) + throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF)); if (processDataStandardFormat.Sequence is null) throw new NullReferenceException(nameof(processDataStandardFormat.Sequence)); string endOffset = "E#######T"; @@ -457,25 +556,22 @@ internal class ProcessDataStandardFormat } } results.Add("END_HEADER"); - if (processDataStandardFormat.InputPDSF is not null) - { - results.Add(string.Empty); - List hyphens = new(); - results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|'))); - results.Add(string.Empty); - results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|"); - for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++) - hyphens.Add('-'); - results.Add($"|{string.Join("|", hyphens)}|"); - results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|'))); - results.Add(string.Empty); - results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|'))); - results.Add(string.Empty); - results.Add("EOF"); - results.Add(string.Empty); - string json = GetJson(processDataStandardFormat); - results.Add(json); - } + results.Add(string.Empty); + List hyphens = new(); + results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => $"|{l.Replace('\t', '|')}|")); + results.Add(string.Empty); + results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|"); + for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++) + hyphens.Add('-'); + results.Add($"|{string.Join("|", hyphens)}|"); + results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => $"|{l.Replace('\t', '|')}|")); + results.Add(string.Empty); + results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => $"|{l.Replace('\t', '|')}|")); + results.Add(string.Empty); + results.Add("EOF"); + results.Add(string.Empty); + string json = GetJson(processDataStandardFormat); + results.Add(json); File.WriteAllText(path, string.Join(Environment.NewLine, results)); } @@ -518,7 +614,7 @@ internal class ProcessDataStandardFormat { for (int c = 1; c < segments.Length; c++) { - value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); + value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\""); _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\","); } } @@ -526,7 +622,7 @@ internal class ProcessDataStandardFormat { for (int c = 1; c < segments.Length; c++) { - value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\"); + value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\""); if (string.IsNullOrEmpty(value)) _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,"); else if (value.All(char.IsDigit)) @@ -757,10 +853,48 @@ internal class ProcessDataStandardFormat return result; } + internal static string GetXml(ProcessDataStandardFormat processDataStandardFormat) + { + string result; + string value; + string[] segments; + ReadOnlyCollection body = processDataStandardFormat.InputPDSF is null ? + processDataStandardFormat.Body : processDataStandardFormat.InputPDSF.Body; + ReadOnlyCollection columns = processDataStandardFormat.InputPDSF is null ? + processDataStandardFormat.Columns : processDataStandardFormat.InputPDSF.Columns; + List lines = new() { "", "" }; + for (int i = 0; i < body.Count; i++) + { + lines.Add(" "); + segments = body[i].Trim().Split('\t'); + if (segments.Length != columns.Count) + break; + for (int c = 0; c < segments.Length; c++) + { + value = segments[c].Replace("&", "&") + .Replace("<", "<") + .Replace(">", ">") + .Replace("\"", """) + .Replace("'", "'"); + lines.Add(string.Concat(" <", columns[c].Trim('"'), '>', value, "')); + } + lines.Add(" "); + } + lines.Add(""); + result = string.Join(Environment.NewLine, lines); + return result; + } + } [JsonSourceGenerationOptions(WriteIndented = true)] [JsonSerializable(typeof(JsonElement[]))] internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext { +} + +[JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)] +[JsonSerializable(typeof(Dictionary>))] +internal partial class DictionaryStringDictionaryStringStringSourceGenerationContext : JsonSerializerContext +{ } \ No newline at end of file