diff --git a/Adaptation/.editorconfig b/Adaptation/.editorconfig index d117dc5..b02c0bc 100644 --- a/Adaptation/.editorconfig +++ b/Adaptation/.editorconfig @@ -109,7 +109,7 @@ dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2"); dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant. -dotnet_diagnostic.IDE0005.severity = warning # Using directive is unnecessary +dotnet_diagnostic.IDE0005.severity = none # Using directive is unnecessary dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031) dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed @@ -121,6 +121,7 @@ dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]cs dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified +dotnet_diagnostic.MSTEST0015.severity = none # MSTEST0015: Test method {method} should not be ignored dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning diff --git a/Adaptation/.vscode/launch.json b/Adaptation/.vscode/launch.json index 6f63fba..f8bf7e8 100644 --- a/Adaptation/.vscode/launch.json +++ b/Adaptation/.vscode/launch.json @@ -1,10 +1,43 @@ { "configurations": [ + { + "mode": "debug", + "name": "Go launch file", + "program": "${file}", + "request": "launch", + "type": "go" + }, + { + "name": "node Launch Current Opened File", + "program": "${file}", + "request": "launch", + "type": "node" + }, + { + "cwd": "${workspaceFolder}", + "internalConsoleOptions": "neverOpen", + "name": "Debug File", + "program": "${file}", + "request": "launch", + "stopOnEntry": false, + "type": "bun", + "watchMode": false + }, + { + "cwd": "${workspaceFolder}", + "internalConsoleOptions": "neverOpen", + "name": "Run File", + "noDebug": true, + "program": "${file}", + "request": "launch", + "type": "bun", + "watchMode": false + }, { "name": ".NET Core Attach", - "type": "coreclr", + "processId": 32760, "request": "attach", - "processId": 23636 + "type": "coreclr" } ] -} +} \ No newline at end of file diff --git a/Adaptation/.vscode/tasks.json b/Adaptation/.vscode/tasks.json index 7b6c0ce..79f62c3 100644 --- a/Adaptation/.vscode/tasks.json +++ b/Adaptation/.vscode/tasks.json @@ -1,19 +1,134 @@ { "version": "2.0.0", + "inputs": [ + { + "default": "Development", + "description": "Which ASP Net Core Environment?", + "id": "ASPNETCORE_ENVIRONMENT", + "options": [ + "Development", + "Production" + ], + "type": "pickString" + }, + { + "default": "{AssemblyTitle}", + "description": "What Assembly Title?", + "id": "AssemblyTitle", + "type": "promptString" + }, + { + "default": "{Build.BuildId}", + "description": "Which Build BuildId?", + "id": "Build.BuildId", + "type": "promptString" + }, + { + "default": "{Build.Reason}", + "description": "Which Build Reason?", + "id": "Build.Reason", + "type": "promptString" + }, + { + "default": "{Build.Repository.Id}", + "description": "Which Build Repository Id?", + "id": "Build.Repository.Id", + "type": "promptString" + }, + { + "default": "{Build.Repository.Name}", + "description": "Which Build Repository Name?", + "id": "Build.Repository.Name", + "type": "promptString" + }, + { + "default": "{Build.SourceVersion}", + "description": "Which Build Source Version?", + "id": "Build.SourceVersion", + "type": "promptString" + }, + { + "default": "Debug", + "description": "Which Configuration?", + "id": "Configuration", + "options": [ + "Debug", + "Release" + ], + "type": "pickString" + }, + { + "default": "net8.0", + "description": "Which Core Version?", + "id": "CoreVersion", + "options": [ + "net8.0" + ], + "type": "pickString" + }, + { + "default": "C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe", + "description": "Which MS Build?", + "id": "MSBuild", + "type": "promptString" + }, + { + "default": "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/", + "description": "Which Nuget Source?", + "id": "NugetSource", + "type": "promptString" + }, + { + "default": "win-x64", + "description": "Which Runtime?", + "id": "Runtime", + "options": [ + "win-x64", + "win-x32", + "linux-x64", + "linux-x32" + ], + "type": "pickString" + }, + { + "default": "L:/", + "description": "Which System DefaultWorkingDirectory?", + "id": "System.DefaultWorkingDirectory", + "options": [ + "L:/", + "D:/", + "C:/" + ], + "type": "pickString" + }, + { + "default": "v4.8", + "description": "Which Core Target Framework Version?", + "id": "TargetFrameworkVersion", + "options": [ + "v4.8" + ], + "type": "pickString" + }, + { + "default": "{UserSecretsId}", + "description": "Which Core User Secrets Id?", + "id": "UserSecretsId", + "type": "promptString" + } + ], "tasks": [ { "label": "Build", "command": "dotnet", "type": "process", "args": [ - "build", - "/property:GenerateFullPaths=true", - "/consoleloggerparameters:NoSummary" + "build" ], "problemMatcher": "$msCompile" }, { - "label": "Test-Debug", + "label": "Test Debug", "command": "dotnet", "type": "process", "args": [ @@ -24,7 +139,7 @@ "problemMatcher": "$msCompile" }, { - "label": "Test-Release", + "label": "Test Release", "command": "dotnet", "type": "process", "args": [ @@ -77,13 +192,13 @@ "problemMatcher": "$msCompile" }, { - "label": "Project", + "label": "Code Project", "type": "shell", "command": "code ../MET08RESISRP2100.csproj", "problemMatcher": [] }, { - "label": "Readme", + "label": "Code Read Me", "type": "shell", "command": "code ../README.md", "problemMatcher": [] @@ -103,7 +218,7 @@ "problemMatcher": [] }, { - "label": "Git Config", + "label": "Code Git Config", "type": "shell", "command": "code ../.git/config", "problemMatcher": [] diff --git a/Adaptation/FileHandlers/APC/FileRead.cs b/Adaptation/FileHandlers/APC/FileRead.cs index a9ed0b0..a90c002 100644 --- a/Adaptation/FileHandlers/APC/FileRead.cs +++ b/Adaptation/FileHandlers/APC/FileRead.cs @@ -128,7 +128,7 @@ public class FileRead : Shared.FileRead, IFileRead Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) FileCopy(reportFullPath, dateTime, descriptions); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/Archive/FileRead.cs b/Adaptation/FileHandlers/Archive/FileRead.cs index 26fc9fd..03029d6 100644 --- a/Adaptation/FileHandlers/Archive/FileRead.cs +++ b/Adaptation/FileHandlers/Archive/FileRead.cs @@ -152,7 +152,7 @@ public class FileRead : Shared.FileRead, IFileRead Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) MoveArchive(reportFullPath, dateTime); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/IQSSi/FileRead.cs b/Adaptation/FileHandlers/IQSSi/FileRead.cs index a60ec91..92f59fd 100644 --- a/Adaptation/FileHandlers/IQSSi/FileRead.cs +++ b/Adaptation/FileHandlers/IQSSi/FileRead.cs @@ -103,7 +103,7 @@ public class FileRead : Shared.FileRead, IFileRead return results; } - private void FileCopy(string reportFullPath, DateTime dateTime, List descriptions) where T : Shared.Properties.IDescription + private void WriteFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List descriptions) where T : Shared.Properties.IDescription { bool isDummyRun = false; string successDirectory = string.Empty; @@ -111,8 +111,9 @@ public class FileRead : Shared.FileRead, IFileRead string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); if (!Directory.Exists(duplicateDirectory)) _ = Directory.CreateDirectory(duplicateDirectory); - string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath)); - File.Copy(reportFullPath, duplicateFile, overwrite: true); + string duplicateFile = Path.Combine(duplicateDirectory, $"{Path.GetFileName(reportFullPath)}.xml"); + string xml = ProcessDataStandardFormat.GetXml(processDataStandardFormat); + File.WriteAllText(duplicateFile, xml); WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); } @@ -126,8 +127,8 @@ public class FileRead : Shared.FileRead, IFileRead List descriptions = GetDuplicatorDescriptions(jsonElements); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) - FileCopy(reportFullPath, dateTime, descriptions); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + WriteFile(reportFullPath, dateTime, processDataStandardFormat, descriptions); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs b/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs index 2ee4e12..e04cef5 100644 --- a/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs +++ b/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs @@ -161,46 +161,6 @@ public class FileRead : Shared.FileRead, IFileRead return results; } - private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices) - { - ProcessDataStandardFormatMapping result; - string[] segmentsB; - List distinct = new(); - Dictionary keyValuePairs = new(); - string args4 = "Time,Test,Count,MesEntity,HeaderUniqueId,UniqueId,Id,Recipe,Date,AreaDeltaFromLastRun,GLimit,HGCV1"; - string args5 = "Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,AreaDeltaFromLastRun,Variation,Percentage HgCV 4PP Delta,HGCV1"; - string args6 = "RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09,HGCV1"; - string args7 = "FlatZMean|MeanFlatZ,GradeMean|MeanGrade,NAvgMean|MeanNAvg,NslMean|MeanNsl,PhaseMean|MeanPhase,RhoAvgMean|MeanRhoAvg,RhoslMean|MeanRhosl,RsMean|MeanRs,VdMean|MeanVd,FlatZRadialGradient|RadialGradientFlatZ,GradeRadialGradient|RadialGradientGrade,NAvgRadialGradient|RadialGradientNAvg,NslRadialGradient|RadialGradientNsl,PhaseRadialGradient|RadialGradientPhase,RhoAvgRadialGradient|RadialGradientRhoAvg,RhoslRadialGradient|RadialGradientRhosl,RsRadialGradient|RadialGradientRs,VdRadialGradient|RadialGradientVd,FlatZStdDev|StandardDeviationPercentageFlatZ,GradeStdDev|StandardDeviationPercentageGrade,NAvgStdDev|StandardDeviationPercentageNAvg,NslStdDev|StandardDeviationPercentageNsl,PhaseStdDev|StandardDeviationPercentagePhase,RhoAvgStdDev|StandardDeviationPercentageRhoAvg,RhoslStdDev|StandardDeviationPercentageRhosl,RsStdDev|StandardDeviationPercentageRs,VdStdDev|StandardDeviationPercentageVd,|HGCV1"; - // string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Area,Folder,HeaderUniqueId,Id,Layer,Model,Pattern,Phase,Plan,RampRate,RDS,SetupFile,StartVoltage,StopVoltage,UniqueId,Wafer,WaferSize,Zone,Ccomp,CondType,FlatZ,FlatZMean,FlatZRadialGradient,FlatZStdDev,GLimit,Grade,GradeMean,GradeRadialGradient,GradeStdDev,NAvg,NAvgMean,NAvgRadialGradient,NAvgStdDev,Nsl,NslMean,NslRadialGradient,NslStdDev,PhaseMean,PhaseRadialGradient,PhaseStdDev,RhoAvg,RhoAvgMean,RhoAvgRadialGradient,RhoAvgStdDev,RhoMethod,Rhosl,RhoslMean,RhoslRadialGradient,RhoslStdDev,RsMean,RsRadialGradient,RsStdDev,Vd,VdMean,VdRadialGradient,VdStdDev,Variation,AreaDeltaFromLastRun,Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09"; - // string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Index,Operator,StartVoltage,Wafer,StopVoltage,Lot,RampRate,Plan,GLimit,Date,Time,SetupFile,WaferSize,Folder,Ccomp,Pattern,Area,CondType,RhoMethod,Model,MeanNAvg,MeanNsl,MeanVd,MeanFlatZ,MeanRhoAvg,MeanRhosl,MeanPhase,MeanGrade,MeanRs,StandardDeviationPercentageNAvg,StandardDeviationPercentageNsl,StandardDeviationPercentageVd,StandardDeviationPercentageFlatZ,StandardDeviationPercentageRhoAvg,StandardDeviationPercentageRhosl,StandardDeviationPercentagePhase,StandardDeviationPercentageGrade,StandardDeviationPercentageRs,RadialGradientNAvg,RadialGradientNsl,RadialGradientVd,RadialGradientFlatZ,RadialGradientRhoAvg,RadialGradientRhosl,RadialGradientPhase,RadialGradientGrade,RadialGradientRs,Site,X,Y,NAvg,RhoAvg,Nsl,Rhosl,Vd,Phase,FlatZ,Grade,XLeft,XRight,BottomY,TopY,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,AreaDeltaFromLastRun,Variation,Percentage HgCV 4PP Delta,RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09"; - // string args10 = "0,1,2,-1,-1,3,-1,12,70,8,66,67,-1,19,16,-1,-1,68,22,18,58,10,9,65,14,5,7,-1,6,15,69,17,20,59,26,44,35,11,60,30,48,39,53,23,41,32,55,24,42,33,29,47,38,54,27,45,36,21,56,28,46,37,31,49,40,57,25,43,34,81,80,72,73,74,75,76,77,78,79,83,84,85,86,87,88,89,90,91"; - string[] segments = args7.Split(','); - ReadOnlyCollection ignoreColumns = new(args4.Split(',')); - ReadOnlyCollection backfillColumns = new(args5.Split(',')); - ReadOnlyCollection indexOnlyColumns = new(args6.Split(',')); - ReadOnlyCollection newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(',')); - ReadOnlyCollection oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(',')); - ReadOnlyCollection columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray()); - foreach (string segment in segments) - { - segmentsB = segment.Split('|'); - if (segmentsB.Length != 2) - continue; - if (distinct.Contains(segmentsB[0])) - continue; - distinct.Add(segmentsB[0]); - keyValuePairs.Add(segmentsB[0], segmentsB[1]); - } - result = new(backfillColumns: backfillColumns, - columnIndices: columnIndices, - newColumnNames: newColumnNames, - ignoreColumns: ignoreColumns, - indexOnlyColumns: indexOnlyColumns, - keyValuePairs: new(keyValuePairs), - oldColumnNames: oldColumnNames); - return result; - } - private static ReadOnlyCollection GetPreWithCollection(ReadOnlyCollection
 preCollection)
     {
         List results = new();
@@ -305,8 +265,13 @@ public class FileRead : Shared.FileRead, IFileRead
                 continue;
             if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List? wsResults))
                 wsResults = null;
-            ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
-            File.Delete(preWith.MatchingFile);
+            if (processDataStandardFormat.InputPDSF is null)
+                File.Move(preWith.MatchingFile, preWith.CheckFile);
+            else
+            {
+                ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
+                File.Delete(preWith.MatchingFile);
+            }
             if (Directory.Exists(preWith.NoWaitDirectory))
             {
                 post = new(preWith.CheckFile, preWith.ErrFile);
diff --git a/Adaptation/FileHandlers/OpenInsight/FileRead.cs b/Adaptation/FileHandlers/OpenInsight/FileRead.cs
index 458f72a..c48f085 100644
--- a/Adaptation/FileHandlers/OpenInsight/FileRead.cs
+++ b/Adaptation/FileHandlers/OpenInsight/FileRead.cs
@@ -110,7 +110,7 @@ public class FileRead : Shared.FileRead, IFileRead
         return results;
     }
 
-    private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List descriptions, Test[] tests)
+    private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, List descriptions, Test[] tests)
     {
         string duplicateFile;
         bool isDummyRun = false;
@@ -119,15 +119,6 @@ public class FileRead : Shared.FileRead, IFileRead
         if (!Directory.Exists(duplicateDirectory))
             _ = Directory.CreateDirectory(duplicateDirectory);
         string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
-        if (!Directory.Exists(Path.Combine(duplicateDirectory, "1")))
-        {
-            string parentParent = GetParentParent(_FileConnectorConfiguration.SourceFileLocation);
-            if (parentParent.Contains(_CellInstanceName))
-                parentParent = Path.GetDirectoryName(parentParent);
-            duplicateDirectory = Path.Combine(parentParent, "Data");
-            if (!Directory.Exists(duplicateDirectory))
-                _ = Directory.CreateDirectory(duplicateDirectory);
-        }
         if (descriptions.Count == 0 || tests.Length == 0)
             duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
         else
@@ -152,7 +143,7 @@ public class FileRead : Shared.FileRead, IFileRead
             else
                 duplicateFile = Path.Combine(duplicateDirectory, $"{$"Viewer {subgroupId}".TrimEnd()} {fileName.Replace("Viewer", string.Empty)}");
             string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
-            FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), subgroupId, weekOfYear);
+            FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, descriptions.First(), subgroupId, weekOfYear);
         }
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
         {
@@ -172,8 +163,8 @@ public class FileRead : Shared.FileRead, IFileRead
         List descriptions = json.ProcessData.GetDescriptions(jsonElements);
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
-            SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests);
-        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
+            SaveOpenInsightFile(reportFullPath, dateTime, descriptions, tests);
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/OpenInsight/FromIQS.cs b/Adaptation/FileHandlers/OpenInsight/FromIQS.cs
index b2eaad3..dcf49f0 100644
--- a/Adaptation/FileHandlers/OpenInsight/FromIQS.cs
+++ b/Adaptation/FileHandlers/OpenInsight/FromIQS.cs
@@ -324,74 +324,18 @@ public class FromIQS
         return new(result, count, commandText);
     }
 
-    private static string GetJson(Logistics logistics, ProcessDataStandardFormat processDataStandardFormat, json.Description description)
+    internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, json.Description description, long? subGroupId, string weekOfYear)
     {
-        string result;
-        StringBuilder stringBuilder = new();
-        var @object = new
-        {
-            description.MesEntity,
-            description.Employee,
-            description.Layer,
-            description.PSN,
-            description.RDS,
-            description.Reactor,
-            description.Recipe,
-            description.Zone,
-            logistics.DateTimeFromSequence.Ticks
-        };
-        string[] pair;
-        string safeValue;
-        string[] segments;
-        string serializerValue;
-        foreach (string line in processDataStandardFormat.Logistics)
-        {
-            segments = line.Split('\t');
-            if (segments.Length < 2)
-                continue;
-            segments = segments[1].Split(';');
-            _ = stringBuilder.Append('{');
-            foreach (string segment in segments)
-            {
-                pair = segment.Split('=');
-                if (pair.Length != 2 || pair[0].Length < 3)
-                    continue;
-                serializerValue = JsonSerializer.Serialize(pair[1]);
-                safeValue = serializerValue.Substring(1, serializerValue.Length - 2);
-                _ = stringBuilder.Append('"').Append(pair[0].Substring(2)).Append('"').Append(':').Append('"').Append(safeValue).Append('"').Append(',');
-            }
-            if (stringBuilder.Length > 0)
-                _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
-            _ = stringBuilder.Append('}').Append(',');
-        }
-        if (stringBuilder.Length > 0)
-            _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
-        _ = stringBuilder.Append(']').Append('}');
-        _ = stringBuilder.Insert(0, ",\"Logistics\":[");
-        string json = JsonSerializer.Serialize(@object);
-        _ = stringBuilder.Insert(0, json.Substring(0, json.Length - 1));
-        JsonElement? jsonElement = JsonSerializer.Deserialize(stringBuilder.ToString());
-        result = jsonElement is null ? "{}" : JsonSerializer.Serialize(jsonElement, new JsonSerializerOptions { WriteIndented = true });
-        return result;
-    }
-
-    internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, json.Description description, long? subGroupId, string weekOfYear)
-    {
-        string checkFile;
         string fileName = Path.GetFileName(reportFullPath);
-        string json = GetJson(logistics, processDataStandardFormat, description);
         string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);
         bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot);
         string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
         string ecDirectory = Path.Combine(openInsightApiECDirectory, weekYear, $"-{description.PSN}", $"-{description.Reactor}", $"-{description.RDS}", $"-{subGroupId}");
         if (ecExists && !Directory.Exists(ecDirectory))
             _ = Directory.CreateDirectory(ecDirectory);
-        checkFile = Path.Combine(ecDirectory, fileName);
+        string checkFile = Path.Combine(ecDirectory, fileName);
         if (ecExists && !File.Exists(checkFile))
             File.Copy(reportFullPath, checkFile);
-        checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
-        if (ecExists && !File.Exists(checkFile))
-            File.WriteAllText(checkFile, json);
     }
 
     private static string GetCommandText(string[] iqsCopyValues)
diff --git a/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs b/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs
index 462a9c6..8bde7d1 100644
--- a/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs
+++ b/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs
@@ -110,10 +110,10 @@ public class FileRead : Shared.FileRead, IFileRead
         return results;
     }
 
-    private void SendData(string reportFullPath, DateTime dateTime, List descriptions)
+    private void SendData(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List descriptions)
     {
         string checkDirectory;
-        WSRequest wsRequest = new(this, _Logistics, descriptions);
+        WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
         int weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
         string directory = Path.Combine(_OpenInsightMetrologyViewerFileShare, dateTime.Year.ToString(), $"WW{weekOfYear:00}");
         checkDirectory = Path.Combine(directory, _Logistics.Sequence.ToString());
@@ -139,15 +139,16 @@ public class FileRead : Shared.FileRead, IFileRead
     private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
     {
         Tuple> results;
-        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
+        string[] lines = File.ReadAllLines(reportFullPath);
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
         _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
         SetFileParameterLotIDToLogisticsMID();
-        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
+        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
         List descriptions = json.ProcessData.GetDescriptions(jsonElements);
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
-            SendData(reportFullPath, dateTime, descriptions);
-        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
+            SendData(reportFullPath, dateTime, jsonElements, descriptions);
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/OpenInsightMetrologyViewer/WSRequest.cs b/Adaptation/FileHandlers/OpenInsightMetrologyViewer/WSRequest.cs
index 7f54224..79ec9c2 100644
--- a/Adaptation/FileHandlers/OpenInsightMetrologyViewer/WSRequest.cs
+++ b/Adaptation/FileHandlers/OpenInsightMetrologyViewer/WSRequest.cs
@@ -5,6 +5,7 @@ using System;
 using System.Collections.Generic;
 using System.IO;
 using System.Linq;
+using System.Text.Json;
 
 namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer;
 
@@ -33,7 +34,7 @@ public class WSRequest
     [Obsolete("For json")] public WSRequest() { }
 
 #pragma warning disable IDE0060
-    internal WSRequest(IFileRead fileRead, Logistics logistics, List descriptions, string processDataStandardFormat = null)
+    internal WSRequest(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, List descriptions, string processDataStandardFormat = null)
 #pragma warning restore IDE0060
     {
         Id = -1;
@@ -76,14 +77,14 @@ public class WSRequest
             throw new Exception();
     }
 
-    internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, List descriptions)
+    internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, JsonElement[] jsonElements, List descriptions)
     {
         long result;
         if (results is not null && results.HeaderId is not null)
             result = results.HeaderId.Value;
         else
         {
-            WSRequest wsRequest = new(fileRead, logistics, descriptions);
+            WSRequest wsRequest = new(fileRead, logistics, jsonElements, descriptions);
             string directory = Path.Combine(openInsightMetrologyViewerFileShare, logistics.DateTimeFromSequence.Year.ToString(), $"WW{weekOfYear:00}");
             (_, WS.Results wsResults) = WS.SendData(openInsightMetrologyViewerAPI, logistics.Sequence, directory, wsRequest);
             if (wsResults.Success is null || !wsResults.Success.Value)
diff --git a/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs b/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs
index 22c9afa..c3e8f2f 100644
--- a/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs
+++ b/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs
@@ -135,7 +135,7 @@ public class FileRead : Shared.FileRead, IFileRead
         return result;
     }
 
-    private void PostOpenInsightMetrologyViewerAttachments(List descriptions)
+    private void PostOpenInsightMetrologyViewerAttachments(JsonElement[] jsonElements, List descriptions)
     {
         Shared.Metrology.WS.Results? results;
         string jobIdDirectory = Path.Combine(Path.GetDirectoryName(_FileConnectorConfiguration.AlternateTargetFolder) ?? throw new Exception(), _Logistics.JobID);
@@ -151,7 +151,7 @@ public class FileRead : Shared.FileRead, IFileRead
             results = wsResults[0];
         }
         int weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
-        long headerId = !_IsEAFHosted ? -1 : OpenInsightMetrologyViewer.WSRequest.GetHeaderId(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OpenInsightMetrologyViewerFileShare, weekOfYear, results, descriptions);
+        long headerId = !_IsEAFHosted ? -1 : OpenInsightMetrologyViewer.WSRequest.GetHeaderId(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OpenInsightMetrologyViewerFileShare, weekOfYear, results, jsonElements, descriptions);
         string? headerIdDirectory = GetHeaderIdDirectory(headerId);
         if (string.IsNullOrEmpty(headerIdDirectory))
             throw new Exception($"Didn't find header id directory <{headerId}>");
@@ -163,15 +163,16 @@ public class FileRead : Shared.FileRead, IFileRead
 #pragma warning restore IDE0060
     {
         Tuple> results;
-        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
+        string[] lines = File.ReadAllLines(reportFullPath);
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
         _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
         SetFileParameterLotIDToLogisticsMID();
-        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
+        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
         List descriptions = json.ProcessData.GetDescriptions(jsonElements);
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
-            PostOpenInsightMetrologyViewerAttachments(descriptions);
-        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
+            PostOpenInsightMetrologyViewerAttachments(jsonElements, descriptions);
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/Processed/FileRead.cs b/Adaptation/FileHandlers/Processed/FileRead.cs
index fb77b2d..e56de3a 100644
--- a/Adaptation/FileHandlers/Processed/FileRead.cs
+++ b/Adaptation/FileHandlers/Processed/FileRead.cs
@@ -109,7 +109,7 @@ public class FileRead : Shared.FileRead, IFileRead
     }
 
 #pragma warning disable IDE0060
-    private void DirectoryMove(string reportFullPath, DateTime dateTime, List descriptions)
+    private void DirectoryMove(string reportFullPath, DateTime dateTime, JsonElement[] jsonElements, List descriptions)
 #pragma warning restore IDE0060
     {
         FileInfo fileInfo = new(reportFullPath);
@@ -122,7 +122,7 @@ public class FileRead : Shared.FileRead, IFileRead
             throw new Exception("Didn't find directory by logistics sequence");
         if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
             File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
-        OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions);
+        OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
         JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
         string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
         string directoryName = $"{Path.GetFileName(matchDirectories[0]).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0]}{_Logistics.DateTimeFromSequence:yyyy-MM-dd_hh;mm_tt_}{DateTime.Now.Ticks - _Logistics.Sequence}";
@@ -166,23 +166,24 @@ public class FileRead : Shared.FileRead, IFileRead
     private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
     {
         Tuple> results;
-        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
+        string[] lines = File.ReadAllLines(reportFullPath);
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
         _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
         SetFileParameterLotIDToLogisticsMID();
-        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
+        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(reportFullPath, lines, processDataStandardFormat);
         List descriptions = json.ProcessData.GetDescriptions(jsonElements);
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
-        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List());
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
-            DirectoryMove(reportFullPath, dateTime, descriptions);
+            DirectoryMove(reportFullPath, dateTime, jsonElements, descriptions);
         else if (!_IsEAFHosted)
         {
-            OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions);
+            OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, jsonElements, descriptions);
             JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
-            string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
+            string check = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
             string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
             string historicalText = File.ReadAllText(jsonFileName);
-            if (json != historicalText)
+            if (check != historicalText)
                 throw new Exception("File doesn't match historical!");
         }
         return results;
diff --git a/Adaptation/FileHandlers/SPaCe/FileRead.cs b/Adaptation/FileHandlers/SPaCe/FileRead.cs
index 2e0f55a..e258bd6 100644
--- a/Adaptation/FileHandlers/SPaCe/FileRead.cs
+++ b/Adaptation/FileHandlers/SPaCe/FileRead.cs
@@ -125,7 +125,7 @@ public class FileRead : Shared.FileRead, IFileRead
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             FileCopy(reportFullPath, dateTime, descriptions);
-        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/csv/Description.cs b/Adaptation/FileHandlers/csv/Description.cs
index 3e7b724..3964348 100644
--- a/Adaptation/FileHandlers/csv/Description.cs
+++ b/Adaptation/FileHandlers/csv/Description.cs
@@ -4,6 +4,7 @@ using System;
 using System.Collections.Generic;
 using System.Linq;
 using System.Text.Json;
+using System.Text.Json.Serialization;
 
 namespace Adaptation.FileHandlers.csv;
 
@@ -59,6 +60,7 @@ public class Description : IDescription, Shared.Properties.IDescription
         List results = new()
         {
             nameof(RDS),
+            nameof(Reactor),
         };
         return results;
     }
@@ -185,6 +187,7 @@ public class Description : IDescription, Shared.Properties.IDescription
             MID = logistics.MID,
             //
             RDS = nameof(RDS),
+            Reactor = nameof(Reactor),
             //
             Path = nameof(Path),
         };
@@ -193,4 +196,16 @@ public class Description : IDescription, Shared.Properties.IDescription
 
     internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
 
+}
+
+[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
+[JsonSerializable(typeof(Description))]
+internal partial class CsvDescriptionSourceGenerationContext : JsonSerializerContext
+{
+}
+
+[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
+[JsonSerializable(typeof(Description[]))]
+internal partial class CsvDescriptionArraySourceGenerationContext : JsonSerializerContext
+{
 }
\ No newline at end of file
diff --git a/Adaptation/FileHandlers/csv/ProcessData.cs b/Adaptation/FileHandlers/csv/ProcessData.cs
index 55d5860..b8e0645 100644
--- a/Adaptation/FileHandlers/csv/ProcessData.cs
+++ b/Adaptation/FileHandlers/csv/ProcessData.cs
@@ -118,12 +118,11 @@ public partial class ProcessData : IProcessData
     {
         List results = new();
         Description? description;
-        JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
         foreach (JsonElement jsonElement in jsonElements)
         {
             if (jsonElement.ValueKind != JsonValueKind.Object)
                 throw new Exception();
-            description = JsonSerializer.Deserialize(jsonElement.ToString(), jsonSerializerOptions);
+            description = JsonSerializer.Deserialize(jsonElement.ToString(), CsvDescriptionSourceGenerationContext.Default.Description);
             if (description is null)
                 continue;
             results.Add(description);
diff --git a/Adaptation/FileHandlers/json/Description.cs b/Adaptation/FileHandlers/json/Description.cs
index 9c949b7..8a7f349 100644
--- a/Adaptation/FileHandlers/json/Description.cs
+++ b/Adaptation/FileHandlers/json/Description.cs
@@ -4,6 +4,7 @@ using System;
 using System.Collections.Generic;
 using System.Linq;
 using System.Text.Json;
+using System.Text.Json.Serialization;
 
 namespace Adaptation.FileHandlers.json;
 
@@ -252,4 +253,16 @@ public class Description : IDescription, Shared.Properties.IDescription
 
     internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
 
+}
+
+[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
+[JsonSerializable(typeof(Description))]
+internal partial class JsonDescriptionSourceGenerationContext : JsonSerializerContext
+{
+}
+
+[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
+[JsonSerializable(typeof(Description[]))]
+internal partial class JsonDescriptionArraySourceGenerationContext : JsonSerializerContext
+{
 }
\ No newline at end of file
diff --git a/Adaptation/FileHandlers/json/ProcessData.cs b/Adaptation/FileHandlers/json/ProcessData.cs
index 665bd8d..e10ae20 100644
--- a/Adaptation/FileHandlers/json/ProcessData.cs
+++ b/Adaptation/FileHandlers/json/ProcessData.cs
@@ -291,12 +291,11 @@ public partial class ProcessData : IProcessData
     {
         List results = new();
         Description? description;
-        JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
         foreach (JsonElement jsonElement in jsonElements)
         {
             if (jsonElement.ValueKind != JsonValueKind.Object)
                 throw new Exception();
-            description = JsonSerializer.Deserialize(jsonElement.ToString(), jsonSerializerOptions);
+            description = JsonSerializer.Deserialize(jsonElement.ToString(), JsonDescriptionSourceGenerationContext.Default.Description);
             if (description is null)
                 continue;
             results.Add(description);
diff --git a/Adaptation/Infineon/Monitoring/MonA/MonIn.cs b/Adaptation/Infineon/Monitoring/MonA/MonIn.cs
index 7a8a711..fcd16ca 100644
--- a/Adaptation/Infineon/Monitoring/MonA/MonIn.cs
+++ b/Adaptation/Infineon/Monitoring/MonA/MonIn.cs
@@ -226,9 +226,9 @@ public class MonIn : IMonIn, IDisposable
     {
         StringBuilder stringBuilder = new();
         if (string.IsNullOrEmpty(subresource))
-            _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
+            _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
         else
-            _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
+            _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), stateName.Trim(), state.Trim(), description.Trim());
         return stringBuilder.ToString();
     }
 
@@ -247,14 +247,14 @@ public class MonIn : IMonIn, IDisposable
         if (string.IsNullOrEmpty(subresource))
         {
             if (unit.Equals(string.Empty) && !interval.HasValue)
-                _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), performanceName.Trim(), value, description.Trim());
+                _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} \n{5}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), performanceName.Trim(), value, description.Trim());
             else
-                _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} {5} {{interval={6}, unit={7}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : (object)string.Empty, unit.Trim());
+                _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" {4} {5} {{interval={6}, unit={7}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : string.Empty, unit.Trim());
         }
         else if (unit.Equals(string.Empty) && !interval.HasValue)
-            _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim());
+            _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} \n{6}", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim());
         else
-            _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} {6} {{interval={7}, unit={8}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : (object)"now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : (object)string.Empty, unit.Trim());
+            _ = stringBuilder.AppendFormat(_CultureInfo, "> {0} {1} \"{2}\" \"{3}\" \"{4}\" {5} {6} {{interval={7}, unit={8}}}\n", site.Trim(), timeStamp.HasValue ? GetDateTimeNowAsPosix(timeStamp.Value) : "now", resource.Trim(), subresource.Trim(), performanceName.Trim(), value, description.Trim(), interval.HasValue ? interval.Value.ToString() : string.Empty, unit.Trim());
         return stringBuilder.ToString();
     }
 
diff --git a/Adaptation/Shared/Duplicator/Description.cs b/Adaptation/Shared/Duplicator/Description.cs
index 964612e..d9bb3b8 100644
--- a/Adaptation/Shared/Duplicator/Description.cs
+++ b/Adaptation/Shared/Duplicator/Description.cs
@@ -3,6 +3,7 @@ using System;
 using System.Collections.Generic;
 using System.Linq;
 using System.Text.Json;
+using System.Text.Json.Serialization;
 
 namespace Adaptation.Shared.Duplicator;
 
@@ -178,4 +179,16 @@ public class Description : IDescription, Properties.IDescription
 
     internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
 
+}
+
+[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
+[JsonSerializable(typeof(Description))]
+internal partial class SharedDescriptionSourceGenerationContext : JsonSerializerContext
+{
+}
+
+[JsonSourceGenerationOptions(WriteIndented = true, NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString)]
+[JsonSerializable(typeof(Description[]))]
+internal partial class SharedDescriptionArraySourceGenerationContext : JsonSerializerContext
+{
 }
\ No newline at end of file
diff --git a/Adaptation/Shared/FileRead.cs b/Adaptation/Shared/FileRead.cs
index 0c8551c..af8cdbf 100644
--- a/Adaptation/Shared/FileRead.cs
+++ b/Adaptation/Shared/FileRead.cs
@@ -9,7 +9,6 @@ using System.IO;
 using System.Linq;
 using System.Text;
 using System.Text.Json;
-using System.Text.Json.Serialization;
 using System.Threading;
 
 namespace Adaptation.Shared;
@@ -551,12 +550,13 @@ public class FileRead : Properties.IFileRead
     {
         List results = new();
         Duplicator.Description description;
-        JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
         foreach (JsonElement jsonElement in jsonElements)
         {
             if (jsonElement.ValueKind != JsonValueKind.Object)
                 throw new Exception();
-            description = JsonSerializer.Deserialize(jsonElement.ToString(), jsonSerializerOptions);
+            description = JsonSerializer.Deserialize(jsonElement.ToString(), Duplicator.SharedDescriptionSourceGenerationContext.Default.Description);
+            if (description is null)
+                continue;
             results.Add(description);
         }
         return results;
@@ -769,17 +769,24 @@ public class FileRead : Properties.IFileRead
         else
         {
             string[] files;
-            string logisticsSequence = _Logistics.Sequence.ToString();
-            string[] directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
-            foreach (string directory in directories)
+            string[] directories;
+            string logisticsSequence;
+            for (int i = 0; i < 10; i++)
             {
-                files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
-                if (files.Length == 0)
-                    continue;
-                results.Add(directory);
+                logisticsSequence = (_Logistics.Sequence + -i).ToString();
+                directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
+                foreach (string directory in directories)
+                {
+                    files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
+                    if (files.Length == 0)
+                        continue;
+                    results.Add(directory);
+                }
+                if (results.Count == 1)
+                    break;
             }
         }
-        if ((results is null) || results.Count != 1)
+        if (results.Count != 1)
             throw new Exception("Didn't find directory by logistics sequence");
         return results.ToArray();
     }
diff --git a/Adaptation/Shared/ProcessDataStandardFormat.cs b/Adaptation/Shared/ProcessDataStandardFormat.cs
index a86241d..bc3c424 100644
--- a/Adaptation/Shared/ProcessDataStandardFormat.cs
+++ b/Adaptation/Shared/ProcessDataStandardFormat.cs
@@ -2,12 +2,14 @@ using Adaptation.Shared.Methods;
 using System;
 using System.Collections.Generic;
 using System.Collections.ObjectModel;
+using System.Diagnostics;
 using System.Globalization;
 using System.IO;
 using System.Linq;
 using System.Text;
 using System.Text.Json;
 using System.Text.Json.Serialization;
+using System.Text.RegularExpressions;
 
 namespace Adaptation.Shared;
 
@@ -136,6 +138,7 @@ internal class ProcessDataStandardFormat
     internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6)
     {
         ProcessDataStandardFormat result;
+        long? sequence;
         string segment;
         string[] segments;
         bool addToFooter = false;
@@ -184,15 +187,27 @@ internal class ProcessDataStandardFormat
                     break;
             }
         }
-        string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
+        string? linesOne = lines.Length > 1 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
         logistics = GetLogistics(footer, linesOne: linesOne);
+        if (logistics.Count == 0)
+            sequence = null;
+        else
+        {
+            segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
+            sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
+        }
+        if (sequence is null && !string.IsNullOrEmpty(reportFullPath))
+        {
+            FileInfo fileInfo = new(reportFullPath);
+            sequence = fileInfo.LastWriteTime.Ticks;
+        }
         result = new(body: body.AsReadOnly(),
                      columns: columns.AsReadOnly(),
                      footer: footer.AsReadOnly(),
                      header: header.AsReadOnly(),
                      inputPDSF: null,
                      logistics: logistics,
-                     sequence: null);
+                     sequence: sequence);
         return result;
     }
 
@@ -214,19 +229,19 @@ internal class ProcessDataStandardFormat
         return results.AsReadOnly();
     }
 
-    internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
+    internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping processDataStandardFormatMapping)
     {
         ProcessDataStandardFormat result;
         const int columnsLine = 6;
         FileInfo fileInfo = new(reportFullPath);
         ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
-        JsonElement[]? jsonElements = pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
+        JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count == 0 ? null : GetFullArray(processDataStandardFormat);
         JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
-        if (jsonElements is null || jsonProperties is null || jsonProperties.Length != pdsfMapping.NewColumnNames.Count)
+        if (jsonElements is null || jsonProperties is null || jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
             result = processDataStandardFormat;
         else
         {
-            result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
+            result = GetProcessDataStandardFormat(processDataStandardFormatMapping, jsonElements, processDataStandardFormat);
             if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
                 result = processDataStandardFormat;
         }
@@ -236,7 +251,7 @@ internal class ProcessDataStandardFormat
     private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines)
     {
         ProcessDataStandardFormat result;
-        long sequence;
+        long? sequence;
         string[] segments;
         bool addToFooter = false;
         List body = new();
@@ -268,12 +283,13 @@ internal class ProcessDataStandardFormat
         }
         logistics = GetLogistics(footer, linesOne: null);
         if (logistics.Count == 0)
-            sequence = lastWriteTime.Ticks;
+            sequence = null;
         else
         {
             segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
-            sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s;
+            sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
         }
+        sequence ??= lastWriteTime.Ticks;
         result = new(body: body.AsReadOnly(),
                      columns: new(columns),
                      footer: footer.AsReadOnly(),
@@ -302,7 +318,7 @@ internal class ProcessDataStandardFormat
                 segments = bodyLine.Split('\t').ToList();
                 for (int c = 0; c < segments.Count; c++)
                 {
-                    value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
+                    value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
                     _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
                 }
                 _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
@@ -321,12 +337,14 @@ internal class ProcessDataStandardFormat
         int column;
         string value;
         JsonProperty jsonProperty;
+        List debug = new();
         List values = new();
         List results = new();
         JsonProperty[] jsonProperties;
         List unknownColumns = new();
         for (int i = 0; i < jsonElements.Length; i++)
         {
+            debug.Clear();
             values.Clear();
             if (jsonElements[i].ValueKind != JsonValueKind.Object)
             {
@@ -340,16 +358,22 @@ internal class ProcessDataStandardFormat
             {
                 column = processDataStandardFormatMapping.ColumnIndices[c];
                 if (column == -1)
+                {
                     value = processDataStandardFormatMapping.OldColumnNames[c];
+                    debug.Add($"");
+                }
                 else
                 {
                     jsonProperty = jsonProperties[column];
                     value = jsonProperty.Value.ToString();
+                    debug.Add($"");
                 }
                 values.Add(value);
             }
             results.Add(string.Join("\t", values));
         }
+        if (Debugger.IsAttached)
+            File.WriteAllText("../../.txt", string.Join(Environment.NewLine, debug.OrderBy(l => l)));
         result = new(body: new(results),
                      columns: processDataStandardFormatMapping.OldColumnNames,
                      footer: processDataStandardFormat.Footer,
@@ -364,7 +388,6 @@ internal class ProcessDataStandardFormat
     {
         if (processDataStandardFormat.InputPDSF is null)
             throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
-#pragma warning disable CA1845, IDE0057
         string result;
         string line;
         string value;
@@ -378,19 +401,27 @@ internal class ProcessDataStandardFormat
                 break;
             for (int c = 0; c < segments.Length; c++)
             {
-                value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
+                value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
                 line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ',');
             }
             line = string.Concat(line.Substring(0, line.Length - 1), '}');
             lines.Add(line);
         }
+        string? json = null;
+        if (processDataStandardFormat.Footer is not null && processDataStandardFormat.Footer.Count > 0)
+        {
+            Dictionary footerKeyValuePairs = GetFooterKeyValuePairs(processDataStandardFormat.Footer);
+            Dictionary> logisticKeyValuePairs = GetLogisticKeyValuePairs(processDataStandardFormat.Footer, footerKeyValuePairs);
+            json = JsonSerializer.Serialize(logisticKeyValuePairs, DictionaryStringDictionaryStringStringSourceGenerationContext.Default.DictionaryStringDictionaryStringString);
+        }
+        string footerText = string.IsNullOrEmpty(json) || json == "{}" ? string.Empty : $",{Environment.NewLine}\"PDSF\":{Environment.NewLine}{json}";
         result = string.Concat(
             '{',
             Environment.NewLine,
             '"',
             "Count",
             '"',
-            ": ",            
+            ": ",
             processDataStandardFormat.Body.Count,
             ',',
             Environment.NewLine,
@@ -409,17 +440,95 @@ internal class ProcessDataStandardFormat
             '"',
             "Sequence",
             '"',
-            ": ",            
+            ": ",
             processDataStandardFormat.Sequence,
             Environment.NewLine,
+            footerText,
+            Environment.NewLine,
             '}');
         return result;
-#pragma warning restore CA1845, IDE0057
+    }
+
+    private static Dictionary GetFooterKeyValuePairs(ReadOnlyCollection footerLines)
+    {
+        Dictionary results = new();
+        string[] segments;
+        foreach (string footerLine in footerLines)
+        {
+            segments = footerLine.Split('\t');
+            if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
+            {
+                continue;
+            }
+            if (segments[1].Contains(';'))
+            {
+                continue;
+            }
+            else
+            {
+                if (results.ContainsKey(segments[0]))
+                {
+                    continue;
+                }
+                results.Add(segments[0], segments[1]);
+            }
+        }
+        return results;
+    }
+
+    private static Dictionary> GetLogisticKeyValuePairs(ReadOnlyCollection footerLines, Dictionary footerKeyValuePairs)
+    {
+        Dictionary> results = new();
+        string[] segments;
+        string[] subSegments;
+        string[] subSubSegments;
+        Dictionary? keyValue;
+        results.Add("Footer", footerKeyValuePairs);
+        foreach (string footerLine in footerLines)
+        {
+            segments = footerLine.Split('\t');
+            if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
+            {
+                continue;
+            }
+            if (!segments[1].Contains(';') || !segments[1].Contains('='))
+            {
+                continue;
+            }
+            else
+            {
+                subSegments = segments[1].Split(';');
+                if (subSegments.Length < 1)
+                {
+                    continue;
+                }
+                if (!results.TryGetValue(segments[0], out keyValue))
+                {
+                    results.Add(segments[0], new());
+                    if (!results.TryGetValue(segments[0], out keyValue))
+                    {
+                        throw new Exception();
+                    }
+                }
+                foreach (string segment in subSegments)
+                {
+                    subSubSegments = segment.Split('=');
+                    if (subSubSegments.Length != 2)
+                    {
+                        continue;
+                    }
+                    keyValue.Add(subSubSegments[0], subSubSegments[1]);
+                }
+            }
+        }
+        return results;
     }
 
     internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List? wsResults)
     {
         List results = new();
+        if (processDataStandardFormat.InputPDSF is null)
+            throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
         if (processDataStandardFormat.Sequence is null)
             throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
         string endOffset = "E#######T";
@@ -457,25 +566,25 @@ internal class ProcessDataStandardFormat
             }
         }
         results.Add("END_HEADER");
-        if (processDataStandardFormat.InputPDSF is not null)
-        {
-            results.Add(string.Empty);
-            List hyphens = new();
-            results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|')));
-            results.Add(string.Empty);
-            results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
-            for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
-                hyphens.Add('-');
-            results.Add($"|{string.Join("|", hyphens)}|");
-            results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|')));
-            results.Add(string.Empty);
-            results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|')));
-            results.Add(string.Empty);
-            results.Add("EOF");
-            results.Add(string.Empty);
-            string json = GetJson(processDataStandardFormat);
-            results.Add(json);
-        }
+        results.Add(string.Empty);
+        List hyphens = new();
+        results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => $"|{l.Replace('\t', '|')}|"));
+        results.Add(string.Empty);
+        results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
+        for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
+            hyphens.Add('-');
+        results.Add($"|{string.Join("|", hyphens)}|");
+        results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => $"|{l.Replace('\t', '|')}|"));
+        results.Add(string.Empty);
+        results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => $"|{l.Replace('\t', '|')}|"));
+        results.Add(string.Empty);
+        string xml = GetXml(processDataStandardFormat);
+        results.Add(xml);
+        results.Add(string.Empty);
+        results.Add("EOF");
+        results.Add(string.Empty);
+        string json = GetJson(processDataStandardFormat);
+        results.Add(json);
         File.WriteAllText(path, string.Join(Environment.NewLine, results));
     }
 
@@ -518,7 +627,7 @@ internal class ProcessDataStandardFormat
                 {
                     for (int c = 1; c < segments.Length; c++)
                     {
-                        value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
+                        value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
                         _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
                     }
                 }
@@ -526,7 +635,7 @@ internal class ProcessDataStandardFormat
                 {
                     for (int c = 1; c < segments.Length; c++)
                     {
-                        value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
+                        value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
                         if (string.IsNullOrEmpty(value))
                             _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
                         else if (value.All(char.IsDigit))
@@ -545,6 +654,17 @@ internal class ProcessDataStandardFormat
         return results;
     }
 
+    internal static JsonElement[] GetArray(string reportFullPath, string[] lines, ProcessDataStandardFormat processDataStandardFormat)
+    {
+        JsonElement[] results;
+        string? json = GetRecordsJson(reportFullPath, lines);
+        if (string.IsNullOrEmpty(json))
+            results = GetArray(processDataStandardFormat);
+        else
+            results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
+        return results;
+    }
+
     internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
     {
         string result;
@@ -757,10 +877,126 @@ internal class ProcessDataStandardFormat
         return result;
     }
 
+    internal static string GetXml(ProcessDataStandardFormat processDataStandardFormat)
+    {
+        string result;
+        string tag;
+        string value;
+        string[] segments;
+        List values;
+        Dictionary> results = new();
+        ReadOnlyCollection body = processDataStandardFormat.InputPDSF is null ?
+            processDataStandardFormat.Body : processDataStandardFormat.InputPDSF.Body;
+        ReadOnlyCollection columns = processDataStandardFormat.InputPDSF is null ?
+            processDataStandardFormat.Columns : processDataStandardFormat.InputPDSF.Columns;
+        List lines = new() { "", "" };
+        for (int i = 0; i < body.Count; i++)
+        {
+            segments = body[i].Trim().Split('\t');
+            if (segments.Length != columns.Count)
+                break;
+            for (int c = 0; c < segments.Length; c++)
+            {
+                value = segments[c].Replace("&", "&")
+                                   .Replace("<", "<")
+                                   .Replace(">", ">")
+                                   .Replace("\"", """)
+                                   .Replace("'", "'");
+                tag = Regex.Replace(columns[c].Trim('"'), @"[^a-zA-Z0-9]", "_").Split('\r')[0].Split('\n')[0];
+                if (i == 0)
+                {
+                    if (results.ContainsKey(tag))
+                        continue;
+                    results.Add(tag, new List());
+                }
+                results[tag].Add(value);
+            }
+        }
+        foreach (KeyValuePair> keyValuePair in results)
+        {
+            if (body.Count < 2)
+                break;
+            if (keyValuePair.Value.Count != body.Count)
+                continue;
+            values = keyValuePair.Value.Distinct().ToList();
+            if (values.Count == 2 && (string.IsNullOrEmpty(values[0]) || string.IsNullOrEmpty(values[1])))
+            {
+                for (int i = 0; i < body.Count; i++)
+                    keyValuePair.Value[i] = string.Empty;
+                foreach (string v in values)
+                {
+                    if (string.IsNullOrEmpty(v))
+                        continue;
+                    keyValuePair.Value[0] = v;
+                }
+            }
+        }
+        for (int i = 0; i < body.Count; i++)
+        {
+            lines.Add("  ");
+            foreach (KeyValuePair> keyValuePair in results)
+            {
+                if (keyValuePair.Value.Count != body.Count)
+                    continue;
+                lines.Add(string.Concat("    <", keyValuePair.Key, '>', keyValuePair.Value[i], "'));
+            }
+            lines.Add("  ");
+        }
+        lines.Add("");
+        result = string.Join(Environment.NewLine, lines);
+        return result;
+    }
+
+    internal static string GetXml(string reportFullPath, string[]? lines = null)
+    {
+        string result;
+        bool foundXml = false;
+        List results = new();
+        lines ??= File.ReadAllLines(reportFullPath);
+        foreach (string line in lines)
+        {
+            if (line.StartsWith(" results = new();
+        lines ??= File.ReadAllLines(reportFullPath);
+        foreach (string line in lines)
+        {
+            if (line.StartsWith("\"Records\""))
+                foundRecords = true;
+            if (!foundRecords)
+                continue;
+            if (line == "],")
+                break;
+            results.Add(line);
+        }
+        result = results.Count == 0 ? null : $"{string.Join(Environment.NewLine, results.Skip(1))}{Environment.NewLine}]";
+        return result;
+    }
+
 }
 
 [JsonSourceGenerationOptions(WriteIndented = true)]
 [JsonSerializable(typeof(JsonElement[]))]
 internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
 {
+}
+
+[JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
+[JsonSerializable(typeof(Dictionary>))]
+internal partial class DictionaryStringDictionaryStringStringSourceGenerationContext : JsonSerializerContext
+{
 }
\ No newline at end of file
diff --git a/Adaptation/Shared/ProcessDataStandardFormatMapping.cs b/Adaptation/Shared/ProcessDataStandardFormatMapping.cs
index c5a75ec..f99db0a 100644
--- a/Adaptation/Shared/ProcessDataStandardFormatMapping.cs
+++ b/Adaptation/Shared/ProcessDataStandardFormatMapping.cs
@@ -1,33 +1,34 @@
 using System.Collections.ObjectModel;
+using System.Linq;
 
 namespace Adaptation.Shared;
 
 public class ProcessDataStandardFormatMapping
 {
 
-    public ReadOnlyCollection BackfillColumns { get; private set; }
     public ReadOnlyCollection ColumnIndices { get; private set; }
-    public ReadOnlyCollection IgnoreColumns { get; private set; }
-    public ReadOnlyCollection IndexOnlyColumns { get; private set; }
-    public ReadOnlyDictionary KeyValuePairs { get; private set; }
     public ReadOnlyCollection NewColumnNames { get; private set; }
     public ReadOnlyCollection OldColumnNames { get; private set; }
 
-    public ProcessDataStandardFormatMapping(ReadOnlyCollection backfillColumns,
-                                            ReadOnlyCollection columnIndices,
-                                            ReadOnlyCollection ignoreColumns,
-                                            ReadOnlyCollection indexOnlyColumns,
-                                            ReadOnlyDictionary keyValuePairs,
+    public ProcessDataStandardFormatMapping(ReadOnlyCollection columnIndices,
                                             ReadOnlyCollection newColumnNames,
                                             ReadOnlyCollection oldColumnNames)
     {
-        BackfillColumns = backfillColumns;
         ColumnIndices = columnIndices;
-        IgnoreColumns = ignoreColumns;
-        IndexOnlyColumns = indexOnlyColumns;
-        KeyValuePairs = keyValuePairs;
         NewColumnNames = newColumnNames;
         OldColumnNames = oldColumnNames;
     }
 
+    internal static ProcessDataStandardFormatMapping Get(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
+    {
+        ProcessDataStandardFormatMapping result;
+        ReadOnlyCollection newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
+        ReadOnlyCollection oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
+        ReadOnlyCollection columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
+        result = new(columnIndices: columnIndices,
+                     newColumnNames: newColumnNames,
+                     oldColumnNames: oldColumnNames);
+        return result;
+    }
+
 }
\ No newline at end of file
diff --git a/Adaptation/_Tests/Shared/AdaptationTesting.cs b/Adaptation/_Tests/Shared/AdaptationTesting.cs
index 8c0bc93..e49ae57 100644
--- a/Adaptation/_Tests/Shared/AdaptationTesting.cs
+++ b/Adaptation/_Tests/Shared/AdaptationTesting.cs
@@ -193,7 +193,12 @@ public class AdaptationTesting : ISMTP
             segments = withActualCICN.Split(new string[] { ticks }, StringSplitOptions.None);
             dummyDirectory = Path.Combine(dummyRoot, cellInstanceName, ticks, string.Join(null, segments));
             if (!Directory.Exists(dummyDirectory))
+            {
                 _ = Directory.CreateDirectory(dummyDirectory);
+                try
+                { Directory.SetLastWriteTime(Path.Combine(dummyRoot, cellInstanceName), DateTime.Now); }
+                catch { }
+            }
         }
         if (string.IsNullOrEmpty(ticks))
         {