diff --git a/Adaptation/.editorconfig b/Adaptation/.editorconfig index b22ed15..e393d5b 100644 --- a/Adaptation/.editorconfig +++ b/Adaptation/.editorconfig @@ -110,7 +110,7 @@ dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2"); dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant. -dotnet_diagnostic.IDE0005.severity = warning # Using directive is unnecessary +dotnet_diagnostic.IDE0005.severity = none # Using directive is unnecessary dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031) dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed @@ -122,6 +122,7 @@ dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]cs dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified +dotnet_diagnostic.MSTEST0015.severity = none # MSTEST0015: Test method {method} should not be ignored dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning diff --git a/Adaptation/.vscode/launch.json b/Adaptation/.vscode/launch.json index cfb4688..f8bf7e8 100644 --- a/Adaptation/.vscode/launch.json +++ b/Adaptation/.vscode/launch.json @@ -1,10 +1,43 @@ { "configurations": [ + { + "mode": "debug", + "name": "Go launch file", + "program": "${file}", + "request": "launch", + "type": "go" + }, + { + "name": "node Launch Current Opened File", + "program": "${file}", + "request": "launch", + "type": "node" + }, + { + "cwd": "${workspaceFolder}", + "internalConsoleOptions": "neverOpen", + "name": "Debug File", + "program": "${file}", + "request": "launch", + "stopOnEntry": false, + "type": "bun", + "watchMode": false + }, + { + "cwd": "${workspaceFolder}", + "internalConsoleOptions": "neverOpen", + "name": "Run File", + "noDebug": true, + "program": "${file}", + "request": "launch", + "type": "bun", + "watchMode": false + }, { "name": ".NET Core Attach", - "type": "coreclr", + "processId": 32760, "request": "attach", - "processId": 13036 + "type": "coreclr" } ] -} +} \ No newline at end of file diff --git a/Adaptation/.vscode/tasks.json b/Adaptation/.vscode/tasks.json index c2181e2..866a952 100644 --- a/Adaptation/.vscode/tasks.json +++ b/Adaptation/.vscode/tasks.json @@ -1,19 +1,134 @@ { "version": "2.0.0", + "inputs": [ + { + "default": "Development", + "description": "Which ASP Net Core Environment?", + "id": "ASPNETCORE_ENVIRONMENT", + "options": [ + "Development", + "Production" + ], + "type": "pickString" + }, + { + "default": "{AssemblyTitle}", + "description": "What Assembly Title?", + "id": "AssemblyTitle", + "type": "promptString" + }, + { + "default": "{Build.BuildId}", + "description": "Which Build BuildId?", + "id": "Build.BuildId", + "type": "promptString" + }, + { + "default": "{Build.Reason}", + "description": "Which Build Reason?", + "id": "Build.Reason", + "type": "promptString" + }, + { + "default": "{Build.Repository.Id}", + "description": "Which Build Repository Id?", + "id": "Build.Repository.Id", + "type": "promptString" + }, + { + "default": "{Build.Repository.Name}", + "description": "Which Build Repository Name?", + "id": "Build.Repository.Name", + "type": "promptString" + }, + { + "default": "{Build.SourceVersion}", + "description": "Which Build Source Version?", + "id": "Build.SourceVersion", + "type": "promptString" + }, + { + "default": "Debug", + "description": "Which Configuration?", + "id": "Configuration", + "options": [ + "Debug", + "Release" + ], + "type": "pickString" + }, + { + "default": "net8.0", + "description": "Which Core Version?", + "id": "CoreVersion", + "options": [ + "net8.0" + ], + "type": "pickString" + }, + { + "default": "C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe", + "description": "Which MS Build?", + "id": "MSBuild", + "type": "promptString" + }, + { + "default": "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/", + "description": "Which Nuget Source?", + "id": "NugetSource", + "type": "promptString" + }, + { + "default": "win-x64", + "description": "Which Runtime?", + "id": "Runtime", + "options": [ + "win-x64", + "win-x32", + "linux-x64", + "linux-x32" + ], + "type": "pickString" + }, + { + "default": "L:/", + "description": "Which System DefaultWorkingDirectory?", + "id": "System.DefaultWorkingDirectory", + "options": [ + "L:/", + "D:/", + "C:/" + ], + "type": "pickString" + }, + { + "default": "v4.8", + "description": "Which Core Target Framework Version?", + "id": "TargetFrameworkVersion", + "options": [ + "v4.8" + ], + "type": "pickString" + }, + { + "default": "{UserSecretsId}", + "description": "Which Core User Secrets Id?", + "id": "UserSecretsId", + "type": "promptString" + } + ], "tasks": [ { "label": "Build", "command": "dotnet", "type": "process", "args": [ - "build", - "/property:GenerateFullPaths=true", - "/consoleloggerparameters:NoSummary" + "build" ], "problemMatcher": "$msCompile" }, { - "label": "Test-Debug", + "label": "Test Debug", "command": "dotnet", "type": "process", "args": [ @@ -24,7 +139,7 @@ "problemMatcher": "$msCompile" }, { - "label": "Test-Release", + "label": "Test Release", "command": "dotnet", "type": "process", "args": [ @@ -50,7 +165,7 @@ "problemMatcher": "$msCompile" }, { - "label": "Format-Whitespaces", + "label": "Format Whitespaces", "command": "dotnet", "type": "process", "args": [ @@ -87,13 +202,13 @@ "problemMatcher": "$msCompile" }, { - "label": "Project", + "label": "Code Project", "type": "shell", "command": "code ../MET08DDUPSFS6420.csproj", "problemMatcher": [] }, { - "label": "Readme", + "label": "Code Read Me", "type": "shell", "command": "code ../README.md", "problemMatcher": [] @@ -113,7 +228,7 @@ "problemMatcher": [] }, { - "label": "Git Config", + "label": "Code Git Config", "type": "shell", "command": "code ../.git/config", "problemMatcher": [] diff --git a/Adaptation/FileHandlers/APC/FileRead.cs b/Adaptation/FileHandlers/APC/FileRead.cs index a9ed0b0..a90c002 100644 --- a/Adaptation/FileHandlers/APC/FileRead.cs +++ b/Adaptation/FileHandlers/APC/FileRead.cs @@ -128,7 +128,7 @@ public class FileRead : Shared.FileRead, IFileRead Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) FileCopy(reportFullPath, dateTime, descriptions); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/Archive/FileRead.cs b/Adaptation/FileHandlers/Archive/FileRead.cs index 80325c0..5cdb4f2 100644 --- a/Adaptation/FileHandlers/Archive/FileRead.cs +++ b/Adaptation/FileHandlers/Archive/FileRead.cs @@ -153,7 +153,7 @@ public class FileRead : Shared.FileRead, IFileRead Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) MoveArchive(reportFullPath, dateTime); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/IQSSi/FileRead.cs b/Adaptation/FileHandlers/IQSSi/FileRead.cs index 7d0f3d7..f160ef5 100644 --- a/Adaptation/FileHandlers/IQSSi/FileRead.cs +++ b/Adaptation/FileHandlers/IQSSi/FileRead.cs @@ -204,6 +204,30 @@ public class FileRead : Shared.FileRead, IFileRead WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); } + private void WriteFile(string reportFullPath, DateTime dateTime, List descriptions) where T : Shared.Properties.IDescription + { + bool isDummyRun = false; + string successDirectory = string.Empty; + List<(Shared.Properties.IScopeInfo, string)> collection = new(); + string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); + if (!Directory.Exists(duplicateDirectory)) + _ = Directory.CreateDirectory(duplicateDirectory); + string duplicateFile = Path.Combine(duplicateDirectory, $"{Path.GetFileName(reportFullPath)}.xml"); + string xml = ProcessDataStandardFormat.GetXml(reportFullPath) + .Replace("ppm[ ", ">") + .Replace(">*0", ">") + .Replace(">*", ">") + .Replace("%> GetExtractResult(string reportFullPath, DateTime dateTime) { Tuple> results; @@ -213,11 +237,13 @@ public class FileRead : Shared.FileRead, IFileRead JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); List descriptions = pcl.ProcessData.GetDescriptions(jsonElements); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); - results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) SaveIQSFile(reportFullPath, dateTime, descriptions, tests); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) FileCopy(reportFullPath, dateTime, descriptions); + if (string.IsNullOrEmpty(reportFullPath) && _IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) + WriteFile(reportFullPath, dateTime, descriptions); return results; } diff --git a/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs b/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs index 2e419d3..73c6d12 100644 --- a/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs +++ b/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs @@ -88,9 +88,9 @@ public class FileRead : Shared.FileRead, IFileRead string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names"); string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names"); string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices"); - _ProcessDataStandardFormatMapping = GetProcessDataStandardFormatMapping(processDataStandardFormatMappingOldColumnNames, - processDataStandardFormatMappingNewColumnNames, - processDataStandardFormatMappingColumnIndices); + _ProcessDataStandardFormatMapping = ProcessDataStandardFormatMapping.Get(processDataStandardFormatMappingOldColumnNames, + processDataStandardFormatMappingNewColumnNames, + processDataStandardFormatMappingColumnIndices); } void IFileRead.Move(Tuple> extractResults, Exception exception) @@ -169,46 +169,6 @@ public class FileRead : Shared.FileRead, IFileRead return results; } - private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices) - { - ProcessDataStandardFormatMapping result; - string[] segmentsB; - List distinct = new(); - Dictionary keyValuePairs = new(); - string args4 = "Time,HeaderUniqueId,UniqueId,Date"; - string args5 = ""; - string args6 = ""; - string args7 = "Test|EventId,Lot|Id,Slot|WaferId,AreaTotal|WaferAreaTotal,HazeAverage|WaferHazeAverage,HazeRegion|WaferHazeRegion,ScratchTotal|WaferScratchTotal"; - // string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Comments,Diameter,Exclusion,Gain,HeaderUniqueId,Laser,ParseErrorText,RDS,Slot,UniqueId,AreaCount,AreaCountAvg,AreaCountMax,AreaCountMin,AreaCountStdDev,AreaTotal,AreaTotalAvg,AreaTotalMax,AreaTotalMin,AreaTotalStdDev,Bin1,Bin2,Bin3,Bin4,Bin5,Bin6,Bin7,Bin8,HazeAverage,HazeAverageAvg,HazeAverageMax,HazeAverageMin,HazeAverageStdDev,HazePeak,HazeRegion,HazeRegionAvg,HazeRegionMax,HazeRegionMin,HazeRegionStdDev,HazeRng,LPDCM2,LPDCM2Avg,LPDCM2Max,LPDCM2Min,LPDCM2StdDev,LPDCount,LPDCountAvg,LPDCountMax,LPDCountMin,LPDCountStdDev,Mean,ScratchCount,ScratchCountAvg,ScratchCountMax,ScratchCountMin,ScratchCountStdDev,ScratchTotal,ScratchTotalAvg,ScratchTotalMax,ScratchTotalMin,ScratchTotalStdDev,Sort,StdDev,SumOfDefects,SumOfDefectsAvg,SumOfDefectsMax,SumOfDefectsMin,SumOfDefectsStdDev,Thresh,Thruput"; - // string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Date,Recipe,Id,WaferId,LPDCount,LPDCM2,AreaCount,AreaTotal,ScratchCount,ScratchTotal,SumOfDefects,HazeRegion,HazeAverage,Grade,LPDCountMin,LPDCM2Min,AreaCountMin,AreaTotalMin,ScratchCountMin,ScratchTotalMin,SumOfDefectsMin,HazeRegionMin,HazeAverageMin,LPDCountMax,LPDCM2Max,AreaCountMax,AreaTotalMax,ScratchCountMax,ScratchTotalMax,SumOfDefectsMax,HazeRegionMax,HazeAverageMax,LPDCountAvg,LPDCM2Avg,AreaCountAvg,AreaTotalAvg,ScratchCountAvg,ScratchTotalAvg,SumOfDefectsAvg,HazeRegionAvg,HazeAverageAvg,LPDCountStdDev,LPDCM2StdDev,AreaCountStdDev,AreaTotalStdDev,ScratchCountStdDev,ScratchTotalStdDev,SumOfDefectsStdDev,HazeRegionStdDev,HazeAverageStdDev,WaferDate,Comments,Sort,WaferLPDCount,WaferLPDCM2,Bin1,Bin2,Bin3,Bin4,Bin5,Bin6,Bin7,Bin8,Mean,StdDev,WaferAreaCount,WaferAreaTotal,WaferScratchCount,WaferScratchTotal,WaferSumOfDefects,WaferHazeRegion,WaferHazeAverage,HazePeak,Laser,Gain,Diameter,Thresh,Exclusion,HazeRng,Thruput,WaferRecipe,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,EventId"; - // string args10 = "0,1,2,95,3,6,5,7,93,9,89,90,8,58,82,84,81,-1,80,-1,88,10,-1,13,41,32,23,50,73,42,33,24,51,62,63,64,65,66,67,68,69,78,47,38,29,56,79,77,46,37,28,55,85,12,40,31,22,49,11,39,30,21,48,70,15,43,34,25,52,75,44,35,26,53,59,71,17,45,36,27,54,83,86"; - string[] segments = args7.Split(','); - ReadOnlyCollection ignoreColumns = new(args4.Split(',')); - ReadOnlyCollection backfillColumns = new(args5.Split(',')); - ReadOnlyCollection indexOnlyColumns = new(args6.Split(',')); - ReadOnlyCollection newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(',')); - ReadOnlyCollection oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(',')); - ReadOnlyCollection columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray()); - foreach (string segment in segments) - { - segmentsB = segment.Split('|'); - if (segmentsB.Length != 2) - continue; - if (distinct.Contains(segmentsB[0])) - continue; - distinct.Add(segmentsB[0]); - keyValuePairs.Add(segmentsB[0], segmentsB[1]); - } - result = new(backfillColumns: backfillColumns, - columnIndices: columnIndices, - newColumnNames: newColumnNames, - ignoreColumns: ignoreColumns, - indexOnlyColumns: indexOnlyColumns, - keyValuePairs: new(keyValuePairs), - oldColumnNames: oldColumnNames); - return result; - } - private static ReadOnlyCollection GetPreWithCollection(ReadOnlyCollection
 preCollection)
     {
         List results = new();
@@ -313,8 +273,13 @@ public class FileRead : Shared.FileRead, IFileRead
                 continue;
             if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List? wsResults))
                 wsResults = null;
-            ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
-            File.Delete(preWith.MatchingFile);
+            if (processDataStandardFormat.InputPDSF is null)
+                File.Move(preWith.MatchingFile, preWith.CheckFile);
+            else
+            {
+                ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
+                File.Delete(preWith.MatchingFile);
+            }
             if (Directory.Exists(preWith.NoWaitDirectory))
             {
                 post = new(preWith.CheckFile, preWith.ErrFile);
diff --git a/Adaptation/FileHandlers/OpenInsight/FileRead.cs b/Adaptation/FileHandlers/OpenInsight/FileRead.cs
index 19895c6..74c48ff 100644
--- a/Adaptation/FileHandlers/OpenInsight/FileRead.cs
+++ b/Adaptation/FileHandlers/OpenInsight/FileRead.cs
@@ -119,15 +119,6 @@ public class FileRead : Shared.FileRead, IFileRead
         if (!Directory.Exists(duplicateDirectory))
             _ = Directory.CreateDirectory(duplicateDirectory);
         string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
-        if (!Directory.Exists(Path.Combine(duplicateDirectory, "1")))
-        {
-            string parentParent = GetParentParent(_FileConnectorConfiguration.SourceFileLocation);
-            if (parentParent.Contains(_CellInstanceName))
-                parentParent = Path.GetDirectoryName(parentParent);
-            duplicateDirectory = Path.Combine(parentParent, "Data");
-            if (!Directory.Exists(duplicateDirectory))
-                _ = Directory.CreateDirectory(duplicateDirectory);
-        }
         if (descriptions.Count == 0 || tests.Length == 0)
             duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
         else
@@ -172,7 +163,7 @@ public class FileRead : Shared.FileRead, IFileRead
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests);
-        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs b/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs
index 96959ac..c8266ef 100644
--- a/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs
+++ b/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs
@@ -147,7 +147,7 @@ public class FileRead : Shared.FileRead, IFileRead
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             SendData(reportFullPath, dateTime, descriptions);
-        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs b/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs
index 6f1307e..8a58f5e 100644
--- a/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs
+++ b/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs
@@ -171,7 +171,7 @@ public class FileRead : Shared.FileRead, IFileRead
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             PostOpenInsightMetrologyViewerAttachments(descriptions);
-        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/Processed/FileRead.cs b/Adaptation/FileHandlers/Processed/FileRead.cs
index 34ef86b..7f0a221 100644
--- a/Adaptation/FileHandlers/Processed/FileRead.cs
+++ b/Adaptation/FileHandlers/Processed/FileRead.cs
@@ -172,7 +172,7 @@ public class FileRead : Shared.FileRead, IFileRead
         JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
         List descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
-        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List());
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             DirectoryMove(reportFullPath, dateTime, descriptions);
         else if (!_IsEAFHosted)
diff --git a/Adaptation/FileHandlers/SPaCe/FileRead.cs b/Adaptation/FileHandlers/SPaCe/FileRead.cs
index 2e0f55a..e258bd6 100644
--- a/Adaptation/FileHandlers/SPaCe/FileRead.cs
+++ b/Adaptation/FileHandlers/SPaCe/FileRead.cs
@@ -125,7 +125,7 @@ public class FileRead : Shared.FileRead, IFileRead
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             FileCopy(reportFullPath, dateTime, descriptions);
-        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/pcl/Convert.cs b/Adaptation/FileHandlers/pcl/Convert.cs
index fb723dc..3fedd48 100644
--- a/Adaptation/FileHandlers/pcl/Convert.cs
+++ b/Adaptation/FileHandlers/pcl/Convert.cs
@@ -75,6 +75,7 @@ internal class Convert
         string[] txtFiles = Directory.GetFiles(sourcePath, $"{sourceFileNameWithoutExtension}_*.txt", SearchOption.TopDirectoryOnly);
         if (txtFiles.Length != 0)
         {
+            txtFiles = (from l in txtFiles orderby l.Length, l select l).ToArray();
             foreach (string txtFile in txtFiles)
             {
                 sourceFiles.Add(txtFile);
diff --git a/Adaptation/MET08DDUPSFS6420.Tests.csproj b/Adaptation/MET08DDUPSFS6420.Tests.csproj
index 496f8fa..e79870e 100644
--- a/Adaptation/MET08DDUPSFS6420.Tests.csproj
+++ b/Adaptation/MET08DDUPSFS6420.Tests.csproj
@@ -87,16 +87,16 @@
         
     
     
-        
+        
             PreserveNewest
         
-        
+        
             PreserveNewest
         
-        
+        
             PreserveNewest
         
-        
+        
             PreserveNewest
         
     
diff --git a/Adaptation/MET08DDUPSFS6420.yml b/Adaptation/MET08DDUPSFS6420.yml
index 60c1b2c..13aa50f 100644
--- a/Adaptation/MET08DDUPSFS6420.yml
+++ b/Adaptation/MET08DDUPSFS6420.yml
@@ -41,6 +41,24 @@ stages:
             displayName: "Nuget Clear"
             enabled: false
 
+          - task: CopyFiles@2
+            displayName: 'Copy GhostPCL Files to: D:\EAF-Mesa-Integration\copy'
+            inputs:
+              Contents: "*"
+              SourceFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL'
+              TargetFolder: 'D:\EAF-Mesa-Integration\copy\GhostPCL'
+              OverWrite: true
+            enabled: true
+
+          - task: CopyFiles@2
+            displayName: 'Copy LincPDFC Files to: D:\EAF-Mesa-Integration\copy'
+            inputs:
+              Contents: "*"
+              SourceFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC'
+              TargetFolder: 'D:\EAF-Mesa-Integration\copy\LincPDFC'
+              OverWrite: true
+            enabled: false
+
           - script: |
               "C:\program files\dotnet\dotnet.exe" user-secrets init
               "C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"
@@ -184,6 +202,24 @@ stages:
             displayName: "Nuget Clear"
             enabled: false
 
+          - task: CopyFiles@2
+            displayName: 'Copy GhostPCL Files to: D:\EAF-Mesa-Integration\copy'
+            inputs:
+              Contents: "*"
+              SourceFolder: '\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL'
+              TargetFolder: 'D:\EAF-Mesa-Integration\copy\GhostPCL'
+              OverWrite: true
+            enabled: true
+
+          - task: CopyFiles@2
+            displayName: 'Copy LincPDFC Files to: D:\EAF-Mesa-Integration\copy'
+            inputs:
+              Contents: "*"
+              SourceFolder: '\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC'
+              TargetFolder: 'D:\EAF-Mesa-Integration\copy\LincPDFC'
+              OverWrite: true
+            enabled: false
+
           - script: |
               "C:\program files\dotnet\dotnet.exe" user-secrets init
               "C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"
diff --git a/Adaptation/Shared/FileRead.cs b/Adaptation/Shared/FileRead.cs
index 49a4526..48aacab 100644
--- a/Adaptation/Shared/FileRead.cs
+++ b/Adaptation/Shared/FileRead.cs
@@ -383,17 +383,24 @@ public class FileRead : Properties.IFileRead
         else
         {
             string[] files;
-            string logisticsSequence = _Logistics.Sequence.ToString();
-            string[] directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
-            foreach (string directory in directories)
+            string[] directories;
+            string logisticsSequence;
+            for (int i = 0; i < 10; i++)
             {
-                files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
-                if (files.Length == 0)
-                    continue;
-                results.Add(directory);
+                logisticsSequence = (_Logistics.Sequence + -i).ToString();
+                directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
+                foreach (string directory in directories)
+                {
+                    files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
+                    if (files.Length == 0)
+                        continue;
+                    results.Add(directory);
+                }
+                if (results.Count == 1)
+                    break;
             }
         }
-        if ((results is null) || results.Count != 1)
+        if (results.Count != 1)
             throw new Exception("Didn't find directory by logistics sequence");
         return results.ToArray();
     }
@@ -478,27 +485,14 @@ public class FileRead : Properties.IFileRead
         }
     }
 
-    protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
+    protected static void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
     {
-        string directory;
-        string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
-        string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
-        string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
-        if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType)
-            directory = Path.Combine(_TracePath, _EquipmentType, "Target", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName);
-        else
-            directory = Path.Combine(_TracePath, _EquipmentType, "Source", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName);
-        if (!Directory.Exists(directory))
-            _ = Directory.CreateDirectory(directory);
-        string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
-        string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
-        File.WriteAllText(file, lines);
-        if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
-        {
-            try
-            { File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
-            catch (Exception) { }
-        }
+#pragma warning disable CA1510
+        if (fileRead is null)
+            throw new ArgumentNullException(nameof(fileRead));
+        if (jsonElements is null)
+            throw new ArgumentNullException(nameof(jsonElements));
+#pragma warning restore CA1510
     }
 
     protected void WaitForThread(Thread thread, List threadExceptions)
diff --git a/Adaptation/Shared/ProcessDataStandardFormat.cs b/Adaptation/Shared/ProcessDataStandardFormat.cs
index a86241d..0b7a561 100644
--- a/Adaptation/Shared/ProcessDataStandardFormat.cs
+++ b/Adaptation/Shared/ProcessDataStandardFormat.cs
@@ -2,12 +2,14 @@ using Adaptation.Shared.Methods;
 using System;
 using System.Collections.Generic;
 using System.Collections.ObjectModel;
+using System.Diagnostics;
 using System.Globalization;
 using System.IO;
 using System.Linq;
 using System.Text;
 using System.Text.Json;
 using System.Text.Json.Serialization;
+using System.Text.RegularExpressions;
 
 namespace Adaptation.Shared;
 
@@ -136,6 +138,7 @@ internal class ProcessDataStandardFormat
     internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6)
     {
         ProcessDataStandardFormat result;
+        long? sequence;
         string segment;
         string[] segments;
         bool addToFooter = false;
@@ -186,13 +189,25 @@ internal class ProcessDataStandardFormat
         }
         string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
         logistics = GetLogistics(footer, linesOne: linesOne);
+        if (logistics.Count == 0)
+            sequence = null;
+        else
+        {
+            segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
+            sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
+        }
+        if (sequence is null && !string.IsNullOrEmpty(reportFullPath))
+        {
+            FileInfo fileInfo = new(reportFullPath);
+            sequence = fileInfo.LastWriteTime.Ticks;
+        }
         result = new(body: body.AsReadOnly(),
                      columns: columns.AsReadOnly(),
                      footer: footer.AsReadOnly(),
                      header: header.AsReadOnly(),
                      inputPDSF: null,
                      logistics: logistics,
-                     sequence: null);
+                     sequence: sequence);
         return result;
     }
 
@@ -214,19 +229,19 @@ internal class ProcessDataStandardFormat
         return results.AsReadOnly();
     }
 
-    internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
+    internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping processDataStandardFormatMapping)
     {
         ProcessDataStandardFormat result;
         const int columnsLine = 6;
         FileInfo fileInfo = new(reportFullPath);
         ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
-        JsonElement[]? jsonElements = pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
+        JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count != processDataStandardFormatMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
         JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
-        if (jsonElements is null || jsonProperties is null || jsonProperties.Length != pdsfMapping.NewColumnNames.Count)
+        if (jsonElements is null || jsonProperties is null || jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
             result = processDataStandardFormat;
         else
         {
-            result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
+            result = GetProcessDataStandardFormat(processDataStandardFormatMapping, jsonElements, processDataStandardFormat);
             if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
                 result = processDataStandardFormat;
         }
@@ -236,7 +251,7 @@ internal class ProcessDataStandardFormat
     private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines)
     {
         ProcessDataStandardFormat result;
-        long sequence;
+        long? sequence;
         string[] segments;
         bool addToFooter = false;
         List body = new();
@@ -268,12 +283,13 @@ internal class ProcessDataStandardFormat
         }
         logistics = GetLogistics(footer, linesOne: null);
         if (logistics.Count == 0)
-            sequence = lastWriteTime.Ticks;
+            sequence = null;
         else
         {
             segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
-            sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s;
+            sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
         }
+        sequence ??= lastWriteTime.Ticks;
         result = new(body: body.AsReadOnly(),
                      columns: new(columns),
                      footer: footer.AsReadOnly(),
@@ -302,7 +318,7 @@ internal class ProcessDataStandardFormat
                 segments = bodyLine.Split('\t').ToList();
                 for (int c = 0; c < segments.Count; c++)
                 {
-                    value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
+                    value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
                     _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
                 }
                 _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
@@ -321,12 +337,14 @@ internal class ProcessDataStandardFormat
         int column;
         string value;
         JsonProperty jsonProperty;
+        List debug = new();
         List values = new();
         List results = new();
         JsonProperty[] jsonProperties;
         List unknownColumns = new();
         for (int i = 0; i < jsonElements.Length; i++)
         {
+            debug.Clear();
             values.Clear();
             if (jsonElements[i].ValueKind != JsonValueKind.Object)
             {
@@ -340,16 +358,22 @@ internal class ProcessDataStandardFormat
             {
                 column = processDataStandardFormatMapping.ColumnIndices[c];
                 if (column == -1)
+                {
                     value = processDataStandardFormatMapping.OldColumnNames[c];
+                    debug.Add($"");
+                }
                 else
                 {
                     jsonProperty = jsonProperties[column];
                     value = jsonProperty.Value.ToString();
+                    debug.Add($"");
                 }
                 values.Add(value);
             }
             results.Add(string.Join("\t", values));
         }
+        if (Debugger.IsAttached)
+            File.WriteAllText("../../.txt", string.Join(Environment.NewLine, debug.OrderBy(l => l)));
         result = new(body: new(results),
                      columns: processDataStandardFormatMapping.OldColumnNames,
                      footer: processDataStandardFormat.Footer,
@@ -364,7 +388,6 @@ internal class ProcessDataStandardFormat
     {
         if (processDataStandardFormat.InputPDSF is null)
             throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
-#pragma warning disable CA1845, IDE0057
         string result;
         string line;
         string value;
@@ -378,19 +401,27 @@ internal class ProcessDataStandardFormat
                 break;
             for (int c = 0; c < segments.Length; c++)
             {
-                value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
+                value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
                 line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ',');
             }
             line = string.Concat(line.Substring(0, line.Length - 1), '}');
             lines.Add(line);
         }
+        string? json = null;
+        if (processDataStandardFormat.Footer is not null && processDataStandardFormat.Footer.Count > 0)
+        {
+            Dictionary footerKeyValuePairs = GetFooterKeyValuePairs(processDataStandardFormat.Footer);
+            Dictionary> logisticKeyValuePairs = GetLogisticKeyValuePairs(processDataStandardFormat.Footer, footerKeyValuePairs);
+            json = JsonSerializer.Serialize(logisticKeyValuePairs, DictionaryStringDictionaryStringStringSourceGenerationContext.Default.DictionaryStringDictionaryStringString);
+        }
+        string footerText = string.IsNullOrEmpty(json) || json == "{}" ? string.Empty : $",{Environment.NewLine}\"PDSF\":{Environment.NewLine}{json}";
         result = string.Concat(
             '{',
             Environment.NewLine,
             '"',
             "Count",
             '"',
-            ": ",            
+            ": ",
             processDataStandardFormat.Body.Count,
             ',',
             Environment.NewLine,
@@ -409,17 +440,95 @@ internal class ProcessDataStandardFormat
             '"',
             "Sequence",
             '"',
-            ": ",            
+            ": ",
             processDataStandardFormat.Sequence,
             Environment.NewLine,
+            footerText,
+            Environment.NewLine,
             '}');
         return result;
-#pragma warning restore CA1845, IDE0057
+    }
+
+    private static Dictionary GetFooterKeyValuePairs(ReadOnlyCollection footerLines)
+    {
+        Dictionary results = new();
+        string[] segments;
+        foreach (string footerLine in footerLines)
+        {
+            segments = footerLine.Split('\t');
+            if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
+            {
+                continue;
+            }
+            if (segments[1].Contains(';'))
+            {
+                continue;
+            }
+            else
+            {
+                if (results.ContainsKey(segments[0]))
+                {
+                    continue;
+                }
+                results.Add(segments[0], segments[1]);
+            }
+        }
+        return results;
+    }
+
+    private static Dictionary> GetLogisticKeyValuePairs(ReadOnlyCollection footerLines, Dictionary footerKeyValuePairs)
+    {
+        Dictionary> results = new();
+        string[] segments;
+        string[] subSegments;
+        string[] subSubSegments;
+        Dictionary? keyValue;
+        results.Add("Footer", footerKeyValuePairs);
+        foreach (string footerLine in footerLines)
+        {
+            segments = footerLine.Split('\t');
+            if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
+            {
+                continue;
+            }
+            if (!segments[1].Contains(';') || !segments[1].Contains('='))
+            {
+                continue;
+            }
+            else
+            {
+                subSegments = segments[1].Split(';');
+                if (subSegments.Length < 1)
+                {
+                    continue;
+                }
+                if (!results.TryGetValue(segments[0], out keyValue))
+                {
+                    results.Add(segments[0], new());
+                    if (!results.TryGetValue(segments[0], out keyValue))
+                    {
+                        throw new Exception();
+                    }
+                }
+                foreach (string segment in subSegments)
+                {
+                    subSubSegments = segment.Split('=');
+                    if (subSubSegments.Length != 2)
+                    {
+                        continue;
+                    }
+                    keyValue.Add(subSubSegments[0], subSubSegments[1]);
+                }
+            }
+        }
+        return results;
     }
 
     internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List? wsResults)
     {
         List results = new();
+        if (processDataStandardFormat.InputPDSF is null)
+            throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
         if (processDataStandardFormat.Sequence is null)
             throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
         string endOffset = "E#######T";
@@ -457,25 +566,25 @@ internal class ProcessDataStandardFormat
             }
         }
         results.Add("END_HEADER");
-        if (processDataStandardFormat.InputPDSF is not null)
-        {
-            results.Add(string.Empty);
-            List hyphens = new();
-            results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|')));
-            results.Add(string.Empty);
-            results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
-            for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
-                hyphens.Add('-');
-            results.Add($"|{string.Join("|", hyphens)}|");
-            results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|')));
-            results.Add(string.Empty);
-            results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|')));
-            results.Add(string.Empty);
-            results.Add("EOF");
-            results.Add(string.Empty);
-            string json = GetJson(processDataStandardFormat);
-            results.Add(json);
-        }
+        results.Add(string.Empty);
+        List hyphens = new();
+        results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => $"|{l.Replace('\t', '|')}|"));
+        results.Add(string.Empty);
+        results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
+        for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
+            hyphens.Add('-');
+        results.Add($"|{string.Join("|", hyphens)}|");
+        results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => $"|{l.Replace('\t', '|')}|"));
+        results.Add(string.Empty);
+        results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => $"|{l.Replace('\t', '|')}|"));
+        results.Add(string.Empty);
+        string xml = GetXml(processDataStandardFormat);
+        results.Add(xml);
+        results.Add(string.Empty);
+        results.Add("EOF");
+        results.Add(string.Empty);
+        string json = GetJson(processDataStandardFormat);
+        results.Add(json);
         File.WriteAllText(path, string.Join(Environment.NewLine, results));
     }
 
@@ -518,7 +627,7 @@ internal class ProcessDataStandardFormat
                 {
                     for (int c = 1; c < segments.Length; c++)
                     {
-                        value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
+                        value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
                         _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
                     }
                 }
@@ -526,7 +635,7 @@ internal class ProcessDataStandardFormat
                 {
                     for (int c = 1; c < segments.Length; c++)
                     {
-                        value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
+                        value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
                         if (string.IsNullOrEmpty(value))
                             _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
                         else if (value.All(char.IsDigit))
@@ -757,10 +866,70 @@ internal class ProcessDataStandardFormat
         return result;
     }
 
+    internal static string GetXml(ProcessDataStandardFormat processDataStandardFormat)
+    {
+        string result;
+        string tag;
+        string value;
+        string[] segments;
+        ReadOnlyCollection body = processDataStandardFormat.InputPDSF is null ?
+            processDataStandardFormat.Body : processDataStandardFormat.InputPDSF.Body;
+        ReadOnlyCollection columns = processDataStandardFormat.InputPDSF is null ?
+            processDataStandardFormat.Columns : processDataStandardFormat.InputPDSF.Columns;
+        List lines = new() { "", "" };
+        for (int i = 0; i < body.Count; i++)
+        {
+            lines.Add("  ");
+            segments = body[i].Trim().Split('\t');
+            if (segments.Length != columns.Count)
+                break;
+            for (int c = 0; c < segments.Length; c++)
+            {
+                value = segments[c].Replace("&", "&")
+                                   .Replace("<", "<")
+                                   .Replace(">", ">")
+                                   .Replace("\"", """)
+                                   .Replace("'", "'");
+                tag = Regex.Replace(columns[c].Trim('"'), @"[^a-zA-Z0-9]", "_").Split('\r')[0].Split('\n')[0];
+                lines.Add(string.Concat("    <", tag, '>', value, "'));
+            }
+            lines.Add("  ");
+        }
+        lines.Add("");
+        result = string.Join(Environment.NewLine, lines);
+        return result;
+    }
+
+    internal static string GetXml(string reportFullPath, string[]? lines = null)
+    {
+        string result;
+        bool foundXml = false;
+        List results = new();
+        lines ??= File.ReadAllLines(reportFullPath);
+        foreach (string line in lines)
+        {
+            if (line.StartsWith(">))]
+internal partial class DictionaryStringDictionaryStringStringSourceGenerationContext : JsonSerializerContext
+{
 }
\ No newline at end of file
diff --git a/Adaptation/Shared/ProcessDataStandardFormatMapping.cs b/Adaptation/Shared/ProcessDataStandardFormatMapping.cs
index c5a75ec..f99db0a 100644
--- a/Adaptation/Shared/ProcessDataStandardFormatMapping.cs
+++ b/Adaptation/Shared/ProcessDataStandardFormatMapping.cs
@@ -1,33 +1,34 @@
 using System.Collections.ObjectModel;
+using System.Linq;
 
 namespace Adaptation.Shared;
 
 public class ProcessDataStandardFormatMapping
 {
 
-    public ReadOnlyCollection BackfillColumns { get; private set; }
     public ReadOnlyCollection ColumnIndices { get; private set; }
-    public ReadOnlyCollection IgnoreColumns { get; private set; }
-    public ReadOnlyCollection IndexOnlyColumns { get; private set; }
-    public ReadOnlyDictionary KeyValuePairs { get; private set; }
     public ReadOnlyCollection NewColumnNames { get; private set; }
     public ReadOnlyCollection OldColumnNames { get; private set; }
 
-    public ProcessDataStandardFormatMapping(ReadOnlyCollection backfillColumns,
-                                            ReadOnlyCollection columnIndices,
-                                            ReadOnlyCollection ignoreColumns,
-                                            ReadOnlyCollection indexOnlyColumns,
-                                            ReadOnlyDictionary keyValuePairs,
+    public ProcessDataStandardFormatMapping(ReadOnlyCollection columnIndices,
                                             ReadOnlyCollection newColumnNames,
                                             ReadOnlyCollection oldColumnNames)
     {
-        BackfillColumns = backfillColumns;
         ColumnIndices = columnIndices;
-        IgnoreColumns = ignoreColumns;
-        IndexOnlyColumns = indexOnlyColumns;
-        KeyValuePairs = keyValuePairs;
         NewColumnNames = newColumnNames;
         OldColumnNames = oldColumnNames;
     }
 
+    internal static ProcessDataStandardFormatMapping Get(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
+    {
+        ProcessDataStandardFormatMapping result;
+        ReadOnlyCollection newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
+        ReadOnlyCollection oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
+        ReadOnlyCollection columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
+        result = new(columnIndices: columnIndices,
+                     newColumnNames: newColumnNames,
+                     oldColumnNames: oldColumnNames);
+        return result;
+    }
+
 }
\ No newline at end of file
diff --git a/Adaptation/_Tests/Extract/Production/v2.60.0/MET08DDUPSFS6420.cs b/Adaptation/_Tests/Extract/Production/v2.60.0/MET08DDUPSFS6420.cs
index dbfe2c7..5396be8 100644
--- a/Adaptation/_Tests/Extract/Production/v2.60.0/MET08DDUPSFS6420.cs
+++ b/Adaptation/_Tests/Extract/Production/v2.60.0/MET08DDUPSFS6420.cs
@@ -37,6 +37,23 @@ public class MET08DDUPSFS6420
     [TestMethod]
     public void Production__v2_60_0__MET08DDUPSFS6420__MoveMatchingFiles() => _MET08DDUPSFS6420.Production__v2_60_0__MET08DDUPSFS6420__MoveMatchingFiles();
 
+#if DEBUG
+    [Ignore]
+#endif
+    [TestMethod]
+    public void Production__v2_60_0__MET08DDUPSFS6420__MoveMatchingFiles638918057133464542__Normal()
+    {
+        string check = "*.pdsf";
+        bool validatePDSF = false;
+        MethodBase methodBase = new StackFrame().GetMethod();
+        _MET08DDUPSFS6420.Production__v2_60_0__MET08DDUPSFS6420__MoveMatchingFiles();
+        string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
+        IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
+        Logistics logistics = new(fileRead);
+        _ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
+        NonThrowTryCatch();
+    }
+
 #if DEBUG
     [Ignore]
 #endif
diff --git a/Adaptation/_Tests/Extract/Production/v2.60.0/TENCOR2.cs b/Adaptation/_Tests/Extract/Production/v2.60.0/TENCOR2.cs
index f164e4e..7d3b8bb 100644
--- a/Adaptation/_Tests/Extract/Production/v2.60.0/TENCOR2.cs
+++ b/Adaptation/_Tests/Extract/Production/v2.60.0/TENCOR2.cs
@@ -54,5 +54,22 @@ public class TENCOR2
         NonThrowTryCatch();
     }
 
+#if DEBUG
+    [Ignore]
+#endif
+    [TestMethod]
+    public void Production__v2_60_0__TENCOR2__pcl638860965797666706__TwoRuns()
+    {
+        string check = "*.pcl";
+        bool validatePDSF = false;
+        _TENCOR2.Production__v2_60_0__TENCOR2__pcl();
+        MethodBase methodBase = new StackFrame().GetMethod();
+        string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
+        IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
+        Logistics logistics = new(fileRead);
+        _ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
+        NonThrowTryCatch();
+    }
+
 }
 #endif
\ No newline at end of file