diff --git a/Adaptation/.editorconfig b/Adaptation/.editorconfig index 69ecc38..b02c0bc 100644 --- a/Adaptation/.editorconfig +++ b/Adaptation/.editorconfig @@ -109,7 +109,7 @@ dotnet_diagnostic.CA2254.severity = none # CA2254: The logging message template dotnet_diagnostic.IDE0001.severity = warning # IDE0001: Simplify name dotnet_diagnostic.IDE0002.severity = warning # Simplify (member access) - System.Version.Equals("1", "2"); Version.Equals("1", "2"); dotnet_diagnostic.IDE0004.severity = warning # IDE0004: Cast is redundant. -dotnet_diagnostic.IDE0005.severity = warning # Using directive is unnecessary +dotnet_diagnostic.IDE0005.severity = none # Using directive is unnecessary dotnet_diagnostic.IDE0028.severity = none # IDE0028: Collection initialization can be simplified dotnet_diagnostic.IDE0031.severity = warning # Use null propagation (IDE0031) dotnet_diagnostic.IDE0047.severity = warning # IDE0047: Parentheses can be removed @@ -121,6 +121,8 @@ dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]cs dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified +dotnet_diagnostic.MSTEST0015.severity = none # MSTEST0015: Test method {method} should not be ignored +dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning dotnet_naming_rule.abstract_method_should_be_pascal_case.style = pascal_case diff --git a/Adaptation/.vscode/tasks.json b/Adaptation/.vscode/tasks.json index 48512b9..d234c1d 100644 --- a/Adaptation/.vscode/tasks.json +++ b/Adaptation/.vscode/tasks.json @@ -1,19 +1,134 @@ { "version": "2.0.0", + "inputs": [ + { + "default": "Development", + "description": "Which ASP Net Core Environment?", + "id": "ASPNETCORE_ENVIRONMENT", + "options": [ + "Development", + "Production" + ], + "type": "pickString" + }, + { + "default": "{AssemblyTitle}", + "description": "What Assembly Title?", + "id": "AssemblyTitle", + "type": "promptString" + }, + { + "default": "{Build.BuildId}", + "description": "Which Build BuildId?", + "id": "Build.BuildId", + "type": "promptString" + }, + { + "default": "{Build.Reason}", + "description": "Which Build Reason?", + "id": "Build.Reason", + "type": "promptString" + }, + { + "default": "{Build.Repository.Id}", + "description": "Which Build Repository Id?", + "id": "Build.Repository.Id", + "type": "promptString" + }, + { + "default": "{Build.Repository.Name}", + "description": "Which Build Repository Name?", + "id": "Build.Repository.Name", + "type": "promptString" + }, + { + "default": "{Build.SourceVersion}", + "description": "Which Build Source Version?", + "id": "Build.SourceVersion", + "type": "promptString" + }, + { + "default": "Debug", + "description": "Which Configuration?", + "id": "Configuration", + "options": [ + "Debug", + "Release" + ], + "type": "pickString" + }, + { + "default": "net8.0", + "description": "Which Core Version?", + "id": "CoreVersion", + "options": [ + "net8.0" + ], + "type": "pickString" + }, + { + "default": "C:/Program Files (x86)/Microsoft Visual Studio/2022/BuildTools/MSBuild/Current/Bin/MSBuild.exe", + "description": "Which MS Build?", + "id": "MSBuild", + "type": "promptString" + }, + { + "default": "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/", + "description": "Which Nuget Source?", + "id": "NugetSource", + "type": "promptString" + }, + { + "default": "win-x64", + "description": "Which Runtime?", + "id": "Runtime", + "options": [ + "win-x64", + "win-x32", + "linux-x64", + "linux-x32" + ], + "type": "pickString" + }, + { + "default": "L:/", + "description": "Which System DefaultWorkingDirectory?", + "id": "System.DefaultWorkingDirectory", + "options": [ + "L:/", + "D:/", + "C:/" + ], + "type": "pickString" + }, + { + "default": "v4.8", + "description": "Which Core Target Framework Version?", + "id": "TargetFrameworkVersion", + "options": [ + "v4.8" + ], + "type": "pickString" + }, + { + "default": "{UserSecretsId}", + "description": "Which Core User Secrets Id?", + "id": "UserSecretsId", + "type": "promptString" + } + ], "tasks": [ { "label": "Build", "command": "dotnet", "type": "process", "args": [ - "build", - "/property:GenerateFullPaths=true", - "/consoleloggerparameters:NoSummary" + "build" ], "problemMatcher": "$msCompile" }, { - "label": "Test-Debug", + "label": "Test Debug", "command": "dotnet", "type": "process", "args": [ @@ -24,7 +139,7 @@ "problemMatcher": "$msCompile" }, { - "label": "Test-Release", + "label": "Test Release", "command": "dotnet", "type": "process", "args": [ @@ -77,13 +192,33 @@ "problemMatcher": "$msCompile" }, { - "label": "Project", + "label": "Code Project", "type": "shell", "command": "code ../METCLIMATEC.csproj", "problemMatcher": [] }, { - "label": "Git Config", + "label": "Code Read Me", + "type": "shell", + "command": "code ../README.md", + "problemMatcher": [] + }, + { + "label": "File-Folder-Helper AOT s X Day-Helper-2025-03-20", + "type": "shell", + "command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe", + "args": [ + "s", + "X", + "L:/DevOps/EAF-Mesa-Integration/METCLIMATEC", + "Day-Helper-2025-03-20", + "false", + "4" + ], + "problemMatcher": [] + }, + { + "label": "Code Git Config", "type": "shell", "command": "code ../.git/config", "problemMatcher": [] diff --git a/Adaptation/FileHandlers/APC/FileRead.cs b/Adaptation/FileHandlers/APC/FileRead.cs index 5e2ab67..a90c002 100644 --- a/Adaptation/FileHandlers/APC/FileRead.cs +++ b/Adaptation/FileHandlers/APC/FileRead.cs @@ -14,7 +14,7 @@ namespace Adaptation.FileHandlers.APC; public class FileRead : Shared.FileRead, IFileRead { - public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : + public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null) { _MinFileLength = 10; @@ -120,15 +120,15 @@ public class FileRead : Shared.FileRead, IFileRead private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime) { Tuple> results; - Tuple pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath); - _Logistics = new Logistics(reportFullPath, pdsf.Item1); + ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); + _Logistics = new Logistics(reportFullPath, processDataStandardFormat); SetFileParameterLotIDToLogisticsMID(); - JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf); + JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); List descriptions = GetDuplicatorDescriptions(jsonElements); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) FileCopy(reportFullPath, dateTime, descriptions); - results = new Tuple>(pdsf.Item1, tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/Archive/FileRead.cs b/Adaptation/FileHandlers/Archive/FileRead.cs index 0e79541..03029d6 100644 --- a/Adaptation/FileHandlers/Archive/FileRead.cs +++ b/Adaptation/FileHandlers/Archive/FileRead.cs @@ -18,7 +18,7 @@ public class FileRead : Shared.FileRead, IFileRead private readonly string _JobIdParentDirectory; private readonly string _JobIdArchiveParentDirectory; - public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : + public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null) { _MinFileLength = 10; @@ -144,15 +144,15 @@ public class FileRead : Shared.FileRead, IFileRead private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime) { Tuple> results; - Tuple pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath); - _Logistics = new Logistics(reportFullPath, pdsf.Item1); + ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); + _Logistics = new Logistics(reportFullPath, processDataStandardFormat); SetFileParameterLotIDToLogisticsMID(); - JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf); + JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); List descriptions = GetDuplicatorDescriptions(jsonElements); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) MoveArchive(reportFullPath, dateTime); - results = new Tuple>(pdsf.Item1, tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/CellInstanceConnectionName.cs b/Adaptation/FileHandlers/CellInstanceConnectionName.cs index 67cb69e..1275155 100644 --- a/Adaptation/FileHandlers/CellInstanceConnectionName.cs +++ b/Adaptation/FileHandlers/CellInstanceConnectionName.cs @@ -9,7 +9,7 @@ namespace Adaptation.FileHandlers; public class CellInstanceConnectionName { - internal static IFileRead Get(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, int? connectionCount) + internal static IFileRead Get(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, int? connectionCount) { IFileRead result = cellInstanceConnectionName switch { diff --git a/Adaptation/FileHandlers/Dummy/FileRead.cs b/Adaptation/FileHandlers/Dummy/FileRead.cs index 916f5d7..3b777eb 100644 --- a/Adaptation/FileHandlers/Dummy/FileRead.cs +++ b/Adaptation/FileHandlers/Dummy/FileRead.cs @@ -23,7 +23,7 @@ public class FileRead : Shared.FileRead, IFileRead private int _LastDummyRunIndex; private readonly string[] _CellNames; - public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : + public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null) { _MinFileLength = 10; diff --git a/Adaptation/FileHandlers/IQSSi/FileRead.cs b/Adaptation/FileHandlers/IQSSi/FileRead.cs index 12bc214..92f59fd 100644 --- a/Adaptation/FileHandlers/IQSSi/FileRead.cs +++ b/Adaptation/FileHandlers/IQSSi/FileRead.cs @@ -14,7 +14,7 @@ namespace Adaptation.FileHandlers.IQSSi; public class FileRead : Shared.FileRead, IFileRead { - public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : + public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null) { _MinFileLength = 10; @@ -103,7 +103,7 @@ public class FileRead : Shared.FileRead, IFileRead return results; } - private void FileCopy(string reportFullPath, DateTime dateTime, List descriptions) where T : Shared.Properties.IDescription + private void WriteFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List descriptions) where T : Shared.Properties.IDescription { bool isDummyRun = false; string successDirectory = string.Empty; @@ -111,23 +111,24 @@ public class FileRead : Shared.FileRead, IFileRead string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName); if (!Directory.Exists(duplicateDirectory)) _ = Directory.CreateDirectory(duplicateDirectory); - string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath)); - File.Copy(reportFullPath, duplicateFile, overwrite: true); + string duplicateFile = Path.Combine(duplicateDirectory, $"{Path.GetFileName(reportFullPath)}.xml"); + string xml = ProcessDataStandardFormat.GetXml(processDataStandardFormat); + File.WriteAllText(duplicateFile, xml); WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile); } private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime) { Tuple> results; - Tuple pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath); - _Logistics = new Logistics(reportFullPath, pdsf.Item1); + ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); + _Logistics = new Logistics(reportFullPath, processDataStandardFormat); SetFileParameterLotIDToLogisticsMID(); - JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf); + JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); List descriptions = GetDuplicatorDescriptions(jsonElements); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) - FileCopy(reportFullPath, dateTime, descriptions); - results = new Tuple>(pdsf.Item1, tests, jsonElements, new List()); + WriteFile(reportFullPath, dateTime, processDataStandardFormat, descriptions); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs b/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs index 02c5520..e04cef5 100644 --- a/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs +++ b/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs @@ -5,17 +5,73 @@ using Adaptation.Shared.Duplicator; using Adaptation.Shared.Methods; using System; using System.Collections.Generic; +using System.Collections.ObjectModel; using System.IO; +using System.Linq; using System.Text; using System.Text.Json; using System.Threading; namespace Adaptation.FileHandlers.MoveMatchingFiles; +#nullable enable + public class FileRead : Shared.FileRead, IFileRead { - public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : + internal class PreWith + { + + internal string ErrFile { get; private set; } + internal string CheckFile { get; private set; } + internal string MatchingFile { get; private set; } + internal string CheckDirectory { get; private set; } + internal string NoWaitDirectory { get; private set; } + + internal PreWith(string checkDirectory, + string checkFile, + string errFile, + string matchingFile, + string noWaitDirectory) + { + ErrFile = errFile; + CheckFile = checkFile; + MatchingFile = matchingFile; + CheckDirectory = checkDirectory; + NoWaitDirectory = noWaitDirectory; + } + + } + + internal class Pre + { + + internal string MatchingFile { get; private set; } + internal string CheckFile { get; private set; } + + internal Pre(string matchingFile, string checkFile) + { + MatchingFile = matchingFile; + CheckFile = checkFile; + } + + } + + internal class Post + { + + internal string ErrFile { get; private set; } + internal string CheckFile { get; private set; } + + internal Post(string checkFile, string errFile) + { + ErrFile = errFile; + CheckFile = checkFile; + } + + } + + public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null) { _MinFileLength = 10; @@ -41,7 +97,8 @@ public class FileRead : Shared.FileRead, IFileRead Move(extractResults); } - void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null); + void IFileRead.WaitForThread() => + WaitForThread(thread: null, threadExceptions: null); string IFileRead.GetEventDescription() { @@ -88,7 +145,7 @@ public class FileRead : Shared.FileRead, IFileRead DateTime dateTime = DateTime.Now; results = GetExtractResult(reportFullPath, dateTime); if (results.Item3 is null) - results = new Tuple>(results.Item1, Array.Empty(), JsonSerializer.Deserialize("[]"), results.Item4); + results = new Tuple>(results.Item1, Array.Empty(), JsonSerializer.Deserialize("[]") ?? throw new Exception(), results.Item4); if (results.Item3.Length > 0 && _IsEAFHosted) WritePDSF(this, results.Item3); UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks); @@ -104,7 +161,33 @@ public class FileRead : Shared.FileRead, IFileRead return results; } - private static List GetSearchDirectories(int numberLength, string parentDirectory) + private static ReadOnlyCollection GetPreWithCollection(ReadOnlyCollection
 preCollection)
+    {
+        List results = new();
+        string errFile;
+        PreWith preWith;
+        string? checkDirectory;
+        string noWaitDirectory;
+        foreach (Pre pre in preCollection)
+        {
+            errFile = string.Concat(pre.CheckFile, ".err");
+            checkDirectory = Path.GetDirectoryName(pre.CheckFile);
+            if (string.IsNullOrEmpty(checkDirectory))
+                continue;
+            if (!Directory.Exists(checkDirectory))
+                _ = Directory.CreateDirectory(checkDirectory);
+            noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
+            preWith = new(checkDirectory: checkDirectory,
+                          checkFile: pre.CheckFile,
+                          errFile: errFile,
+                          matchingFile: pre.MatchingFile,
+                          noWaitDirectory: noWaitDirectory);
+            results.Add(preWith);
+        }
+        return results.AsReadOnly();
+    }
+
+    private static ReadOnlyCollection GetSearchDirectories(int numberLength, string parentDirectory)
     {
         List results = new();
         string[] directories = Directory.GetDirectories(parentDirectory, "*", SearchOption.TopDirectoryOnly);
@@ -115,10 +198,138 @@ public class FileRead : Shared.FileRead, IFileRead
             results.Add(directory);
         }
         results.Sort();
+        return results.AsReadOnly();
+    }
+
+    private static void CreatePointerFile(int numberLength, string parentDirectory, ReadOnlyCollection matchingFiles)
+    {
+        string checkFile;
+        string writeFile;
+        string? directoryName;
+        int parentDirectoryLength = parentDirectory.Length;
+        foreach (string matchingFile in matchingFiles)
+        {
+            directoryName = Path.GetDirectoryName(matchingFile);
+            if (directoryName is null)
+                continue;
+            checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
+            writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
+            if (File.Exists(writeFile))
+                continue;
+            File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
+        }
+    }
+
+    private static ReadOnlyCollection
 GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection matchingFiles)
+    {
+        List
 results = new();
+        Pre pre;
+        string checkFile;
+        int parentDirectoryLength = parentDirectory.Length;
+        foreach (string matchingFile in matchingFiles)
+        {
+            checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
+            pre = new(matchingFile, checkFile);
+            results.Add(pre);
+        }
+        return results.AsReadOnly();
+    }
+
+    private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, ReadOnlyCollection preWithCollection)
+    {
+        ReadOnlyCollection postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
+        if (postCollection.Count != 0)
+        {
+            Thread.Sleep(500);
+            StringBuilder stringBuilder = new();
+            foreach (Post post in postCollection)
+            {
+                if (File.Exists(post.ErrFile))
+                    _ = stringBuilder.AppendLine(File.ReadAllText(post.ErrFile));
+                if (File.Exists(post.CheckFile))
+                    _ = stringBuilder.AppendLine($"<{post.CheckFile}> was not consumed by the end!");
+            }
+            if (stringBuilder.Length > 0)
+                throw new Exception(stringBuilder.ToString());
+        }
+    }
+
+    private ReadOnlyCollection GetPostCollection(DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, ReadOnlyCollection preWithCollection)
+    {
+        List results = new();
+        Post post;
+        long preWait;
+        foreach (PreWith preWith in preWithCollection)
+        {
+            if (!_IsEAFHosted)
+                continue;
+            if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List? wsResults))
+                wsResults = null;
+            if (processDataStandardFormat.InputPDSF is null)
+                File.Move(preWith.MatchingFile, preWith.CheckFile);
+            else
+            {
+                ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
+                File.Delete(preWith.MatchingFile);
+            }
+            if (Directory.Exists(preWith.NoWaitDirectory))
+            {
+                post = new(preWith.CheckFile, preWith.ErrFile);
+                results.Add(post);
+                continue;
+            }
+            if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
+                preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
+            else
+                preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
+            for (short i = 0; i < short.MaxValue; i++)
+            {
+                if (DateTime.Now.Ticks > preWait)
+                    break;
+                Thread.Sleep(500);
+            }
+            for (int i = 0; i < int.MaxValue; i++)
+            {
+                if (File.Exists(preWith.ErrFile))
+                    throw new Exception(File.ReadAllText(preWith.ErrFile));
+                if (!File.Exists(preWith.CheckFile))
+                    break;
+                if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
+                    throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
+                Thread.Sleep(500);
+            }
+        }
+        return results.AsReadOnly();
+    }
+
+    private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
+    {
+        Tuple> results = new(string.Empty, Array.Empty(), Array.Empty(), new List());
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
+        _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
+        if (!_IsEAFHosted)
+            ProcessDataStandardFormat.Write("../../.pdsf", processDataStandardFormat, wsResults: null);
+        SetFileParameterLotIDToLogisticsMID();
+        int numberLength = 2;
+        long ticks = dateTime.Ticks;
+        string parentParentDirectory = GetParentParent(reportFullPath);
+        ReadOnlyCollection searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
+        ReadOnlyCollection matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
+        if (matchingFiles.Count != searchDirectories.Count)
+            throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
+        if (_IsEAFHosted)
+        {
+            try
+            { CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
+            catch (Exception) { }
+        }
+        ReadOnlyCollection
 preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
+        ReadOnlyCollection preWithCollection = GetPreWithCollection(preCollection);
+        MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
         return results;
     }
 
-    private List GetMatchingFiles(long ticks, string reportFullPath, List searchDirectories)
+    private ReadOnlyCollection GetMatchingFiles(long ticks, string reportFullPath, ReadOnlyCollection searchDirectories)
     {
         List results = new();
         string[] found;
@@ -137,131 +348,7 @@ public class FileRead : Shared.FileRead, IFileRead
                     break;
             }
         }
-        return results;
-    }
-
-    private static List<(string matchingFile, string checkFile)> GetCollection(int numberLength, string parentDirectory, List matchingFiles)
-    {
-        List<(string matchingFile, string checkFile)> results = new();
-        string checkFile;
-        int parentDirectoryLength = parentDirectory.Length;
-        foreach (string matchingFile in matchingFiles)
-        {
-            checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
-            results.Add(new(matchingFile, checkFile));
-        }
-        return results;
-    }
-
-    private static List<(string, string, string, string, string)> GetCollection(List<(string matchingFile, string checkFile)> collection)
-    {
-        List<(string, string, string, string, string)> results = new();
-        string errFile;
-        string checkDirectory;
-        string noWaitDirectory;
-        foreach ((string matchingFile, string checkFile) in collection)
-        {
-            errFile = string.Concat(checkFile, ".err");
-            checkDirectory = Path.GetDirectoryName(checkFile);
-            if (!Directory.Exists(checkDirectory))
-                _ = Directory.CreateDirectory(checkDirectory);
-            noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
-            results.Add(new(matchingFile, checkFile, errFile, checkDirectory, noWaitDirectory));
-        }
-        return results;
-    }
-
-    private void MoveCollection(DateTime dateTime, List<(string matchingFile, string checkFile)> collection)
-    {
-        long preWait;
-        List<(string checkFile, string errFile)> postCollection = new();
-        foreach ((string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory) in GetCollection(collection))
-        {
-            File.Move(matchingFile, checkFile);
-            if (Directory.Exists(noWaitDirectory))
-            {
-                postCollection.Add(new(checkFile, errFile));
-                continue;
-            }
-            if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
-                preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
-            else
-                preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
-            for (short i = 0; i < short.MaxValue; i++)
-            {
-                if (DateTime.Now.Ticks > preWait)
-                    break;
-                Thread.Sleep(500);
-            }
-            for (int i = 0; i < int.MaxValue; i++)
-            {
-                if (File.Exists(errFile))
-                    throw new Exception(File.ReadAllText(errFile));
-                if (!File.Exists(checkFile))
-                    break;
-                if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
-                    throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
-                Thread.Sleep(500);
-            }
-        }
-        if (postCollection.Count != 0)
-        {
-            Thread.Sleep(500);
-            StringBuilder stringBuilder = new();
-            foreach ((string checkFile, string errFile) in postCollection)
-            {
-                if (File.Exists(errFile))
-                    _ = stringBuilder.AppendLine(File.ReadAllText(errFile));
-                if (File.Exists(checkFile))
-                    _ = stringBuilder.AppendLine($"<{checkFile}> was not consumed by the end!");
-            }
-            if (stringBuilder.Length > 0)
-                throw new Exception(stringBuilder.ToString());
-        }
-    }
-
-    private static void CreatePointerFile(int numberLength, string parentDirectory, List matchingFiles)
-    {
-#nullable enable
-        string checkFile;
-        string writeFile;
-        string? directoryName;
-        int parentDirectoryLength = parentDirectory.Length;
-        foreach (string matchingFile in matchingFiles)
-        {
-            directoryName = Path.GetDirectoryName(matchingFile);
-            if (directoryName is null)
-                continue;
-            checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
-            writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
-            if (File.Exists(writeFile))
-                continue;
-            File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
-        }
-#nullable disable
-    }
-
-    private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
-    {
-        Tuple> results = new(string.Empty, null, null, new List());
-        Tuple pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
-        _Logistics = new Logistics(reportFullPath, pdsf.Item1);
-        SetFileParameterLotIDToLogisticsMID();
-        if (string.IsNullOrEmpty(_Logistics.JobID) || _Logistics.JobID == "null")
-            throw new NotSupportedException("JobId is null! Please update Logistics.");
-        int numberLength = 2;
-        long ticks = dateTime.Ticks;
-        string parentParentDirectory = GetParentParent(reportFullPath);
-        List searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
-        List matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
-        if (matchingFiles.Count != searchDirectories.Count)
-            throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
-        try
-        { CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
-        catch (Exception) { }
-        List<(string matchingFile, string checkFile)> collection = GetCollection(numberLength, parentParentDirectory, matchingFiles);
-        MoveCollection(dateTime, collection);
-        return results;
+        return results.AsReadOnly();
     }
 
 }
\ No newline at end of file
diff --git a/Adaptation/FileHandlers/Processed/FileRead.cs b/Adaptation/FileHandlers/Processed/FileRead.cs
index cf64e0a..1333c8b 100644
--- a/Adaptation/FileHandlers/Processed/FileRead.cs
+++ b/Adaptation/FileHandlers/Processed/FileRead.cs
@@ -17,7 +17,7 @@ public class FileRead : Shared.FileRead, IFileRead
     private readonly string _JobIdParentDirectory;
     private readonly string _JobIdProcessParentDirectory;
 
-    public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
+    public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
         base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
     {
         _MinFileLength = 10;
@@ -129,6 +129,7 @@ public class FileRead : Shared.FileRead, IFileRead
         string destinationJobIdDirectory = Path.Combine(_JobIdProcessParentDirectory, _Logistics.JobID, directoryName);
         string sequenceDirectory = Path.Combine(destinationJobIdDirectory, logisticsSequence);
         // string jsonFileName = Path.Combine(sequenceDirectory, $"{Path.GetFileNameWithoutExtension(reportFullPath)}.json");
+        MoveMatchingFile(jobIdDirectory, matchDirectories[0]);
         Directory.Move(matchDirectories[0], destinationJobIdDirectory);
         if (!Directory.Exists(sequenceDirectory))
             _ = Directory.CreateDirectory(sequenceDirectory);
@@ -136,16 +137,40 @@ public class FileRead : Shared.FileRead, IFileRead
         // File.WriteAllText(jsonFileName, json);
     }
 
+    private static void MoveMatchingFile(string jobIdDirectory, string matchDirectory)
+    {
+        string checkFile;
+        string jobIdDirectoryFileName;
+        string matchDirectoryFileName;
+        string[] jobIdDirectoryFiles = Directory.GetFiles(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
+        string[] matchDirectoryFiles = Directory.GetFiles(matchDirectory, "*", SearchOption.TopDirectoryOnly);
+        foreach (string jobIdDirectoryFile in jobIdDirectoryFiles)
+        {
+            jobIdDirectoryFileName = Path.GetFileName(jobIdDirectoryFile);
+            foreach (string matchDirectoryFile in matchDirectoryFiles)
+            {
+                matchDirectoryFileName = Path.GetFileName(matchDirectoryFile);
+                if (jobIdDirectoryFileName.StartsWith(matchDirectoryFileName))
+                {
+                    checkFile = Path.Combine(matchDirectory, jobIdDirectoryFileName);
+                    if (File.Exists(checkFile))
+                        continue;
+                    File.Move(jobIdDirectoryFile, checkFile);
+                }
+            }
+        }
+    }
+
     private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
     {
         Tuple> results;
-        Tuple pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
-        _Logistics = new Logistics(reportFullPath, pdsf.Item1);
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
+        _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
         SetFileParameterLotIDToLogisticsMID();
-        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
+        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
         List descriptions = csv.ProcessData.GetDescriptions(jsonElements);
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
-        results = new Tuple>(pdsf.Item1, tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List());
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             DirectoryMove(reportFullPath, dateTime, descriptions);
         else if (!_IsEAFHosted)
diff --git a/Adaptation/FileHandlers/SPaCe/FileRead.cs b/Adaptation/FileHandlers/SPaCe/FileRead.cs
index a8155b6..e258bd6 100644
--- a/Adaptation/FileHandlers/SPaCe/FileRead.cs
+++ b/Adaptation/FileHandlers/SPaCe/FileRead.cs
@@ -14,7 +14,7 @@ namespace Adaptation.FileHandlers.SPaCe;
 public class FileRead : Shared.FileRead, IFileRead
 {
 
-    public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
+    public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
         base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
     {
         _MinFileLength = 10;
@@ -117,15 +117,15 @@ public class FileRead : Shared.FileRead, IFileRead
     private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
     {
         Tuple> results;
-        Tuple pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
-        _Logistics = new Logistics(reportFullPath, pdsf.Item1);
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
+        _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
         SetFileParameterLotIDToLogisticsMID();
-        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
+        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
         List descriptions = GetDuplicatorDescriptions(jsonElements);
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             FileCopy(reportFullPath, dateTime, descriptions);
-        results = new Tuple>(pdsf.Item1, tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/csv/Description.cs b/Adaptation/FileHandlers/csv/Description.cs
index 6aa5505..5382a8c 100644
--- a/Adaptation/FileHandlers/csv/Description.cs
+++ b/Adaptation/FileHandlers/csv/Description.cs
@@ -24,7 +24,7 @@ public class Description : IDescription, Shared.Properties.IDescription
     public string MID { get; set; }
     //
     public string Date { get; set; }
-    public string Lot { get; set; }
+    public string RDS { get; set; }
     public string Part { get; set; }
     public string Process { get; set; }
     public string Recipe { get; set; }
@@ -65,7 +65,7 @@ public class Description : IDescription, Shared.Properties.IDescription
         List results = new()
         {
             nameof(Date),
-            nameof(Lot),
+            nameof(RDS),
             nameof(Part),
             nameof(Process),
             nameof(Recipe)
@@ -165,7 +165,7 @@ public class Description : IDescription, Shared.Properties.IDescription
                     MID = logistics.MID,
                     //
                     Date = detail.Date,
-                    Lot = processData.MetaData.Date,
+                    RDS = processData.MetaData.Date,
                     Part = $"{processData.MetaData.DeviceId}-{processData.MetaData.DeviceType}-{processData.MetaData.DeviceNumber}",
                     Process = $"{processData.MetaData.DescriptionName}",
                     Recipe = processData.MetaData.Frequency,
@@ -204,7 +204,7 @@ public class Description : IDescription, Shared.Properties.IDescription
             MID = logistics.MID,
             //
             Date = nameof(Date),
-            Lot = nameof(Lot),
+            RDS = nameof(RDS),
             Part = nameof(Part),
             Process = nameof(Process),
             Recipe = nameof(Recipe),
diff --git a/Adaptation/FileHandlers/csv/FileRead.cs b/Adaptation/FileHandlers/csv/FileRead.cs
index a0e0526..e75e215 100644
--- a/Adaptation/FileHandlers/csv/FileRead.cs
+++ b/Adaptation/FileHandlers/csv/FileRead.cs
@@ -15,7 +15,7 @@ public class FileRead : Shared.FileRead, IFileRead
     private long? _TickOffset;
     private readonly CommaSeparatedValuesConfiguration _CommaSeparatedValuesConfiguration;
 
-    public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
+    public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
         base(new Description(), true, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
     {
         _MinFileLength = 1000;
diff --git a/Adaptation/FileHandlers/csv/ProcessData.cs b/Adaptation/FileHandlers/csv/ProcessData.cs
index bcc727e..f4c7763 100644
--- a/Adaptation/FileHandlers/csv/ProcessData.cs
+++ b/Adaptation/FileHandlers/csv/ProcessData.cs
@@ -25,7 +25,6 @@ public class ProcessData : IProcessData
     public ProcessData(IFileRead fileRead, Logistics logistics, List fileInfoCollection, CommaSeparatedValuesConfiguration commaSeparatedValuesConfiguration, MetaData metaData)
     {
         JobID = logistics.JobID;
-        fileInfoCollection.Clear();
         _Details = new List();
         MesEntity = logistics.MesEntity;
         Parse(fileRead, logistics, fileInfoCollection, commaSeparatedValuesConfiguration, metaData);
@@ -49,9 +48,13 @@ public class ProcessData : IProcessData
             if (description.Test != (int)tests[i])
                 throw new Exception();
         }
+        FileInfo fileInfo = new($"{logistics.ReportFullPath}.descriptions.json");
         List fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
         string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
-        JsonElement[] jsonElements = JsonSerializer.Deserialize(json);
+        File.WriteAllText(fileInfo.FullName, json);
+        File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
+        fileInfoCollection.Add(fileInfo);
+        JsonElement[] jsonElements = JsonSerializer.Deserialize(json) ?? throw new Exception();
         results = new Tuple>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
         return results;
     }
diff --git a/Adaptation/Shared/Duplicator/Description.cs b/Adaptation/Shared/Duplicator/Description.cs
index f725b29..964612e 100644
--- a/Adaptation/Shared/Duplicator/Description.cs
+++ b/Adaptation/Shared/Duplicator/Description.cs
@@ -12,7 +12,7 @@ public class Description : IDescription, Properties.IDescription
     public int Test { get; set; }
     public int Count { get; set; }
     public int Index { get; set; }
-    public string Lot { get; set; }
+    public string RDS { get; set; }
     //
     public string EventName { get; set; }
     public string NullData { get; set; }
@@ -141,7 +141,7 @@ public class Description : IDescription, Properties.IDescription
                     MID = logistics.MID,
                     //
                     Date = DateTime.Now.ToString(GetDateFormat()),
-                    Lot = string.Empty,
+                    RDS = string.Empty,
                 };
                 results.Add(description);
             }
diff --git a/Adaptation/Shared/FileRead.cs b/Adaptation/Shared/FileRead.cs
index 464ef0b..5d6ef60 100644
--- a/Adaptation/Shared/FileRead.cs
+++ b/Adaptation/Shared/FileRead.cs
@@ -44,9 +44,9 @@ public class FileRead : Properties.IFileRead
     protected readonly string _CellInstanceConnectionNameBase;
     protected readonly Dictionary> _DummyRuns;
     protected readonly Dictionary _FileParameter;
-    protected readonly Dictionary> _StaticRuns;
     protected readonly string _ParameterizedModelObjectDefinitionType;
     protected readonly FileConnectorConfiguration _FileConnectorConfiguration;
+    protected readonly Dictionary> _StaticRuns;
     protected readonly IList _ModelObjectParameterDefinitions;
 
     bool Properties.IFileRead.IsEvent => _IsEvent;
@@ -63,203 +63,6 @@ public class FileRead : Properties.IFileRead
     string Properties.IFileRead.CellInstanceConnectionName => _CellInstanceConnectionName;
     string Properties.IFileRead.ParameterizedModelObjectDefinitionType => _ParameterizedModelObjectDefinitionType;
 
-    public FileRead(IDescription description, bool isEvent, ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted)
-    {
-        _SMTP = smtp;
-        _IsEvent = isEvent;
-        _DummyRuns = dummyRuns;
-        _LastTicksDuration = 0;
-        _StaticRuns = staticRuns;
-        _IsEAFHosted = isEAFHosted;
-        _Description = description;
-        _FileParameter = fileParameter;
-        _ReportFullPath = string.Empty;
-        _CellInstanceName = cellInstanceName;
-        _Calendar = new CultureInfo("en-US").Calendar;
-        _Log = LogManager.GetLogger(typeof(FileRead));
-        _UseCyclicalForDescription = useCyclicalForDescription;
-        _CellInstanceConnectionName = cellInstanceConnectionName;
-        _ModelObjectParameterDefinitions = modelObjectParameters;
-        _FileConnectorConfiguration = fileConnectorConfiguration;
-        _ParameterizedModelObjectDefinitionType = parameterizedModelObjectDefinitionType;
-        _IsSourceTimer = fileConnectorConfiguration.SourceFileFilter.StartsWith("*Timer.txt");
-        string cellInstanceConnectionNameBase = cellInstanceConnectionName.Replace("-", string.Empty);
-        _Hyphens = cellInstanceConnectionName.Length - cellInstanceConnectionNameBase.Length;
-        _TracePath = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Path.Trace");
-        _ExceptionSubject = string.Concat("Exception:", _CellInstanceConnectionName, _FileConnectorConfiguration?.SourceDirectoryCloaking);
-        string suffix;
-        string[] segments = _ParameterizedModelObjectDefinitionType.Split('.');
-        string @namespace = segments[0];
-        string eventNameFileRead = "FileRead";
-        string eventName = segments[segments.Length - 1];
-        bool isDuplicator = segments[0] == cellInstanceName;
-        _IsDuplicator = isDuplicator;
-        _CellInstanceConnectionNameBase = cellInstanceConnectionNameBase;
-        if (eventName == eventNameFileRead)
-            suffix = string.Empty;
-        else
-            suffix = string.Concat('_', eventName.Split(new string[] { eventNameFileRead }, StringSplitOptions.RemoveEmptyEntries)[1]);
-        string parameterizedModelObjectDefinitionTypeAppended = string.Concat(@namespace, suffix);
-        if (!isEAFHosted)
-        {
-            if (string.IsNullOrEmpty(equipmentTypeName) || equipmentTypeName != parameterizedModelObjectDefinitionTypeAppended)
-                throw new Exception(cellInstanceConnectionName);
-            if (string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
-                throw new Exception(cellInstanceConnectionName);
-            if (!string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent && connectionCount > 1)
-                throw new Exception(cellInstanceConnectionName);
-            // if (string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent)
-            //     throw new Exception(cellInstanceConnectionName);
-            // if (!string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
-            //     throw new Exception(cellInstanceConnectionName);
-        }
-        if (isDuplicator)
-            _MesEntity = string.Empty;
-        else
-            _MesEntity = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, string.Concat("CellInstance.", cellInstanceName, ".Alias"));
-        _EventName = eventName;
-        _EventNameFileRead = eventNameFileRead;
-        _EquipmentType = parameterizedModelObjectDefinitionTypeAppended;
-        long breakAfterSeconds;
-        if (_FileConnectorConfiguration is null)
-            breakAfterSeconds = 360;
-        else
-        {
-            if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.TimeBased)
-                breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileHandleTimeout.Value);
-            else if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.FileWatcher)
-                breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value);
-            else
-                throw new Exception();
-        }
-        _BreakAfterSeconds = breakAfterSeconds;
-        UpdateLastTicksDuration(breakAfterSeconds * 10000000);
-        if (_IsDuplicator)
-        {
-            if (string.IsNullOrEmpty(_FileConnectorConfiguration.TargetFileLocation) || string.IsNullOrEmpty(_FileConnectorConfiguration.ErrorTargetFileLocation))
-                throw new Exception("_Configuration is empty?");
-            if (_FileConnectorConfiguration.TargetFileLocation.Contains('%') || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains('%'))
-                throw new Exception("_Configuration is incorrect for a duplicator!");
-            // if (_FileConnectorConfiguration is not null)
-            // {
-            //     if (string.IsNullOrEmpty(_FileConnectorConfiguration.SourceDirectoryCloaking))
-            //         throw new Exception("SourceDirectoryCloaking is empty?");
-            //     if (!_FileConnectorConfiguration.SourceDirectoryCloaking.StartsWith("~"))
-            //         throw new Exception("SourceDirectoryCloaking is incorrect for a duplicator!");
-            // }
-        }
-    }
-
-    protected static string GetPropertyValue(string cellInstanceConnectionName, IList modelObjectParameters, string propertyName)
-    {
-        string result;
-        List results = (from l in modelObjectParameters where l.Name == propertyName select l.Value).ToList();
-        if (results.Count != 1)
-            throw new Exception(cellInstanceConnectionName);
-        result = results[0];
-        return result;
-    }
-
-    protected static ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList modelObjectParameters, string propertyNamePrefix)
-    {
-        ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) select l).ToArray();
-        if (results.Length == 0)
-            throw new Exception(cellInstanceConnectionName);
-        return results;
-    }
-
-    protected static ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList modelObjectParameters, string propertyNamePrefix, string propertyNameSuffix)
-    {
-        ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) && l.Name.EndsWith(propertyNameSuffix) select l).ToArray();
-        if (results.Length == 0)
-            throw new Exception(cellInstanceConnectionName);
-        return results;
-    }
-
-    protected void UpdateLastTicksDuration(long ticksDuration)
-    {
-        if (ticksDuration < 50000000)
-            ticksDuration = 50000000;
-        _LastTicksDuration = (long)Math.Ceiling(ticksDuration * .667);
-        _Log.Info($"{new TimeSpan(ticksDuration).TotalMilliseconds} TotalMillisecond(s) to process{Environment.NewLine}{_CellInstanceConnectionName}{Environment.NewLine}<{_ReportFullPath}>");
-    }
-
-    protected void WaitForThread(Thread thread, List threadExceptions)
-    {
-        if (thread is not null)
-        {
-            ThreadState threadState;
-            for (short i = 0; i < short.MaxValue; i++)
-            {
-                if (thread is null)
-                    break;
-                else
-                {
-                    threadState = thread.ThreadState;
-                    if (threadState is not ThreadState.Running and not ThreadState.WaitSleepJoin)
-                        break;
-                }
-                Thread.Sleep(500);
-            }
-            lock (threadExceptions)
-            {
-                if (threadExceptions.Count != 0)
-                {
-                    foreach (Exception item in threadExceptions)
-                        _Log.Error(string.Concat(item.Message, Environment.NewLine, Environment.NewLine, item.StackTrace));
-                    Exception exception = threadExceptions[0];
-                    threadExceptions.Clear();
-                    throw exception;
-                }
-            }
-        }
-    }
-
-    private void WriteAllLines(string to, string[] exceptionLines)
-    {
-        string fileName = string.Concat(to, @"\readme.txt");
-        try
-        {
-            if (!Directory.Exists(to))
-                _ = Directory.CreateDirectory(to);
-            File.WriteAllLines(fileName, exceptionLines);
-        }
-        catch (Exception ex) { _Log.Error(ex.Message); }
-    }
-
-    protected string[] Move(Tuple> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
-    {
-        string[] results;
-        bool isErrorFile = exception is not null;
-        if (!to.EndsWith(@"\"))
-            _ = string.Concat(to, @"\");
-        if (!isErrorFile)
-            results = Array.Empty();
-        else
-        {
-            results = new string[] { _Logistics.Sequence.ToString(), _Logistics.ReportFullPath, from, resolvedFileLocation, to, string.Empty, string.Empty, exception.Message, string.Empty, string.Empty, exception.StackTrace };
-            if (!_IsDuplicator)
-                WriteAllLines(to, results);
-        }
-        if (extractResults is not null && extractResults.Item4 is not null && extractResults.Item4.Count != 0)
-        {
-            string itemFile;
-            List directories = new();
-            foreach (FileInfo sourceFile in extractResults.Item4)
-            {
-                if (sourceFile.FullName != _Logistics.ReportFullPath)
-                {
-                    itemFile = sourceFile.FullName.Replace(from, to);
-                    Shared1880(itemFile, directories, sourceFile, isErrorFile);
-                }
-                else if (!isErrorFile && _Logistics is not null)
-                    Shared1811(to, sourceFile);
-            }
-            Shared0231(directories);
-        }
-        return results;
-    }
-
     protected static string GetTupleFile(Logistics logistics, List descriptions, Properties.IScopeInfo scopeInfo, string duplicateDirectory, string duplicateFile) where T : Properties.IDescription
     {
         string result;
@@ -268,10 +71,10 @@ public class FileRead : Properties.IFileRead
         string dateValue;
         string rdsPlaceholder = "%RDS%";
         string mesEntityPlaceholder = "%MesEntity%";
-        if (descriptions.Count == 0 || string.IsNullOrEmpty(descriptions[0].Lot))
+        if (descriptions.Count == 0 || string.IsNullOrEmpty(descriptions[0].RDS))
             rds = logistics.MID;
         else
-            rds = descriptions[0].Lot;
+            rds = descriptions[0].RDS;
         string[] segments = scopeInfo.FileName.Split(new string[] { "DateTime:" }, StringSplitOptions.RemoveEmptyEntries);
         if (segments.Length == 0)
             result = string.Concat(duplicateDirectory, @"\", scopeInfo.FileNameWithoutExtension.Replace(rdsPlaceholder, rds).Replace(mesEntityPlaceholder, logistics.MesEntity));
@@ -384,191 +187,131 @@ public class FileRead : Properties.IFileRead
         }
     }
 
-    protected void SetFileParameter(string key, string value)
+    protected void WaitForFileConsumption(DateTime dateTime, List descriptions, bool isDummyRun, string successDirectory, string duplicateDirectory, List<(Properties.IScopeInfo, string)> collection, string duplicateFile) where T : Properties.IDescription
     {
-        if (_FileConnectorConfiguration is null || _FileConnectorConfiguration.TargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.TargetFileName.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileName.Contains(string.Concat("%", key, "%")))
-        {
-            if (_FileParameter.ContainsKey(key))
-                _FileParameter[key] = value;
-            else
-                _FileParameter.Add(key, value);
-        }
-    }
-
-    protected void SetFileParameterLotIDToLogisticsMID(bool includeLogisticsSequence = true)
-    {
-        string key;
-        if (!includeLogisticsSequence)
-            key = "LotID";
-        else
-            key = "LotIDWithLogisticsSequence";
-        string value = string.Concat(_Logistics.MID, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
-        SetFileParameter(key, value);
-    }
-
-    protected void SetFileParameterLotID(string value, bool includeLogisticsSequence = true)
-    {
-        string key;
-        if (!includeLogisticsSequence)
-            key = "LotID";
+        if (!isDummyRun && _IsEAFHosted)
+            WaitForFileConsumption(_FileConnectorConfiguration.SourceDirectoryCloaking, _Logistics, dateTime, descriptions, successDirectory, duplicateDirectory, duplicateFile, collection);
         else
         {
-            key = "LotIDWithLogisticsSequence";
-            value = string.Concat(value, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
-        }
-        SetFileParameter(key, value);
-    }
-
-    protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
-    {
-        string directory;
-        string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
-        string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
-        if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType)
-            directory = Path.Combine(_TracePath, _EquipmentType, "Target", weekDirectory, _CellInstanceName, _CellInstanceConnectionName);
-        else
-            directory = Path.Combine(_TracePath, _EquipmentType, "Source", weekDirectory, _CellInstanceName, _CellInstanceConnectionName);
-        if (!Directory.Exists(directory))
-            _ = Directory.CreateDirectory(directory);
-        string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
-        string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
-        File.WriteAllText(file, lines);
-        if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
-        {
-            try
-            { File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
-            catch (Exception) { }
-        }
-    }
-
-    protected void Move(Tuple> extractResults)
-    {
-        if (!_IsEAFHosted)
-        {
-            string to;
-            if (!_FileConnectorConfiguration.TargetFileLocation.EndsWith(Path.DirectorySeparatorChar.ToString()))
-                to = _FileConnectorConfiguration.TargetFileLocation;
-            else
-                to = Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation);
-            foreach (KeyValuePair keyValuePair in _FileParameter)
-                to = to.Replace(string.Concat('%', keyValuePair.Key, '%'), keyValuePair.Value);
-            if (to.Contains('%'))
-                _Log.Debug("Can't debug without EAF Hosting");
-            else
-                _ = Move(extractResults, to, _FileConnectorConfiguration.SourceFileLocation, resolvedFileLocation: string.Empty, exception: null);
-        }
-    }
-
-    protected void TriggerEvents(Tuple> extractResults, List headerNames, Dictionary keyValuePairs)
-    {
-        object value;
-        string segments;
-        string description;
-        List list;
-        for (int i = 0; i < extractResults.Item3.Length; i++)
-        {
-            _Log.Debug(string.Concat("TriggerEvent - {", _Logistics.ReportFullPath, "} ", i, " of ", extractResults.Item3.Length));
-            foreach (JsonProperty jsonProperty in extractResults.Item3[i].EnumerateObject())
+            long breakAfter = DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
+            for (short i = 0; i < short.MaxValue; i++)
             {
-                if (jsonProperty.Value.ValueKind != JsonValueKind.String || !keyValuePairs.TryGetValue(jsonProperty.Name, out segments))
-                    description = string.Empty;
-                else
-                    description = segments.Split('|')[0];
-                if (!_UseCyclicalForDescription || headerNames.Contains(jsonProperty.Name))
-                    value = jsonProperty.Value.ToString();
-                else
-                {
-                    list = new List();
-                    for (int z = 0; z < extractResults.Item3.Length; z++)
-                        list.Add(new object[] { z, extractResults.Item3[z].GetProperty(jsonProperty.Name).ToString() });
-                    value = list;
-                }
-            }
-            if (_UseCyclicalForDescription)
-                break;
-        }
-    }
-
-    protected Tuple> ReExtract(IFileRead fileRead, List headerNames, Dictionary keyValuePairs)
-    {
-        Tuple> results;
-        if (!Directory.Exists(_FileConnectorConfiguration.SourceFileLocation))
-            results = null;
-        else
-        {
-            string[] segments;
-            string[] matches = null;
-            foreach (string subSourceFileFilter in _FileConnectorConfiguration.SourceFileFilters)
-            {
-                segments = subSourceFileFilter.Split('\\');
-                if (_FileConnectorConfiguration.IncludeSubDirectories.Value)
-                    matches = Directory.GetFiles(_FileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.AllDirectories);
-                else
-                    matches = Directory.GetFiles(_FileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.TopDirectoryOnly);
-                if (matches.Length != 0)
+                if (!_IsEAFHosted || DateTime.Now.Ticks > breakAfter)
                     break;
-            }
-            if (matches is null || matches.Length == 0)
-                results = null;
-            else
-            {
-                _ReportFullPath = matches[0];
-                results = fileRead.GetExtractResult(_ReportFullPath, _EventName);
-                if (!_IsEAFHosted)
-                    TriggerEvents(results, headerNames, keyValuePairs);
+                Thread.Sleep(500);
             }
         }
-        return results;
     }
 
-    protected static List GetDuplicatorDescriptions(JsonElement[] jsonElements)
+    public FileRead(IDescription description, bool isEvent, ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted)
     {
-        List results = new();
-        Duplicator.Description description;
-        JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
-        foreach (JsonElement jsonElement in jsonElements)
+        _SMTP = smtp;
+        _IsEvent = isEvent;
+        _DummyRuns = dummyRuns;
+        _LastTicksDuration = 0;
+        _StaticRuns = staticRuns;
+        _IsEAFHosted = isEAFHosted;
+        _Description = description;
+        _FileParameter = fileParameter;
+        _ReportFullPath = string.Empty;
+        _CellInstanceName = cellInstanceName;
+        _Calendar = new CultureInfo("en-US").Calendar;
+        _Log = LogManager.GetLogger(typeof(FileRead));
+        _UseCyclicalForDescription = useCyclicalForDescription;
+        _CellInstanceConnectionName = cellInstanceConnectionName;
+        _ModelObjectParameterDefinitions = modelObjectParameters;
+        _FileConnectorConfiguration = fileConnectorConfiguration;
+        _ParameterizedModelObjectDefinitionType = parameterizedModelObjectDefinitionType;
+        _IsSourceTimer = fileConnectorConfiguration.SourceFileFilter.StartsWith("*Timer.txt");
+        string cellInstanceConnectionNameBase = cellInstanceConnectionName.Replace("-", string.Empty);
+        _Hyphens = cellInstanceConnectionName.Length - cellInstanceConnectionNameBase.Length;
+        _TracePath = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Path.Trace");
+        _ExceptionSubject = string.Concat("Exception:", _CellInstanceConnectionName, _FileConnectorConfiguration?.SourceDirectoryCloaking);
+        string suffix;
+        string[] segments = _ParameterizedModelObjectDefinitionType.Split('.');
+        string @namespace = segments[0];
+        string eventNameFileRead = "FileRead";
+        string eventName = segments[segments.Length - 1];
+        bool isDuplicator = segments[0] == cellInstanceName;
+        _IsDuplicator = isDuplicator;
+        _CellInstanceConnectionNameBase = cellInstanceConnectionNameBase;
+        if (eventName == eventNameFileRead)
+            suffix = string.Empty;
+        else
+            suffix = string.Concat('_', eventName.Split(new string[] { eventNameFileRead }, StringSplitOptions.RemoveEmptyEntries)[1]);
+        string parameterizedModelObjectDefinitionTypeAppended = string.Concat(@namespace, suffix);
+        if (!isEAFHosted)
         {
-            if (jsonElement.ValueKind != JsonValueKind.Object)
+            if (string.IsNullOrEmpty(equipmentTypeName) || equipmentTypeName != parameterizedModelObjectDefinitionTypeAppended)
+                throw new Exception(cellInstanceConnectionName);
+            if (string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
+                throw new Exception(cellInstanceConnectionName);
+            if (!string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent && connectionCount > 1)
+                throw new Exception(cellInstanceConnectionName);
+            // if (string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent)
+            //     throw new Exception(cellInstanceConnectionName);
+            // if (!string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
+            //     throw new Exception(cellInstanceConnectionName);
+        }
+        if (isDuplicator)
+            _MesEntity = string.Empty;
+        else
+            _MesEntity = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, string.Concat("CellInstance.", cellInstanceName, ".Alias"));
+        _EventName = eventName;
+        _EventNameFileRead = eventNameFileRead;
+        _EquipmentType = parameterizedModelObjectDefinitionTypeAppended;
+        long breakAfterSeconds;
+        if (_FileConnectorConfiguration is null)
+            breakAfterSeconds = 360;
+        else
+        {
+            if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.TimeBased)
+                breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileHandleTimeout.Value);
+            else if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.FileWatcher)
+                breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value);
+            else
                 throw new Exception();
-            description = JsonSerializer.Deserialize(jsonElement.ToString(), jsonSerializerOptions);
-            results.Add(description);
         }
-        return results;
+        _BreakAfterSeconds = breakAfterSeconds;
+        UpdateLastTicksDuration(breakAfterSeconds * 10000000);
+        if (_IsDuplicator)
+        {
+            if (string.IsNullOrEmpty(_FileConnectorConfiguration.TargetFileLocation) || string.IsNullOrEmpty(_FileConnectorConfiguration.ErrorTargetFileLocation))
+                throw new Exception("_Configuration is empty?");
+            if (_FileConnectorConfiguration.TargetFileLocation.Contains('%') || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains('%'))
+                throw new Exception("_Configuration is incorrect for a duplicator!");
+            // if (_FileConnectorConfiguration is not null)
+            // {
+            //     if (string.IsNullOrEmpty(_FileConnectorConfiguration.SourceDirectoryCloaking))
+            //         throw new Exception("SourceDirectoryCloaking is empty?");
+            //     if (!_FileConnectorConfiguration.SourceDirectoryCloaking.StartsWith("~"))
+            //         throw new Exception("SourceDirectoryCloaking is incorrect for a duplicator!");
+            // }
+        }
     }
 
-    private void Shared1880(string itemFile, List directories, FileInfo sourceFile, bool isErrorFile)
+    protected static string GetPropertyValue(string cellInstanceConnectionName, IList modelObjectParameters, string propertyName)
     {
-        string itemDirectory;
-        directories.Add(Path.GetDirectoryName(sourceFile.FullName));
-        itemDirectory = Path.GetDirectoryName(itemFile);
-        FileConnectorConfiguration.PostProcessingModeEnum processingModeEnum;
-        if (!isErrorFile)
-            processingModeEnum = _FileConnectorConfiguration.PostProcessingMode.Value;
-        else
-            processingModeEnum = _FileConnectorConfiguration.ErrorPostProcessingMode.Value;
-        if (processingModeEnum != FileConnectorConfiguration.PostProcessingModeEnum.Delete && !Directory.Exists(itemDirectory))
-        {
-            _ = Directory.CreateDirectory(itemDirectory);
-            FileInfo fileInfo = new(_Logistics.ReportFullPath);
-            Directory.SetCreationTime(itemDirectory, fileInfo.LastWriteTime);
-        }
-        if (_IsEAFHosted)
-        {
-            switch (processingModeEnum)
-            {
-                case FileConnectorConfiguration.PostProcessingModeEnum.Move:
-                    File.Move(sourceFile.FullName, itemFile);
-                    break;
-                case FileConnectorConfiguration.PostProcessingModeEnum.Copy:
-                    File.Copy(sourceFile.FullName, itemFile);
-                    break;
-                case FileConnectorConfiguration.PostProcessingModeEnum.Delete:
-                    File.Delete(sourceFile.FullName);
-                    break;
-                default:
-                    throw new Exception();
-            }
-        }
+        string result;
+        List results = (from l in modelObjectParameters where l.Name == propertyName select l.Value).ToList();
+        if (results.Count != 1)
+            throw new Exception(cellInstanceConnectionName);
+        result = results[0];
+        return result;
+    }
+
+    protected void UpdateLastTicksDuration(long ticksDuration)
+    {
+        if (ticksDuration < 50000000)
+            ticksDuration = 50000000;
+        _LastTicksDuration = (long)Math.Ceiling(ticksDuration * .667);
+        _Log.Info($"{new TimeSpan(ticksDuration).TotalMilliseconds} TotalMillisecond(s) to process{Environment.NewLine}{_CellInstanceConnectionName}{Environment.NewLine}<{_ReportFullPath}>");
+    }
+
+    internal static string GetParentParent(string value)
+    {
+        string result = Path.GetDirectoryName(Path.GetDirectoryName(value));
+        return result;
     }
 
     internal static List GetDirectoryNames(string directory)
@@ -609,6 +352,320 @@ public class FileRead : Properties.IFileRead
 #nullable disable
     }
 
+    internal static string GetJobIdParentDirectory(string directory)
+    {
+        string result;
+        if (!string.IsNullOrEmpty(Path.GetFileName(directory)))
+            result = Path.GetFullPath(GetParentParent(directory));
+        else
+            result = Path.GetFullPath(GetParentParent(Path.GetDirectoryName(directory)));
+        if (!Directory.Exists(result))
+            _ = Directory.CreateDirectory(result);
+        return result;
+    }
+
+    internal static string GetFileNameAfterUnderscoreSplit(string reportFullPath)
+    {
+        string result;
+        string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
+        if (segments.Length <= 2)
+            result = segments[0];
+        else
+            result = string.Concat(segments[0], segments[2]);
+        return result;
+    }
+
+    internal string[] GetInProcessDirectory(string jobIdDirectory)
+    {
+        List results = new();
+        if (!_IsEAFHosted)
+            results = new string[] { jobIdDirectory }.ToList();
+        else
+        {
+            string[] files;
+            string[] directories;
+            string logisticsSequence;
+            for (int i = 0; i < 10; i++)
+            {
+                logisticsSequence = (_Logistics.Sequence + -i).ToString();
+                directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
+                foreach (string directory in directories)
+                {
+                    files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
+                    if (files.Length == 0)
+                        continue;
+                    results.Add(directory);
+                }
+                if (results.Count == 1)
+                    break;
+            }
+        }
+        if (results.Count != 1)
+            throw new Exception("Didn't find directory by logistics sequence");
+        return results.ToArray();
+    }
+
+    protected static string[] GetMatches(FileConnectorConfiguration fileConnectorConfiguration)
+    {
+        string[] segments;
+        string[] results = null;
+        foreach (string subSourceFileFilter in fileConnectorConfiguration.SourceFileFilters)
+        {
+            segments = subSourceFileFilter.Split('\\');
+            if (fileConnectorConfiguration.IncludeSubDirectories.Value)
+                results = Directory.GetFiles(fileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.AllDirectories);
+            else
+                results = Directory.GetFiles(fileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.TopDirectoryOnly);
+            if (results.Length != 0)
+                break;
+        }
+        return results;
+    }
+
+    protected static void NestExistingFiles(FileConnectorConfiguration fileConnectorConfiguration)
+    {
+        // if (!fileConnectorConfiguration.IncludeSubDirectories.Value && fileConnectorConfiguration.TriggerOnCreated is not null && fileConnectorConfiguration.TriggerOnCreated.Value)
+        if (!fileConnectorConfiguration.IncludeSubDirectories.Value)
+        {
+            string[] matches = GetMatches(fileConnectorConfiguration);
+            if (matches is not null && matches.Length > 0)
+            {
+                string fileName;
+                string nestedDirectory = Path.Combine(fileConnectorConfiguration.SourceFileLocation, DateTime.Now.Ticks.ToString());
+                if (!Directory.Exists(nestedDirectory))
+                    _ = Directory.CreateDirectory(nestedDirectory);
+                foreach (string match in matches)
+                {
+                    fileName = Path.GetFileName(match);
+                    File.Move(match, Path.Combine(nestedDirectory, fileName));
+                }
+            }
+        }
+    }
+
+    protected static List GetDuplicatorDescriptions(JsonElement[] jsonElements)
+    {
+        List results = new();
+        Duplicator.Description description;
+        JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
+        foreach (JsonElement jsonElement in jsonElements)
+        {
+            if (jsonElement.ValueKind != JsonValueKind.Object)
+                throw new Exception();
+            description = JsonSerializer.Deserialize(jsonElement.ToString(), jsonSerializerOptions);
+            results.Add(description);
+        }
+        return results;
+    }
+
+    protected static ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList modelObjectParameters, string propertyNamePrefix)
+    {
+        ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) select l).ToArray();
+        if (results.Length == 0)
+            throw new Exception(cellInstanceConnectionName);
+        return results;
+    }
+
+    protected static ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList modelObjectParameters, string propertyNamePrefix, string propertyNameSuffix)
+    {
+        ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) && l.Name.EndsWith(propertyNameSuffix) select l).ToArray();
+        if (results.Length == 0)
+            throw new Exception(cellInstanceConnectionName);
+        return results;
+    }
+
+    protected void SetFileParameter(string key, string value)
+    {
+        if (_FileConnectorConfiguration is null || _FileConnectorConfiguration.TargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.TargetFileName.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileName.Contains(string.Concat("%", key, "%")))
+        {
+            if (_FileParameter.ContainsKey(key))
+                _FileParameter[key] = value;
+            else
+                _FileParameter.Add(key, value);
+        }
+    }
+
+    protected static void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
+    {
+#pragma warning disable CA1510
+        if (fileRead is null)
+            throw new ArgumentNullException(nameof(fileRead));
+        if (jsonElements is null)
+            throw new ArgumentNullException(nameof(jsonElements));
+#pragma warning restore CA1510
+    }
+
+    protected void WaitForThread(Thread thread, List threadExceptions)
+    {
+        if (thread is not null)
+        {
+            ThreadState threadState;
+            for (short i = 0; i < short.MaxValue; i++)
+            {
+                if (thread is null)
+                    break;
+                else
+                {
+                    threadState = thread.ThreadState;
+                    if (threadState is not ThreadState.Running and not ThreadState.WaitSleepJoin)
+                        break;
+                }
+                Thread.Sleep(500);
+            }
+            lock (threadExceptions)
+            {
+                if (threadExceptions.Count != 0)
+                {
+                    foreach (Exception item in threadExceptions)
+                        _Log.Error(string.Concat(item.Message, Environment.NewLine, Environment.NewLine, item.StackTrace));
+                    Exception exception = threadExceptions[0];
+                    threadExceptions.Clear();
+                    throw exception;
+                }
+            }
+        }
+    }
+
+    protected void Move(Tuple> extractResults)
+    {
+        if (!_IsEAFHosted)
+        {
+            string to;
+            if (!_FileConnectorConfiguration.TargetFileLocation.EndsWith(Path.DirectorySeparatorChar.ToString()))
+                to = _FileConnectorConfiguration.TargetFileLocation;
+            else
+                to = Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation);
+            foreach (KeyValuePair keyValuePair in _FileParameter)
+                to = to.Replace(string.Concat('%', keyValuePair.Key, '%'), keyValuePair.Value);
+            if (to.Contains('%'))
+                _Log.Debug("Can't debug without EAF Hosting");
+            else
+                _ = Move(extractResults, to, _FileConnectorConfiguration.SourceFileLocation, resolvedFileLocation: string.Empty, exception: null);
+        }
+    }
+
+    protected string[] Move(Tuple> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
+    {
+        string[] results;
+        bool isErrorFile = exception is not null;
+        if (!to.EndsWith(@"\"))
+            _ = string.Concat(to, @"\");
+        if (!isErrorFile)
+            results = Array.Empty();
+        else
+        {
+            results = new string[] { _Logistics.Sequence.ToString(), _Logistics.ReportFullPath, from, resolvedFileLocation, to, string.Empty, string.Empty, exception.Message, string.Empty, string.Empty, exception.StackTrace };
+            if (!_IsDuplicator)
+                WriteAllLines(to, results);
+        }
+        if (extractResults is not null && extractResults.Item4 is not null && extractResults.Item4.Count != 0)
+        {
+            string itemFile;
+            List directories = new();
+            foreach (FileInfo sourceFile in extractResults.Item4)
+            {
+                if (sourceFile.FullName != _Logistics.ReportFullPath)
+                {
+                    itemFile = sourceFile.FullName.Replace(from, to);
+                    Shared1880(itemFile, directories, sourceFile, isErrorFile);
+                }
+                else if (!isErrorFile && _Logistics is not null)
+                    Shared1811(to, sourceFile);
+            }
+            Shared0231(directories);
+        }
+        return results;
+    }
+
+    private void WriteAllLines(string to, string[] exceptionLines)
+    {
+        string fileName = string.Concat(to, @"\readme.txt");
+        try
+        {
+            if (!Directory.Exists(to))
+                _ = Directory.CreateDirectory(to);
+            File.WriteAllLines(fileName, exceptionLines);
+        }
+        catch (Exception ex) { _Log.Error(ex.Message); }
+    }
+
+    private void Shared1880(string itemFile, List directories, FileInfo sourceFile, bool isErrorFile)
+    {
+        string itemDirectory;
+        directories.Add(Path.GetDirectoryName(sourceFile.FullName));
+        itemDirectory = Path.GetDirectoryName(itemFile);
+        FileConnectorConfiguration.PostProcessingModeEnum processingModeEnum;
+        if (!isErrorFile)
+            processingModeEnum = _FileConnectorConfiguration.PostProcessingMode.Value;
+        else
+            processingModeEnum = _FileConnectorConfiguration.ErrorPostProcessingMode.Value;
+        if (processingModeEnum != FileConnectorConfiguration.PostProcessingModeEnum.Delete && !Directory.Exists(itemDirectory))
+        {
+            _ = Directory.CreateDirectory(itemDirectory);
+            FileInfo fileInfo = new(_Logistics.ReportFullPath);
+            Directory.SetCreationTime(itemDirectory, fileInfo.LastWriteTime);
+        }
+        if (_IsEAFHosted)
+        {
+            switch (processingModeEnum)
+            {
+                case FileConnectorConfiguration.PostProcessingModeEnum.Move:
+                    File.Move(sourceFile.FullName, itemFile);
+                    break;
+                case FileConnectorConfiguration.PostProcessingModeEnum.Copy:
+                    File.Copy(sourceFile.FullName, itemFile);
+                    break;
+                case FileConnectorConfiguration.PostProcessingModeEnum.Delete:
+                    File.Delete(sourceFile.FullName);
+                    break;
+                case FileConnectorConfiguration.PostProcessingModeEnum.None:
+                    File.Move(sourceFile.FullName, itemFile);
+                    break;
+                default:
+                    throw new Exception();
+            }
+        }
+    }
+
+    private void Shared1811(string to, FileInfo sourceFile)
+    {
+        if (!_IsDuplicator && _FileConnectorConfiguration.SourceFileFilter != "*" && sourceFile.Exists && sourceFile.Length < _MinFileLength)
+        {
+            string directoryName = Path.GetFileName(to);
+            string jobIdDirectory = GetJobIdDirectory(to);
+            DateTime dateTime = DateTime.Now.AddMinutes(-15);
+            string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
+            string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
+            string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
+            string destinationDirectory = Path.Combine(jobIdDirectory, "_ Ignore 100 bytes", weekDirectory, day, directoryName);
+            if (!Directory.Exists(destinationDirectory))
+                _ = Directory.CreateDirectory(destinationDirectory);
+            File.Move(sourceFile.FullName, string.Concat(destinationDirectory, @"\", sourceFile.Name));
+            try
+            {
+                string[] checkDirectories = Directory.GetDirectories(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
+                foreach (string checkDirectory in checkDirectories)
+                {
+                    if (!checkDirectory.Contains('_'))
+                        continue;
+                    if (Directory.GetDirectories(checkDirectory, "*", SearchOption.TopDirectoryOnly).Length != 0)
+                        continue;
+                    if (Directory.GetFiles(checkDirectory, "*", SearchOption.TopDirectoryOnly).Length != 0)
+                        continue;
+                    if (Directory.GetDirectories(checkDirectory, "*", SearchOption.AllDirectories).Length != 0)
+                        continue;
+                    if (Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories).Length != 0)
+                        continue;
+                    if (new DirectoryInfo(checkDirectory).CreationTime > dateTime)
+                        continue;
+                    Directory.Delete(checkDirectory, recursive: false);
+                }
+            }
+            catch (Exception) { throw; }
+            DeleteEmptyTopDirectories(jobIdDirectory);
+        }
+    }
+
     private string GetJobIdDirectory(string path)
     {
         string result;
@@ -654,44 +711,6 @@ public class FileRead : Properties.IFileRead
         }
     }
 
-    private void Shared1811(string to, FileInfo sourceFile)
-    {
-        if (!_IsDuplicator && _FileConnectorConfiguration.SourceFileFilter != "*" && sourceFile.Exists && sourceFile.Length < _MinFileLength)
-        {
-            string directoryName = Path.GetFileName(to);
-            string jobIdDirectory = GetJobIdDirectory(to);
-            DateTime dateTime = DateTime.Now.AddMinutes(-15);
-            string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
-            string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
-            string destinationDirectory = Path.Combine(jobIdDirectory, "_ Ignore 100 bytes", weekDirectory, directoryName);
-            if (!Directory.Exists(destinationDirectory))
-                _ = Directory.CreateDirectory(destinationDirectory);
-            File.Move(sourceFile.FullName, string.Concat(destinationDirectory, @"\", sourceFile.Name));
-            try
-            {
-                string[] checkDirectories = Directory.GetDirectories(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
-                foreach (string checkDirectory in checkDirectories)
-                {
-                    if (!checkDirectory.Contains('_'))
-                        continue;
-                    if (Directory.GetDirectories(checkDirectory, "*", SearchOption.TopDirectoryOnly).Length != 0)
-                        continue;
-                    if (Directory.GetFiles(checkDirectory, "*", SearchOption.TopDirectoryOnly).Length != 0)
-                        continue;
-                    if (Directory.GetDirectories(checkDirectory, "*", SearchOption.AllDirectories).Length != 0)
-                        continue;
-                    if (Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories).Length != 0)
-                        continue;
-                    if (new DirectoryInfo(checkDirectory).CreationTime > dateTime)
-                        continue;
-                    Directory.Delete(checkDirectory, recursive: false);
-                }
-            }
-            catch (Exception) { throw; }
-            DeleteEmptyTopDirectories(jobIdDirectory);
-        }
-    }
-
     private void Shared0231(List directories)
     {
         if (_FileConnectorConfiguration.PostProcessingMode != FileConnectorConfiguration.PostProcessingModeEnum.Copy)
@@ -704,66 +723,81 @@ public class FileRead : Properties.IFileRead
         }
     }
 
-    protected void WaitForFileConsumption(DateTime dateTime, List descriptions, bool isDummyRun, string successDirectory, string duplicateDirectory, List<(Properties.IScopeInfo, string)> collection, string duplicateFile) where T : Properties.IDescription
+    protected void SetFileParameterLotID(string value, bool includeLogisticsSequence = true)
     {
-        if (!isDummyRun && _IsEAFHosted)
-            WaitForFileConsumption(_FileConnectorConfiguration.SourceDirectoryCloaking, _Logistics, dateTime, descriptions, successDirectory, duplicateDirectory, duplicateFile, collection);
+        string key;
+        if (!includeLogisticsSequence)
+            key = "LotID";
         else
         {
-            long breakAfter = DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
-            for (short i = 0; i < short.MaxValue; i++)
+            key = "LotIDWithLogisticsSequence";
+            value = string.Concat(value, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
+        }
+        SetFileParameter(key, value);
+    }
+
+    protected void SetFileParameterLotIDToLogisticsMID(bool includeLogisticsSequence = true)
+    {
+        string key;
+        if (!includeLogisticsSequence)
+            key = "LotID";
+        else
+            key = "LotIDWithLogisticsSequence";
+        string value = string.Concat(_Logistics.MID, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
+        SetFileParameter(key, value);
+    }
+
+    protected Tuple> ReExtract(IFileRead fileRead, List headerNames, Dictionary keyValuePairs)
+    {
+        Tuple> results;
+        if (!Directory.Exists(_FileConnectorConfiguration.SourceFileLocation))
+            results = null;
+        else
+        {
+            string[] matches = GetMatches(_FileConnectorConfiguration);
+            if (matches is null || matches.Length == 0)
+                results = null;
+            else
             {
-                if (!_IsEAFHosted || DateTime.Now.Ticks > breakAfter)
-                    break;
-                Thread.Sleep(500);
+                _ReportFullPath = matches[0];
+                results = fileRead.GetExtractResult(_ReportFullPath, _EventName);
+                if (!_IsEAFHosted)
+                    TriggerEvents(results, headerNames, keyValuePairs);
             }
         }
-    }
-
-    internal static string GetJobIdParentDirectory(string directory)
-    {
-        string result;
-        if (!string.IsNullOrEmpty(Path.GetFileName(directory)))
-            result = Path.GetFullPath(GetParentParent(directory));
-        else
-            result = Path.GetFullPath(GetParentParent(Path.GetDirectoryName(directory)));
-        if (!Directory.Exists(result))
-            _ = Directory.CreateDirectory(result);
-        return result;
-    }
-
-    internal string[] GetInProcessDirectory(string jobIdDirectory)
-    {
-        string[] results;
-        if (!_IsEAFHosted)
-            results = new string[] { jobIdDirectory };
-        else
-        {
-            string logisticsSequence = _Logistics.Sequence.ToString();
-            results = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
-        }
-        if ((results is null) || results.Length != 1)
-            throw new Exception("Didn't find directory by logistics sequence");
         return results;
     }
 
-    internal static string GetFileNameAfterUnderscoreSplit(string reportFullPath)
+    protected void TriggerEvents(Tuple> extractResults, List headerNames, Dictionary keyValuePairs)
     {
-        string result;
-        string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
-        if (segments.Length <= 2)
-            result = segments[0];
-        else
-            result = string.Concat(segments[0], segments[2]);
-        return result;
-    }
-
-    internal static string GetParentParent(string value)
-    {
-        string result = Path.GetDirectoryName(Path.GetDirectoryName(value));
-        return result;
+        object value;
+        string segments;
+        string description;
+        List list;
+        for (int i = 0; i < extractResults.Item3.Length; i++)
+        {
+            _Log.Debug(string.Concat("TriggerEvent - {", _Logistics.ReportFullPath, "} ", i, " of ", extractResults.Item3.Length));
+            foreach (JsonProperty jsonProperty in extractResults.Item3[i].EnumerateObject())
+            {
+                if (jsonProperty.Value.ValueKind != JsonValueKind.String || !keyValuePairs.TryGetValue(jsonProperty.Name, out segments))
+                    description = string.Empty;
+                else
+                    description = segments.Split('|')[0];
+                if (!_UseCyclicalForDescription || headerNames.Contains(jsonProperty.Name))
+                    value = jsonProperty.Value.ToString();
+                else
+                {
+                    list = new List();
+                    for (int z = 0; z < extractResults.Item3.Length; z++)
+                        list.Add(new object[] { z, extractResults.Item3[z].GetProperty(jsonProperty.Name).ToString() });
+                    value = list;
+                }
+            }
+            if (_UseCyclicalForDescription)
+                break;
+        }
     }
 
 }
 
-// 2022-06-08 -> Shared - FileRead
\ No newline at end of file
+// 2025-03-25 -> Shared - FileRead
\ No newline at end of file
diff --git a/Adaptation/Shared/Logistics.cs b/Adaptation/Shared/Logistics.cs
index cb1f805..4f187f8 100644
--- a/Adaptation/Shared/Logistics.cs
+++ b/Adaptation/Shared/Logistics.cs
@@ -35,6 +35,9 @@ public class Logistics : ILogistics
     public long Sequence => _Sequence;
     public double TotalSecondsSinceLastWriteTimeFromSequence => _TotalSecondsSinceLastWriteTimeFromSequence;
 
+    private static string DefaultMesEntity(DateTime dateTime) =>
+        string.Concat(dateTime.Ticks, "_MES_ENTITY");
+
     public Logistics(IFileRead fileRead)
     {
         DateTime dateTime = DateTime.Now;
@@ -84,13 +87,13 @@ public class Logistics : ILogistics
         _Logistics2 = new List();
     }
 
-    public Logistics(string reportFullPath, string logistics)
+    internal Logistics(string reportFullPath, ProcessDataStandardFormat processDataStandardFormat)
     {
         string key;
         DateTime dateTime;
         string[] segments;
         _FileInfo = new(reportFullPath);
-        _Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
+        _Logistics1 = processDataStandardFormat.Logistics.ToList();
         if (Logistics1.Count == 0 || !Logistics1[0].StartsWith("LOGISTICS_1"))
         {
             _NullData = null;
@@ -190,8 +193,6 @@ public class Logistics : ILogistics
         }
     }
 
-    private static string DefaultMesEntity(DateTime dateTime) => string.Concat(dateTime.Ticks, "_MES_ENTITY");
-
     internal void Update(string mid, string processJobID)
     {
         _MID = mid;
diff --git a/Adaptation/Shared/Metrology/WS.Attachment.cs b/Adaptation/Shared/Metrology/WS.Attachment.cs
index 8edb116..0a7950e 100644
--- a/Adaptation/Shared/Metrology/WS.Attachment.cs
+++ b/Adaptation/Shared/Metrology/WS.Attachment.cs
@@ -6,23 +6,25 @@ public partial class WS
     public class Attachment
     {
 
-        public string SubGroupId { get; set; }
-        public long HeaderId { get; set; }
-        public string HeaderIdDirectory { get; set; }
-        public string UniqueId { get; set; }
-        public string DestinationFileName { get; set; }
-        public string SourceFileName { get; set; }
-        public string AttachmentId { get; set; }
+#nullable enable
 
-        public Attachment(string subGroupId, long headerId, string headerIdDirectory, string uniqueId, string destinationFileName, string sourceFileName)
+        public long HeaderId { get; set; }
+        public string UniqueId { get; set; }
+        public string SubGroupId { get; set; }
+        public string AttachmentId { get; set; }
+        public string SourceFileName { get; set; }
+        public string HeaderIdDirectory { get; set; }
+        public string DestinationFileName { get; set; }
+
+        public Attachment(Results? results, string headerIdDirectory, string uniqueId, string destinationFileName, string sourceFileName)
         {
-            SubGroupId = subGroupId;
-            HeaderId = headerId;
-            HeaderIdDirectory = headerIdDirectory;
             UniqueId = uniqueId;
-            DestinationFileName = destinationFileName;
             SourceFileName = sourceFileName;
+            HeaderIdDirectory = headerIdDirectory;
+            DestinationFileName = destinationFileName;
             AttachmentId = System.Guid.NewGuid().ToString();
+            HeaderId = results?.HeaderId is null ? -1 : results.HeaderId.Value;
+            SubGroupId = results?.SubgroupId is null ? string.Empty : results.SubgroupId.Value.ToString();
         }
 
     }
diff --git a/Adaptation/Shared/Metrology/WS.Results.cs b/Adaptation/Shared/Metrology/WS.Results.cs
index 2d1c603..07685a3 100644
--- a/Adaptation/Shared/Metrology/WS.Results.cs
+++ b/Adaptation/Shared/Metrology/WS.Results.cs
@@ -1,27 +1,75 @@
-using System.Collections.Generic;
+using System;
+using System.Collections.Generic;
+using System.Text;
 using System.Text.Json;
+using System.Text.Json.Serialization;
 
 namespace Adaptation.Shared.Metrology;
 
 public partial class WS
 {
-    // this class represents the response from the Inbound API endpoint
+
     public class Results
     {
-        // true or false if data was written to the database
-        public bool Success { get; set; }
 
-        // if true, contains ID of the Header record in the database
-        public long HeaderID { get; set; }
+#nullable enable
 
-        // if false, this collection will contain a list of errors
-        public List Errors { get; set; }
+        [JsonConstructor]
+        public Results(List? errors,
+                       long? headerId,
+                       long? subgroupId,
+                       bool? success,
+                       List? warnings)
+        {
+            Errors = errors;
+            Success = success;
+            HeaderId = headerId;
+            Warnings = warnings;
+            SubgroupId = subgroupId;
+        }
 
-        // this collection will contain a list of warnings, they will not prevent data from being saved
-        public List Warnings { get; set; }
+        [JsonPropertyName("errors")] public List? Errors { get; set; }
+        [JsonPropertyName("headerID")] public long? HeaderId { get; set; }
+        [JsonPropertyName("subgroupId")] public long? SubgroupId { get; set; }
+        [JsonPropertyName("success")] public bool? Success { get; set; }
+        [JsonPropertyName("warnings")] public List? Warnings { get; set; }
+
+        public override string ToString()
+        {
+            string result = JsonSerializer.Serialize(this, ResultsSourceGenerationContext.Default.Results);
+            return result;
+        }
+
+        internal static Results Get(Results results, long? subgroupId) =>
+            new(results.Errors, results.HeaderId, subgroupId, results.Success, results.Warnings);
+
+        internal static Results Get(string resultsJson, Exception e)
+        {
+            Results results;
+            Exception? exception = e;
+            List errors = new();
+            StringBuilder stringBuilder = new();
+            while (exception is not null)
+            {
+                _ = stringBuilder.AppendLine(exception.Message);
+                exception = exception.InnerException;
+            }
+            errors.Add(resultsJson);
+            errors.Add(stringBuilder.ToString());
+            results = new(errors: errors,
+                          headerId: null,
+                          subgroupId: null,
+                          success: false,
+                          warnings: new());
+            return results;
+        }
 
-        // this is just a helper function to make displaying the results easier
-        public override string ToString() => JsonSerializer.Serialize(this, GetType());
     }
 
+}
+
+[JsonSourceGenerationOptions(WriteIndented = true)]
+[JsonSerializable(typeof(WS.Results))]
+internal partial class ResultsSourceGenerationContext : JsonSerializerContext
+{
 }
\ No newline at end of file
diff --git a/Adaptation/Shared/Metrology/WS.cs b/Adaptation/Shared/Metrology/WS.cs
index c49e61d..b7666db 100644
--- a/Adaptation/Shared/Metrology/WS.cs
+++ b/Adaptation/Shared/Metrology/WS.cs
@@ -10,9 +10,11 @@ namespace Adaptation.Shared.Metrology;
 public partial class WS
 {
 
+#nullable enable
+
     public static (string, Results) SendData(string url, long sequence, string directory, object payload, int timeoutSeconds = 120)
     {
-        Results results = new();
+        Results? wsResults = null;
         string resultsJson = string.Empty;
         try
         {
@@ -30,29 +32,20 @@ public partial class WS
                 };
                 HttpResponseMessage httpResponseMessage = httpClient.SendAsync(httpRequestMessage, HttpCompletionOption.ResponseContentRead).Result;
                 resultsJson = httpResponseMessage.Content.ReadAsStringAsync().Result;
-                results = JsonSerializer.Deserialize(resultsJson, new JsonSerializerOptions { PropertyNameCaseInsensitive = true });
-                string checkDirectory = Path.Combine(directory, $"-{results.HeaderID}");
+                wsResults = JsonSerializer.Deserialize(resultsJson, ResultsSourceGenerationContext.Default.Results);
+                if (wsResults is null)
+                    throw new NullReferenceException(nameof(wsResults));
+                string checkDirectory = Path.Combine(directory, $"-{wsResults.HeaderId}");
                 if (!Directory.Exists(checkDirectory))
                     _ = Directory.CreateDirectory(checkDirectory);
                 File.WriteAllText(Path.Combine(checkDirectory, $"{sequence}.json"), json);
             }
-            if (!results.Success)
-                results.Errors.Add(results.ToString());
+            if (wsResults.Success is null || !wsResults.Success.Value)
+                wsResults.Errors?.Add(wsResults.ToString());
         }
         catch (Exception e)
-        {
-            Exception exception = e;
-            StringBuilder stringBuilder = new();
-            while (exception is not null)
-            {
-                _ = stringBuilder.AppendLine(exception.Message);
-                exception = exception.InnerException;
-            }
-            results.Errors ??= new List();
-            results.Errors.Add(resultsJson);
-            results.Errors.Add(stringBuilder.ToString());
-        }
-        return new(resultsJson, results);
+        { wsResults ??= Results.Get(resultsJson, e); }
+        return new(resultsJson, wsResults);
     }
 
     public static void AttachFile(string url, Attachment attachment, int timeoutSeconds = 60)
@@ -69,16 +62,20 @@ public partial class WS
         }
     }
 
-    public static void AttachFiles(string url, List headerAttachments = null, List dataAttachments = null)
+    public static void AttachFiles(string url, List? headerAttachments = null, List? dataAttachments = null)
     {
         string directory;
         try
         {
+            string? directoryName;
             if (headerAttachments is not null)
             {
                 foreach (Attachment attachment in headerAttachments)
                 {
-                    directory = Path.Combine(Path.GetDirectoryName(attachment.HeaderIdDirectory), attachment.AttachmentId) ?? throw new Exception();
+                    directoryName = Path.GetDirectoryName(attachment.HeaderIdDirectory);
+                    if (string.IsNullOrEmpty(directoryName))
+                        continue;
+                    directory = Path.Combine(directoryName, attachment.AttachmentId) ?? throw new Exception();
                     if (!Directory.Exists(directory))
                         _ = Directory.CreateDirectory(directory);
                     File.Copy(attachment.SourceFileName, Path.Combine(directory, attachment.DestinationFileName), overwrite: true);
@@ -88,7 +85,10 @@ public partial class WS
             {
                 foreach (Attachment attachment in dataAttachments)
                 {
-                    directory = Path.Combine(Path.GetDirectoryName(attachment.HeaderIdDirectory.Replace("Header", "Data")), attachment.AttachmentId) ?? throw new Exception();
+                    directoryName = Path.GetDirectoryName(attachment.HeaderIdDirectory.Replace("Header", "Data"));
+                    if (string.IsNullOrEmpty(directoryName))
+                        continue;
+                    directory = Path.Combine(directoryName, attachment.AttachmentId) ?? throw new Exception();
                     if (!Directory.Exists(directory))
                         _ = Directory.CreateDirectory(directory);
                     File.Copy(attachment.SourceFileName, Path.Combine(directory, attachment.DestinationFileName), overwrite: true);
@@ -108,7 +108,7 @@ public partial class WS
         }
         catch (Exception e)
         {
-            Exception exception = e;
+            Exception? exception = e;
             StringBuilder stringBuilder = new();
             while (exception is not null)
             {
diff --git a/Adaptation/Shared/ProcessDataStandardFormat.cs b/Adaptation/Shared/ProcessDataStandardFormat.cs
index b2ca7b6..a4df161 100644
--- a/Adaptation/Shared/ProcessDataStandardFormat.cs
+++ b/Adaptation/Shared/ProcessDataStandardFormat.cs
@@ -1,18 +1,23 @@
 using Adaptation.Shared.Methods;
 using System;
 using System.Collections.Generic;
+using System.Collections.ObjectModel;
+using System.Diagnostics;
 using System.Globalization;
 using System.IO;
 using System.Linq;
 using System.Text;
 using System.Text.Json;
+using System.Text.Json.Serialization;
 
 namespace Adaptation.Shared;
 
-public class ProcessDataStandardFormat
+#nullable enable
+
+internal class ProcessDataStandardFormat
 {
 
-    public enum SearchFor
+    internal enum SearchFor
     {
         EquipmentIntegration = 1,
         BusinessIntegration = 2,
@@ -20,325 +25,47 @@ public class ProcessDataStandardFormat
         Archive = 4
     }
 
-    public static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
+    internal long? Sequence { get; private set; }
+    internal ReadOnlyCollection Body { get; private set; }
+    internal ReadOnlyCollection Footer { get; private set; }
+    internal ReadOnlyCollection Header { get; private set; }
+    internal ReadOnlyCollection Columns { get; private set; }
+    internal ProcessDataStandardFormat? InputPDSF { get; private set; }
+    internal ReadOnlyCollection Logistics { get; private set; }
+
+    internal ProcessDataStandardFormat(ReadOnlyCollection body,
+                                       ReadOnlyCollection columns,
+                                       ReadOnlyCollection footer,
+                                       ReadOnlyCollection header,
+                                       ProcessDataStandardFormat? inputPDSF,
+                                       ReadOnlyCollection logistics,
+                                       long? sequence)
     {
-        string result;
-        if (jsonElements.Length == 0)
-            result = string.Empty;
-        else
-        {
-            int columns = 0;
-            List lines;
-            string endOffset = "E#######T";
-            string dataOffset = "D#######T";
-            string headerOffset = "H#######T";
-            string format = "MM/dd/yyyy HH:mm:ss";
-            StringBuilder stringBuilder = new();
-            lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
-            _ = stringBuilder.Append("\"Time\"").Append('\t');
-            _ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
-            _ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
-            for (int i = 0; i < jsonElements.Length;)
-            {
-                foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
-                {
-                    columns += 1;
-                    _ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
-                }
-                break;
-            }
-            _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
-            lines.Add(stringBuilder.ToString());
-            for (int i = 0; i < jsonElements.Length; i++)
-            {
-                _ = stringBuilder.Clear();
-                _ = stringBuilder.Append("0.1").Append('\t');
-                _ = stringBuilder.Append('1').Append('\t');
-                _ = stringBuilder.Append('2').Append('\t');
-                foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
-                    _ = stringBuilder.Append(jsonProperty.Value).Append('\t');
-                _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
-                lines.Add(stringBuilder.ToString());
-            }
-            lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
-            lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
-            lines.Add("DELIMITER	;");
-            lines.Add(string.Concat("START_TIME_FORMAT	", format));
-            lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
-            lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
-            lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
-            if (!string.IsNullOrEmpty(logisticsText))
-                lines.Add(logisticsText);
-            else
-            {
-                lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
-                lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
-                lines.Add("END_HEADER");
-            }
-            _ = stringBuilder.Clear();
-            foreach (string line in lines)
-                _ = stringBuilder.AppendLine(line);
-            result = stringBuilder.ToString();
-            result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
-                Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
-                Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
-        }
-        return result;
+        Body = body;
+        Columns = columns;
+        Footer = footer;
+        Header = header;
+        InputPDSF = inputPDSF;
+        Logistics = logistics;
+        Sequence = sequence;
     }
 
-    public static Tuple GetLogisticsColumnsAndBody(string reportFullPath, string[] lines = null)
-    {
-        string segment;
-        List body = new();
-        StringBuilder logistics = new();
-        lines ??= File.ReadAllLines(reportFullPath);
-        string[] segments;
-        if (lines.Length < 7)
-            segments = Array.Empty();
-        else
-            segments = lines[6].Trim().Split('\t');
-        List columns = new();
-        for (int c = 0; c < segments.Length; c++)
-        {
-            segment = segments[c].Substring(1, segments[c].Length - 2);
-            if (!columns.Contains(segment))
-                columns.Add(segment);
-            else
-            {
-                for (short i = 1; i < short.MaxValue; i++)
-                {
-                    segment = string.Concat(segment, "_", i);
-                    if (!columns.Contains(segment))
-                    {
-                        columns.Add(segment);
-                        break;
-                    }
-                }
-            }
-        }
-        bool lookForLogistics = false;
-        for (int r = 7; r < lines.Length; r++)
-        {
-            if (lines[r].StartsWith("NUM_DATA_ROWS"))
-                lookForLogistics = true;
-            if (!lookForLogistics)
-            {
-                body.Add(lines[r]);
-                continue;
-            }
-            if (lines[r].StartsWith("LOGISTICS_1"))
-            {
-                for (int i = r; i < lines.Length; i++)
-                {
-                    if (lines[r].StartsWith("END_HEADER"))
-                        break;
-                    _ = logistics.AppendLine(lines[i]);
-                }
-                break;
-            }
-        }
-        return new Tuple(logistics.ToString(), columns.ToArray(), body.ToArray());
-    }
+    internal static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') =>
+        GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
 
-    public static JsonElement[] GetArray(Tuple pdsf, bool lookForNumbers = false)
-    {
-        JsonElement[] results;
-        string logistics = pdsf.Item1;
-        string[] columns = pdsf.Item2;
-        string[] bodyLines = pdsf.Item3;
-        if (bodyLines.Length == 0 || !bodyLines[0].Contains('\t'))
-            results = JsonSerializer.Deserialize("[]");
-        else
-        {
-            string value;
-            string[] segments;
-            List lines = new();
-            StringBuilder stringBuilder = new();
-            foreach (string bodyLine in bodyLines)
-            {
-                _ = stringBuilder.Clear();
-                _ = stringBuilder.Append('{');
-                segments = bodyLine.Trim().Split('\t');
-                if (!lookForNumbers)
-                {
-                    for (int c = 1; c < segments.Length; c++)
-                    {
-                        value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
-                        _ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
-                    }
-                }
-                else
-                {
-                    for (int c = 1; c < segments.Length; c++)
-                    {
-                        value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
-                        if (string.IsNullOrEmpty(value))
-                            _ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append("null,");
-                        else if (value.All(char.IsDigit))
-                            _ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append(',');
-                        else
-                            _ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
-                    }
-                }
-                _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
-                _ = stringBuilder.AppendLine("}");
-                lines.Add(stringBuilder.ToString());
-            }
-            string json = $"[{string.Join(",", lines)}]";
-            results = JsonSerializer.Deserialize(json);
-        }
-        return results;
-    }
+    internal static string BusinessIntegration(bool addSpaces = true, char separator = ' ') =>
+        GetString(SearchFor.BusinessIntegration, addSpaces, separator);
 
-    public static Dictionary> GetDictionary(Tuple pdsf)
-    {
-        Dictionary> results = new();
-        string[] segments;
-        string[] columns = pdsf.Item2;
-        string[] bodyLines = pdsf.Item3;
-        foreach (string column in columns)
-            results.Add(column, new List());
-        foreach (string bodyLine in bodyLines)
-        {
-            segments = bodyLine.Split('\t');
-            for (int c = 1; c < segments.Length; c++)
-            {
-                if (c >= columns.Length)
-                    continue;
-                results[columns[c]].Add(segments[c]);
-            }
-        }
-        return results;
-    }
+    internal static string SystemExport(bool addSpaces = true, char separator = ' ') =>
+        GetString(SearchFor.SystemExport, addSpaces, separator);
 
-    public static Tuple>>> GetTestDictionary(Tuple pdsf)
-    {
-        Dictionary>> results = new();
-        List collection;
-        string testColumn = nameof(Test);
-        Dictionary> keyValuePairs = GetDictionary(pdsf);
-        if (!keyValuePairs.TryGetValue(testColumn, out collection))
-            throw new Exception();
-        int min;
-        int max;
-        Test testKey;
-        List vs;
-        string columnKey;
-        Dictionary> tests = new();
-        for (int i = 0; i < collection.Count; i++)
-        {
-            if (Enum.TryParse(collection[i], out Test test))
-            {
-                if (!results.ContainsKey(test))
-                {
-                    tests.Add(test, new List());
-                    results.Add(test, new Dictionary>());
-                }
-                tests[test].Add(i);
-            }
-        }
-        foreach (KeyValuePair> testKeyValuePair in tests)
-        {
-            testKey = testKeyValuePair.Key;
-            min = testKeyValuePair.Value.Min();
-            max = testKeyValuePair.Value.Max() + 1;
-            foreach (KeyValuePair> keyValuePair in keyValuePairs)
-                results[testKey].Add(keyValuePair.Key, new List());
-            foreach (KeyValuePair> keyValuePair in keyValuePairs)
-            {
-                vs = keyValuePair.Value;
-                columnKey = keyValuePair.Key;
-                for (int i = min; i < max; i++)
-                {
-                    if (vs.Count > i)
-                        results[testKey][columnKey].Add(vs[i]);
-                    else
-                        results[testKey][columnKey].Add(string.Empty);
-                }
-            }
-        }
-        return new Tuple>>>(pdsf.Item1, results);
-    }
+    internal static string Archive(bool addSpaces = true, char separator = ' ') =>
+        GetString(SearchFor.Archive, addSpaces, separator);
 
-    private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
-    {
-        if (!addSpaces)
-            return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
-        else
-            return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
-    }
+    internal static ProcessDataStandardFormat GetEmpty(Logistics logistics) =>
+        new(new(Array.Empty()), new(Array.Empty()), new(Array.Empty()), new(Array.Empty()), null, new(logistics.Logistics1), null);
 
-    public static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
-
-    public static string BusinessIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.BusinessIntegration, addSpaces, separator);
-
-    public static string SystemExport(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.SystemExport, addSpaces, separator);
-
-    public static string Archive(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.Archive, addSpaces, separator);
-
-    public static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List names, Dictionary> keyValuePairs, string dateFormat, string timeFormat, List pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List ignoreParameterNames = null)
-    {
-        StringBuilder result = new();
-        ignoreParameterNames ??= new List();
-        if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
-            throw new Exception();
-        else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
-            throw new Exception();
-        string nullData;
-        const string columnDate = "Date";
-        const string columnTime = "Time";
-        const string firstDuplicate = "_1";
-        _ = result.AppendLine(scopeInfo.Header);
-        StringBuilder line = new();
-        if (logistics.NullData is null)
-            nullData = string.Empty;
-        else
-            nullData = logistics.NullData.ToString();
-        int count = (from l in keyValuePairs select l.Value.Count).Min();
-        for (int r = 0; r < count; r++)
-        {
-            _ = line.Clear();
-            _ = line.Append('!');
-            foreach (KeyValuePair> keyValuePair in keyValuePairs)
-            {
-                if (!names.Contains(keyValuePair.Key))
-                    continue;
-                if (ignoreParameterNames.Contains(keyValuePair.Key))
-                    continue;
-                if (pairedParameterNames.Contains(keyValuePair.Key))
-                {
-                    if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
-                        continue;
-                    else
-                        _ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
-                }
-                else
-                {
-                    if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
-                        _ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
-                    else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
-                        _ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
-                    else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
-                        _ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
-                    else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
-                        _ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
-                    else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
-                        _ = line.Append(nullData);
-                    else
-                        _ = line.Append(keyValuePair.Value[r]);
-                    _ = line.Append(';');
-                }
-            }
-            if (pairedParameterNames.Count == 0)
-            {
-                _ = line.Remove(line.Length - 1, 1);
-                _ = result.AppendLine(line.ToString());
-            }
-        }
-        return result.ToString();
-    }
-
-    public static List PDSFToFixedWidth(string reportFullPath)
+    internal static List PDSFToFixedWidth(string reportFullPath)
     {
         List results = new();
         if (!File.Exists(reportFullPath))
@@ -407,4 +134,776 @@ public class ProcessDataStandardFormat
         return results;
     }
 
+    internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6)
+    {
+        ProcessDataStandardFormat result;
+        long? sequence;
+        string segment;
+        string[] segments;
+        bool addToFooter = false;
+        List body = new();
+        List header = new();
+        List footer = new();
+        List columns = new();
+        ReadOnlyCollection logistics;
+        lines ??= File.ReadAllLines(reportFullPath);
+        if (lines.Length < columnsLine + 1)
+            segments = Array.Empty();
+        else
+        {
+            segments = lines[columnsLine].Trim().Split('\t');
+            for (int i = 0; i < columnsLine; i++)
+                header.Add(lines[i]);
+        }
+        for (int c = 0; c < segments.Length; c++)
+        {
+            segment = segments[c].Substring(1, segments[c].Length - 2);
+            if (!columns.Contains(segment))
+                columns.Add(segment);
+            else
+            {
+                for (short i = 1; i < short.MaxValue; i++)
+                {
+                    segment = string.Concat(segment, "_", i);
+                    if (!columns.Contains(segment))
+                    {
+                        columns.Add(segment);
+                        break;
+                    }
+                }
+            }
+        }
+        for (int r = columnsLine + 1; r < lines.Length; r++)
+        {
+            if (lines[r].StartsWith("NUM_DATA_ROWS"))
+                addToFooter = true;
+            if (!addToFooter)
+                body.Add(lines[r]);
+            else
+            {
+                footer.Add(lines[r]);
+                if (lines[r].StartsWith("END_HEADER"))
+                    break;
+            }
+        }
+        string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
+        logistics = GetLogistics(footer, linesOne: linesOne);
+        if (logistics.Count == 0)
+            sequence = null;
+        else
+        {
+            segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
+            sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
+        }
+        if (sequence is null && !string.IsNullOrEmpty(reportFullPath))
+        {
+            FileInfo fileInfo = new(reportFullPath);
+            sequence = fileInfo.LastWriteTime.Ticks;
+        }
+        result = new(body: body.AsReadOnly(),
+                     columns: columns.AsReadOnly(),
+                     footer: footer.AsReadOnly(),
+                     header: header.AsReadOnly(),
+                     inputPDSF: null,
+                     logistics: logistics,
+                     sequence: sequence);
+        return result;
+    }
+
+    private static ReadOnlyCollection GetLogistics(List footer, string? linesOne)
+    {
+        List results = new();
+        bool foundLogistics1 = false;
+        foreach (string line in footer)
+        {
+            if (line.StartsWith("END_HEADER"))
+                break;
+            if (line.StartsWith("LOGISTICS_1"))
+                foundLogistics1 = true;
+            if (foundLogistics1 && line.StartsWith("LOGISTICS_"))
+                results.Add(line);
+        }
+        if (!string.IsNullOrEmpty(linesOne) && results.Count == 0)
+            results.Add(linesOne);
+        return results.AsReadOnly();
+    }
+
+    internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping processDataStandardFormatMapping)
+    {
+        ProcessDataStandardFormat result;
+        const int columnsLine = 6;
+        FileInfo fileInfo = new(reportFullPath);
+        ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
+        JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count != processDataStandardFormatMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
+        JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
+        if (jsonElements is null || jsonProperties is null || jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
+            result = processDataStandardFormat;
+        else
+        {
+            result = GetProcessDataStandardFormat(processDataStandardFormatMapping, jsonElements, processDataStandardFormat);
+            if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
+                result = processDataStandardFormat;
+        }
+        return result;
+    }
+
+    private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines)
+    {
+        ProcessDataStandardFormat result;
+        long? sequence;
+        string[] segments;
+        bool addToFooter = false;
+        List body = new();
+        List header = new();
+        List footer = new();
+        ReadOnlyCollection logistics;
+        lines ??= File.ReadAllLines(path);
+        if (lines.Length <= columnsLine)
+            segments = Array.Empty();
+        else
+        {
+            segments = lines[columnsLine].Split('\t');
+            for (int i = 0; i < columnsLine; i++)
+                header.Add(lines[i]);
+        }
+        string[] columns = segments.Select(l => l.Trim('"')).ToArray();
+        for (int r = columnsLine + 1; r < lines.Length; r++)
+        {
+            if (lines[r].StartsWith("NUM_DATA_ROWS"))
+                addToFooter = true;
+            if (!addToFooter)
+                body.Add(lines[r]);
+            else
+            {
+                footer.Add(lines[r]);
+                if (lines[r].StartsWith("END_HEADER"))
+                    break;
+            }
+        }
+        logistics = GetLogistics(footer, linesOne: null);
+        if (logistics.Count == 0)
+            sequence = null;
+        else
+        {
+            segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
+            sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
+        }
+        sequence ??= lastWriteTime.Ticks;
+        result = new(body: body.AsReadOnly(),
+                     columns: new(columns),
+                     footer: footer.AsReadOnly(),
+                     header: header.AsReadOnly(),
+                     inputPDSF: null,
+                     logistics: logistics,
+                     sequence: sequence);
+        return result;
+    }
+
+    private static JsonElement[]? GetFullArray(ProcessDataStandardFormat processDataStandardFormat)
+    {
+        JsonElement[]? results;
+        if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
+            results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
+        else
+        {
+            string value;
+            List segments;
+            List lines = new();
+            StringBuilder stringBuilder = new();
+            foreach (string bodyLine in processDataStandardFormat.Body)
+            {
+                _ = stringBuilder.Clear();
+                _ = stringBuilder.Append('{');
+                segments = bodyLine.Split('\t').ToList();
+                for (int c = 0; c < segments.Count; c++)
+                {
+                    value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
+                    _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
+                }
+                _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
+                _ = stringBuilder.AppendLine("}");
+                lines.Add(stringBuilder.ToString());
+            }
+            string json = $"[{string.Join(",", lines)}]";
+            results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray);
+        }
+        return results;
+    }
+
+    private static ProcessDataStandardFormat GetProcessDataStandardFormat(ProcessDataStandardFormatMapping processDataStandardFormatMapping, JsonElement[] jsonElements, ProcessDataStandardFormat processDataStandardFormat)
+    {
+        ProcessDataStandardFormat result;
+        int column;
+        string value;
+        JsonProperty jsonProperty;
+        List debug = new();
+        List values = new();
+        List results = new();
+        JsonProperty[] jsonProperties;
+        List unknownColumns = new();
+        for (int i = 0; i < jsonElements.Length; i++)
+        {
+            debug.Clear();
+            values.Clear();
+            if (jsonElements[i].ValueKind != JsonValueKind.Object)
+            {
+                unknownColumns.Add(string.Empty);
+                break;
+            }
+            jsonProperties = jsonElements[i].EnumerateObject().ToArray();
+            if (jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
+                continue;
+            for (int c = 0; c < processDataStandardFormatMapping.ColumnIndices.Count; c++)
+            {
+                column = processDataStandardFormatMapping.ColumnIndices[c];
+                if (column == -1)
+                {
+                    value = processDataStandardFormatMapping.OldColumnNames[c];
+                    debug.Add($"");
+                }
+                else
+                {
+                    jsonProperty = jsonProperties[column];
+                    value = jsonProperty.Value.ToString();
+                    debug.Add($"");
+                }
+                values.Add(value);
+            }
+            results.Add(string.Join("\t", values));
+        }
+        if (Debugger.IsAttached)
+            File.WriteAllText("../../.txt", string.Join(Environment.NewLine, debug.OrderBy(l => l)));
+        result = new(body: new(results),
+                     columns: processDataStandardFormatMapping.OldColumnNames,
+                     footer: processDataStandardFormat.Footer,
+                     header: processDataStandardFormat.Header,
+                     inputPDSF: processDataStandardFormat,
+                     logistics: processDataStandardFormat.Logistics,
+                     sequence: processDataStandardFormat.Sequence);
+        return result;
+    }
+
+    private static string GetJson(ProcessDataStandardFormat processDataStandardFormat)
+    {
+        if (processDataStandardFormat.InputPDSF is null)
+            throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
+        string result;
+        string line;
+        string value;
+        string[] segments;
+        List lines = new();
+        for (int i = 0; i < processDataStandardFormat.InputPDSF.Body.Count; i++)
+        {
+            line = "{";
+            segments = processDataStandardFormat.InputPDSF.Body[i].Trim().Split('\t');
+            if (segments.Length != processDataStandardFormat.InputPDSF.Columns.Count)
+                break;
+            for (int c = 0; c < segments.Length; c++)
+            {
+                value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
+                line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ',');
+            }
+            line = string.Concat(line.Substring(0, line.Length - 1), '}');
+            lines.Add(line);
+        }
+        string? json = null;
+        if (processDataStandardFormat.Footer is not null && processDataStandardFormat.Footer.Count > 0)
+        {
+            Dictionary footerKeyValuePairs = GetFooterKeyValuePairs(processDataStandardFormat.Footer);
+            Dictionary> logisticKeyValuePairs = GetLogisticKeyValuePairs(processDataStandardFormat.Footer, footerKeyValuePairs);
+            json = JsonSerializer.Serialize(logisticKeyValuePairs, DictionaryStringDictionaryStringStringSourceGenerationContext.Default.DictionaryStringDictionaryStringString);
+        }
+        string footerText = string.IsNullOrEmpty(json) || json == "{}" ? string.Empty : $",{Environment.NewLine}\"PDSF\":{Environment.NewLine}{json}";
+        result = string.Concat(
+            '{',
+            Environment.NewLine,
+            '"',
+            "Count",
+            '"',
+            ": ",
+            processDataStandardFormat.Body.Count,
+            ',',
+            Environment.NewLine,
+            '"',
+            "Records",
+            '"',
+            ": ",
+            Environment.NewLine,
+            '[',
+            Environment.NewLine,
+            string.Join($",{Environment.NewLine}", lines),
+            Environment.NewLine,
+            ']',
+            ',',
+            Environment.NewLine,
+            '"',
+            "Sequence",
+            '"',
+            ": ",
+            processDataStandardFormat.Sequence,
+            Environment.NewLine,
+            footerText,
+            Environment.NewLine,
+            '}');
+        return result;
+    }
+
+    private static Dictionary GetFooterKeyValuePairs(ReadOnlyCollection footerLines)
+    {
+        Dictionary results = new();
+        string[] segments;
+        foreach (string footerLine in footerLines)
+        {
+            segments = footerLine.Split('\t');
+            if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
+            {
+                continue;
+            }
+            if (segments[1].Contains(';'))
+            {
+                continue;
+            }
+            else
+            {
+                if (results.ContainsKey(segments[0]))
+                {
+                    continue;
+                }
+                results.Add(segments[0], segments[1]);
+            }
+        }
+        return results;
+    }
+
+    private static Dictionary> GetLogisticKeyValuePairs(ReadOnlyCollection footerLines, Dictionary footerKeyValuePairs)
+    {
+        Dictionary> results = new();
+        string[] segments;
+        string[] subSegments;
+        string[] subSubSegments;
+        Dictionary? keyValue;
+        results.Add("Footer", footerKeyValuePairs);
+        foreach (string footerLine in footerLines)
+        {
+            segments = footerLine.Split('\t');
+            if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
+            {
+                continue;
+            }
+            if (!segments[1].Contains(';') || !segments[1].Contains('='))
+            {
+                continue;
+            }
+            else
+            {
+                subSegments = segments[1].Split(';');
+                if (subSegments.Length < 1)
+                {
+                    continue;
+                }
+                if (!results.TryGetValue(segments[0], out keyValue))
+                {
+                    results.Add(segments[0], new());
+                    if (!results.TryGetValue(segments[0], out keyValue))
+                    {
+                        throw new Exception();
+                    }
+                }
+                foreach (string segment in subSegments)
+                {
+                    subSubSegments = segment.Split('=');
+                    if (subSubSegments.Length != 2)
+                    {
+                        continue;
+                    }
+                    keyValue.Add(subSubSegments[0], subSubSegments[1]);
+                }
+            }
+        }
+        return results;
+    }
+
+    internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List? wsResults)
+    {
+        List results = new();
+        if (processDataStandardFormat.InputPDSF is null)
+            throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
+        if (processDataStandardFormat.Sequence is null)
+            throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
+        string endOffset = "E#######T";
+        string dataOffset = "D#######T";
+        string headerOffset = "H#######T";
+        string format = "MM/dd/yyyy HH:mm:ss";
+        string startTime = new DateTime(processDataStandardFormat.Sequence.Value).ToString(format);
+        results.Add("HEADER_TAG\tHEADER_VALUE");
+        results.Add("FORMAT\t2.00");
+        results.Add("NUMBER_PASSES\t0001");
+        results.Add($"HEADER_OFFSET\t{headerOffset}");
+        results.Add($"DATA_OFFSET\t{dataOffset}");
+        results.Add($"END_OFFSET\t{endOffset}");
+        results.Add($"\"{string.Join("\"\t\"", processDataStandardFormat.Columns)}\"");
+        results.AddRange(processDataStandardFormat.Body);
+        results.Add($"NUM_DATA_ROWS\t{processDataStandardFormat.Body.Count.ToString().PadLeft(9, '0')}");
+        results.Add($"NUM_DATA_COLUMNS\t{processDataStandardFormat.Columns.Count.ToString().PadLeft(9, '0')}");
+        results.Add("DELIMITER\t;");
+        results.Add($"START_TIME_FORMAT\t{format}");
+        results.Add($"START_TIME\t{startTime}");
+        results.Add("LOGISTICS_COLUMN\tA_LOGISTICS");
+        results.Add("LOGISTICS_COLUMN\tB_LOGISTICS");
+        if (wsResults is null || wsResults.Count != 1)
+            results.AddRange(processDataStandardFormat.Logistics);
+        else
+        {
+            string[] segments;
+            foreach (string logistics in processDataStandardFormat.Logistics)
+            {
+                segments = logistics.Split(new string[] { "\t" }, StringSplitOptions.None);
+                if (segments.Length != 2 || string.IsNullOrEmpty(segments[1]))
+                    results.Add(logistics);
+                else
+                    results.Add($"{segments[0]}\t{segments[1][0]}_HeaderId={wsResults[0].HeaderId};{segments[1][0]}_SubgroupId={wsResults[0].SubgroupId};{segments[1]}");
+            }
+        }
+        results.Add("END_HEADER");
+        results.Add(string.Empty);
+        List hyphens = new();
+        results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => $"|{l.Replace('\t', '|')}|"));
+        results.Add(string.Empty);
+        results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
+        for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
+            hyphens.Add('-');
+        results.Add($"|{string.Join("|", hyphens)}|");
+        results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => $"|{l.Replace('\t', '|')}|"));
+        results.Add(string.Empty);
+        results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => $"|{l.Replace('\t', '|')}|"));
+        results.Add(string.Empty);
+        results.Add("EOF");
+        results.Add(string.Empty);
+        string json = GetJson(processDataStandardFormat);
+        results.Add(json);
+        File.WriteAllText(path, string.Join(Environment.NewLine, results));
+    }
+
+    internal static Dictionary> GetDictionary(ProcessDataStandardFormat processDataStandardFormat)
+    {
+        Dictionary> results = new();
+        string[] segments;
+        foreach (string column in processDataStandardFormat.Columns)
+            results.Add(column, new List());
+        foreach (string bodyLine in processDataStandardFormat.Body)
+        {
+            segments = bodyLine.Split('\t');
+            for (int c = 1; c < segments.Length; c++)
+            {
+                if (c >= processDataStandardFormat.Columns.Count)
+                    continue;
+                results[processDataStandardFormat.Columns[c]].Add(segments[c]);
+            }
+        }
+        return results;
+    }
+
+    internal static JsonElement[] GetArray(ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers = false)
+    {
+        JsonElement[] results;
+        if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
+            results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
+        else
+        {
+            string value;
+            string[] segments;
+            List lines = new();
+            StringBuilder stringBuilder = new();
+            foreach (string bodyLine in processDataStandardFormat.Body)
+            {
+                _ = stringBuilder.Clear();
+                _ = stringBuilder.Append('{');
+                segments = bodyLine.Trim().Split('\t');
+                if (!lookForNumbers)
+                {
+                    for (int c = 1; c < segments.Length; c++)
+                    {
+                        value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
+                        _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
+                    }
+                }
+                else
+                {
+                    for (int c = 1; c < segments.Length; c++)
+                    {
+                        value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
+                        if (string.IsNullOrEmpty(value))
+                            _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
+                        else if (value.All(char.IsDigit))
+                            _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
+                        else
+                            _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
+                    }
+                }
+                _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
+                _ = stringBuilder.AppendLine("}");
+                lines.Add(stringBuilder.ToString());
+            }
+            string json = $"[{string.Join(",", lines)}]";
+            results = JsonSerializer.Deserialize(json) ?? throw new Exception();
+        }
+        return results;
+    }
+
+    internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
+    {
+        string result;
+        if (jsonElements.Length == 0)
+            result = string.Empty;
+        else
+        {
+            int columns = 0;
+            List lines;
+            string endOffset = "E#######T";
+            string dataOffset = "D#######T";
+            string headerOffset = "H#######T";
+            string format = "MM/dd/yyyy HH:mm:ss";
+            StringBuilder stringBuilder = new();
+            lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
+            _ = stringBuilder.Append("\"Time\"").Append('\t');
+            _ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
+            _ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
+            for (int i = 0; i < jsonElements.Length;)
+            {
+                foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
+                {
+                    columns += 1;
+                    _ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
+                }
+                break;
+            }
+            _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
+            lines.Add(stringBuilder.ToString());
+            for (int i = 0; i < jsonElements.Length; i++)
+            {
+                _ = stringBuilder.Clear();
+                _ = stringBuilder.Append("0.1").Append('\t');
+                _ = stringBuilder.Append('1').Append('\t');
+                _ = stringBuilder.Append('2').Append('\t');
+                foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
+                    _ = stringBuilder.Append(jsonProperty.Value).Append('\t');
+                _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
+                lines.Add(stringBuilder.ToString());
+            }
+            lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
+            lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
+            lines.Add("DELIMITER	;");
+            lines.Add(string.Concat("START_TIME_FORMAT	", format));
+            lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
+            lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
+            lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
+            if (!string.IsNullOrEmpty(logisticsText))
+                lines.Add(logisticsText);
+            else
+            {
+                lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
+                lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
+                lines.Add("END_HEADER");
+            }
+            _ = stringBuilder.Clear();
+            foreach (string line in lines)
+                _ = stringBuilder.AppendLine(line);
+            result = stringBuilder.ToString();
+            result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
+                Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
+                Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
+        }
+        return result;
+    }
+
+    internal static Tuple>>> GetTestDictionary(ProcessDataStandardFormat processDataStandardFormat)
+    {
+        Dictionary>> results = new();
+        List? collection;
+        string testColumn = nameof(Test);
+        Dictionary> keyValuePairs = GetDictionary(processDataStandardFormat);
+        if (!keyValuePairs.TryGetValue(testColumn, out collection))
+            throw new Exception();
+        int min;
+        int max;
+        Test testKey;
+        List vs;
+        string columnKey;
+        Dictionary> tests = new();
+        for (int i = 0; i < collection.Count; i++)
+        {
+            if (Enum.TryParse(collection[i], out Test test))
+            {
+                if (!results.ContainsKey(test))
+                {
+                    tests.Add(test, new List());
+                    results.Add(test, new Dictionary>());
+                }
+                tests[test].Add(i);
+            }
+        }
+        foreach (KeyValuePair> testKeyValuePair in tests)
+        {
+            testKey = testKeyValuePair.Key;
+            min = testKeyValuePair.Value.Min();
+            max = testKeyValuePair.Value.Max() + 1;
+            foreach (KeyValuePair> keyValuePair in keyValuePairs)
+                results[testKey].Add(keyValuePair.Key, new List());
+            foreach (KeyValuePair> keyValuePair in keyValuePairs)
+            {
+                vs = keyValuePair.Value;
+                columnKey = keyValuePair.Key;
+                for (int i = min; i < max; i++)
+                {
+                    if (vs.Count > i)
+                        results[testKey][columnKey].Add(vs[i]);
+                    else
+                        results[testKey][columnKey].Add(string.Empty);
+                }
+            }
+        }
+        return new Tuple>>>(processDataStandardFormat.Logistics[0], results);
+    }
+
+    internal static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List names, Dictionary> keyValuePairs, string dateFormat, string timeFormat, List pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List? ignoreParameterNames = null)
+    {
+        StringBuilder result = new();
+        ignoreParameterNames ??= new List();
+        if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
+            throw new Exception();
+        else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
+            throw new Exception();
+        string? nullData;
+        const string columnDate = "Date";
+        const string columnTime = "Time";
+        const string firstDuplicate = "_1";
+        _ = result.AppendLine(scopeInfo.Header);
+        StringBuilder line = new();
+        if (logistics.NullData is null)
+            nullData = string.Empty;
+        else
+            nullData = logistics.NullData.ToString();
+        int count = (from l in keyValuePairs select l.Value.Count).Min();
+        for (int r = 0; r < count; r++)
+        {
+            _ = line.Clear();
+            _ = line.Append('!');
+            foreach (KeyValuePair> keyValuePair in keyValuePairs)
+            {
+                if (!names.Contains(keyValuePair.Key))
+                    continue;
+                if (ignoreParameterNames.Contains(keyValuePair.Key))
+                    continue;
+                if (pairedParameterNames.Contains(keyValuePair.Key))
+                {
+                    if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
+                        continue;
+                    else
+                        _ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
+                }
+                else
+                {
+                    if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
+                        _ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
+                    else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
+                        _ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
+                    else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
+                        _ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
+                    else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
+                        _ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
+                    else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
+                        _ = line.Append(nullData);
+                    else
+                        _ = line.Append(keyValuePair.Value[r]);
+                    _ = line.Append(';');
+                }
+            }
+            if (pairedParameterNames.Count == 0)
+            {
+                _ = line.Remove(line.Length - 1, 1);
+                _ = result.AppendLine(line.ToString());
+            }
+        }
+        return result.ToString();
+    }
+
+    private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
+    {
+        if (!addSpaces)
+            return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
+        else
+            return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
+    }
+
+    private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName)
+    {
+        int? result = null;
+        for (int i = 0; i < jsonProperties.Length; i++)
+        {
+            if (jsonProperties[i].Name != propertyName)
+                continue;
+            result = i;
+            break;
+        }
+        if (result is null)
+        {
+            for (int i = 0; i < jsonProperties.Length; i++)
+            {
+                if (jsonProperties[i].Name[0] != propertyName[0])
+                    continue;
+                if (jsonProperties[i].Name.Length != propertyName.Length)
+                    continue;
+                if (jsonProperties[i].Name != propertyName)
+                    continue;
+                result = i;
+                break;
+            }
+        }
+        return result;
+    }
+
+    internal static string GetXml(ProcessDataStandardFormat processDataStandardFormat)
+    {
+        string result;
+        string value;
+        string[] segments;
+        ReadOnlyCollection body = processDataStandardFormat.InputPDSF is null ?
+            processDataStandardFormat.Body : processDataStandardFormat.InputPDSF.Body;
+        ReadOnlyCollection columns = processDataStandardFormat.InputPDSF is null ?
+            processDataStandardFormat.Columns : processDataStandardFormat.InputPDSF.Columns;
+        List lines = new() { "", "" };
+        for (int i = 0; i < body.Count; i++)
+        {
+            lines.Add("  ");
+            segments = body[i].Trim().Split('\t');
+            if (segments.Length != columns.Count)
+                break;
+            for (int c = 0; c < segments.Length; c++)
+            {
+                value = segments[c].Replace("&", "&")
+                                   .Replace("<", "<")
+                                   .Replace(">", ">")
+                                   .Replace("\"", """)
+                                   .Replace("'", "'");
+                lines.Add(string.Concat("    <", columns[c].Trim('"'), '>', value, "'));
+            }
+            lines.Add("  ");
+        }
+        lines.Add("");
+        result = string.Join(Environment.NewLine, lines);
+        return result;
+    }
+
+}
+
+[JsonSourceGenerationOptions(WriteIndented = true)]
+[JsonSerializable(typeof(JsonElement[]))]
+internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
+{
+}
+
+[JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
+[JsonSerializable(typeof(Dictionary>))]
+internal partial class DictionaryStringDictionaryStringStringSourceGenerationContext : JsonSerializerContext
+{
 }
\ No newline at end of file
diff --git a/Adaptation/Shared/ProcessDataStandardFormatMapping.cs b/Adaptation/Shared/ProcessDataStandardFormatMapping.cs
new file mode 100644
index 0000000..c5a75ec
--- /dev/null
+++ b/Adaptation/Shared/ProcessDataStandardFormatMapping.cs
@@ -0,0 +1,33 @@
+using System.Collections.ObjectModel;
+
+namespace Adaptation.Shared;
+
+public class ProcessDataStandardFormatMapping
+{
+
+    public ReadOnlyCollection BackfillColumns { get; private set; }
+    public ReadOnlyCollection ColumnIndices { get; private set; }
+    public ReadOnlyCollection IgnoreColumns { get; private set; }
+    public ReadOnlyCollection IndexOnlyColumns { get; private set; }
+    public ReadOnlyDictionary KeyValuePairs { get; private set; }
+    public ReadOnlyCollection NewColumnNames { get; private set; }
+    public ReadOnlyCollection OldColumnNames { get; private set; }
+
+    public ProcessDataStandardFormatMapping(ReadOnlyCollection backfillColumns,
+                                            ReadOnlyCollection columnIndices,
+                                            ReadOnlyCollection ignoreColumns,
+                                            ReadOnlyCollection indexOnlyColumns,
+                                            ReadOnlyDictionary keyValuePairs,
+                                            ReadOnlyCollection newColumnNames,
+                                            ReadOnlyCollection oldColumnNames)
+    {
+        BackfillColumns = backfillColumns;
+        ColumnIndices = columnIndices;
+        IgnoreColumns = ignoreColumns;
+        IndexOnlyColumns = indexOnlyColumns;
+        KeyValuePairs = keyValuePairs;
+        NewColumnNames = newColumnNames;
+        OldColumnNames = oldColumnNames;
+    }
+
+}
\ No newline at end of file
diff --git a/Adaptation/Shared/Properties/IDescription.cs b/Adaptation/Shared/Properties/IDescription.cs
index 34b92af..1166e17 100644
--- a/Adaptation/Shared/Properties/IDescription.cs
+++ b/Adaptation/Shared/Properties/IDescription.cs
@@ -6,6 +6,6 @@ public interface IDescription
     int Test { get; }
     int Count { get; }
     int Index { get; }
-    string Lot { get; }
+    string RDS { get; }
 
 }
\ No newline at end of file
diff --git a/FileHandlers/FileRead.cs b/FileHandlers/FileRead.cs
index f556172..2a2e3d0 100644
--- a/FileHandlers/FileRead.cs
+++ b/FileHandlers/FileRead.cs
@@ -37,7 +37,7 @@ public partial class FileRead : FileReaderHandler, ISMTP
     private FilePathGenerator _FilePathGeneratorForTarget;
     private readonly List _EquipmentParameters;
     private static readonly Dictionary> _DummyRuns;
-    private static readonly Dictionary> _StaticRuns;
+    private static readonly Dictionary> _StaticRuns;
 
     static FileRead()
     {