diff --git a/Adaptation/.vscode/launch.json b/Adaptation/.vscode/launch.json index 681323a..7732306 100644 --- a/Adaptation/.vscode/launch.json +++ b/Adaptation/.vscode/launch.json @@ -4,13 +4,7 @@ "name": ".NET Core Attach", "type": "coreclr", "request": "attach", - "processId": 25140 - }, - { - "type": "node", - "request": "launch", - "name": "node Launch Current Opened File", - "program": "${file}" + "processId": 23840 } ] } diff --git a/Adaptation/.vscode/tasks.json b/Adaptation/.vscode/tasks.json index 397decd..28a62b0 100644 --- a/Adaptation/.vscode/tasks.json +++ b/Adaptation/.vscode/tasks.json @@ -92,6 +92,26 @@ "command": "code ../MESAFIBACKLOG.csproj", "problemMatcher": [] }, + { + "label": "Readme", + "type": "shell", + "command": "code ../README.md", + "problemMatcher": [] + }, + { + "label": "File-Folder-Helper AOT s X Day-Helper-2025-03-20", + "type": "shell", + "command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe", + "args": [ + "s", + "X", + "L:/DevOps/EAF-Mesa-Integration/MESAFIBACKLOG", + "Day-Helper-2025-03-20", + "false", + "4" + ], + "problemMatcher": [] + }, { "label": "Git Config", "type": "shell", diff --git a/Adaptation/FileHandlers/ADO/FileRead.cs b/Adaptation/FileHandlers/ADO/FileRead.cs index 7215e01..6a277e1 100644 --- a/Adaptation/FileHandlers/ADO/FileRead.cs +++ b/Adaptation/FileHandlers/ADO/FileRead.cs @@ -103,7 +103,9 @@ public class FileRead : Shared.FileRead, IFileRead { Tuple> results = new(string.Empty, null, null, new List()); _TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks; - _Logistics = new Logistics(reportFullPath, $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};"); + string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" }; + ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines); + _Logistics = new Logistics(reportFullPath, processDataStandardFormat); SetFileParameterLotIDToLogisticsMID(); if (_Logistics.FileInfo.Length < _MinFileLength) results.Item4.Add(_Logistics.FileInfo); diff --git a/Adaptation/FileHandlers/APC/FileRead.cs b/Adaptation/FileHandlers/APC/FileRead.cs index 2595250..ea8b0ed 100644 --- a/Adaptation/FileHandlers/APC/FileRead.cs +++ b/Adaptation/FileHandlers/APC/FileRead.cs @@ -120,15 +120,15 @@ public class FileRead : Shared.FileRead, IFileRead private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime) { Tuple> results; - ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath); - _Logistics = new Logistics(reportFullPath, processData.Logistics); + ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); + _Logistics = new Logistics(reportFullPath, processDataStandardFormat); SetFileParameterLotIDToLogisticsMID(); - JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData); + JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); List descriptions = GetDuplicatorDescriptions(jsonElements); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) FileCopy(reportFullPath, dateTime, descriptions); - results = new Tuple>(processData.Logistics, tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/Archive/FileRead.cs b/Adaptation/FileHandlers/Archive/FileRead.cs index c14a09f..aab4662 100644 --- a/Adaptation/FileHandlers/Archive/FileRead.cs +++ b/Adaptation/FileHandlers/Archive/FileRead.cs @@ -120,9 +120,10 @@ public class FileRead : Shared.FileRead, IFileRead if (dateTime == DateTime.MinValue) throw new ArgumentNullException(nameof(dateTime)); string logisticsSequence = _Logistics.Sequence.ToString(); + string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}"; string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00"); - string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}"; - string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory); + string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}"; + string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory, day); if (!Directory.Exists(destinationArchiveDirectory)) _ = Directory.CreateDirectory(destinationArchiveDirectory); string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID); @@ -144,15 +145,15 @@ public class FileRead : Shared.FileRead, IFileRead private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime) { Tuple> results; - ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath); - _Logistics = new Logistics(reportFullPath, processData.Logistics); + ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); + _Logistics = new Logistics(reportFullPath, processDataStandardFormat); SetFileParameterLotIDToLogisticsMID(); - JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData); + JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); List descriptions = GetDuplicatorDescriptions(jsonElements); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) MoveArchive(reportFullPath, dateTime); - results = new Tuple>(processData.Logistics, tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/CellInstanceConnectionName.cs b/Adaptation/FileHandlers/CellInstanceConnectionName.cs index 1f19893..0d59fc1 100644 --- a/Adaptation/FileHandlers/CellInstanceConnectionName.cs +++ b/Adaptation/FileHandlers/CellInstanceConnectionName.cs @@ -29,6 +29,7 @@ public class CellInstanceConnectionName nameof(Priority) => new Priority.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), + nameof(Violation) => new Violation.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null), _ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped") }; return result; diff --git a/Adaptation/FileHandlers/IQSSi/FileRead.cs b/Adaptation/FileHandlers/IQSSi/FileRead.cs index 77ca839..9e7054f 100644 --- a/Adaptation/FileHandlers/IQSSi/FileRead.cs +++ b/Adaptation/FileHandlers/IQSSi/FileRead.cs @@ -119,15 +119,15 @@ public class FileRead : Shared.FileRead, IFileRead private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime) { Tuple> results; - ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath); - _Logistics = new Logistics(reportFullPath, processData.Logistics); + ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath); + _Logistics = new Logistics(reportFullPath, processDataStandardFormat); SetFileParameterLotIDToLogisticsMID(); - JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData); + JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat); List descriptions = GetDuplicatorDescriptions(jsonElements); Test[] tests = (from l in descriptions select (Test)l.Test).ToArray(); if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) FileCopy(reportFullPath, dateTime, descriptions); - results = new Tuple>(processData.Logistics, tests, jsonElements, new List()); + results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List()); return results; } diff --git a/Adaptation/FileHandlers/Kanban/FileRead.cs b/Adaptation/FileHandlers/Kanban/FileRead.cs index 2bc6a55..0e491ad 100644 --- a/Adaptation/FileHandlers/Kanban/FileRead.cs +++ b/Adaptation/FileHandlers/Kanban/FileRead.cs @@ -103,7 +103,9 @@ public class FileRead : Shared.FileRead, IFileRead { Tuple> results = new(string.Empty, null, null, new List()); _TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks; - _Logistics = new Logistics(reportFullPath, $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};"); + string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" }; + ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines); + _Logistics = new Logistics(reportFullPath, processDataStandardFormat); SetFileParameterLotIDToLogisticsMID(); if (_Logistics.FileInfo.Length < _MinFileLength) results.Item4.Add(_Logistics.FileInfo); diff --git a/Adaptation/FileHandlers/Kanban/ProcessData.cs b/Adaptation/FileHandlers/Kanban/ProcessData.cs index b78c411..a528559 100644 --- a/Adaptation/FileHandlers/Kanban/ProcessData.cs +++ b/Adaptation/FileHandlers/Kanban/ProcessData.cs @@ -13,6 +13,8 @@ using System.Text.Json.Serialization; namespace Adaptation.FileHandlers.Kanban; +#nullable enable + public class ProcessData : IProcessData { @@ -22,125 +24,25 @@ public class ProcessData : IProcessData private readonly ILog _Log; - public ProcessData(IFileRead fileRead, Logistics logistics, Calendar calendar, string targetFileLocation, string url, List fileInfoCollection) - { - if (fileRead.IsEAFHosted) - { } - if (url is null) - throw new ArgumentNullException(nameof(url)); - _Details = new List(); - _Log = LogManager.GetLogger(typeof(ProcessData)); - WriteFiles(fileRead, logistics, calendar, targetFileLocation, fileInfoCollection); - } - string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary reactors) => throw new Exception(string.Concat("See ", nameof(WriteFiles))); Tuple> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List fileInfoCollection) => new(logistics.Logistics1[0], Array.Empty(), Array.Empty(), fileInfoCollection); -#nullable enable - - internal static List GetDescriptions(JsonElement[] jsonElements) + public ProcessData(IFileRead fileRead, Logistics logistics, Calendar calendar, string targetFileLocation, string url, List fileInfoCollection) { - List results = new(); - Description? description; - JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString }; - foreach (JsonElement jsonElement in jsonElements) + if (fileRead.IsEAFHosted) { - if (jsonElement.ValueKind != JsonValueKind.Object) - throw new Exception(); - description = JsonSerializer.Deserialize(jsonElement.ToString(), jsonSerializerOptions); - if (description is null) - continue; - results.Add(description); + } - return results; + if (url is null) + throw new ArgumentNullException(nameof(url)); + _Details = new List(); + _Log = LogManager.GetLogger(typeof(ProcessData)); + WriteFiles(fileRead, logistics, calendar, targetFileLocation, fileInfoCollection); } - private static ReadOnlyDictionary GetKeyValuePairs(ReadOnlyDictionary keyValuePairs, bool keepRelations) - { - Dictionary results = new(); - Record record; - List nests = new(); - WorkItem? parentWorkItem; - ReadOnlyCollection childRecords; - ReadOnlyCollection relatedRecords; - ReadOnlyCollection successorRecords; - foreach (KeyValuePair keyValuePair in keyValuePairs) - { - nests.Clear(); - if (keyValuePair.Value.Parent is null) - parentWorkItem = null; - else - _ = keyValuePairs.TryGetValue(keyValuePair.Value.Parent.Value, out parentWorkItem); - try - { - childRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Child", nests, keepRelations); // Forward - relatedRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Related", nests, keepRelations); // Related - successorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Successor", nests, keepRelations); // Forward - // predecessorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Predecessor", nests, keepRelations); // Reverse - record = Record.Get(keyValuePair.Value, parentWorkItem, childRecords, relatedRecords, successorRecords, keepRelations); - } - catch (Exception) - { - record = new(keyValuePair.Value, parentWorkItem, Array.Empty(), Array.Empty(), Array.Empty()); - } - results.Add(keyValuePair.Key, record); - } - return new(results); - } - - private static ReadOnlyDictionary GetWorkItems(WorkItem[] workItems, bool keepRelations) - { - ReadOnlyDictionary results; - Dictionary keyValuePairs = new(); - foreach (WorkItem workItem in workItems) - keyValuePairs.Add(workItem.Id, workItem); - results = GetKeyValuePairs(new(keyValuePairs), keepRelations); - return results; - } - - private static void WriteFiles(IFileRead fileRead, DirectoryInfo tasksDirectory, Record[] records) - { - string old; - string json; - string checkFile; - WorkItem workItem; - foreach (Record record in records) - { - workItem = record.WorkItem; - json = JsonSerializer.Serialize(workItem, WorkItemSourceGenerationContext.Default.WorkItem); - checkFile = Path.Combine(tasksDirectory.FullName, $"{workItem.Id}.json"); - old = File.Exists(checkFile) ? File.ReadAllText(checkFile) : string.Empty; - if (!fileRead.IsEAFHosted || old == json) - continue; - File.WriteAllText(checkFile, json); - } - } - - private static string GetTaskText(string directory) => - string.Join(Environment.NewLine, new string[] - { - "{", - "\"version\": \"2.0.0\",", - "\"tasks\": [", - "{", - "\"label\": \"File-Folder-Helper AOT s X Day-Helper-2025-02-04\",", - "\"type\": \"shell\",", - "\"command\": \"L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe\",", - "\"args\": [", - "\"s\",", - "\"X\",", - $"\"{directory.Replace('\\', '/')}\",", - "\"Day-Helper-2025-02-04\",", - "],", - "\"problemMatcher\": []", - "}", - "]", - "}", - }); - private static void WriteFiles(IFileRead fileRead, Calendar calendar, string destinationDirectory, bool keepRelations, WorkItem[] workItems) { string json; @@ -193,6 +95,22 @@ public class ProcessData : IProcessData } } + private void WriteFiles(IFileRead fileRead, Logistics logistics, Calendar calendar, string destinationDirectory, List fileInfoCollection) + { + if (fileInfoCollection is null) + throw new ArgumentNullException(nameof(fileInfoCollection)); + bool keepRelations = true; + string json = File.ReadAllText(logistics.ReportFullPath); + WorkItem[]? workItems = JsonSerializer.Deserialize(json); + if (workItems is null) + throw new Exception(nameof(workItems)); + _Details.Add(workItems); + if (!Directory.Exists(destinationDirectory)) + _ = Directory.CreateDirectory(destinationDirectory); + WriteFiles(fileRead, calendar, destinationDirectory, workItems); + WriteFiles(fileRead, calendar, destinationDirectory, keepRelations, workItems); + } + private static void WriteFiles(IFileRead fileRead, Calendar calendar, string destinationDirectory, WorkItem[] workItems) { string old; @@ -215,20 +133,104 @@ public class ProcessData : IProcessData } } - private void WriteFiles(IFileRead fileRead, Logistics logistics, Calendar calendar, string destinationDirectory, List fileInfoCollection) + private static ReadOnlyDictionary GetWorkItems(WorkItem[] workItems, bool keepRelations) { - if (fileInfoCollection is null) - throw new ArgumentNullException(nameof(fileInfoCollection)); - bool keepRelations = true; - string json = File.ReadAllText(logistics.ReportFullPath); - WorkItem[]? workItems = JsonSerializer.Deserialize(json); - if (workItems is null) - throw new Exception(nameof(workItems)); - _Details.Add(workItems); - if (!Directory.Exists(destinationDirectory)) - _ = Directory.CreateDirectory(destinationDirectory); - WriteFiles(fileRead, calendar, destinationDirectory, workItems); - WriteFiles(fileRead, calendar, destinationDirectory, keepRelations, workItems); + ReadOnlyDictionary results; + Dictionary keyValuePairs = new(); + foreach (WorkItem workItem in workItems) + keyValuePairs.Add(workItem.Id, workItem); + results = GetKeyValuePairs(new(keyValuePairs), keepRelations); + return results; + } + + private static ReadOnlyDictionary GetKeyValuePairs(ReadOnlyDictionary keyValuePairs, bool keepRelations) + { + Dictionary results = new(); + Record record; + List nests = new(); + WorkItem? parentWorkItem; + ReadOnlyCollection childRecords; + ReadOnlyCollection relatedRecords; + ReadOnlyCollection successorRecords; + foreach (KeyValuePair keyValuePair in keyValuePairs) + { + nests.Clear(); + if (keyValuePair.Value.Parent is null) + parentWorkItem = null; + else + _ = keyValuePairs.TryGetValue(keyValuePair.Value.Parent.Value, out parentWorkItem); + try + { + childRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Child", nests, keepRelations); // Forward + relatedRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Related", nests, keepRelations); // Related + successorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Successor", nests, keepRelations); // Forward + // predecessorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Predecessor", nests, keepRelations); // Reverse + record = Record.Get(keyValuePair.Value, parentWorkItem, childRecords, relatedRecords, successorRecords, keepRelations); + } + catch (Exception) + { + record = new(keyValuePair.Value, parentWorkItem, Array.Empty(), Array.Empty(), Array.Empty()); + } + results.Add(keyValuePair.Key, record); + } + return new(results); + } + + private static string GetTaskText(string directory) => + string.Join(Environment.NewLine, new string[] + { + "{", + "\"version\": \"2.0.0\",", + "\"tasks\": [", + "{", + "\"label\": \"File-Folder-Helper AOT s X Day-Helper-2025-02-04\",", + "\"type\": \"shell\",", + "\"command\": \"L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe\",", + "\"args\": [", + "\"s\",", + "\"X\",", + $"\"{directory.Replace('\\', '/')}\",", + "\"Day-Helper-2025-02-04\",", + "],", + "\"problemMatcher\": []", + "}", + "]", + "}", + }); + + private static void WriteFiles(IFileRead fileRead, DirectoryInfo tasksDirectory, Record[] records) + { + string old; + string json; + string checkFile; + WorkItem workItem; + foreach (Record record in records) + { + workItem = record.WorkItem; + json = JsonSerializer.Serialize(workItem, WorkItemSourceGenerationContext.Default.WorkItem); + checkFile = Path.Combine(tasksDirectory.FullName, $"{workItem.Id}.json"); + old = File.Exists(checkFile) ? File.ReadAllText(checkFile) : string.Empty; + if (!fileRead.IsEAFHosted || old == json) + continue; + File.WriteAllText(checkFile, json); + } + } + + internal static List GetDescriptions(JsonElement[] jsonElements) + { + List results = new(); + Description? description; + JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString }; + foreach (JsonElement jsonElement in jsonElements) + { + if (jsonElement.ValueKind != JsonValueKind.Object) + throw new Exception(); + description = JsonSerializer.Deserialize(jsonElement.ToString(), jsonSerializerOptions); + if (description is null) + continue; + results.Add(description); + } + return results; } } \ No newline at end of file diff --git a/Adaptation/FileHandlers/Markdown/FileRead.cs b/Adaptation/FileHandlers/Markdown/FileRead.cs index 62cf504..db98ba3 100644 --- a/Adaptation/FileHandlers/Markdown/FileRead.cs +++ b/Adaptation/FileHandlers/Markdown/FileRead.cs @@ -107,7 +107,9 @@ public class FileRead : Shared.FileRead, IFileRead { Tuple> results = new(string.Empty, null, null, new List()); _TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks; - _Logistics = new Logistics(reportFullPath, $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};"); + string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" }; + ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines); + _Logistics = new Logistics(reportFullPath, processDataStandardFormat); SetFileParameterLotIDToLogisticsMID(); if (_Logistics.FileInfo.Length < _MinFileLength) results.Item4.Add(_Logistics.FileInfo); diff --git a/Adaptation/FileHandlers/Markdown/ProcessData.cs b/Adaptation/FileHandlers/Markdown/ProcessData.cs index 822e328..9b40e23 100644 --- a/Adaptation/FileHandlers/Markdown/ProcessData.cs +++ b/Adaptation/FileHandlers/Markdown/ProcessData.cs @@ -13,6 +13,8 @@ using System.Text.Json.Serialization; namespace Adaptation.FileHandlers.Markdown; +#nullable enable + public class ProcessData : IProcessData { @@ -22,38 +24,46 @@ public class ProcessData : IProcessData private readonly ILog _Log; - public ProcessData(IFileRead fileRead, Logistics logistics, string targetFileLocation, string url, ReadOnlyCollection workItemTypes, List fileInfoCollection) - { - if (fileRead.IsEAFHosted) - { } - _Details = new List(); - _Log = LogManager.GetLogger(typeof(ProcessData)); - WriteFiles(fileRead, logistics, url, workItemTypes, targetFileLocation, fileInfoCollection); - } - string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary reactors) => throw new Exception(string.Concat("See ", nameof(WriteFiles))); Tuple> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List fileInfoCollection) => new(logistics.Logistics1[0], Array.Empty(), Array.Empty(), fileInfoCollection); -#nullable enable - - internal static List GetDescriptions(JsonElement[] jsonElements) + public ProcessData(IFileRead fileRead, Logistics logistics, string targetFileLocation, string url, ReadOnlyCollection workItemTypes, List fileInfoCollection) { - List results = new(); - Description? description; - JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString }; - foreach (JsonElement jsonElement in jsonElements) + if (fileRead.IsEAFHosted) { - if (jsonElement.ValueKind != JsonValueKind.Object) - throw new Exception(); - description = JsonSerializer.Deserialize(jsonElement.ToString(), jsonSerializerOptions); - if (description is null) - continue; - results.Add(description); + } - return results; + _Details = new List(); + _Log = LogManager.GetLogger(typeof(ProcessData)); + WriteFiles(fileRead, logistics, url, workItemTypes, targetFileLocation, fileInfoCollection); + } + + private static void WriteFiles(IFileRead fileRead, string destinationDirectory, List fileInfoCollection, ReadOnlyCollection lines, ReadOnlyCollection records, string fileName) + { + string markdown = string.Join(Environment.NewLine, lines); + string markdownFile = Path.Combine(destinationDirectory, $"{fileName}.md"); + string markdownOld = !File.Exists(markdownFile) ? string.Empty : File.ReadAllText(markdownFile); + if (markdown != markdownOld) + File.WriteAllText(markdownFile, markdown); + if (!fileRead.IsEAFHosted) + fileInfoCollection.Add(new(markdownFile)); + string html = CommonMark.CommonMarkConverter.Convert(markdown).Replace(" workItemTypes, string destinationDirectory, List fileInfoCollection) @@ -83,13 +93,11 @@ public class ProcessData : IProcessData ReadOnlyCollection results; ReadOnlyDictionary keyValuePairs = GetWorkItems(workItems, keepRelations); ReadOnlyCollection records = new(keyValuePairs.Values.ToArray()); - ReadOnlyCollection userStoryWorkItemTypes = new(new string[] { "User Story" }); ReadOnlyCollection bugFeatureWorkItemTypes = new(new string[] { "Bug", "Feature" }); ReadOnlyCollection bugUserStoryWorkItemTypes = new(new string[] { "Bug", "User Story" }); - ReadOnlyCollection bugUserStoryTaskWorkItemTypes = new(new string[] { "Bug", "User Story", "Task" }); messages.AddRange(WriteFile(fileRead, destinationDirectory, fileInfoCollection, records, "records")); - messages.AddRange(WriteWithPartentsFile(fileRead, destinationDirectory, fileInfoCollection, records, bugFeatureWorkItemTypes, "bugs-features-with-parents")); - messages.AddRange(WriteWithPartentsFile(fileRead, destinationDirectory, fileInfoCollection, records, bugUserStoryWorkItemTypes, "bugs-user-stories-with-parents")); + messages.AddRange(WriteWithParentsFile(fileRead, destinationDirectory, fileInfoCollection, records, bugFeatureWorkItemTypes, "bugs-features-with-parents")); + messages.AddRange(WriteWithParentsFile(fileRead, destinationDirectory, fileInfoCollection, records, bugUserStoryWorkItemTypes, "bugs-user-stories-with-parents")); foreach (string workItemType in workItemTypes) { lines.Clear(); @@ -100,98 +108,10 @@ public class ProcessData : IProcessData WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, workItemType); _Details.Add(results); } - { - lines.Clear(); - string workItemType = "Feature"; - lines.Add($"# {nameof(FeatureCheckIterationPath122508)}"); - lines.Add(string.Empty); - results = FeatureCheckIterationPath122508(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType); - WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-122508"); - _Details.Add(results); - } - { - lines.Clear(); - string workItemType = "Feature"; - lines.Add($"# {nameof(FeatureCheckTag122514)}"); - lines.Add(string.Empty); - results = FeatureCheckTag122514(url, lines, bugUserStoryWorkItemTypes, keyValuePairs, workItemType); - WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-122514"); - _Details.Add(results); - } - { - lines.Clear(); - string workItemType = "Feature"; - lines.Add($"# {nameof(FeatureCheckPriority126169)}"); - lines.Add(string.Empty); - results = FeatureCheckPriority126169(url, lines, bugUserStoryWorkItemTypes, keyValuePairs, workItemType); - WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-126169"); - _Details.Add(results); - } - { - lines.Clear(); - string workItemType = "Feature"; - lines.Add($"# {nameof(FeatureCheckState123066)}"); - lines.Add(string.Empty); - results = FeatureCheckState123066(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType); - WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-123066"); - _Details.Add(results); - } - { - lines.Clear(); - string workItemType = "Feature"; - lines.Add($"# {nameof(FeatureCheckState123067)}"); - lines.Add(string.Empty); - results = FeatureCheckState123067(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType); - WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-123067"); - _Details.Add(results); - } - { - lines.Clear(); - string workItemType = "Feature"; - lines.Add($"# {nameof(FeatureCheckStart122517)}"); - lines.Add(string.Empty); - results = FeatureCheckStart122517(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType); - WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-122517"); - _Details.Add(results); - } - { - lines.Clear(); - string workItemType = "User Story"; - lines.Add($"# {nameof(UserStoryCheckIterationPath228385)}"); - lines.Add(string.Empty); - results = UserStoryCheckIterationPath228385(url, lines, userStoryWorkItemTypes, keyValuePairs, workItemType); - WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, "check-228385"); - _Details.Add(results); - } if (messages.Count > 0) throw new Exception($"{messages.Count}{Environment.NewLine}{string.Join(Environment.NewLine, messages)}"); } - private static void WriteFiles(IFileRead fileRead, string destinationDirectory, List fileInfoCollection, ReadOnlyCollection lines, ReadOnlyCollection records, string fileName) - { - string markdown = string.Join(Environment.NewLine, lines); - string markdownFile = Path.Combine(destinationDirectory, $"{fileName}.md"); - string markdownOld = !File.Exists(markdownFile) ? string.Empty : File.ReadAllText(markdownFile); - if (markdown != markdownOld) - File.WriteAllText(markdownFile, markdown); - if (!fileRead.IsEAFHosted) - fileInfoCollection.Add(new(markdownFile)); - string html = CommonMark.CommonMarkConverter.Convert(markdown).Replace(" GetWorkItems(IEnumerable workItems, bool keepRelations) { ReadOnlyDictionary results; @@ -202,6 +122,74 @@ public class ProcessData : IProcessData return results; } + private static ReadOnlyDictionary GetKeyValuePairs(ReadOnlyDictionary keyValuePairs, bool keepRelations) + { + Dictionary results = new(); + Record record; + List nests = new(); + WorkItem? parentWorkItem; + ReadOnlyCollection childRecords; + ReadOnlyCollection relatedRecords; + ReadOnlyCollection successorRecords; + foreach (KeyValuePair keyValuePair in keyValuePairs) + { + nests.Clear(); + if (keyValuePair.Value.Parent is null) + parentWorkItem = null; + else + _ = keyValuePairs.TryGetValue(keyValuePair.Value.Parent.Value, out parentWorkItem); + try + { + childRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Child", nests, keepRelations); // Forward + relatedRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Related", nests, keepRelations); // Related + successorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Successor", nests, keepRelations); // Forward + // predecessorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Predecessor", nests, keepRelations); // Reverse + record = Record.Get(keyValuePair.Value, parentWorkItem, childRecords, relatedRecords, successorRecords, keepRelations); + } + catch (Exception) + { + record = new(keyValuePair.Value, parentWorkItem, Array.Empty(), Array.Empty(), Array.Empty()); + } + results.Add(keyValuePair.Key, record); + } + return new(results); + } + + private static ReadOnlyCollection WriteFile(IFileRead fileRead, string destinationDirectory, List fileInfoCollection, ReadOnlyCollection records, string fileName) + { + List results = new(); + string? json = GetJson(records, results); + string jsonFile = Path.Combine(destinationDirectory, $"{fileName}.json"); + string jsonOld = !File.Exists(jsonFile) ? string.Empty : File.ReadAllText(jsonFile); + if (!string.IsNullOrEmpty(json) && json != jsonOld) + File.WriteAllText(jsonFile, json); + if (!fileRead.IsEAFHosted) + fileInfoCollection.Add(new(jsonFile)); + return new(results); + } + + private static ReadOnlyCollection WriteWithParentsFile(IFileRead fileRead, string destinationDirectory, List fileInfoCollection, ReadOnlyCollection records, ReadOnlyCollection workItemTypes, string fileName) + { + List results = new(); + Record record; + List filtered = new(); + foreach (Record r in records) + { + if (r.WorkItem.State == "Removed" || !workItemTypes.Contains(r.WorkItem.WorkItemType)) + continue; + record = new(r.WorkItem, r.Parent, Array.Empty(), Array.Empty(), Array.Empty()); + filtered.Add(record); + } + string? json = GetJson(filtered, results); + string jsonFile = Path.Combine(destinationDirectory, $"{fileName}.json"); + string jsonOld = !File.Exists(jsonFile) ? string.Empty : File.ReadAllText(jsonFile); + if (!string.IsNullOrEmpty(json) && json != jsonOld) + File.WriteAllText(jsonFile, json); + if (!fileRead.IsEAFHosted) + fileInfoCollection.Add(new(jsonFile)); + return new(results); + } + private static string? GetJson(IEnumerable records, List results) { string? result; @@ -221,39 +209,22 @@ public class ProcessData : IProcessData return result; } - private static ReadOnlyCollection WriteFile(IFileRead fileRead, string destinationDirectory, List fileInfoCollection, ReadOnlyCollection records, string fileName) + private static void AppendLines(List spaces, List lines, Record record, bool condensed, bool sprintOnly) { - List results = new(); - string? json = GetJson(records, results); - string jsonFile = Path.Combine(destinationDirectory, $"{fileName}.json"); - string jsonOld = !File.Exists(jsonFile) ? string.Empty : File.ReadAllText(jsonFile); - if (!string.IsNullOrEmpty(json) && json != jsonOld) - File.WriteAllText(jsonFile, json); - if (!fileRead.IsEAFHosted) - fileInfoCollection.Add(new(jsonFile)); - return new(results); - } - - private static ReadOnlyCollection WriteWithPartentsFile(IFileRead fileRead, string destinationDirectory, List fileInfoCollection, ReadOnlyCollection records, ReadOnlyCollection workItemTypes, string fileName) - { - List results = new(); - List filtered = new(); - Record record; - foreach (Record r in records) + string line; + spaces.Add('\t'); + WorkItem workItem; + if (record.Children is not null) { - if (r.WorkItem.State == "Removed" || !workItemTypes.Contains(r.WorkItem.WorkItemType)) - continue; - record = new(r.WorkItem, r.Parent, Array.Empty(), Array.Empty(), Array.Empty()); - filtered.Add(record); + foreach (Record child in record.Children) + { + workItem = child.WorkItem; + line = GetLine(spaces, workItem, child, condensed, sprintOnly).TrimEnd(); + lines.Add(line); + AppendLines(spaces, lines, child, condensed, sprintOnly); + } } - string? json = GetJson(filtered, results); - string jsonFile = Path.Combine(destinationDirectory, $"{fileName}.json"); - string jsonOld = !File.Exists(jsonFile) ? string.Empty : File.ReadAllText(jsonFile); - if (!string.IsNullOrEmpty(json) && json != jsonOld) - File.WriteAllText(jsonFile, json); - if (!fileRead.IsEAFHosted) - fileInfoCollection.Add(new(jsonFile)); - return new(results); + spaces.RemoveAt(0); } private static void AppendLines(string url, List spaces, List lines, ReadOnlyCollection records, string workItemType) @@ -304,57 +275,6 @@ public class ProcessData : IProcessData } } - private static void AppendLines(List spaces, List lines, Record record, bool condensed, bool sprintOnly) - { - string line; - spaces.Add('\t'); - WorkItem workItem; - if (record.Children is not null) - { - foreach (Record child in record.Children) - { - workItem = child.WorkItem; - line = GetLine(spaces, workItem, child, condensed, sprintOnly).TrimEnd(); - lines.Add(line); - AppendLines(spaces, lines, child, condensed, sprintOnly); - } - } - spaces.RemoveAt(0); - } - - private static ReadOnlyDictionary GetKeyValuePairs(ReadOnlyDictionary keyValuePairs, bool keepRelations) - { - Dictionary results = new(); - Record record; - List nests = new(); - WorkItem? parentWorkItem; - ReadOnlyCollection childRecords; - ReadOnlyCollection relatedRecords; - ReadOnlyCollection successorRecords; - foreach (KeyValuePair keyValuePair in keyValuePairs) - { - nests.Clear(); - if (keyValuePair.Value.Parent is null) - parentWorkItem = null; - else - _ = keyValuePairs.TryGetValue(keyValuePair.Value.Parent.Value, out parentWorkItem); - try - { - childRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Child", nests, keepRelations); // Forward - relatedRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Related", nests, keepRelations); // Related - successorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Successor", nests, keepRelations); // Forward - // predecessorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Predecessor", nests, keepRelations); // Reverse - record = Record.Get(keyValuePair.Value, parentWorkItem, childRecords, relatedRecords, successorRecords, keepRelations); - } - catch (Exception) - { - record = new(keyValuePair.Value, parentWorkItem, Array.Empty(), Array.Empty(), Array.Empty()); - } - results.Add(keyValuePair.Key, record); - } - return new(results); - } - private static string GetLine(List spaces, WorkItem workItem, Record record, bool condensed, bool sprintOnly) { string result; @@ -368,485 +288,21 @@ public class ProcessData : IProcessData private static string GetClosed(WorkItem workItem) => workItem.State != "Closed" ? "[ ]" : "[x]"; - private static void FilterChildren(ReadOnlyCollection workItemTypes, Record record, List results) + internal static List GetDescriptions(JsonElement[] jsonElements) { - if (record.Children is not null) + List results = new(); + Description? description; + JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString }; + foreach (JsonElement jsonElement in jsonElements) { - foreach (Record r in record.Children) - { - if (!workItemTypes.Contains(r.WorkItem.WorkItemType)) - continue; - results.Add(r); - FilterChildren(workItemTypes, r, results); - } + if (jsonElement.ValueKind != JsonValueKind.Object) + throw new Exception(); + description = JsonSerializer.Deserialize(jsonElement.ToString(), jsonSerializerOptions); + if (description is null) + continue; + results.Add(description); } - } - - private static ReadOnlyCollection FilterChildren(ReadOnlyCollection workItemTypes, Record record) - { - List results = new(); - FilterChildren(workItemTypes, record, results); - return new(results); - } - - private static int GetState(WorkItem workItem) => - workItem.State switch - { - "New" => 1, - "Active" => 2, - "Resolved" => 3, - "Closed" => 4, - "Removed" => 5, - _ => 8 - }; - - private static ReadOnlyCollection GetMaxIterationPaths122508(ReadOnlyCollection records) - { - List results; - List? collection; - Dictionary> keyValuePairs = new(); - foreach (Record record in records) - { - if (!keyValuePairs.TryGetValue(record.WorkItem.IterationPath, out collection)) - { - keyValuePairs.Add(record.WorkItem.IterationPath, new()); - if (!keyValuePairs.TryGetValue(record.WorkItem.IterationPath, out collection)) - throw new Exception(); - - } - collection.Add(record); - } - string? max = keyValuePairs.Keys.Max(); - results = string.IsNullOrEmpty(max) ? new() : keyValuePairs[max]; - return results.AsReadOnly(); - } - - private static ReadOnlyCollection FeatureCheckIterationPath122508(string url, List lines, ReadOnlyCollection workItemTypes, ReadOnlyDictionary keyValuePairs, string workItemType) - { - List results = new(); - Record record; - List violations = new(); - List collection = new(); - ReadOnlyCollection records; - ReadOnlyCollection maxIterationPaths; - foreach (KeyValuePair keyValuePair in keyValuePairs) - { - record = keyValuePair.Value; - if (record.WorkItem.State is "Removed") - continue; - if (!record.WorkItem.IterationPath.Contains('\\')) - continue; - if (record.WorkItem.WorkItemType != workItemType) - continue; - collection.Clear(); - violations.Clear(); - if (record.Children is null || record.Children.Length == 0) - continue; - records = FilterChildren(workItemTypes, record); - maxIterationPaths = GetMaxIterationPaths122508(records); - foreach (Record r in maxIterationPaths) - { - if (string.IsNullOrEmpty(r.WorkItem.IterationPath) || record.WorkItem.IterationPath == r.WorkItem.IterationPath) - continue; - violations.Add($"{r.WorkItem.Id}:{r.WorkItem.IterationPath};"); - } - if (violations.Count > 0) - { - collection.Insert(0, string.Empty); - collection.Insert(0, $"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}"); - lines.AddRange(collection); - violations.Insert(0, $"IterationPath:{record.WorkItem.IterationPath};"); - results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations))); - } - } - return new(results); - } - - private static ReadOnlyCollection GetWorkItemsNotMatching122514(Record record, ReadOnlyCollection records) - { - List results = new(); - string[] segments; - string[] parentTags = record.WorkItem.Tags.Split(';').Select(l => l.Trim()).ToArray(); - foreach (Record r in records) - { - segments = string.IsNullOrEmpty(r.WorkItem.Tags) ? Array.Empty() : r.WorkItem.Tags.Split(';').Select(l => l.Trim()).ToArray(); - if (segments.Length > 0 && parentTags.Any(l => segments.Contains(l))) - continue; - results.Add(r); - } - return new(results); - } - - private static ReadOnlyCollection FeatureCheckTag122514(string url, List lines, ReadOnlyCollection workItemTypes, ReadOnlyDictionary keyValuePairs, string workItemType) - { - List results = new(); - Record record; - List collection = new(); - List violations = new(); - ReadOnlyCollection records; - ReadOnlyCollection recordsNotMatching; - foreach (KeyValuePair keyValuePair in keyValuePairs) - { - record = keyValuePair.Value; - if (record.WorkItem.State is "Removed") - continue; - if (record.WorkItem.WorkItemType != workItemType) - continue; - collection.Clear(); - violations.Clear(); - if (record.Children is null || record.Children.Length == 0) - continue; - if (string.IsNullOrEmpty(record.WorkItem.Tags)) - recordsNotMatching = new(new Record[] { record }); - else - { - records = FilterChildren(workItemTypes, record); - recordsNotMatching = GetWorkItemsNotMatching122514(record, records); - if (!string.IsNullOrEmpty(record.WorkItem.Tags) && recordsNotMatching.Count == 0) - continue; - } - collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}"); - collection.Add(string.Empty); - foreach (Record r in recordsNotMatching) - collection.Add($"- [ ] [{r.WorkItem}]({url}{r.WorkItem}) {nameof(record.WorkItem.Tags)} != {record.WorkItem.Tags}"); - collection.Add(string.Empty); - lines.AddRange(collection); - violations.Add($"Tag:{record.WorkItem.Tags};"); - foreach (Record r in recordsNotMatching) - violations.Add($"{r.WorkItem.Id}:{r.WorkItem.Tags};"); - results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations))); - } - return new(results); - } - - private static ReadOnlyCollection GetWorkItemsNotMatching126169(Record record, ReadOnlyCollection records) - { - List results = new(); - foreach (Record r in records) - { - if (record.WorkItem.Priority is null) - { - results.Add(record); - break; - } - if (r.WorkItem.Priority == record.WorkItem.Priority.Value) - continue; - results.Add(r); - } - return new(results); - } - - private static ReadOnlyCollection FeatureCheckPriority126169(string url, List lines, ReadOnlyCollection workItemTypes, ReadOnlyDictionary keyValuePairs, string workItemType) - { - List results = new(); - Record record; - List collection = new(); - List violations = new(); - ReadOnlyCollection records; - ReadOnlyCollection recordsNotMatching; - foreach (KeyValuePair keyValuePair in keyValuePairs) - { - record = keyValuePair.Value; - if (record.WorkItem.State is "Removed") - continue; - if (record.WorkItem.WorkItemType != workItemType) - continue; - collection.Clear(); - violations.Clear(); - if (record.Children is null || record.Children.Length == 0) - continue; - records = FilterChildren(workItemTypes, record); - recordsNotMatching = GetWorkItemsNotMatching126169(record, records); - if (recordsNotMatching.Count == 0) - continue; - collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}"); - collection.Add(string.Empty); - collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})"); - foreach (Record r in recordsNotMatching) - collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.Priority)} != {record.WorkItem.Priority}"); - collection.Add(string.Empty); - lines.AddRange(collection); - violations.Add($"Priority:{record.WorkItem.Priority};"); - foreach (Record r in recordsNotMatching) - violations.Add($"{r.WorkItem.Id}:{r.WorkItem.Priority};"); - results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations))); - } - return new(results); - } - - private static ReadOnlyCollection GetWorkItemsNotMatching123066(Record record, ReadOnlyCollection records) - { - List results = new(); - int check; - int state = GetState(record.WorkItem); - List> collection = new(); - foreach (Record r in records) - { - if (r.WorkItem.State is "Removed") - continue; - check = GetState(r.WorkItem); - if (check == state) - continue; - collection.Add(new(check, r)); - } - if (collection.Count > 0) - { - KeyValuePair[] notNewState = (from l in collection where l.Value.WorkItem.State != "New" select l).ToArray(); - if (notNewState.Length == 0 && record.WorkItem.State is "New" or "Active") - collection.Clear(); - else if (notNewState.Length > 0) - { - int minimum = notNewState.Min(l => l.Key); - if (minimum == state) - collection.Clear(); - else if (minimum == 1 && record.WorkItem.State == "New") - collection.Clear(); - else if (notNewState.Length > 0 && record.WorkItem.State == "Active") - collection.Clear(); - } - } - foreach (KeyValuePair keyValuePair in collection.OrderByDescending(l => l.Key)) - results.Add(keyValuePair.Value); - return new(results); - } - - private static ReadOnlyCollection GetWorkItemsNotMatching123067(Record record, ReadOnlyCollection records) - { - List results = new(); - int check; - int state = GetState(record.WorkItem); - List> collection = new(); - foreach (Record r in records) - { - if (r.WorkItem.State is "Removed") - continue; - check = GetState(r.WorkItem); - if (check == state) - continue; - collection.Add(new(check, r)); - } - if (collection.Count > 0) - { - KeyValuePair[] notNewState = (from l in collection where l.Value.WorkItem.State != "New" select l).ToArray(); - if (notNewState.Length == 0 && record.WorkItem.State is "New" or "Active") - collection.Clear(); - else if (notNewState.Length > 0) - { - int minimum = notNewState.Min(l => l.Key); - if (minimum == state) - collection.Clear(); - else if (minimum == 1 && record.WorkItem.State == "New") - collection.Clear(); - else if (notNewState.Length > 0 && record.WorkItem.State == "Active") - collection.Clear(); - } - } - foreach (KeyValuePair keyValuePair in collection.OrderByDescending(l => l.Key)) - results.Add(keyValuePair.Value); - return new(results); - } - - private static ReadOnlyCollection GetWorkItemsNotMatching122517(Record record, ReadOnlyCollection records) - { - List results = new(); - if (record.WorkItem.StartDate is null) - throw new Exception(); - DateTime dateTime = record.WorkItem.StartDate.Value; - List> collection = new(); - foreach (Record r in records) - { - if (r.WorkItem.State is "Removed") - continue; - if (r.WorkItem.ActivatedDate is null) - continue; - if (dateTime >= r.WorkItem.ActivatedDate.Value) - continue; - collection.Add(new(r.WorkItem.ActivatedDate.Value.Ticks, r)); - } - foreach (KeyValuePair keyValuePair in collection.OrderBy(l => l.Key)) - results.Add(keyValuePair.Value); - return new(results); - } - - private static ReadOnlyCollection FeatureCheckState123066(string url, List lines, ReadOnlyCollection workItemTypes, ReadOnlyDictionary keyValuePairs, string workItemType) - { - List results = new(); - Record record; - List collection = new(); - List violations = new(); - ReadOnlyCollection records; - ReadOnlyCollection recordsNotMatching; - foreach (KeyValuePair keyValuePair in keyValuePairs) - { - record = keyValuePair.Value; - if (record.WorkItem.State is "Removed") - continue; - if (record.WorkItem.WorkItemType != workItemType) - continue; - collection.Clear(); - violations.Clear(); - if (record.Children is null || record.Children.Length == 0) - continue; - records = FilterChildren(workItemTypes, record); - recordsNotMatching = GetWorkItemsNotMatching123066(record, records); - if (recordsNotMatching.Count == 0) - continue; - collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}"); - collection.Add(string.Empty); - collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})"); - foreach (Record r in recordsNotMatching) - collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.State)} != {record.WorkItem.State}"); - collection.Add(string.Empty); - lines.AddRange(collection); - violations.Add($"State:{record.WorkItem.State};"); - foreach (Record r in recordsNotMatching) - violations.Add($"{r.WorkItem.Id}:{r.WorkItem.State};"); - results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations))); - } - return new(results); - } - - private static ReadOnlyCollection FeatureCheckState123067(string url, List lines, ReadOnlyCollection workItemTypes, ReadOnlyDictionary keyValuePairs, string workItemType) - { - List results = new(); - Record record; - List collection = new(); - List violations = new(); - ReadOnlyCollection records; - ReadOnlyCollection recordsNotMatching; - foreach (KeyValuePair keyValuePair in keyValuePairs) - { - record = keyValuePair.Value; - if (record.WorkItem.State is "Removed") - continue; - if (record.WorkItem.WorkItemType != workItemType) - continue; - collection.Clear(); - violations.Clear(); - if (record.Children is null || record.Children.Length == 0) - continue; - records = FilterChildren(workItemTypes, record); - recordsNotMatching = GetWorkItemsNotMatching123067(record, records); - if (recordsNotMatching.Count == 0) - continue; - collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}"); - collection.Add(string.Empty); - collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})"); - foreach (Record r in recordsNotMatching) - collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.State)} != {record.WorkItem.State}"); - collection.Add(string.Empty); - lines.AddRange(collection); - violations.Add($"State:{record.WorkItem.State};"); - foreach (Record r in recordsNotMatching) - violations.Add($"{r.WorkItem.Id}:{r.WorkItem.State};"); - results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations))); - } - return new(results); - } - - private static ReadOnlyCollection FeatureCheckStart122517(string url, List lines, ReadOnlyCollection workItemTypes, ReadOnlyDictionary keyValuePairs, string workItemType) - { - List results = new(); - Record record; - List collection = new(); - List violations = new(); - ReadOnlyCollection records; - ReadOnlyCollection recordsNotMatching; - foreach (KeyValuePair keyValuePair in keyValuePairs) - { - record = keyValuePair.Value; - if (record.WorkItem.State is "Removed") - continue; - if (record.WorkItem.WorkItemType != workItemType) - continue; - collection.Clear(); - violations.Clear(); - if (record.Children is null || record.Children.Length == 0) - continue; - if (record.WorkItem.StartDate is null) - continue; - records = FilterChildren(workItemTypes, record); - recordsNotMatching = GetWorkItemsNotMatching122517(record, records); - if (recordsNotMatching.Count == 0) - continue; - collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}"); - collection.Add(string.Empty); - collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})"); - foreach (Record r in recordsNotMatching) - collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.ActivatedDate)} != {record.WorkItem.ActivatedDate}"); - collection.Add(string.Empty); - lines.AddRange(collection); - violations.Add($"StartDate:{record.WorkItem.StartDate};"); - foreach (Record r in recordsNotMatching) - violations.Add($"{r.WorkItem.Id}:{r.WorkItem.ActivatedDate};"); - results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations))); - } - return new(results); - } - - private static ReadOnlyDictionary> GetWorkItemsMatching228385(ReadOnlyCollection records) - { - Dictionary> results = new(); - string key; - List? collection; - foreach (Record record in records) - { - key = $"{record.WorkItem.IterationPath}-{record.WorkItem.AssignedTo}"; - if (!results.TryGetValue(key, out collection)) - { - results.Add(key, new()); - if (!results.TryGetValue(key, out collection)) - throw new Exception(); - } - collection.Add(record); - } - return new(results); - } - - private static ReadOnlyDictionary> GetWorkItemsMatching228385(ReadOnlyDictionary keyValuePairs, string workItemType) - { - ReadOnlyDictionary> results; - Record record; - List records = new(); - foreach (KeyValuePair keyValuePair in keyValuePairs) - { - record = keyValuePair.Value; - if (record.WorkItem.State is "Removed" or "Closed") - continue; - if (!record.WorkItem.IterationPath.Contains('\\')) - continue; - if (record.WorkItem.StoryPoints is null) - continue; - if (record.WorkItem.WorkItemType != workItemType) - continue; - records.Add(record); - } - Record[] sorted = records.OrderByDescending(l => l.WorkItem.IterationPath).ToArray(); - results = GetWorkItemsMatching228385(new(sorted)); return results; } - private static ReadOnlyCollection UserStoryCheckIterationPath228385(string url, List lines, ReadOnlyCollection _, ReadOnlyDictionary keyValuePairs, string workItemType) - { - List results = new(); - long totalStoryPoints; - ReadOnlyDictionary> records = GetWorkItemsMatching228385(keyValuePairs, workItemType); - foreach (KeyValuePair> keyValuePair in records) - { - totalStoryPoints = 0; - foreach (Record record in keyValuePair.Value) - { - if (record.WorkItem.StoryPoints is null) - continue; - totalStoryPoints += record.WorkItem.StoryPoints.Value; - } - lines.Add(string.Empty); - lines.Add($"## {keyValuePair.Key} => {totalStoryPoints}"); - lines.Add(string.Empty); - foreach (Record record in keyValuePair.Value) - lines.Add($"- [ ] [{record.WorkItem.Id}]({url}{record.WorkItem.Id}) - {record.WorkItem.Title}"); - } - return new(results); - } - } \ No newline at end of file diff --git a/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs b/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs index a6135db..65bc0f0 100644 --- a/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs +++ b/Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs @@ -5,16 +5,68 @@ using Adaptation.Shared.Duplicator; using Adaptation.Shared.Methods; using System; using System.Collections.Generic; +using System.Collections.ObjectModel; using System.IO; +using System.Linq; using System.Text; using System.Text.Json; using System.Threading; namespace Adaptation.FileHandlers.MoveMatchingFiles; +#nullable enable + public class FileRead : Shared.FileRead, IFileRead { + internal class PreWith + { + + internal string MatchingFile { get; private set; } + internal string CheckFile { get; private set; } + internal string ErrFile { get; private set; } + internal string CheckDirectory { get; private set; } + internal string NoWaitDirectory { get; private set; } + + internal PreWith(string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory) + { + MatchingFile = matchingFile; + CheckFile = checkFile; + ErrFile = errFile; + CheckDirectory = checkDirectory; + NoWaitDirectory = noWaitDirectory; + } + + } + + internal class Pre + { + + internal string MatchingFile { get; private set; } + internal string CheckFile { get; private set; } + + internal Pre(string matchingFile, string checkFile) + { + MatchingFile = matchingFile; + CheckFile = checkFile; + } + + } + + internal class Post + { + + internal string ErrFile { get; private set; } + internal string CheckFile { get; private set; } + + internal Post(string checkFile, string errFile) + { + ErrFile = errFile; + CheckFile = checkFile; + } + + } + public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) : base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null) { @@ -41,7 +93,8 @@ public class FileRead : Shared.FileRead, IFileRead Move(extractResults); } - void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null); + void IFileRead.WaitForThread() => + WaitForThread(thread: null, threadExceptions: null); string IFileRead.GetEventDescription() { @@ -88,7 +141,7 @@ public class FileRead : Shared.FileRead, IFileRead DateTime dateTime = DateTime.Now; results = GetExtractResult(reportFullPath, dateTime); if (results.Item3 is null) - results = new Tuple>(results.Item1, Array.Empty(), JsonSerializer.Deserialize("[]"), results.Item4); + results = new Tuple>(results.Item1, Array.Empty(), JsonSerializer.Deserialize("[]") ?? throw new Exception(), results.Item4); if (results.Item3.Length > 0 && _IsEAFHosted) WritePDSF(this, results.Item3); UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks); @@ -104,7 +157,69 @@ public class FileRead : Shared.FileRead, IFileRead return results; } - private static List GetSearchDirectories(int numberLength, string parentDirectory) + private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping() + { + ProcessDataStandardFormatMapping result; + string[] segmentsB; + List distinct = new(); + Dictionary keyValuePairs = new(); + string args4 = "Time,Test,Count,MesEntity,HeaderUniqueId,UniqueId,Id,Recipe,Date,AreaDeltaFromLastRun,GLimit,HGCV1"; + string args5 = "Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,AreaDeltaFromLastRun,Variation,Percentage HgCV 4PP Delta,HGCV1"; + string args6 = "RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09,HGCV1"; + string args7 = "FlatZMean|MeanFlatZ,GradeMean|MeanGrade,NAvgMean|MeanNAvg,NslMean|MeanNsl,PhaseMean|MeanPhase,RhoAvgMean|MeanRhoAvg,RhoslMean|MeanRhosl,RsMean|MeanRs,VdMean|MeanVd,FlatZRadialGradient|RadialGradientFlatZ,GradeRadialGradient|RadialGradientGrade,NAvgRadialGradient|RadialGradientNAvg,NslRadialGradient|RadialGradientNsl,PhaseRadialGradient|RadialGradientPhase,RhoAvgRadialGradient|RadialGradientRhoAvg,RhoslRadialGradient|RadialGradientRhosl,RsRadialGradient|RadialGradientRs,VdRadialGradient|RadialGradientVd,FlatZStdDev|StandardDeviationPercentageFlatZ,GradeStdDev|StandardDeviationPercentageGrade,NAvgStdDev|StandardDeviationPercentageNAvg,NslStdDev|StandardDeviationPercentageNsl,PhaseStdDev|StandardDeviationPercentagePhase,RhoAvgStdDev|StandardDeviationPercentageRhoAvg,RhoslStdDev|StandardDeviationPercentageRhosl,RsStdDev|StandardDeviationPercentageRs,VdStdDev|StandardDeviationPercentageVd,|HGCV1"; + string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Area,Folder,HeaderUniqueId,Id,Layer,Model,Pattern,Phase,Plan,RampRate,RDS,SetupFile,StartVoltage,StopVoltage,UniqueId,Wafer,WaferSize,Zone,Ccomp,CondType,FlatZ,FlatZMean,FlatZRadialGradient,FlatZStdDev,GLimit,Grade,GradeMean,GradeRadialGradient,GradeStdDev,NAvg,NAvgMean,NAvgRadialGradient,NAvgStdDev,Nsl,NslMean,NslRadialGradient,NslStdDev,PhaseMean,PhaseRadialGradient,PhaseStdDev,RhoAvg,RhoAvgMean,RhoAvgRadialGradient,RhoAvgStdDev,RhoMethod,Rhosl,RhoslMean,RhoslRadialGradient,RhoslStdDev,RsMean,RsRadialGradient,RsStdDev,Vd,VdMean,VdRadialGradient,VdStdDev,Variation,AreaDeltaFromLastRun,Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09"; + string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Index,Operator,StartVoltage,Wafer,StopVoltage,Lot,RampRate,Plan,GLimit,Date,Time,SetupFile,WaferSize,Folder,Ccomp,Pattern,Area,CondType,RhoMethod,Model,MeanNAvg,MeanNsl,MeanVd,MeanFlatZ,MeanRhoAvg,MeanRhosl,MeanPhase,MeanGrade,MeanRs,StandardDeviationPercentageNAvg,StandardDeviationPercentageNsl,StandardDeviationPercentageVd,StandardDeviationPercentageFlatZ,StandardDeviationPercentageRhoAvg,StandardDeviationPercentageRhosl,StandardDeviationPercentagePhase,StandardDeviationPercentageGrade,StandardDeviationPercentageRs,RadialGradientNAvg,RadialGradientNsl,RadialGradientVd,RadialGradientFlatZ,RadialGradientRhoAvg,RadialGradientRhosl,RadialGradientPhase,RadialGradientGrade,RadialGradientRs,Site,X,Y,NAvg,RhoAvg,Nsl,Rhosl,Vd,Phase,FlatZ,Grade,XLeft,XRight,BottomY,TopY,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,AreaDeltaFromLastRun,Variation,Percentage HgCV 4PP Delta,RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09"; + string args10 = "0,1,2,-1,-1,3,-1,12,70,8,66,67,-1,19,16,-1,-1,68,22,18,58,10,9,65,14,5,7,-1,6,15,69,17,20,59,26,44,35,11,60,30,48,39,53,23,41,32,55,24,42,33,29,47,38,54,27,45,36,21,56,28,46,37,31,49,40,57,25,43,34,81,80,72,73,74,75,76,77,78,79,83,84,85,86,87,88,89,90,91"; + string[] segments = args7.Split(','); + ReadOnlyCollection ignoreColumns = new(args4.Split(',')); + ReadOnlyCollection newColumnNames = new(args9.Split(',')); + ReadOnlyCollection oldColumnNames = new(args8.Split(',')); + ReadOnlyCollection backfillColumns = new(args5.Split(',')); + ReadOnlyCollection indexOnlyColumns = new(args6.Split(',')); + ReadOnlyCollection columnIndices = new(args10.Split(',').Select(int.Parse).ToArray()); + foreach (string segment in segments) + { + segmentsB = segment.Split('|'); + if (segmentsB.Length != 2) + continue; + if (distinct.Contains(segmentsB[0])) + continue; + distinct.Add(segmentsB[0]); + keyValuePairs.Add(segmentsB[0], segmentsB[1]); + } + result = new(backfillColumns: backfillColumns, + columnIndices: columnIndices, + newColumnNames: newColumnNames, + ignoreColumns: ignoreColumns, + indexOnlyColumns: indexOnlyColumns, + keyValuePairs: new(keyValuePairs), + oldColumnNames: oldColumnNames); + return result; + } + + private static ReadOnlyCollection GetPreWithCollection(ReadOnlyCollection
 preCollection)
+    {
+        List results = new();
+        string errFile;
+        PreWith preWith;
+        string? checkDirectory;
+        string noWaitDirectory;
+        foreach (Pre pre in preCollection)
+        {
+            errFile = string.Concat(pre.CheckFile, ".err");
+            checkDirectory = Path.GetDirectoryName(pre.CheckFile);
+            if (string.IsNullOrEmpty(checkDirectory))
+                continue;
+            if (!Directory.Exists(checkDirectory))
+                _ = Directory.CreateDirectory(checkDirectory);
+            noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
+            preWith = new(pre.MatchingFile, pre.CheckFile, errFile, checkDirectory, noWaitDirectory);
+            results.Add(preWith);
+        }
+        return results.AsReadOnly();
+    }
+
+    private static ReadOnlyCollection GetSearchDirectories(int numberLength, string parentDirectory)
     {
         List results = new();
         string[] directories = Directory.GetDirectories(parentDirectory, "*", SearchOption.TopDirectoryOnly);
@@ -115,10 +230,137 @@ public class FileRead : Shared.FileRead, IFileRead
             results.Add(directory);
         }
         results.Sort();
+        return results.AsReadOnly();
+    }
+
+    private static void CreatePointerFile(int numberLength, string parentDirectory, ReadOnlyCollection matchingFiles)
+    {
+        string checkFile;
+        string writeFile;
+        string? directoryName;
+        int parentDirectoryLength = parentDirectory.Length;
+        foreach (string matchingFile in matchingFiles)
+        {
+            directoryName = Path.GetDirectoryName(matchingFile);
+            if (directoryName is null)
+                continue;
+            checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
+            writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
+            if (File.Exists(writeFile))
+                continue;
+            File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
+        }
+    }
+
+    private static ReadOnlyCollection
 GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection matchingFiles)
+    {
+        List
 results = new();
+        Pre pre;
+        string checkFile;
+        int parentDirectoryLength = parentDirectory.Length;
+        foreach (string matchingFile in matchingFiles)
+        {
+            checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
+            pre = new(matchingFile, checkFile);
+            results.Add(pre);
+        }
+        return results.AsReadOnly();
+    }
+
+    private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection preWithCollection)
+    {
+        ReadOnlyCollection postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
+        if (postCollection.Count != 0)
+        {
+            Thread.Sleep(500);
+            StringBuilder stringBuilder = new();
+            foreach (Post post in postCollection)
+            {
+                if (File.Exists(post.ErrFile))
+                    _ = stringBuilder.AppendLine(File.ReadAllText(post.ErrFile));
+                if (File.Exists(post.CheckFile))
+                    _ = stringBuilder.AppendLine($"<{post.CheckFile}> was not consumed by the end!");
+            }
+            if (stringBuilder.Length > 0)
+                throw new Exception(stringBuilder.ToString());
+        }
+    }
+
+    private ReadOnlyCollection GetPostCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection preWithCollection)
+    {
+        List results = new();
+        Post post;
+        long preWait;
+        foreach (PreWith preWith in preWithCollection)
+        {
+            if (processDataStandardFormat is null)
+                File.Move(preWith.MatchingFile, preWith.CheckFile);
+            else
+            {
+                ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat);
+                File.Delete(preWith.MatchingFile);
+            }
+            if (Directory.Exists(preWith.NoWaitDirectory))
+            {
+                post = new(preWith.CheckFile, preWith.ErrFile);
+                results.Add(post);
+                continue;
+            }
+            if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
+                preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
+            else
+                preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
+            for (short i = 0; i < short.MaxValue; i++)
+            {
+                if (DateTime.Now.Ticks > preWait)
+                    break;
+                Thread.Sleep(500);
+            }
+            for (int i = 0; i < int.MaxValue; i++)
+            {
+                if (File.Exists(preWith.ErrFile))
+                    throw new Exception(File.ReadAllText(preWith.ErrFile));
+                if (!File.Exists(preWith.CheckFile))
+                    break;
+                if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
+                    throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
+                Thread.Sleep(500);
+            }
+        }
+        return results.AsReadOnly();
+    }
+
+    private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
+    {
+        Tuple> results = new(string.Empty, Array.Empty(), Array.Empty(), new List());
+        ProcessDataStandardFormatMapping processDataStandardFormatMapping = GetProcessDataStandardFormatMapping();
+        ProcessDataStandardFormat? processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, processDataStandardFormatMapping);
+        if (processDataStandardFormat is not null)
+            _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
+        else
+        {
+            processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
+            _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
+            processDataStandardFormat = null;
+        }
+        SetFileParameterLotIDToLogisticsMID();
+        int numberLength = 2;
+        long ticks = dateTime.Ticks;
+        string parentParentDirectory = GetParentParent(reportFullPath);
+        ReadOnlyCollection searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
+        ReadOnlyCollection matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
+        if (matchingFiles.Count != searchDirectories.Count)
+            throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
+        try
+        { CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
+        catch (Exception) { }
+        ReadOnlyCollection
 preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
+        ReadOnlyCollection preWithCollection = GetPreWithCollection(preCollection);
+        MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
         return results;
     }
 
-    private List GetMatchingFiles(long ticks, string reportFullPath, List searchDirectories)
+    private ReadOnlyCollection GetMatchingFiles(long ticks, string reportFullPath, ReadOnlyCollection searchDirectories)
     {
         List results = new();
         string[] found;
@@ -137,129 +379,7 @@ public class FileRead : Shared.FileRead, IFileRead
                     break;
             }
         }
-        return results;
-    }
-
-    private static List<(string matchingFile, string checkFile)> GetCollection(int numberLength, string parentDirectory, List matchingFiles)
-    {
-        List<(string matchingFile, string checkFile)> results = new();
-        string checkFile;
-        int parentDirectoryLength = parentDirectory.Length;
-        foreach (string matchingFile in matchingFiles)
-        {
-            checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
-            results.Add(new(matchingFile, checkFile));
-        }
-        return results;
-    }
-
-    private static List<(string, string, string, string, string)> GetCollection(List<(string matchingFile, string checkFile)> collection)
-    {
-        List<(string, string, string, string, string)> results = new();
-        string errFile;
-        string checkDirectory;
-        string noWaitDirectory;
-        foreach ((string matchingFile, string checkFile) in collection)
-        {
-            errFile = string.Concat(checkFile, ".err");
-            checkDirectory = Path.GetDirectoryName(checkFile);
-            if (!Directory.Exists(checkDirectory))
-                _ = Directory.CreateDirectory(checkDirectory);
-            noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
-            results.Add(new(matchingFile, checkFile, errFile, checkDirectory, noWaitDirectory));
-        }
-        return results;
-    }
-
-    private void MoveCollection(DateTime dateTime, List<(string matchingFile, string checkFile)> collection)
-    {
-        long preWait;
-        List<(string checkFile, string errFile)> postCollection = new();
-        foreach ((string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory) in GetCollection(collection))
-        {
-            File.Move(matchingFile, checkFile);
-            if (Directory.Exists(noWaitDirectory))
-            {
-                postCollection.Add(new(checkFile, errFile));
-                continue;
-            }
-            if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
-                preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
-            else
-                preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
-            for (short i = 0; i < short.MaxValue; i++)
-            {
-                if (DateTime.Now.Ticks > preWait)
-                    break;
-                Thread.Sleep(500);
-            }
-            for (int i = 0; i < int.MaxValue; i++)
-            {
-                if (File.Exists(errFile))
-                    throw new Exception(File.ReadAllText(errFile));
-                if (!File.Exists(checkFile))
-                    break;
-                if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
-                    throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
-                Thread.Sleep(500);
-            }
-        }
-        if (postCollection.Count != 0)
-        {
-            Thread.Sleep(500);
-            StringBuilder stringBuilder = new();
-            foreach ((string checkFile, string errFile) in postCollection)
-            {
-                if (File.Exists(errFile))
-                    _ = stringBuilder.AppendLine(File.ReadAllText(errFile));
-                if (File.Exists(checkFile))
-                    _ = stringBuilder.AppendLine($"<{checkFile}> was not consumed by the end!");
-            }
-            if (stringBuilder.Length > 0)
-                throw new Exception(stringBuilder.ToString());
-        }
-    }
-
-    private static void CreatePointerFile(int numberLength, string parentDirectory, List matchingFiles)
-    {
-#nullable enable
-        string checkFile;
-        string writeFile;
-        string? directoryName;
-        int parentDirectoryLength = parentDirectory.Length;
-        foreach (string matchingFile in matchingFiles)
-        {
-            directoryName = Path.GetDirectoryName(matchingFile);
-            if (directoryName is null)
-                continue;
-            checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
-            writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
-            if (File.Exists(writeFile))
-                continue;
-            File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
-        }
-#nullable disable
-    }
-
-    private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
-    {
-        Tuple> results = new(string.Empty, null, null, new List());
-        ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
-        _Logistics = new Logistics(reportFullPath, processData.Logistics);
-        SetFileParameterLotIDToLogisticsMID();
-        int numberLength = 2;
-        long ticks = dateTime.Ticks;
-        string parentParentDirectory = GetParentParent(reportFullPath);
-        List searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
-        List matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
-        if (matchingFiles.Count != searchDirectories.Count)
-            throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
-        try
-        { CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
-        catch (Exception) { }
-        List<(string matchingFile, string checkFile)> collection = GetCollection(numberLength, parentParentDirectory, matchingFiles);
-        MoveCollection(dateTime, collection);
-        return results;
+        return results.AsReadOnly();
     }
 
 }
\ No newline at end of file
diff --git a/Adaptation/FileHandlers/OpenInsight/FileRead.cs b/Adaptation/FileHandlers/OpenInsight/FileRead.cs
index 5706703..5dadde5 100644
--- a/Adaptation/FileHandlers/OpenInsight/FileRead.cs
+++ b/Adaptation/FileHandlers/OpenInsight/FileRead.cs
@@ -118,15 +118,15 @@ public class FileRead : Shared.FileRead, IFileRead
     private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
     {
         Tuple> results;
-        ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
-        _Logistics = new Logistics(reportFullPath, processData.Logistics);
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
+        _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
         SetFileParameterLotIDToLogisticsMID();
-        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
+        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
         List descriptions = json.ProcessData.GetDescriptions(jsonElements);
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             SaveOpenInsightFile(reportFullPath, dateTime, descriptions, tests);
-        results = new Tuple>(processData.Logistics, tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs b/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs
index 8c0b66a..a9a686b 100644
--- a/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs
+++ b/Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs
@@ -129,15 +129,15 @@ public class FileRead : Shared.FileRead, IFileRead
     private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
     {
         Tuple> results;
-        ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
-        _Logistics = new Logistics(reportFullPath, processData.Logistics);
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
+        _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
         SetFileParameterLotIDToLogisticsMID();
-        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
+        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
         List descriptions = json.ProcessData.GetDescriptions(jsonElements);
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             SendData(reportFullPath, dateTime, descriptions);
-        results = new Tuple>(processData.Logistics, tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs b/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs
index 2242792..52bbdd9 100644
--- a/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs
+++ b/Adaptation/FileHandlers/OpenInsightMetrologyViewerAttachments/FileRead.cs
@@ -146,15 +146,15 @@ public class FileRead : Shared.FileRead, IFileRead
         if (dateTime == DateTime.MinValue)
             throw new ArgumentNullException(nameof(dateTime));
         Tuple> results;
-        ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
-        _Logistics = new Logistics(reportFullPath, processData.Logistics);
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
+        _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
         SetFileParameterLotIDToLogisticsMID();
-        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
+        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
         List descriptions = json.ProcessData.GetDescriptions(jsonElements);
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             PostOpenInsightMetrologyViewerAttachments(descriptions);
-        results = new Tuple>(processData.Logistics, tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/Priority/FileRead.cs b/Adaptation/FileHandlers/Priority/FileRead.cs
index d2c7e1e..e67bd2e 100644
--- a/Adaptation/FileHandlers/Priority/FileRead.cs
+++ b/Adaptation/FileHandlers/Priority/FileRead.cs
@@ -131,7 +131,9 @@ public class FileRead : Shared.FileRead, IFileRead
         Tuple> results;
         if (dateTime == DateTime.MinValue)
             throw new ArgumentNullException(nameof(dateTime));
-        _Logistics = new Logistics(reportFullPath, $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};");
+        string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" };
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
+        _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
         results = new(_Logistics.Logistics1[0], Array.Empty(), Array.Empty(), new List());
         return results;
     }
diff --git a/Adaptation/FileHandlers/Processed/FileRead.cs b/Adaptation/FileHandlers/Processed/FileRead.cs
index 5dd6706..8e06842 100644
--- a/Adaptation/FileHandlers/Processed/FileRead.cs
+++ b/Adaptation/FileHandlers/Processed/FileRead.cs
@@ -143,13 +143,13 @@ public class FileRead : Shared.FileRead, IFileRead
     private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
     {
         Tuple> results;
-        ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
-        _Logistics = new Logistics(reportFullPath, processData.Logistics);
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
+        _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
         SetFileParameterLotIDToLogisticsMID();
-        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
+        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
         List descriptions = json.ProcessData.GetDescriptions(jsonElements);
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
-        results = new Tuple>(processData.Logistics, tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             DirectoryMove(reportFullPath, dateTime, descriptions);
         else if (!_IsEAFHosted)
diff --git a/Adaptation/FileHandlers/SPaCe/FileRead.cs b/Adaptation/FileHandlers/SPaCe/FileRead.cs
index dc59c67..39efb63 100644
--- a/Adaptation/FileHandlers/SPaCe/FileRead.cs
+++ b/Adaptation/FileHandlers/SPaCe/FileRead.cs
@@ -117,15 +117,15 @@ public class FileRead : Shared.FileRead, IFileRead
     private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
     {
         Tuple> results;
-        ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
-        _Logistics = new Logistics(reportFullPath, processData.Logistics);
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
+        _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
         SetFileParameterLotIDToLogisticsMID();
-        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
+        JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
         List descriptions = GetDuplicatorDescriptions(jsonElements);
         Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             FileCopy(reportFullPath, dateTime, descriptions);
-        results = new Tuple>(processData.Logistics, tests, jsonElements, new List());
+        results = new Tuple>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List());
         return results;
     }
 
diff --git a/Adaptation/FileHandlers/Violation/FileRead.cs b/Adaptation/FileHandlers/Violation/FileRead.cs
new file mode 100644
index 0000000..a47b518
--- /dev/null
+++ b/Adaptation/FileHandlers/Violation/FileRead.cs
@@ -0,0 +1,127 @@
+using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
+using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
+using Adaptation.Shared;
+using Adaptation.Shared.Duplicator;
+using Adaptation.Shared.Methods;
+using System;
+using System.Collections.Generic;
+using System.Collections.ObjectModel;
+using System.IO;
+using System.Text.Json;
+
+namespace Adaptation.FileHandlers.Violation;
+
+public class FileRead : Shared.FileRead, IFileRead
+{
+
+    private long? _TickOffset;
+    private readonly string _URL;
+    private readonly ReadOnlyCollection _WorkItemTypes;
+
+    public FileRead(ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
+        base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
+    {
+        _MinFileLength = 10;
+        _NullData = string.Empty;
+        _Logistics = new(this);
+        if (_FileParameter is null)
+            throw new Exception(cellInstanceConnectionName);
+        if (_ModelObjectParameterDefinitions is null)
+            throw new Exception(cellInstanceConnectionName);
+        if (!_IsDuplicator)
+            throw new Exception(cellInstanceConnectionName);
+        string cellInstanceNamed = string.Concat("CellInstance.", _EquipmentType);
+        _URL = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, $"{cellInstanceNamed}.URL");
+        string workItemTypes = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, $"{cellInstanceNamed}.WorkItemTypes");
+        _WorkItemTypes = new(workItemTypes.Split('|'));
+        if (_IsEAFHosted)
+            NestExistingFiles(_FileConnectorConfiguration);
+    }
+
+    void IFileRead.Move(Tuple> extractResults, Exception exception) => Move(extractResults);
+
+    void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
+
+    string IFileRead.GetEventDescription()
+    {
+        string result = _Description.GetEventDescription();
+        return result;
+    }
+
+    List IFileRead.GetHeaderNames()
+    {
+        List results = _Description.GetHeaderNames();
+        return results;
+    }
+
+    string[] IFileRead.Move(Tuple> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
+    {
+        string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
+        return results;
+    }
+
+    JsonProperty[] IFileRead.GetDefault()
+    {
+        JsonProperty[] results = _Description.GetDefault(this, _Logistics);
+        return results;
+    }
+
+    Dictionary IFileRead.GetDisplayNamesJsonElement()
+    {
+        Dictionary results = _Description.GetDisplayNamesJsonElement(this);
+        return results;
+    }
+
+    List IFileRead.GetDescriptions(IFileRead fileRead, List tests, IProcessData processData)
+    {
+        List results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
+        return results;
+    }
+
+    Tuple> IFileRead.GetExtractResult(string reportFullPath, string eventName)
+    {
+        Tuple> results;
+        if (string.IsNullOrEmpty(eventName))
+            throw new Exception();
+        _ReportFullPath = reportFullPath;
+        DateTime dateTime = DateTime.Now;
+        results = GetExtractResult(reportFullPath, dateTime);
+        if (results.Item3 is null)
+            results = new Tuple>(results.Item1, Array.Empty(), JsonSerializer.Deserialize("[]"), results.Item4);
+        if (results.Item3.Length > 0 && _IsEAFHosted)
+            WritePDSF(this, results.Item3);
+        UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
+        return results;
+    }
+
+    Tuple> IFileRead.ReExtract()
+    {
+        Tuple> results;
+        List headerNames = _Description.GetHeaderNames();
+        Dictionary keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
+        results = ReExtract(this, headerNames, keyValuePairs);
+        return results;
+    }
+
+    private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
+    {
+        Tuple> results = new(string.Empty, null, null, new List());
+        _TickOffset ??= 0; // new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
+        string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" };
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
+        _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
+        SetFileParameterLotIDToLogisticsMID();
+        if (_Logistics.FileInfo.Length < _MinFileLength)
+            results.Item4.Add(_Logistics.FileInfo);
+        else
+        {
+            IProcessData iProcessData = new ProcessData(this, _Logistics, _FileConnectorConfiguration.TargetFileLocation, _URL, _WorkItemTypes, results.Item4);
+            if (iProcessData.Details.Count == 0)
+                results = new(string.Concat("B) No Data - ", dateTime.Ticks), Array.Empty(), Array.Empty(), results.Item4);
+            else
+                results = iProcessData.GetResults(this, _Logistics, results.Item4);
+        }
+        return results;
+    }
+
+}
\ No newline at end of file
diff --git a/Adaptation/FileHandlers/Violation/ProcessData.cs b/Adaptation/FileHandlers/Violation/ProcessData.cs
new file mode 100644
index 0000000..51c2da9
--- /dev/null
+++ b/Adaptation/FileHandlers/Violation/ProcessData.cs
@@ -0,0 +1,706 @@
+using Adaptation.FileHandlers.json.WorkItems;
+using Adaptation.Shared;
+using Adaptation.Shared.Duplicator;
+using Adaptation.Shared.Methods;
+using log4net;
+using System;
+using System.Collections.Generic;
+using System.Collections.ObjectModel;
+using System.IO;
+using System.Linq;
+using System.Text.Json;
+using System.Text.Json.Serialization;
+
+namespace Adaptation.FileHandlers.Violation;
+
+#nullable enable
+
+public class ProcessData : IProcessData
+{
+
+    private readonly List _Details;
+
+    List Shared.Properties.IProcessData.Details => _Details;
+
+    private readonly ILog _Log;
+
+    string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary reactors) =>
+        throw new Exception(string.Concat("See ", nameof(WriteFiles)));
+
+    Tuple> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List fileInfoCollection) =>
+        new(logistics.Logistics1[0], Array.Empty(), Array.Empty(), fileInfoCollection);
+
+    public ProcessData(IFileRead fileRead, Logistics logistics, string targetFileLocation, string url, ReadOnlyCollection workItemTypes, List fileInfoCollection)
+    {
+        if (fileRead.IsEAFHosted)
+        { }
+        _Details = new List();
+        _Log = LogManager.GetLogger(typeof(ProcessData));
+        WriteFiles(fileRead, logistics, targetFileLocation, url, fileInfoCollection, workItemTypes);
+    }
+
+    private void WriteFiles(IFileRead fileRead, Logistics logistics, string destinationDirectory, string url, List fileInfoCollection, ReadOnlyCollection __)
+    {
+        if (!Directory.Exists(destinationDirectory))
+            _ = Directory.CreateDirectory(destinationDirectory);
+        string json = File.ReadAllText(logistics.ReportFullPath);
+        // WorkItem[]? workItems = JsonSerializer.Deserialize(json);
+        // if (workItems is null)
+        //     throw new Exception(nameof(workItems));
+        JsonElement[]? jsonElements = JsonSerializer.Deserialize(json);
+        if (jsonElements is null)
+            throw new Exception(nameof(jsonElements));
+        WorkItem? workItem;
+        List workItems = new();
+        foreach (JsonElement jsonElement in jsonElements)
+        {
+            workItem = JsonSerializer.Deserialize(jsonElement.ToString());
+            if (workItem is null)
+                continue;
+            workItems.Add(workItem);
+        }
+        List spaces = new();
+        bool keepRelations = false;
+        List lines = new();
+        List messages = new();
+        ReadOnlyCollection results;
+        ReadOnlyDictionary keyValuePairs = GetWorkItems(workItems, keepRelations);
+        ReadOnlyCollection records = new(keyValuePairs.Values.ToArray());
+        ReadOnlyCollection userStoryWorkItemTypes = new(new string[] { "User Story" });
+        ReadOnlyCollection bugFeatureWorkItemTypes = new(new string[] { "Bug", "Feature" });
+        ReadOnlyCollection bugUserStoryWorkItemTypes = new(new string[] { "Bug", "User Story" });
+        ReadOnlyCollection bugUserStoryTaskWorkItemTypes = new(new string[] { "Bug", "User Story", "Task" });
+        {
+            lines.Clear();
+            string workItemType = "Feature";
+            lines.Add($"# {nameof(FeatureCheckIterationPath122508)}");
+            lines.Add(string.Empty);
+            results = FeatureCheckIterationPath122508(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType);
+            WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-122508");
+            _Details.Add(results);
+        }
+        {
+            lines.Clear();
+            string workItemType = "Feature";
+            lines.Add($"# {nameof(FeatureCheckTag122514)}");
+            lines.Add(string.Empty);
+            results = FeatureCheckTag122514(url, lines, bugUserStoryWorkItemTypes, keyValuePairs, workItemType);
+            WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-122514");
+            _Details.Add(results);
+        }
+        {
+            lines.Clear();
+            string workItemType = "Feature";
+            lines.Add($"# {nameof(FeatureCheckPriority126169)}");
+            lines.Add(string.Empty);
+            results = FeatureCheckPriority126169(url, lines, bugUserStoryWorkItemTypes, keyValuePairs, workItemType);
+            WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-126169");
+            _Details.Add(results);
+        }
+        {
+            lines.Clear();
+            string workItemType = "Feature";
+            lines.Add($"# {nameof(FeatureCheckState123066)}");
+            lines.Add(string.Empty);
+            results = FeatureCheckState123066(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType);
+            WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-123066");
+            _Details.Add(results);
+        }
+        {
+            lines.Clear();
+            string workItemType = "Feature";
+            lines.Add($"# {nameof(FeatureCheckState123067)}");
+            lines.Add(string.Empty);
+            results = FeatureCheckState123067(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType);
+            WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-123067");
+            _Details.Add(results);
+        }
+        {
+            lines.Clear();
+            string workItemType = "Feature";
+            lines.Add($"# {nameof(FeatureCheckStart122517)}");
+            lines.Add(string.Empty);
+            results = FeatureCheckStart122517(url, lines, bugUserStoryTaskWorkItemTypes, keyValuePairs, workItemType);
+            WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-122517");
+            _Details.Add(results);
+        }
+        {
+            lines.Clear();
+            string workItemType = "User Story";
+            lines.Add($"# {nameof(UserStoryCheckIterationPath228385)}");
+            lines.Add(string.Empty);
+            results = UserStoryCheckIterationPath228385(url, lines, userStoryWorkItemTypes, keyValuePairs, workItemType);
+            WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-228385");
+            _Details.Add(results);
+        }
+        if (messages.Count > 0)
+            throw new Exception($"{messages.Count}{Environment.NewLine}{string.Join(Environment.NewLine, messages)}");
+    }
+
+    private static ReadOnlyDictionary GetWorkItems(IEnumerable workItems, bool keepRelations)
+    {
+        ReadOnlyDictionary results;
+        Dictionary keyValuePairs = new();
+        foreach (WorkItem workItem in workItems)
+            keyValuePairs.Add(workItem.Id, workItem);
+        results = GetKeyValuePairs(new(keyValuePairs), keepRelations);
+        return results;
+    }
+
+    private static ReadOnlyDictionary GetKeyValuePairs(ReadOnlyDictionary keyValuePairs, bool keepRelations)
+    {
+        Dictionary results = new();
+        Record record;
+        List nests = new();
+        WorkItem? parentWorkItem;
+        ReadOnlyCollection childRecords;
+        ReadOnlyCollection relatedRecords;
+        ReadOnlyCollection successorRecords;
+        foreach (KeyValuePair keyValuePair in keyValuePairs)
+        {
+            nests.Clear();
+            if (keyValuePair.Value.Parent is null)
+                parentWorkItem = null;
+            else
+                _ = keyValuePairs.TryGetValue(keyValuePair.Value.Parent.Value, out parentWorkItem);
+            try
+            {
+                childRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Child", nests, keepRelations); // Forward
+                relatedRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Related", nests, keepRelations); // Related
+                successorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Successor", nests, keepRelations); // Forward
+                // predecessorRecords = Record.GetKeyValuePairs(keyValuePairs, keyValuePair.Value, "Predecessor", nests, keepRelations); // Reverse
+                record = Record.Get(keyValuePair.Value, parentWorkItem, childRecords, relatedRecords, successorRecords, keepRelations);
+            }
+            catch (Exception)
+            {
+                record = new(keyValuePair.Value, parentWorkItem, Array.Empty(), Array.Empty(), Array.Empty());
+            }
+            results.Add(keyValuePair.Key, record);
+        }
+        return new(results);
+    }
+
+    private static ReadOnlyCollection FeatureCheckIterationPath122508(string url, List lines, ReadOnlyCollection workItemTypes, ReadOnlyDictionary keyValuePairs, string workItemType)
+    {
+        List results = new();
+        Record record;
+        List violations = new();
+        List collection = new();
+        ReadOnlyCollection records;
+        ReadOnlyCollection maxIterationPaths;
+        foreach (KeyValuePair keyValuePair in keyValuePairs)
+        {
+            record = keyValuePair.Value;
+            if (record.WorkItem.State is "Removed")
+                continue;
+            if (!record.WorkItem.IterationPath.Contains('\\'))
+                continue;
+            if (record.WorkItem.WorkItemType != workItemType)
+                continue;
+            collection.Clear();
+            violations.Clear();
+            if (record.Children is null || record.Children.Length == 0)
+                continue;
+            records = FilterChildren(workItemTypes, record);
+            maxIterationPaths = GetMaxIterationPaths122508(records);
+            foreach (Record r in maxIterationPaths)
+            {
+                if (string.IsNullOrEmpty(r.WorkItem.IterationPath) || record.WorkItem.IterationPath == r.WorkItem.IterationPath)
+                    continue;
+                violations.Add($"{r.WorkItem.Id}:{r.WorkItem.IterationPath};");
+            }
+            if (violations.Count > 0)
+            {
+                collection.Insert(0, string.Empty);
+                collection.Insert(0, $"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
+                lines.AddRange(collection);
+                violations.Insert(0, $"IterationPath:{record.WorkItem.IterationPath};");
+                results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
+            }
+        }
+        return new(results);
+    }
+
+    private static ReadOnlyCollection GetMaxIterationPaths122508(ReadOnlyCollection records)
+    {
+        List results;
+        List? collection;
+        Dictionary> keyValuePairs = new();
+        foreach (Record record in records)
+        {
+            if (!keyValuePairs.TryGetValue(record.WorkItem.IterationPath, out collection))
+            {
+                keyValuePairs.Add(record.WorkItem.IterationPath, new());
+                if (!keyValuePairs.TryGetValue(record.WorkItem.IterationPath, out collection))
+                    throw new Exception();
+
+            }
+            collection.Add(record);
+        }
+        string? max = keyValuePairs.Keys.Max();
+        results = string.IsNullOrEmpty(max) ? new() : keyValuePairs[max];
+        return results.AsReadOnly();
+    }
+
+    private static void WriteFiles(IFileRead fileRead, string destinationDirectory, List fileInfoCollection, ReadOnlyCollection lines, string _, ReadOnlyCollection records, string fileName)
+    {
+        string markdown = string.Join(Environment.NewLine, lines);
+        string markdownFile = Path.Combine(destinationDirectory, $"{fileName}.md");
+        string markdownOld = !File.Exists(markdownFile) ? string.Empty : File.ReadAllText(markdownFile);
+        if (markdown != markdownOld)
+            File.WriteAllText(markdownFile, markdown);
+        if (!fileRead.IsEAFHosted)
+            fileInfoCollection.Add(new(markdownFile));
+        string html = CommonMark.CommonMarkConverter.Convert(markdown).Replace(" FeatureCheckTag122514(string url, List lines, ReadOnlyCollection workItemTypes, ReadOnlyDictionary keyValuePairs, string workItemType)
+    {
+        List results = new();
+        Record record;
+        List collection = new();
+        List violations = new();
+        ReadOnlyCollection records;
+        ReadOnlyCollection recordsNotMatching;
+        foreach (KeyValuePair keyValuePair in keyValuePairs)
+        {
+            record = keyValuePair.Value;
+            if (record.WorkItem.State is "Removed")
+                continue;
+            if (record.WorkItem.WorkItemType != workItemType)
+                continue;
+            collection.Clear();
+            violations.Clear();
+            if (record.Children is null || record.Children.Length == 0)
+                continue;
+            if (string.IsNullOrEmpty(record.WorkItem.Tags))
+                recordsNotMatching = new(new Record[] { record });
+            else
+            {
+                records = FilterChildren(workItemTypes, record);
+                recordsNotMatching = GetWorkItemsNotMatching122514(record, records);
+                if (!string.IsNullOrEmpty(record.WorkItem.Tags) && recordsNotMatching.Count == 0)
+                    continue;
+            }
+            collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
+            collection.Add(string.Empty);
+            foreach (Record r in recordsNotMatching)
+                collection.Add($"- [ ] [{r.WorkItem}]({url}{r.WorkItem}) {nameof(record.WorkItem.Tags)} != {record.WorkItem.Tags}");
+            collection.Add(string.Empty);
+            lines.AddRange(collection);
+            violations.Add($"Tag:{record.WorkItem.Tags};");
+            foreach (Record r in recordsNotMatching)
+                violations.Add($"{r.WorkItem.Id}:{r.WorkItem.Tags};");
+            results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
+        }
+        return new(results);
+    }
+
+    private static ReadOnlyCollection FilterChildren(ReadOnlyCollection workItemTypes, Record record)
+    {
+        List results = new();
+        FilterChildren(workItemTypes, record, results);
+        return new(results);
+    }
+
+    private static void FilterChildren(ReadOnlyCollection workItemTypes, Record record, List results)
+    {
+        if (record.Children is not null)
+        {
+            foreach (Record r in record.Children)
+            {
+                if (!workItemTypes.Contains(r.WorkItem.WorkItemType))
+                    continue;
+                results.Add(r);
+                FilterChildren(workItemTypes, r, results);
+            }
+        }
+    }
+
+    private static ReadOnlyCollection GetWorkItemsNotMatching122514(Record record, ReadOnlyCollection records)
+    {
+        List results = new();
+        string[] segments;
+        string[] parentTags = record.WorkItem.Tags.Split(';').Select(l => l.Trim()).ToArray();
+        foreach (Record r in records)
+        {
+            segments = string.IsNullOrEmpty(r.WorkItem.Tags) ? Array.Empty() : r.WorkItem.Tags.Split(';').Select(l => l.Trim()).ToArray();
+            if (segments.Length > 0 && parentTags.Any(l => segments.Contains(l)))
+                continue;
+            results.Add(r);
+        }
+        return new(results);
+    }
+
+    private static ReadOnlyCollection FeatureCheckPriority126169(string url, List lines, ReadOnlyCollection workItemTypes, ReadOnlyDictionary keyValuePairs, string workItemType)
+    {
+        List results = new();
+        Record record;
+        List collection = new();
+        List violations = new();
+        ReadOnlyCollection records;
+        ReadOnlyCollection recordsNotMatching;
+        foreach (KeyValuePair keyValuePair in keyValuePairs)
+        {
+            record = keyValuePair.Value;
+            if (record.WorkItem.State is "Removed")
+                continue;
+            if (record.WorkItem.WorkItemType != workItemType)
+                continue;
+            collection.Clear();
+            violations.Clear();
+            if (record.Children is null || record.Children.Length == 0)
+                continue;
+            records = FilterChildren(workItemTypes, record);
+            recordsNotMatching = GetWorkItemsNotMatching126169(record, records);
+            if (recordsNotMatching.Count == 0)
+                continue;
+            collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
+            collection.Add(string.Empty);
+            collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})");
+            foreach (Record r in recordsNotMatching)
+                collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.Priority)} != {record.WorkItem.Priority}");
+            collection.Add(string.Empty);
+            lines.AddRange(collection);
+            violations.Add($"Priority:{record.WorkItem.Priority};");
+            foreach (Record r in recordsNotMatching)
+                violations.Add($"{r.WorkItem.Id}:{r.WorkItem.Priority};");
+            results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
+        }
+        return new(results);
+    }
+
+    private static ReadOnlyCollection GetWorkItemsNotMatching126169(Record record, ReadOnlyCollection records)
+    {
+        List results = new();
+        foreach (Record r in records)
+        {
+            if (record.WorkItem.Priority is null)
+            {
+                results.Add(record);
+                break;
+            }
+            if (r.WorkItem.Priority == record.WorkItem.Priority.Value)
+                continue;
+            results.Add(r);
+        }
+        return new(results);
+    }
+
+    private static ReadOnlyCollection FeatureCheckState123066(string url, List lines, ReadOnlyCollection workItemTypes, ReadOnlyDictionary keyValuePairs, string workItemType)
+    {
+        List results = new();
+        Record record;
+        List collection = new();
+        List violations = new();
+        ReadOnlyCollection records;
+        ReadOnlyCollection recordsNotMatching;
+        foreach (KeyValuePair keyValuePair in keyValuePairs)
+        {
+            record = keyValuePair.Value;
+            if (record.WorkItem.State is "Removed")
+                continue;
+            if (record.WorkItem.WorkItemType != workItemType)
+                continue;
+            collection.Clear();
+            violations.Clear();
+            if (record.Children is null || record.Children.Length == 0)
+                continue;
+            records = FilterChildren(workItemTypes, record);
+            recordsNotMatching = GetWorkItemsNotMatching123066(record, records);
+            if (recordsNotMatching.Count == 0)
+                continue;
+            collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
+            collection.Add(string.Empty);
+            collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})");
+            foreach (Record r in recordsNotMatching)
+                collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.State)} != {record.WorkItem.State}");
+            collection.Add(string.Empty);
+            lines.AddRange(collection);
+            violations.Add($"State:{record.WorkItem.State};");
+            foreach (Record r in recordsNotMatching)
+                violations.Add($"{r.WorkItem.Id}:{r.WorkItem.State};");
+            results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
+        }
+        return new(results);
+    }
+
+    private static ReadOnlyCollection GetWorkItemsNotMatching123066(Record record, ReadOnlyCollection records)
+    {
+        List results = new();
+        int check;
+        int state = GetState(record.WorkItem);
+        List> collection = new();
+        foreach (Record r in records)
+        {
+            if (r.WorkItem.State is "Removed")
+                continue;
+            check = GetState(r.WorkItem);
+            if (check == state)
+                continue;
+            collection.Add(new(check, r));
+        }
+        if (collection.Count > 0)
+        {
+            KeyValuePair[] notNewState = (from l in collection where l.Value.WorkItem.State != "New" select l).ToArray();
+            if (notNewState.Length == 0 && record.WorkItem.State is "New" or "Active")
+                collection.Clear();
+            else if (notNewState.Length > 0)
+            {
+                int minimum = notNewState.Min(l => l.Key);
+                if (minimum == state)
+                    collection.Clear();
+                else if (minimum == 1 && record.WorkItem.State == "New")
+                    collection.Clear();
+                else if (notNewState.Length > 0 && record.WorkItem.State == "Active")
+                    collection.Clear();
+            }
+        }
+        foreach (KeyValuePair keyValuePair in collection.OrderByDescending(l => l.Key))
+            results.Add(keyValuePair.Value);
+        return new(results);
+    }
+
+    private static int GetState(WorkItem workItem) =>
+        workItem.State switch
+        {
+            "New" => 1,
+            "Active" => 2,
+            "Resolved" => 3,
+            "Closed" => 4,
+            "Removed" => 5,
+            _ => 8
+        };
+
+    private static ReadOnlyCollection FeatureCheckState123067(string url, List lines, ReadOnlyCollection workItemTypes, ReadOnlyDictionary keyValuePairs, string workItemType)
+    {
+        List results = new();
+        Record record;
+        List collection = new();
+        List violations = new();
+        ReadOnlyCollection records;
+        ReadOnlyCollection recordsNotMatching;
+        foreach (KeyValuePair keyValuePair in keyValuePairs)
+        {
+            record = keyValuePair.Value;
+            if (record.WorkItem.State is "Removed")
+                continue;
+            if (record.WorkItem.WorkItemType != workItemType)
+                continue;
+            collection.Clear();
+            violations.Clear();
+            if (record.Children is null || record.Children.Length == 0)
+                continue;
+            records = FilterChildren(workItemTypes, record);
+            recordsNotMatching = GetWorkItemsNotMatching123067(record, records);
+            if (recordsNotMatching.Count == 0)
+                continue;
+            collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
+            collection.Add(string.Empty);
+            collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})");
+            foreach (Record r in recordsNotMatching)
+                collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.State)} != {record.WorkItem.State}");
+            collection.Add(string.Empty);
+            lines.AddRange(collection);
+            violations.Add($"State:{record.WorkItem.State};");
+            foreach (Record r in recordsNotMatching)
+                violations.Add($"{r.WorkItem.Id}:{r.WorkItem.State};");
+            results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
+        }
+        return new(results);
+    }
+
+    private static ReadOnlyCollection GetWorkItemsNotMatching123067(Record record, ReadOnlyCollection records)
+    {
+        List results = new();
+        int check;
+        int state = GetState(record.WorkItem);
+        List> collection = new();
+        foreach (Record r in records)
+        {
+            if (r.WorkItem.State is "Removed")
+                continue;
+            check = GetState(r.WorkItem);
+            if (check == state)
+                continue;
+            collection.Add(new(check, r));
+        }
+        if (collection.Count > 0)
+        {
+            KeyValuePair[] notNewState = (from l in collection where l.Value.WorkItem.State != "New" select l).ToArray();
+            if (notNewState.Length == 0 && record.WorkItem.State is "New" or "Active")
+                collection.Clear();
+            else if (notNewState.Length > 0)
+            {
+                int minimum = notNewState.Min(l => l.Key);
+                if (minimum == state)
+                    collection.Clear();
+                else if (minimum == 1 && record.WorkItem.State == "New")
+                    collection.Clear();
+                else if (notNewState.Length > 0 && record.WorkItem.State == "Active")
+                    collection.Clear();
+            }
+        }
+        foreach (KeyValuePair keyValuePair in collection.OrderByDescending(l => l.Key))
+            results.Add(keyValuePair.Value);
+        return new(results);
+    }
+
+    private static ReadOnlyCollection FeatureCheckStart122517(string url, List lines, ReadOnlyCollection workItemTypes, ReadOnlyDictionary keyValuePairs, string workItemType)
+    {
+        List results = new();
+        Record record;
+        List collection = new();
+        List violations = new();
+        ReadOnlyCollection records;
+        ReadOnlyCollection recordsNotMatching;
+        foreach (KeyValuePair keyValuePair in keyValuePairs)
+        {
+            record = keyValuePair.Value;
+            if (record.WorkItem.State is "Removed")
+                continue;
+            if (record.WorkItem.WorkItemType != workItemType)
+                continue;
+            collection.Clear();
+            violations.Clear();
+            if (record.Children is null || record.Children.Length == 0)
+                continue;
+            if (record.WorkItem.StartDate is null)
+                continue;
+            records = FilterChildren(workItemTypes, record);
+            recordsNotMatching = GetWorkItemsNotMatching122517(record, records);
+            if (recordsNotMatching.Count == 0)
+                continue;
+            collection.Add($"## {record.WorkItem.AssignedTo} - {record.WorkItem.Id} - {record.WorkItem.Title}");
+            collection.Add(string.Empty);
+            collection.Add($"- [{record.WorkItem.Id}]({url}{record.WorkItem.Id})");
+            foreach (Record r in recordsNotMatching)
+                collection.Add($"- [ ] [{r.WorkItem.Id}]({url}{r.WorkItem.Id}) {nameof(record.WorkItem.ActivatedDate)} != {record.WorkItem.ActivatedDate}");
+            collection.Add(string.Empty);
+            lines.AddRange(collection);
+            violations.Add($"StartDate:{record.WorkItem.StartDate};");
+            foreach (Record r in recordsNotMatching)
+                violations.Add($"{r.WorkItem.Id}:{r.WorkItem.ActivatedDate};");
+            results.Add(Record.GetWithoutNesting(record, string.Join(" ", violations)));
+        }
+        return new(results);
+    }
+
+    private static ReadOnlyCollection GetWorkItemsNotMatching122517(Record record, ReadOnlyCollection records)
+    {
+        List results = new();
+        if (record.WorkItem.StartDate is null)
+            throw new Exception();
+        DateTime dateTime = record.WorkItem.StartDate.Value;
+        List> collection = new();
+        foreach (Record r in records)
+        {
+            if (r.WorkItem.State is "Removed")
+                continue;
+            if (r.WorkItem.ActivatedDate is null)
+                continue;
+            if (dateTime >= r.WorkItem.ActivatedDate.Value)
+                continue;
+            collection.Add(new(r.WorkItem.ActivatedDate.Value.Ticks, r));
+        }
+        foreach (KeyValuePair keyValuePair in collection.OrderBy(l => l.Key))
+            results.Add(keyValuePair.Value);
+        return new(results);
+    }
+
+    private static ReadOnlyCollection UserStoryCheckIterationPath228385(string url, List lines, ReadOnlyCollection _, ReadOnlyDictionary keyValuePairs, string workItemType)
+    {
+        List results = new();
+        long totalStoryPoints;
+        ReadOnlyDictionary> records = GetWorkItemsMatching228385(keyValuePairs, workItemType);
+        foreach (KeyValuePair> keyValuePair in records)
+        {
+            totalStoryPoints = 0;
+            foreach (Record record in keyValuePair.Value)
+            {
+                if (record.WorkItem.StoryPoints is null)
+                    continue;
+                totalStoryPoints += record.WorkItem.StoryPoints.Value;
+            }
+            lines.Add(string.Empty);
+            lines.Add($"## {keyValuePair.Key} => {totalStoryPoints}");
+            lines.Add(string.Empty);
+            foreach (Record record in keyValuePair.Value)
+                lines.Add($"- [ ] [{record.WorkItem.Id}]({url}{record.WorkItem.Id}) - {record.WorkItem.Title}");
+        }
+        return new(results);
+    }
+
+    private static ReadOnlyDictionary> GetWorkItemsMatching228385(ReadOnlyDictionary keyValuePairs, string workItemType)
+    {
+        ReadOnlyDictionary> results;
+        Record record;
+        List records = new();
+        foreach (KeyValuePair keyValuePair in keyValuePairs)
+        {
+            record = keyValuePair.Value;
+            if (record.WorkItem.State is "Removed" or "Closed")
+                continue;
+            if (!record.WorkItem.IterationPath.Contains('\\'))
+                continue;
+            if (record.WorkItem.StoryPoints is null)
+                continue;
+            if (record.WorkItem.WorkItemType != workItemType)
+                continue;
+            records.Add(record);
+        }
+        Record[] sorted = records.OrderByDescending(l => l.WorkItem.IterationPath).ToArray();
+        results = GetWorkItemsMatching228385(new(sorted));
+        return results;
+    }
+
+    private static ReadOnlyDictionary> GetWorkItemsMatching228385(ReadOnlyCollection records)
+    {
+        Dictionary> results = new();
+        string key;
+        List? collection;
+        foreach (Record record in records)
+        {
+            key = $"{record.WorkItem.IterationPath}-{record.WorkItem.AssignedTo}";
+            if (!results.TryGetValue(key, out collection))
+            {
+                results.Add(key, new());
+                if (!results.TryGetValue(key, out collection))
+                    throw new Exception();
+            }
+            collection.Add(record);
+        }
+        return new(results);
+    }
+
+    internal static List GetDescriptions(JsonElement[] jsonElements)
+    {
+        List results = new();
+        Description? description;
+        JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
+        foreach (JsonElement jsonElement in jsonElements)
+        {
+            if (jsonElement.ValueKind != JsonValueKind.Object)
+                throw new Exception();
+            description = JsonSerializer.Deserialize(jsonElement.ToString(), jsonSerializerOptions);
+            if (description is null)
+                continue;
+            results.Add(description);
+        }
+        return results;
+    }
+
+}
\ No newline at end of file
diff --git a/Adaptation/FileHandlers/json/FileRead.cs b/Adaptation/FileHandlers/json/FileRead.cs
index 2dfbdf6..d5c2e27 100644
--- a/Adaptation/FileHandlers/json/FileRead.cs
+++ b/Adaptation/FileHandlers/json/FileRead.cs
@@ -358,7 +358,9 @@ public class FileRead : Shared.FileRead, IFileRead
     private Tuple> GetExtractResult(string reportFullPath, DateTime dateTime)
     {
         Tuple> results;
-        _Logistics = new Logistics(reportFullPath, $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};");
+        string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" };
+        ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
+        _Logistics = new Logistics(reportFullPath, processDataStandardFormat);
         if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
             MoveJson(reportFullPath, dateTime);
         results = new(_Logistics.Logistics1[0], Array.Empty(), Array.Empty(), new List());
diff --git a/Adaptation/Shared/FileRead.cs b/Adaptation/Shared/FileRead.cs
index bc160cf..7c2fe14 100644
--- a/Adaptation/Shared/FileRead.cs
+++ b/Adaptation/Shared/FileRead.cs
@@ -63,203 +63,6 @@ public class FileRead : Properties.IFileRead
     string Properties.IFileRead.CellInstanceConnectionName => _CellInstanceConnectionName;
     string Properties.IFileRead.ParameterizedModelObjectDefinitionType => _ParameterizedModelObjectDefinitionType;
 
-    public FileRead(IDescription description, bool isEvent, ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted)
-    {
-        _SMTP = smtp;
-        _IsEvent = isEvent;
-        _DummyRuns = dummyRuns;
-        _LastTicksDuration = 0;
-        _StaticRuns = staticRuns;
-        _IsEAFHosted = isEAFHosted;
-        _Description = description;
-        _FileParameter = fileParameter;
-        _ReportFullPath = string.Empty;
-        _CellInstanceName = cellInstanceName;
-        _Calendar = new CultureInfo("en-US").Calendar;
-        _Log = LogManager.GetLogger(typeof(FileRead));
-        _UseCyclicalForDescription = useCyclicalForDescription;
-        _CellInstanceConnectionName = cellInstanceConnectionName;
-        _ModelObjectParameterDefinitions = modelObjectParameters;
-        _FileConnectorConfiguration = fileConnectorConfiguration;
-        _ParameterizedModelObjectDefinitionType = parameterizedModelObjectDefinitionType;
-        _IsSourceTimer = fileConnectorConfiguration.SourceFileFilter.StartsWith("*Timer.txt");
-        string cellInstanceConnectionNameBase = cellInstanceConnectionName.Replace("-", string.Empty);
-        _Hyphens = cellInstanceConnectionName.Length - cellInstanceConnectionNameBase.Length;
-        _TracePath = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Path.Trace");
-        _ExceptionSubject = string.Concat("Exception:", _CellInstanceConnectionName, _FileConnectorConfiguration?.SourceDirectoryCloaking);
-        string suffix;
-        string[] segments = _ParameterizedModelObjectDefinitionType.Split('.');
-        string @namespace = segments[0];
-        string eventNameFileRead = "FileRead";
-        string eventName = segments[segments.Length - 1];
-        bool isDuplicator = segments[0] == cellInstanceName;
-        _IsDuplicator = isDuplicator;
-        _CellInstanceConnectionNameBase = cellInstanceConnectionNameBase;
-        if (eventName == eventNameFileRead)
-            suffix = string.Empty;
-        else
-            suffix = string.Concat('_', eventName.Split(new string[] { eventNameFileRead }, StringSplitOptions.RemoveEmptyEntries)[1]);
-        string parameterizedModelObjectDefinitionTypeAppended = string.Concat(@namespace, suffix);
-        if (!isEAFHosted)
-        {
-            if (string.IsNullOrEmpty(equipmentTypeName) || equipmentTypeName != parameterizedModelObjectDefinitionTypeAppended)
-                throw new Exception(cellInstanceConnectionName);
-            if (string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
-                throw new Exception(cellInstanceConnectionName);
-            if (!string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent && connectionCount > 1)
-                throw new Exception(cellInstanceConnectionName);
-            // if (string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent)
-            //     throw new Exception(cellInstanceConnectionName);
-            // if (!string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
-            //     throw new Exception(cellInstanceConnectionName);
-        }
-        if (isDuplicator)
-            _MesEntity = string.Empty;
-        else
-            _MesEntity = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, string.Concat("CellInstance.", cellInstanceName, ".Alias"));
-        _EventName = eventName;
-        _EventNameFileRead = eventNameFileRead;
-        _EquipmentType = parameterizedModelObjectDefinitionTypeAppended;
-        long breakAfterSeconds;
-        if (_FileConnectorConfiguration is null)
-            breakAfterSeconds = 360;
-        else
-        {
-            if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.TimeBased)
-                breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileHandleTimeout.Value);
-            else if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.FileWatcher)
-                breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value);
-            else
-                throw new Exception();
-        }
-        _BreakAfterSeconds = breakAfterSeconds;
-        UpdateLastTicksDuration(breakAfterSeconds * 10000000);
-        if (_IsDuplicator)
-        {
-            if (string.IsNullOrEmpty(_FileConnectorConfiguration.TargetFileLocation) || string.IsNullOrEmpty(_FileConnectorConfiguration.ErrorTargetFileLocation))
-                throw new Exception("_Configuration is empty?");
-            if (_FileConnectorConfiguration.TargetFileLocation.Contains('%') || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains('%'))
-                throw new Exception("_Configuration is incorrect for a duplicator!");
-            // if (_FileConnectorConfiguration is not null)
-            // {
-            //     if (string.IsNullOrEmpty(_FileConnectorConfiguration.SourceDirectoryCloaking))
-            //         throw new Exception("SourceDirectoryCloaking is empty?");
-            //     if (!_FileConnectorConfiguration.SourceDirectoryCloaking.StartsWith("~"))
-            //         throw new Exception("SourceDirectoryCloaking is incorrect for a duplicator!");
-            // }
-        }
-    }
-
-    protected static string GetPropertyValue(string cellInstanceConnectionName, IList modelObjectParameters, string propertyName)
-    {
-        string result;
-        List results = (from l in modelObjectParameters where l.Name == propertyName select l.Value).ToList();
-        if (results.Count != 1)
-            throw new Exception(cellInstanceConnectionName);
-        result = results[0];
-        return result;
-    }
-
-    protected static ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList modelObjectParameters, string propertyNamePrefix)
-    {
-        ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) select l).ToArray();
-        if (results.Length == 0)
-            throw new Exception(cellInstanceConnectionName);
-        return results;
-    }
-
-    protected static ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList modelObjectParameters, string propertyNamePrefix, string propertyNameSuffix)
-    {
-        ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) && l.Name.EndsWith(propertyNameSuffix) select l).ToArray();
-        if (results.Length == 0)
-            throw new Exception(cellInstanceConnectionName);
-        return results;
-    }
-
-    protected void UpdateLastTicksDuration(long ticksDuration)
-    {
-        if (ticksDuration < 50000000)
-            ticksDuration = 50000000;
-        _LastTicksDuration = (long)Math.Ceiling(ticksDuration * .667);
-        _Log.Info($"{new TimeSpan(ticksDuration).TotalMilliseconds} TotalMillisecond(s) to process{Environment.NewLine}{_CellInstanceConnectionName}{Environment.NewLine}<{_ReportFullPath}>");
-    }
-
-    protected void WaitForThread(Thread thread, List threadExceptions)
-    {
-        if (thread is not null)
-        {
-            ThreadState threadState;
-            for (short i = 0; i < short.MaxValue; i++)
-            {
-                if (thread is null)
-                    break;
-                else
-                {
-                    threadState = thread.ThreadState;
-                    if (threadState is not ThreadState.Running and not ThreadState.WaitSleepJoin)
-                        break;
-                }
-                Thread.Sleep(500);
-            }
-            lock (threadExceptions)
-            {
-                if (threadExceptions.Count != 0)
-                {
-                    foreach (Exception item in threadExceptions)
-                        _Log.Error(string.Concat(item.Message, Environment.NewLine, Environment.NewLine, item.StackTrace));
-                    Exception exception = threadExceptions[0];
-                    threadExceptions.Clear();
-                    throw exception;
-                }
-            }
-        }
-    }
-
-    private void WriteAllLines(string to, string[] exceptionLines)
-    {
-        string fileName = string.Concat(to, @"\readme.txt");
-        try
-        {
-            if (!Directory.Exists(to))
-                _ = Directory.CreateDirectory(to);
-            File.WriteAllLines(fileName, exceptionLines);
-        }
-        catch (Exception ex) { _Log.Error(ex.Message); }
-    }
-
-    protected string[] Move(Tuple> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
-    {
-        string[] results;
-        bool isErrorFile = exception is not null;
-        if (!to.EndsWith(@"\"))
-            _ = string.Concat(to, @"\");
-        if (!isErrorFile)
-            results = Array.Empty();
-        else
-        {
-            results = new string[] { _Logistics.Sequence.ToString(), _Logistics.ReportFullPath, from, resolvedFileLocation, to, string.Empty, string.Empty, exception.Message, string.Empty, string.Empty, exception.StackTrace };
-            if (!_IsDuplicator)
-                WriteAllLines(to, results);
-        }
-        if (extractResults is not null && extractResults.Item4 is not null && extractResults.Item4.Count != 0)
-        {
-            string itemFile;
-            List directories = new();
-            foreach (FileInfo sourceFile in extractResults.Item4)
-            {
-                if (sourceFile.FullName != _Logistics.ReportFullPath)
-                {
-                    itemFile = sourceFile.FullName.Replace(from, to);
-                    Shared1880(itemFile, directories, sourceFile, isErrorFile);
-                }
-                else if (!isErrorFile && _Logistics is not null)
-                    Shared1811(to, sourceFile);
-            }
-            Shared0231(directories);
-        }
-        return results;
-    }
-
     protected static string GetTupleFile(Logistics logistics, List descriptions, Properties.IScopeInfo scopeInfo, string duplicateDirectory, string duplicateFile) where T : Properties.IDescription
     {
         string result;
@@ -384,217 +187,131 @@ public class FileRead : Properties.IFileRead
         }
     }
 
-    protected void SetFileParameter(string key, string value)
+    protected void WaitForFileConsumption(DateTime dateTime, List descriptions, bool isDummyRun, string successDirectory, string duplicateDirectory, List<(Properties.IScopeInfo, string)> collection, string duplicateFile) where T : Properties.IDescription
     {
-        if (_FileConnectorConfiguration is null || _FileConnectorConfiguration.TargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.TargetFileName.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileName.Contains(string.Concat("%", key, "%")))
-        {
-            if (_FileParameter.ContainsKey(key))
-                _FileParameter[key] = value;
-            else
-                _FileParameter.Add(key, value);
-        }
-    }
-
-    protected void SetFileParameterLotIDToLogisticsMID(bool includeLogisticsSequence = true)
-    {
-        string key;
-        if (!includeLogisticsSequence)
-            key = "LotID";
-        else
-            key = "LotIDWithLogisticsSequence";
-        string value = string.Concat(_Logistics.MID, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
-        SetFileParameter(key, value);
-    }
-
-    protected void SetFileParameterLotID(string value, bool includeLogisticsSequence = true)
-    {
-        string key;
-        if (!includeLogisticsSequence)
-            key = "LotID";
+        if (!isDummyRun && _IsEAFHosted)
+            WaitForFileConsumption(_FileConnectorConfiguration.SourceDirectoryCloaking, _Logistics, dateTime, descriptions, successDirectory, duplicateDirectory, duplicateFile, collection);
         else
         {
-            key = "LotIDWithLogisticsSequence";
-            value = string.Concat(value, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
-        }
-        SetFileParameter(key, value);
-    }
-
-    protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
-    {
-        string directory;
-        string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
-        string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
-        if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType)
-            directory = Path.Combine(_TracePath, _EquipmentType, "Target", weekDirectory, _CellInstanceName, _CellInstanceConnectionName);
-        else
-            directory = Path.Combine(_TracePath, _EquipmentType, "Source", weekDirectory, _CellInstanceName, _CellInstanceConnectionName);
-        if (!Directory.Exists(directory))
-            _ = Directory.CreateDirectory(directory);
-        string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
-        string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
-        File.WriteAllText(file, lines);
-        if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
-        {
-            try
-            { File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
-            catch (Exception) { }
-        }
-    }
-
-    protected void Move(Tuple> extractResults)
-    {
-        if (!_IsEAFHosted)
-        {
-            string to;
-            if (!_FileConnectorConfiguration.TargetFileLocation.EndsWith(Path.DirectorySeparatorChar.ToString()))
-                to = _FileConnectorConfiguration.TargetFileLocation;
-            else
-                to = Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation);
-            foreach (KeyValuePair keyValuePair in _FileParameter)
-                to = to.Replace(string.Concat('%', keyValuePair.Key, '%'), keyValuePair.Value);
-            if (to.Contains('%'))
-                _Log.Debug("Can't debug without EAF Hosting");
-            else
-                _ = Move(extractResults, to, _FileConnectorConfiguration.SourceFileLocation, resolvedFileLocation: string.Empty, exception: null);
-        }
-    }
-
-    protected void TriggerEvents(Tuple> extractResults, List headerNames, Dictionary keyValuePairs)
-    {
-        object value;
-        string segments;
-        string description;
-        List list;
-        for (int i = 0; i < extractResults.Item3.Length; i++)
-        {
-            _Log.Debug(string.Concat("TriggerEvent - {", _Logistics.ReportFullPath, "} ", i, " of ", extractResults.Item3.Length));
-            foreach (JsonProperty jsonProperty in extractResults.Item3[i].EnumerateObject())
+            long breakAfter = DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
+            for (short i = 0; i < short.MaxValue; i++)
             {
-                if (jsonProperty.Value.ValueKind != JsonValueKind.String || !keyValuePairs.TryGetValue(jsonProperty.Name, out segments))
-                    description = string.Empty;
-                else
-                    description = segments.Split('|')[0];
-                if (!_UseCyclicalForDescription || headerNames.Contains(jsonProperty.Name))
-                    value = jsonProperty.Value.ToString();
-                else
-                {
-                    list = new List();
-                    for (int z = 0; z < extractResults.Item3.Length; z++)
-                        list.Add(new object[] { z, extractResults.Item3[z].GetProperty(jsonProperty.Name).ToString() });
-                    value = list;
-                }
-            }
-            if (_UseCyclicalForDescription)
-                break;
-        }
-    }
-
-    protected static void NestExistingFiles(FileConnectorConfiguration fileConnectorConfiguration)
-    {
-        if (!fileConnectorConfiguration.IncludeSubDirectories.Value && fileConnectorConfiguration.TriggerOnCreated is not null && fileConnectorConfiguration.TriggerOnCreated.Value)
-        {
-            string[] matches = GetMatches(fileConnectorConfiguration);
-            if (matches is not null && matches.Length > 0)
-            {
-                string fileName;
-                string nestedDirectory = Path.Combine(fileConnectorConfiguration.SourceFileLocation, DateTime.Now.Ticks.ToString());
-                if (!Directory.Exists(nestedDirectory))
-                    _ = Directory.CreateDirectory(nestedDirectory);
-                foreach (string match in matches)
-                {
-                    fileName = Path.GetFileName(match);
-                    File.Move(match, Path.Combine(nestedDirectory, fileName));
-                }
+                if (!_IsEAFHosted || DateTime.Now.Ticks > breakAfter)
+                    break;
+                Thread.Sleep(500);
             }
         }
     }
 
-    protected static string[] GetMatches(FileConnectorConfiguration fileConnectorConfiguration)
+    public FileRead(IDescription description, bool isEvent, ISMTP smtp, Dictionary fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, Dictionary> dummyRuns, Dictionary> staticRuns, bool useCyclicalForDescription, bool isEAFHosted)
     {
-        string[] segments;
-        string[] results = null;
-        foreach (string subSourceFileFilter in fileConnectorConfiguration.SourceFileFilters)
+        _SMTP = smtp;
+        _IsEvent = isEvent;
+        _DummyRuns = dummyRuns;
+        _LastTicksDuration = 0;
+        _StaticRuns = staticRuns;
+        _IsEAFHosted = isEAFHosted;
+        _Description = description;
+        _FileParameter = fileParameter;
+        _ReportFullPath = string.Empty;
+        _CellInstanceName = cellInstanceName;
+        _Calendar = new CultureInfo("en-US").Calendar;
+        _Log = LogManager.GetLogger(typeof(FileRead));
+        _UseCyclicalForDescription = useCyclicalForDescription;
+        _CellInstanceConnectionName = cellInstanceConnectionName;
+        _ModelObjectParameterDefinitions = modelObjectParameters;
+        _FileConnectorConfiguration = fileConnectorConfiguration;
+        _ParameterizedModelObjectDefinitionType = parameterizedModelObjectDefinitionType;
+        _IsSourceTimer = fileConnectorConfiguration.SourceFileFilter.StartsWith("*Timer.txt");
+        string cellInstanceConnectionNameBase = cellInstanceConnectionName.Replace("-", string.Empty);
+        _Hyphens = cellInstanceConnectionName.Length - cellInstanceConnectionNameBase.Length;
+        _TracePath = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Path.Trace");
+        _ExceptionSubject = string.Concat("Exception:", _CellInstanceConnectionName, _FileConnectorConfiguration?.SourceDirectoryCloaking);
+        string suffix;
+        string[] segments = _ParameterizedModelObjectDefinitionType.Split('.');
+        string @namespace = segments[0];
+        string eventNameFileRead = "FileRead";
+        string eventName = segments[segments.Length - 1];
+        bool isDuplicator = segments[0] == cellInstanceName;
+        _IsDuplicator = isDuplicator;
+        _CellInstanceConnectionNameBase = cellInstanceConnectionNameBase;
+        if (eventName == eventNameFileRead)
+            suffix = string.Empty;
+        else
+            suffix = string.Concat('_', eventName.Split(new string[] { eventNameFileRead }, StringSplitOptions.RemoveEmptyEntries)[1]);
+        string parameterizedModelObjectDefinitionTypeAppended = string.Concat(@namespace, suffix);
+        if (!isEAFHosted)
         {
-            segments = subSourceFileFilter.Split('\\');
-            if (fileConnectorConfiguration.IncludeSubDirectories.Value)
-                results = Directory.GetFiles(fileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.AllDirectories);
-            else
-                results = Directory.GetFiles(fileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.TopDirectoryOnly);
-            if (results.Length != 0)
-                break;
+            if (string.IsNullOrEmpty(equipmentTypeName) || equipmentTypeName != parameterizedModelObjectDefinitionTypeAppended)
+                throw new Exception(cellInstanceConnectionName);
+            if (string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
+                throw new Exception(cellInstanceConnectionName);
+            if (!string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent && connectionCount > 1)
+                throw new Exception(cellInstanceConnectionName);
+            // if (string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent)
+            //     throw new Exception(cellInstanceConnectionName);
+            // if (!string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
+            //     throw new Exception(cellInstanceConnectionName);
         }
-        return results;
-    }
-
-    protected Tuple> ReExtract(IFileRead fileRead, List headerNames, Dictionary keyValuePairs)
-    {
-        Tuple> results;
-        if (!Directory.Exists(_FileConnectorConfiguration.SourceFileLocation))
-            results = null;
+        if (isDuplicator)
+            _MesEntity = string.Empty;
+        else
+            _MesEntity = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, string.Concat("CellInstance.", cellInstanceName, ".Alias"));
+        _EventName = eventName;
+        _EventNameFileRead = eventNameFileRead;
+        _EquipmentType = parameterizedModelObjectDefinitionTypeAppended;
+        long breakAfterSeconds;
+        if (_FileConnectorConfiguration is null)
+            breakAfterSeconds = 360;
         else
         {
-            string[] matches = GetMatches(_FileConnectorConfiguration);
-            if (matches is null || matches.Length == 0)
-                results = null;
+            if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.TimeBased)
+                breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileHandleTimeout.Value);
+            else if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.FileWatcher)
+                breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value);
             else
-            {
-                _ReportFullPath = matches[0];
-                results = fileRead.GetExtractResult(_ReportFullPath, _EventName);
-                if (!_IsEAFHosted)
-                    TriggerEvents(results, headerNames, keyValuePairs);
-            }
-        }
-        return results;
-    }
-
-    protected static List GetDuplicatorDescriptions(JsonElement[] jsonElements)
-    {
-        List results = new();
-        Duplicator.Description description;
-        JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
-        foreach (JsonElement jsonElement in jsonElements)
-        {
-            if (jsonElement.ValueKind != JsonValueKind.Object)
                 throw new Exception();
-            description = JsonSerializer.Deserialize(jsonElement.ToString(), jsonSerializerOptions);
-            results.Add(description);
         }
-        return results;
+        _BreakAfterSeconds = breakAfterSeconds;
+        UpdateLastTicksDuration(breakAfterSeconds * 10000000);
+        if (_IsDuplicator)
+        {
+            if (string.IsNullOrEmpty(_FileConnectorConfiguration.TargetFileLocation) || string.IsNullOrEmpty(_FileConnectorConfiguration.ErrorTargetFileLocation))
+                throw new Exception("_Configuration is empty?");
+            if (_FileConnectorConfiguration.TargetFileLocation.Contains('%') || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains('%'))
+                throw new Exception("_Configuration is incorrect for a duplicator!");
+            // if (_FileConnectorConfiguration is not null)
+            // {
+            //     if (string.IsNullOrEmpty(_FileConnectorConfiguration.SourceDirectoryCloaking))
+            //         throw new Exception("SourceDirectoryCloaking is empty?");
+            //     if (!_FileConnectorConfiguration.SourceDirectoryCloaking.StartsWith("~"))
+            //         throw new Exception("SourceDirectoryCloaking is incorrect for a duplicator!");
+            // }
+        }
     }
 
-    private void Shared1880(string itemFile, List directories, FileInfo sourceFile, bool isErrorFile)
+    protected static string GetPropertyValue(string cellInstanceConnectionName, IList modelObjectParameters, string propertyName)
     {
-        string itemDirectory;
-        directories.Add(Path.GetDirectoryName(sourceFile.FullName));
-        itemDirectory = Path.GetDirectoryName(itemFile);
-        FileConnectorConfiguration.PostProcessingModeEnum processingModeEnum;
-        if (!isErrorFile)
-            processingModeEnum = _FileConnectorConfiguration.PostProcessingMode.Value;
-        else
-            processingModeEnum = _FileConnectorConfiguration.ErrorPostProcessingMode.Value;
-        if (processingModeEnum != FileConnectorConfiguration.PostProcessingModeEnum.Delete && !Directory.Exists(itemDirectory))
-        {
-            _ = Directory.CreateDirectory(itemDirectory);
-            FileInfo fileInfo = new(_Logistics.ReportFullPath);
-            Directory.SetCreationTime(itemDirectory, fileInfo.LastWriteTime);
-        }
-        if (_IsEAFHosted)
-        {
-            switch (processingModeEnum)
-            {
-                case FileConnectorConfiguration.PostProcessingModeEnum.Move:
-                    File.Move(sourceFile.FullName, itemFile);
-                    break;
-                case FileConnectorConfiguration.PostProcessingModeEnum.Copy:
-                    File.Copy(sourceFile.FullName, itemFile);
-                    break;
-                case FileConnectorConfiguration.PostProcessingModeEnum.Delete:
-                    File.Delete(sourceFile.FullName);
-                    break;
-                default:
-                    throw new Exception();
-            }
-        }
+        string result;
+        List results = (from l in modelObjectParameters where l.Name == propertyName select l.Value).ToList();
+        if (results.Count != 1)
+            throw new Exception(cellInstanceConnectionName);
+        result = results[0];
+        return result;
+    }
+
+    protected void UpdateLastTicksDuration(long ticksDuration)
+    {
+        if (ticksDuration < 50000000)
+            ticksDuration = 50000000;
+        _LastTicksDuration = (long)Math.Ceiling(ticksDuration * .667);
+        _Log.Info($"{new TimeSpan(ticksDuration).TotalMilliseconds} TotalMillisecond(s) to process{Environment.NewLine}{_CellInstanceConnectionName}{Environment.NewLine}<{_ReportFullPath}>");
+    }
+
+    internal static string GetParentParent(string value)
+    {
+        string result = Path.GetDirectoryName(Path.GetDirectoryName(value));
+        return result;
     }
 
     internal static List GetDirectoryNames(string directory)
@@ -635,6 +352,315 @@ public class FileRead : Properties.IFileRead
 #nullable disable
     }
 
+    internal static string GetJobIdParentDirectory(string directory)
+    {
+        string result;
+        if (!string.IsNullOrEmpty(Path.GetFileName(directory)))
+            result = Path.GetFullPath(GetParentParent(directory));
+        else
+            result = Path.GetFullPath(GetParentParent(Path.GetDirectoryName(directory)));
+        if (!Directory.Exists(result))
+            _ = Directory.CreateDirectory(result);
+        return result;
+    }
+
+    internal static string GetFileNameAfterUnderscoreSplit(string reportFullPath)
+    {
+        string result;
+        string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
+        if (segments.Length <= 2)
+            result = segments[0];
+        else
+            result = string.Concat(segments[0], segments[2]);
+        return result;
+    }
+
+    internal string[] GetInProcessDirectory(string jobIdDirectory)
+    {
+        string[] results;
+        if (!_IsEAFHosted)
+            results = new string[] { jobIdDirectory };
+        else
+        {
+            string logisticsSequence = _Logistics.Sequence.ToString();
+            results = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
+        }
+        if ((results is null) || results.Length != 1)
+            throw new Exception("Didn't find directory by logistics sequence");
+        return results;
+    }
+
+    protected static string[] GetMatches(FileConnectorConfiguration fileConnectorConfiguration)
+    {
+        string[] segments;
+        string[] results = null;
+        foreach (string subSourceFileFilter in fileConnectorConfiguration.SourceFileFilters)
+        {
+            segments = subSourceFileFilter.Split('\\');
+            if (fileConnectorConfiguration.IncludeSubDirectories.Value)
+                results = Directory.GetFiles(fileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.AllDirectories);
+            else
+                results = Directory.GetFiles(fileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.TopDirectoryOnly);
+            if (results.Length != 0)
+                break;
+        }
+        return results;
+    }
+
+    protected static void NestExistingFiles(FileConnectorConfiguration fileConnectorConfiguration)
+    {
+        // if (!fileConnectorConfiguration.IncludeSubDirectories.Value && fileConnectorConfiguration.TriggerOnCreated is not null && fileConnectorConfiguration.TriggerOnCreated.Value)
+        if (!fileConnectorConfiguration.IncludeSubDirectories.Value)
+        {
+            string[] matches = GetMatches(fileConnectorConfiguration);
+            if (matches is not null && matches.Length > 0)
+            {
+                string fileName;
+                string nestedDirectory = Path.Combine(fileConnectorConfiguration.SourceFileLocation, DateTime.Now.Ticks.ToString());
+                if (!Directory.Exists(nestedDirectory))
+                    _ = Directory.CreateDirectory(nestedDirectory);
+                foreach (string match in matches)
+                {
+                    fileName = Path.GetFileName(match);
+                    File.Move(match, Path.Combine(nestedDirectory, fileName));
+                }
+            }
+        }
+    }
+
+    protected static List GetDuplicatorDescriptions(JsonElement[] jsonElements)
+    {
+        List results = new();
+        Duplicator.Description description;
+        JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
+        foreach (JsonElement jsonElement in jsonElements)
+        {
+            if (jsonElement.ValueKind != JsonValueKind.Object)
+                throw new Exception();
+            description = JsonSerializer.Deserialize(jsonElement.ToString(), jsonSerializerOptions);
+            results.Add(description);
+        }
+        return results;
+    }
+
+    protected static ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList modelObjectParameters, string propertyNamePrefix)
+    {
+        ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) select l).ToArray();
+        if (results.Length == 0)
+            throw new Exception(cellInstanceConnectionName);
+        return results;
+    }
+
+    protected static ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList modelObjectParameters, string propertyNamePrefix, string propertyNameSuffix)
+    {
+        ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) && l.Name.EndsWith(propertyNameSuffix) select l).ToArray();
+        if (results.Length == 0)
+            throw new Exception(cellInstanceConnectionName);
+        return results;
+    }
+
+    protected void SetFileParameter(string key, string value)
+    {
+        if (_FileConnectorConfiguration is null || _FileConnectorConfiguration.TargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.TargetFileName.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileName.Contains(string.Concat("%", key, "%")))
+        {
+            if (_FileParameter.ContainsKey(key))
+                _FileParameter[key] = value;
+            else
+                _FileParameter.Add(key, value);
+        }
+    }
+
+    protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
+    {
+        string directory;
+        string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
+        string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
+        string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
+        if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType)
+            directory = Path.Combine(_TracePath, _EquipmentType, "Target", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName);
+        else
+            directory = Path.Combine(_TracePath, _EquipmentType, "Source", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName);
+        if (!Directory.Exists(directory))
+            _ = Directory.CreateDirectory(directory);
+        string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
+        string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
+        File.WriteAllText(file, lines);
+        if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
+        {
+            try
+            { File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
+            catch (Exception) { }
+        }
+    }
+
+    protected void WaitForThread(Thread thread, List threadExceptions)
+    {
+        if (thread is not null)
+        {
+            ThreadState threadState;
+            for (short i = 0; i < short.MaxValue; i++)
+            {
+                if (thread is null)
+                    break;
+                else
+                {
+                    threadState = thread.ThreadState;
+                    if (threadState is not ThreadState.Running and not ThreadState.WaitSleepJoin)
+                        break;
+                }
+                Thread.Sleep(500);
+            }
+            lock (threadExceptions)
+            {
+                if (threadExceptions.Count != 0)
+                {
+                    foreach (Exception item in threadExceptions)
+                        _Log.Error(string.Concat(item.Message, Environment.NewLine, Environment.NewLine, item.StackTrace));
+                    Exception exception = threadExceptions[0];
+                    threadExceptions.Clear();
+                    throw exception;
+                }
+            }
+        }
+    }
+
+    protected void Move(Tuple> extractResults)
+    {
+        if (!_IsEAFHosted)
+        {
+            string to;
+            if (!_FileConnectorConfiguration.TargetFileLocation.EndsWith(Path.DirectorySeparatorChar.ToString()))
+                to = _FileConnectorConfiguration.TargetFileLocation;
+            else
+                to = Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation);
+            foreach (KeyValuePair keyValuePair in _FileParameter)
+                to = to.Replace(string.Concat('%', keyValuePair.Key, '%'), keyValuePair.Value);
+            if (to.Contains('%'))
+                _Log.Debug("Can't debug without EAF Hosting");
+            else
+                _ = Move(extractResults, to, _FileConnectorConfiguration.SourceFileLocation, resolvedFileLocation: string.Empty, exception: null);
+        }
+    }
+
+    protected string[] Move(Tuple> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
+    {
+        string[] results;
+        bool isErrorFile = exception is not null;
+        if (!to.EndsWith(@"\"))
+            _ = string.Concat(to, @"\");
+        if (!isErrorFile)
+            results = Array.Empty();
+        else
+        {
+            results = new string[] { _Logistics.Sequence.ToString(), _Logistics.ReportFullPath, from, resolvedFileLocation, to, string.Empty, string.Empty, exception.Message, string.Empty, string.Empty, exception.StackTrace };
+            if (!_IsDuplicator)
+                WriteAllLines(to, results);
+        }
+        if (extractResults is not null && extractResults.Item4 is not null && extractResults.Item4.Count != 0)
+        {
+            string itemFile;
+            List directories = new();
+            foreach (FileInfo sourceFile in extractResults.Item4)
+            {
+                if (sourceFile.FullName != _Logistics.ReportFullPath)
+                {
+                    itemFile = sourceFile.FullName.Replace(from, to);
+                    Shared1880(itemFile, directories, sourceFile, isErrorFile);
+                }
+                else if (!isErrorFile && _Logistics is not null)
+                    Shared1811(to, sourceFile);
+            }
+            Shared0231(directories);
+        }
+        return results;
+    }
+
+    private void WriteAllLines(string to, string[] exceptionLines)
+    {
+        string fileName = string.Concat(to, @"\readme.txt");
+        try
+        {
+            if (!Directory.Exists(to))
+                _ = Directory.CreateDirectory(to);
+            File.WriteAllLines(fileName, exceptionLines);
+        }
+        catch (Exception ex) { _Log.Error(ex.Message); }
+    }
+
+    private void Shared1880(string itemFile, List directories, FileInfo sourceFile, bool isErrorFile)
+    {
+        string itemDirectory;
+        directories.Add(Path.GetDirectoryName(sourceFile.FullName));
+        itemDirectory = Path.GetDirectoryName(itemFile);
+        FileConnectorConfiguration.PostProcessingModeEnum processingModeEnum;
+        if (!isErrorFile)
+            processingModeEnum = _FileConnectorConfiguration.PostProcessingMode.Value;
+        else
+            processingModeEnum = _FileConnectorConfiguration.ErrorPostProcessingMode.Value;
+        if (processingModeEnum != FileConnectorConfiguration.PostProcessingModeEnum.Delete && !Directory.Exists(itemDirectory))
+        {
+            _ = Directory.CreateDirectory(itemDirectory);
+            FileInfo fileInfo = new(_Logistics.ReportFullPath);
+            Directory.SetCreationTime(itemDirectory, fileInfo.LastWriteTime);
+        }
+        if (_IsEAFHosted)
+        {
+            switch (processingModeEnum)
+            {
+                case FileConnectorConfiguration.PostProcessingModeEnum.Move:
+                    File.Move(sourceFile.FullName, itemFile);
+                    break;
+                case FileConnectorConfiguration.PostProcessingModeEnum.Copy:
+                    File.Copy(sourceFile.FullName, itemFile);
+                    break;
+                case FileConnectorConfiguration.PostProcessingModeEnum.Delete:
+                    File.Delete(sourceFile.FullName);
+                    break;
+                default:
+                    throw new Exception();
+            }
+        }
+    }
+
+    private void Shared1811(string to, FileInfo sourceFile)
+    {
+        if (!_IsDuplicator && _FileConnectorConfiguration.SourceFileFilter != "*" && sourceFile.Exists && sourceFile.Length < _MinFileLength)
+        {
+            string directoryName = Path.GetFileName(to);
+            string jobIdDirectory = GetJobIdDirectory(to);
+            DateTime dateTime = DateTime.Now.AddMinutes(-15);
+            string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
+            string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
+            string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
+            string destinationDirectory = Path.Combine(jobIdDirectory, "_ Ignore 100 bytes", weekDirectory, day, directoryName);
+            if (!Directory.Exists(destinationDirectory))
+                _ = Directory.CreateDirectory(destinationDirectory);
+            File.Move(sourceFile.FullName, string.Concat(destinationDirectory, @"\", sourceFile.Name));
+            try
+            {
+                string[] checkDirectories = Directory.GetDirectories(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
+                foreach (string checkDirectory in checkDirectories)
+                {
+                    if (!checkDirectory.Contains('_'))
+                        continue;
+                    if (Directory.GetDirectories(checkDirectory, "*", SearchOption.TopDirectoryOnly).Length != 0)
+                        continue;
+                    if (Directory.GetFiles(checkDirectory, "*", SearchOption.TopDirectoryOnly).Length != 0)
+                        continue;
+                    if (Directory.GetDirectories(checkDirectory, "*", SearchOption.AllDirectories).Length != 0)
+                        continue;
+                    if (Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories).Length != 0)
+                        continue;
+                    if (new DirectoryInfo(checkDirectory).CreationTime > dateTime)
+                        continue;
+                    Directory.Delete(checkDirectory, recursive: false);
+                }
+            }
+            catch (Exception) { throw; }
+            DeleteEmptyTopDirectories(jobIdDirectory);
+        }
+    }
+
     private string GetJobIdDirectory(string path)
     {
         string result;
@@ -680,44 +706,6 @@ public class FileRead : Properties.IFileRead
         }
     }
 
-    private void Shared1811(string to, FileInfo sourceFile)
-    {
-        if (!_IsDuplicator && _FileConnectorConfiguration.SourceFileFilter != "*" && sourceFile.Exists && sourceFile.Length < _MinFileLength)
-        {
-            string directoryName = Path.GetFileName(to);
-            string jobIdDirectory = GetJobIdDirectory(to);
-            DateTime dateTime = DateTime.Now.AddMinutes(-15);
-            string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
-            string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
-            string destinationDirectory = Path.Combine(jobIdDirectory, "_ Ignore 100 bytes", weekDirectory, directoryName);
-            if (!Directory.Exists(destinationDirectory))
-                _ = Directory.CreateDirectory(destinationDirectory);
-            File.Move(sourceFile.FullName, string.Concat(destinationDirectory, @"\", sourceFile.Name));
-            try
-            {
-                string[] checkDirectories = Directory.GetDirectories(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
-                foreach (string checkDirectory in checkDirectories)
-                {
-                    if (!checkDirectory.Contains('_'))
-                        continue;
-                    if (Directory.GetDirectories(checkDirectory, "*", SearchOption.TopDirectoryOnly).Length != 0)
-                        continue;
-                    if (Directory.GetFiles(checkDirectory, "*", SearchOption.TopDirectoryOnly).Length != 0)
-                        continue;
-                    if (Directory.GetDirectories(checkDirectory, "*", SearchOption.AllDirectories).Length != 0)
-                        continue;
-                    if (Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories).Length != 0)
-                        continue;
-                    if (new DirectoryInfo(checkDirectory).CreationTime > dateTime)
-                        continue;
-                    Directory.Delete(checkDirectory, recursive: false);
-                }
-            }
-            catch (Exception) { throw; }
-            DeleteEmptyTopDirectories(jobIdDirectory);
-        }
-    }
-
     private void Shared0231(List directories)
     {
         if (_FileConnectorConfiguration.PostProcessingMode != FileConnectorConfiguration.PostProcessingModeEnum.Copy)
@@ -730,66 +718,81 @@ public class FileRead : Properties.IFileRead
         }
     }
 
-    protected void WaitForFileConsumption(DateTime dateTime, List descriptions, bool isDummyRun, string successDirectory, string duplicateDirectory, List<(Properties.IScopeInfo, string)> collection, string duplicateFile) where T : Properties.IDescription
+    protected void SetFileParameterLotID(string value, bool includeLogisticsSequence = true)
     {
-        if (!isDummyRun && _IsEAFHosted)
-            WaitForFileConsumption(_FileConnectorConfiguration.SourceDirectoryCloaking, _Logistics, dateTime, descriptions, successDirectory, duplicateDirectory, duplicateFile, collection);
+        string key;
+        if (!includeLogisticsSequence)
+            key = "LotID";
         else
         {
-            long breakAfter = DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
-            for (short i = 0; i < short.MaxValue; i++)
+            key = "LotIDWithLogisticsSequence";
+            value = string.Concat(value, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
+        }
+        SetFileParameter(key, value);
+    }
+
+    protected void SetFileParameterLotIDToLogisticsMID(bool includeLogisticsSequence = true)
+    {
+        string key;
+        if (!includeLogisticsSequence)
+            key = "LotID";
+        else
+            key = "LotIDWithLogisticsSequence";
+        string value = string.Concat(_Logistics.MID, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
+        SetFileParameter(key, value);
+    }
+
+    protected Tuple> ReExtract(IFileRead fileRead, List headerNames, Dictionary keyValuePairs)
+    {
+        Tuple> results;
+        if (!Directory.Exists(_FileConnectorConfiguration.SourceFileLocation))
+            results = null;
+        else
+        {
+            string[] matches = GetMatches(_FileConnectorConfiguration);
+            if (matches is null || matches.Length == 0)
+                results = null;
+            else
             {
-                if (!_IsEAFHosted || DateTime.Now.Ticks > breakAfter)
-                    break;
-                Thread.Sleep(500);
+                _ReportFullPath = matches[0];
+                results = fileRead.GetExtractResult(_ReportFullPath, _EventName);
+                if (!_IsEAFHosted)
+                    TriggerEvents(results, headerNames, keyValuePairs);
             }
         }
-    }
-
-    internal static string GetJobIdParentDirectory(string directory)
-    {
-        string result;
-        if (!string.IsNullOrEmpty(Path.GetFileName(directory)))
-            result = Path.GetFullPath(GetParentParent(directory));
-        else
-            result = Path.GetFullPath(GetParentParent(Path.GetDirectoryName(directory)));
-        if (!Directory.Exists(result))
-            _ = Directory.CreateDirectory(result);
-        return result;
-    }
-
-    internal string[] GetInProcessDirectory(string jobIdDirectory)
-    {
-        string[] results;
-        if (!_IsEAFHosted)
-            results = new string[] { jobIdDirectory };
-        else
-        {
-            string logisticsSequence = _Logistics.Sequence.ToString();
-            results = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
-        }
-        if ((results is null) || results.Length != 1)
-            throw new Exception("Didn't find directory by logistics sequence");
         return results;
     }
 
-    internal static string GetFileNameAfterUnderscoreSplit(string reportFullPath)
+    protected void TriggerEvents(Tuple> extractResults, List headerNames, Dictionary keyValuePairs)
     {
-        string result;
-        string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
-        if (segments.Length <= 2)
-            result = segments[0];
-        else
-            result = string.Concat(segments[0], segments[2]);
-        return result;
-    }
-
-    internal static string GetParentParent(string value)
-    {
-        string result = Path.GetDirectoryName(Path.GetDirectoryName(value));
-        return result;
+        object value;
+        string segments;
+        string description;
+        List list;
+        for (int i = 0; i < extractResults.Item3.Length; i++)
+        {
+            _Log.Debug(string.Concat("TriggerEvent - {", _Logistics.ReportFullPath, "} ", i, " of ", extractResults.Item3.Length));
+            foreach (JsonProperty jsonProperty in extractResults.Item3[i].EnumerateObject())
+            {
+                if (jsonProperty.Value.ValueKind != JsonValueKind.String || !keyValuePairs.TryGetValue(jsonProperty.Name, out segments))
+                    description = string.Empty;
+                else
+                    description = segments.Split('|')[0];
+                if (!_UseCyclicalForDescription || headerNames.Contains(jsonProperty.Name))
+                    value = jsonProperty.Value.ToString();
+                else
+                {
+                    list = new List();
+                    for (int z = 0; z < extractResults.Item3.Length; z++)
+                        list.Add(new object[] { z, extractResults.Item3[z].GetProperty(jsonProperty.Name).ToString() });
+                    value = list;
+                }
+            }
+            if (_UseCyclicalForDescription)
+                break;
+        }
     }
 
 }
 
-// 2022-06-08 -> Shared - FileRead
\ No newline at end of file
+// 2025-03-25 -> Shared - FileRead
\ No newline at end of file
diff --git a/Adaptation/Shared/Logistics.cs b/Adaptation/Shared/Logistics.cs
index cb1f805..4f187f8 100644
--- a/Adaptation/Shared/Logistics.cs
+++ b/Adaptation/Shared/Logistics.cs
@@ -35,6 +35,9 @@ public class Logistics : ILogistics
     public long Sequence => _Sequence;
     public double TotalSecondsSinceLastWriteTimeFromSequence => _TotalSecondsSinceLastWriteTimeFromSequence;
 
+    private static string DefaultMesEntity(DateTime dateTime) =>
+        string.Concat(dateTime.Ticks, "_MES_ENTITY");
+
     public Logistics(IFileRead fileRead)
     {
         DateTime dateTime = DateTime.Now;
@@ -84,13 +87,13 @@ public class Logistics : ILogistics
         _Logistics2 = new List();
     }
 
-    public Logistics(string reportFullPath, string logistics)
+    internal Logistics(string reportFullPath, ProcessDataStandardFormat processDataStandardFormat)
     {
         string key;
         DateTime dateTime;
         string[] segments;
         _FileInfo = new(reportFullPath);
-        _Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
+        _Logistics1 = processDataStandardFormat.Logistics.ToList();
         if (Logistics1.Count == 0 || !Logistics1[0].StartsWith("LOGISTICS_1"))
         {
             _NullData = null;
@@ -190,8 +193,6 @@ public class Logistics : ILogistics
         }
     }
 
-    private static string DefaultMesEntity(DateTime dateTime) => string.Concat(dateTime.Ticks, "_MES_ENTITY");
-
     internal void Update(string mid, string processJobID)
     {
         _MID = mid;
diff --git a/Adaptation/Shared/ProcessData.cs b/Adaptation/Shared/ProcessData.cs
index cbee8cd..5f28270 100644
--- a/Adaptation/Shared/ProcessData.cs
+++ b/Adaptation/Shared/ProcessData.cs
@@ -1,21 +1 @@
-namespace Adaptation.Shared;
-
-public class ProcessData
-{
-
-    protected readonly string _Logistics;
-    protected readonly string[] _Columns;
-    protected readonly string[] _BodyLines;
-
-    public string Logistics => _Logistics;
-    public string[] Columns => _Columns;
-    public string[] BodyLines => _BodyLines;
-
-    public ProcessData(string logistics, string[] columns, string[] bodyLines)
-    {
-        _Logistics = logistics;
-        _Columns = columns;
-        _BodyLines = bodyLines;
-    }
-
-}
\ No newline at end of file
+
\ No newline at end of file
diff --git a/Adaptation/Shared/ProcessDataStandardFormat.cs b/Adaptation/Shared/ProcessDataStandardFormat.cs
index 894f23b..a066f50 100644
--- a/Adaptation/Shared/ProcessDataStandardFormat.cs
+++ b/Adaptation/Shared/ProcessDataStandardFormat.cs
@@ -1,18 +1,22 @@
 using Adaptation.Shared.Methods;
 using System;
 using System.Collections.Generic;
+using System.Collections.ObjectModel;
 using System.Globalization;
 using System.IO;
 using System.Linq;
 using System.Text;
 using System.Text.Json;
+using System.Text.Json.Serialization;
 
 namespace Adaptation.Shared;
 
-public class ProcessDataStandardFormat
+#nullable enable
+
+internal class ProcessDataStandardFormat
 {
 
-    public enum SearchFor
+    internal enum SearchFor
     {
         EquipmentIntegration = 1,
         BusinessIntegration = 2,
@@ -20,320 +24,38 @@ public class ProcessDataStandardFormat
         Archive = 4
     }
 
-    public static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
+    internal long? Sequence { get; private set; }
+    internal ReadOnlyCollection Body { get; private set; }
+    internal ReadOnlyCollection Columns { get; private set; }
+    internal ReadOnlyCollection Logistics { get; private set; }
+
+    internal ProcessDataStandardFormat(ReadOnlyCollection body,
+                                       ReadOnlyCollection columns,
+                                       ReadOnlyCollection logistics,
+                                       long? sequence)
     {
-        string result;
-        if (jsonElements.Length == 0)
-            result = string.Empty;
-        else
-        {
-            int columns = 0;
-            List lines;
-            string endOffset = "E#######T";
-            string dataOffset = "D#######T";
-            string headerOffset = "H#######T";
-            string format = "MM/dd/yyyy HH:mm:ss";
-            StringBuilder stringBuilder = new();
-            lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
-            _ = stringBuilder.Append("\"Time\"").Append('\t');
-            _ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
-            _ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
-            for (int i = 0; i < jsonElements.Length;)
-            {
-                foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
-                {
-                    columns += 1;
-                    _ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
-                }
-                break;
-            }
-            _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
-            lines.Add(stringBuilder.ToString());
-            for (int i = 0; i < jsonElements.Length; i++)
-            {
-                _ = stringBuilder.Clear();
-                _ = stringBuilder.Append("0.1").Append('\t');
-                _ = stringBuilder.Append('1').Append('\t');
-                _ = stringBuilder.Append('2').Append('\t');
-                foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
-                    _ = stringBuilder.Append(jsonProperty.Value).Append('\t');
-                _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
-                lines.Add(stringBuilder.ToString());
-            }
-            lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
-            lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
-            lines.Add("DELIMITER	;");
-            lines.Add(string.Concat("START_TIME_FORMAT	", format));
-            lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
-            lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
-            lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
-            if (!string.IsNullOrEmpty(logisticsText))
-                lines.Add(logisticsText);
-            else
-            {
-                lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
-                lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
-                lines.Add("END_HEADER");
-            }
-            _ = stringBuilder.Clear();
-            foreach (string line in lines)
-                _ = stringBuilder.AppendLine(line);
-            result = stringBuilder.ToString();
-            result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
-                Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
-                Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
-        }
-        return result;
+        Body = body;
+        Columns = columns;
+        Logistics = logistics;
+        Sequence = sequence;
     }
 
-    public static ProcessData GetProcessData(string reportFullPath, string[] lines = null)
-    {
-        string segment;
-        List body = new();
-        StringBuilder logistics = new();
-        lines ??= File.ReadAllLines(reportFullPath);
-        string[] segments;
-        if (lines.Length < 7)
-            segments = Array.Empty();
-        else
-            segments = lines[6].Trim().Split('\t');
-        List columns = new();
-        for (int c = 0; c < segments.Length; c++)
-        {
-            segment = segments[c].Substring(1, segments[c].Length - 2);
-            if (!columns.Contains(segment))
-                columns.Add(segment);
-            else
-            {
-                for (short i = 1; i < short.MaxValue; i++)
-                {
-                    segment = string.Concat(segment, "_", i);
-                    if (!columns.Contains(segment))
-                    {
-                        columns.Add(segment);
-                        break;
-                    }
-                }
-            }
-        }
-        bool lookForLogistics = false;
-        for (int r = 7; r < lines.Length; r++)
-        {
-            if (lines[r].StartsWith("NUM_DATA_ROWS"))
-                lookForLogistics = true;
-            if (!lookForLogistics)
-            {
-                body.Add(lines[r]);
-                continue;
-            }
-            if (lines[r].StartsWith("LOGISTICS_1"))
-            {
-                for (int i = r; i < lines.Length; i++)
-                {
-                    if (lines[r].StartsWith("END_HEADER"))
-                        break;
-                    _ = logistics.AppendLine(lines[i]);
-                }
-                break;
-            }
-        }
-        return new(logistics.ToString(), columns.ToArray(), body.ToArray());
-    }
+    internal static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') =>
+        GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
 
-    public static JsonElement[] GetArray(ProcessData processData, bool lookForNumbers = false)
-    {
-        JsonElement[] results;
-        if (processData.BodyLines.Length == 0 || !processData.BodyLines[0].Contains('\t'))
-            results = JsonSerializer.Deserialize("[]");
-        else
-        {
-            string value;
-            string[] segments;
-            List lines = new();
-            StringBuilder stringBuilder = new();
-            foreach (string bodyLine in processData.BodyLines)
-            {
-                _ = stringBuilder.Clear();
-                _ = stringBuilder.Append('{');
-                segments = bodyLine.Trim().Split('\t');
-                if (!lookForNumbers)
-                {
-                    for (int c = 1; c < segments.Length; c++)
-                    {
-                        value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
-                        _ = stringBuilder.Append('"').Append(processData.Columns[c]).Append("\":\"").Append(value).Append("\",");
-                    }
-                }
-                else
-                {
-                    for (int c = 1; c < segments.Length; c++)
-                    {
-                        value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
-                        if (string.IsNullOrEmpty(value))
-                            _ = stringBuilder.Append('"').Append(processData.Columns[c]).Append("\":").Append(value).Append("null,");
-                        else if (value.All(char.IsDigit))
-                            _ = stringBuilder.Append('"').Append(processData.Columns[c]).Append("\":").Append(value).Append(',');
-                        else
-                            _ = stringBuilder.Append('"').Append(processData.Columns[c]).Append("\":\"").Append(value).Append("\",");
-                    }
-                }
-                _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
-                _ = stringBuilder.AppendLine("}");
-                lines.Add(stringBuilder.ToString());
-            }
-            string json = $"[{string.Join(",", lines)}]";
-            results = JsonSerializer.Deserialize(json);
-        }
-        return results;
-    }
+    internal static string BusinessIntegration(bool addSpaces = true, char separator = ' ') =>
+        GetString(SearchFor.BusinessIntegration, addSpaces, separator);
 
-    public static Dictionary> GetDictionary(ProcessData processData)
-    {
-        Dictionary> results = new();
-        string[] segments;
-        foreach (string column in processData.Columns)
-            results.Add(column, new List());
-        foreach (string bodyLine in processData.BodyLines)
-        {
-            segments = bodyLine.Split('\t');
-            for (int c = 1; c < segments.Length; c++)
-            {
-                if (c >= processData.Columns.Length)
-                    continue;
-                results[processData.Columns[c]].Add(segments[c]);
-            }
-        }
-        return results;
-    }
+    internal static string SystemExport(bool addSpaces = true, char separator = ' ') =>
+        GetString(SearchFor.SystemExport, addSpaces, separator);
 
-    public static Tuple>>> GetTestDictionary(ProcessData processData)
-    {
-        Dictionary>> results = new();
-        List collection;
-        string testColumn = nameof(Test);
-        Dictionary> keyValuePairs = GetDictionary(processData);
-        if (!keyValuePairs.TryGetValue(testColumn, out collection))
-            throw new Exception();
-        int min;
-        int max;
-        Test testKey;
-        List vs;
-        string columnKey;
-        Dictionary> tests = new();
-        for (int i = 0; i < collection.Count; i++)
-        {
-            if (Enum.TryParse(collection[i], out Test test))
-            {
-                if (!results.ContainsKey(test))
-                {
-                    tests.Add(test, new List());
-                    results.Add(test, new Dictionary>());
-                }
-                tests[test].Add(i);
-            }
-        }
-        foreach (KeyValuePair> testKeyValuePair in tests)
-        {
-            testKey = testKeyValuePair.Key;
-            min = testKeyValuePair.Value.Min();
-            max = testKeyValuePair.Value.Max() + 1;
-            foreach (KeyValuePair> keyValuePair in keyValuePairs)
-                results[testKey].Add(keyValuePair.Key, new List());
-            foreach (KeyValuePair> keyValuePair in keyValuePairs)
-            {
-                vs = keyValuePair.Value;
-                columnKey = keyValuePair.Key;
-                for (int i = min; i < max; i++)
-                {
-                    if (vs.Count > i)
-                        results[testKey][columnKey].Add(vs[i]);
-                    else
-                        results[testKey][columnKey].Add(string.Empty);
-                }
-            }
-        }
-        return new Tuple>>>(processData.Logistics, results);
-    }
+    internal static string Archive(bool addSpaces = true, char separator = ' ') =>
+        GetString(SearchFor.Archive, addSpaces, separator);
 
-    private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
-    {
-        if (!addSpaces)
-            return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
-        else
-            return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
-    }
+    internal static ProcessDataStandardFormat GetEmpty() =>
+        new(new(Array.Empty()), new(Array.Empty()), new(Array.Empty()), null);
 
-    public static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
-
-    public static string BusinessIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.BusinessIntegration, addSpaces, separator);
-
-    public static string SystemExport(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.SystemExport, addSpaces, separator);
-
-    public static string Archive(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.Archive, addSpaces, separator);
-
-    public static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List names, Dictionary> keyValuePairs, string dateFormat, string timeFormat, List pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List ignoreParameterNames = null)
-    {
-        StringBuilder result = new();
-        ignoreParameterNames ??= new List();
-        if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
-            throw new Exception();
-        else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
-            throw new Exception();
-        string nullData;
-        const string columnDate = "Date";
-        const string columnTime = "Time";
-        const string firstDuplicate = "_1";
-        _ = result.AppendLine(scopeInfo.Header);
-        StringBuilder line = new();
-        if (logistics.NullData is null)
-            nullData = string.Empty;
-        else
-            nullData = logistics.NullData.ToString();
-        int count = (from l in keyValuePairs select l.Value.Count).Min();
-        for (int r = 0; r < count; r++)
-        {
-            _ = line.Clear();
-            _ = line.Append('!');
-            foreach (KeyValuePair> keyValuePair in keyValuePairs)
-            {
-                if (!names.Contains(keyValuePair.Key))
-                    continue;
-                if (ignoreParameterNames.Contains(keyValuePair.Key))
-                    continue;
-                if (pairedParameterNames.Contains(keyValuePair.Key))
-                {
-                    if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
-                        continue;
-                    else
-                        _ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
-                }
-                else
-                {
-                    if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
-                        _ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
-                    else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
-                        _ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
-                    else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
-                        _ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
-                    else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.TryGetValue(string.Concat(keyValuePair.Key, firstDuplicate), out List value) && value[r].Length == format.Length)
-                        _ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
-                    else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
-                        _ = line.Append(nullData);
-                    else
-                        _ = line.Append(keyValuePair.Value[r]);
-                    _ = line.Append(';');
-                }
-            }
-            if (pairedParameterNames.Count == 0)
-            {
-                _ = line.Remove(line.Length - 1, 1);
-                _ = result.AppendLine(line.ToString());
-            }
-        }
-        return result.ToString();
-    }
-
-    public static List PDSFToFixedWidth(string reportFullPath)
+    internal static List PDSFToFixedWidth(string reportFullPath)
     {
         List results = new();
         if (!File.Exists(reportFullPath))
@@ -402,4 +124,534 @@ public class ProcessDataStandardFormat
         return results;
     }
 
+    internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null)
+    {
+        ProcessDataStandardFormat result;
+        string segment;
+        List body = new();
+        List logistics = new();
+        lines ??= File.ReadAllLines(reportFullPath);
+        string[] segments;
+        if (lines.Length < 7)
+            segments = Array.Empty();
+        else
+            segments = lines[6].Trim().Split('\t');
+        List columns = new();
+        for (int c = 0; c < segments.Length; c++)
+        {
+            segment = segments[c].Substring(1, segments[c].Length - 2);
+            if (!columns.Contains(segment))
+                columns.Add(segment);
+            else
+            {
+                for (short i = 1; i < short.MaxValue; i++)
+                {
+                    segment = string.Concat(segment, "_", i);
+                    if (!columns.Contains(segment))
+                    {
+                        columns.Add(segment);
+                        break;
+                    }
+                }
+            }
+        }
+        bool lookForLogistics = false;
+        for (int r = 7; r < lines.Length; r++)
+        {
+            if (lines[r].StartsWith("NUM_DATA_ROWS"))
+                lookForLogistics = true;
+            if (!lookForLogistics)
+            {
+                body.Add(lines[r]);
+                continue;
+            }
+            if (lines[r].StartsWith("LOGISTICS_1"))
+            {
+                for (int i = r; i < lines.Length; i++)
+                {
+                    if (lines[r].StartsWith("END_HEADER"))
+                        break;
+                    logistics.Add(lines[i]);
+                }
+                break;
+            }
+        }
+        result = new(logistics.AsReadOnly(), columns.AsReadOnly(), body.AsReadOnly(), null);
+        return result;
+    }
+
+    internal static ProcessDataStandardFormat? GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
+    {
+        ProcessDataStandardFormat? result;
+        const int columnsLine = 6;
+        FileInfo fileInfo = new(reportFullPath);
+        ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, pdsfMapping.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
+        JsonElement[]? jsonElements = GetArray(pdsfMapping.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
+        if (jsonElements is null || pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count)
+            result = null;
+        else
+        {
+            result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
+            if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
+                result = null;
+        }
+        return result;
+    }
+
+    private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int expectedColumns, int columnsLine, string path, string[]? lines)
+    {
+        ProcessDataStandardFormat result;
+        long sequence;
+        string[] segments;
+        List body = new();
+        bool lookForLogistics = false;
+        List logistics = new();
+        lines ??= File.ReadAllLines(path);
+        if (lines.Length <= columnsLine)
+            segments = Array.Empty();
+        else
+        {
+            segments = lines[columnsLine].Split('\t');
+            if (segments.Length != expectedColumns)
+                segments = Array.Empty();
+        }
+        string[] columns = segments.Select(l => l.Trim('"')).ToArray();
+        for (int r = columnsLine + 1; r < lines.Length; r++)
+        {
+            if (lines[r].StartsWith("NUM_DATA_ROWS"))
+                lookForLogistics = true;
+            if (!lookForLogistics)
+            {
+                body.Add(lines[r]);
+                continue;
+            }
+            if (lines[r].StartsWith("LOGISTICS_1"))
+            {
+                for (int i = r; i < lines.Length; i++)
+                {
+                    if (lines[r].StartsWith("END_HEADER"))
+                        break;
+                    logistics.Add(lines[i]);
+                }
+                break;
+            }
+        }
+        if (logistics.Count == 0)
+            sequence = lastWriteTime.Ticks;
+        else
+        {
+            segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
+            sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s;
+        }
+        result = new(body: body.AsReadOnly(),
+                     columns: new(columns),
+                     logistics: logistics.AsReadOnly(),
+                     sequence: sequence);
+        return result;
+    }
+
+    private static JsonElement[]? GetArray(int expectedColumns, ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers)
+    {
+        JsonElement[]? results;
+        if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
+            results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
+        else
+        {
+            string value;
+            string[] segments;
+            List lines = new();
+            StringBuilder stringBuilder = new();
+            foreach (string bodyLine in processDataStandardFormat.Body)
+            {
+                _ = stringBuilder.Clear();
+                _ = stringBuilder.Append('{');
+                segments = bodyLine.Split('\t');
+                if (segments.Length != expectedColumns)
+                    continue;
+                if (!lookForNumbers)
+                {
+                    for (int c = 0; c < segments.Length; c++)
+                    {
+                        value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
+                        _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
+                    }
+                }
+                else
+                {
+                    for (int c = 0; c < segments.Length; c++)
+                    {
+                        value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
+                        if (string.IsNullOrEmpty(value))
+                            _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
+                        else if (value.All(char.IsDigit))
+                            _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
+                        else
+                            _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
+                    }
+                }
+                _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
+                _ = stringBuilder.AppendLine("}");
+                lines.Add(stringBuilder.ToString());
+            }
+            string json = $"[{string.Join(",", lines)}]";
+            results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray);
+        }
+        return results;
+    }
+
+    private static ProcessDataStandardFormat GetProcessDataStandardFormat(ProcessDataStandardFormatMapping processDataStandardFormatMapping, JsonElement[] jsonElements, ProcessDataStandardFormat processDataStandardFormat)
+    {
+        ProcessDataStandardFormat result;
+        int column;
+        string value;
+        JsonProperty jsonProperty;
+        List values = new();
+        List results = new();
+        JsonProperty[] jsonProperties;
+        List unknownColumns = new();
+        for (int i = 0; i < jsonElements.Length; i++)
+        {
+            values.Clear();
+            if (jsonElements[i].ValueKind != JsonValueKind.Object)
+            {
+                unknownColumns.Add(string.Empty);
+                break;
+            }
+            jsonProperties = jsonElements[i].EnumerateObject().ToArray();
+            if (jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
+                continue;
+            for (int c = 0; c < processDataStandardFormatMapping.ColumnIndices.Count; c++)
+            {
+                column = processDataStandardFormatMapping.ColumnIndices[c];
+                if (column == -1)
+                    value = processDataStandardFormatMapping.OldColumnNames[c];
+                else
+                {
+                    jsonProperty = jsonProperties[column];
+                    value = jsonProperty.Value.ToString();
+                }
+                values.Add(value);
+            }
+            results.Add(string.Join("\t", values));
+        }
+        result = new(body: new(results),
+                     columns: processDataStandardFormat.Columns,
+                     logistics: processDataStandardFormat.Logistics,
+                     sequence: processDataStandardFormat.Sequence);
+        return result;
+    }
+
+    internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat)
+    {
+        List results = new();
+        if (processDataStandardFormat.Sequence is null)
+            throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
+        string endOffset = "E#######T";
+        string dataOffset = "D#######T";
+        string headerOffset = "H#######T";
+        string format = "MM/dd/yyyy HH:mm:ss";
+        string startTime = new DateTime(processDataStandardFormat.Sequence.Value).ToString(format);
+        results.Add("HEADER_TAG\tHEADER_VALUE");
+        results.Add("FORMAT\t2.00");
+        results.Add("NUMBER_PASSES\t0001");
+        results.Add($"HEADER_OFFSET\t{headerOffset}");
+        results.Add($"DATA_OFFSET\t{dataOffset}");
+        results.Add($"END_OFFSET\t{endOffset}");
+        results.Add($"\"{string.Join("\",\t\"", processDataStandardFormat.Columns)}\"");
+        results.AddRange(processDataStandardFormat.Body);
+        results.Add($"NUM_DATA_ROWS\t{processDataStandardFormat.Body.Count.ToString().PadLeft(9, '0')}");
+        results.Add($"NUM_DATA_COLUMNS\t{processDataStandardFormat.Columns.Count.ToString().PadLeft(9, '0')}");
+        results.Add("DELIMITER\t;");
+        results.Add($"START_TIME_FORMAT\t{format}");
+        results.Add($"START_TIME\t{startTime}");
+        results.Add("LOGISTICS_COLUMN\tA_LOGISTICS");
+        results.Add("LOGISTICS_COLUMN\tB_LOGISTICS");
+        results.AddRange(processDataStandardFormat.Logistics);
+        File.WriteAllText(path, string.Join(Environment.NewLine, results));
+    }
+
+    internal static Dictionary> GetDictionary(ProcessDataStandardFormat processDataStandardFormat)
+    {
+        Dictionary> results = new();
+        string[] segments;
+        foreach (string column in processDataStandardFormat.Columns)
+            results.Add(column, new List());
+        foreach (string bodyLine in processDataStandardFormat.Body)
+        {
+            segments = bodyLine.Split('\t');
+            for (int c = 1; c < segments.Length; c++)
+            {
+                if (c >= processDataStandardFormat.Columns.Count)
+                    continue;
+                results[processDataStandardFormat.Columns[c]].Add(segments[c]);
+            }
+        }
+        return results;
+    }
+
+    internal static JsonElement[] GetArray(ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers = false)
+    {
+        JsonElement[] results;
+        if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
+            results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
+        else
+        {
+            string value;
+            string[] segments;
+            List lines = new();
+            StringBuilder stringBuilder = new();
+            foreach (string bodyLine in processDataStandardFormat.Body)
+            {
+                _ = stringBuilder.Clear();
+                _ = stringBuilder.Append('{');
+                segments = bodyLine.Trim().Split('\t');
+                if (!lookForNumbers)
+                {
+                    for (int c = 1; c < segments.Length; c++)
+                    {
+                        value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
+                        _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
+                    }
+                }
+                else
+                {
+                    for (int c = 1; c < segments.Length; c++)
+                    {
+                        value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
+                        if (string.IsNullOrEmpty(value))
+                            _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
+                        else if (value.All(char.IsDigit))
+                            _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
+                        else
+                            _ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
+                    }
+                }
+                _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
+                _ = stringBuilder.AppendLine("}");
+                lines.Add(stringBuilder.ToString());
+            }
+            string json = $"[{string.Join(",", lines)}]";
+            results = JsonSerializer.Deserialize(json) ?? throw new Exception();
+        }
+        return results;
+    }
+
+    internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
+    {
+        string result;
+        if (jsonElements.Length == 0)
+            result = string.Empty;
+        else
+        {
+            int columns = 0;
+            List lines;
+            string endOffset = "E#######T";
+            string dataOffset = "D#######T";
+            string headerOffset = "H#######T";
+            string format = "MM/dd/yyyy HH:mm:ss";
+            StringBuilder stringBuilder = new();
+            lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
+            _ = stringBuilder.Append("\"Time\"").Append('\t');
+            _ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
+            _ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
+            for (int i = 0; i < jsonElements.Length;)
+            {
+                foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
+                {
+                    columns += 1;
+                    _ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
+                }
+                break;
+            }
+            _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
+            lines.Add(stringBuilder.ToString());
+            for (int i = 0; i < jsonElements.Length; i++)
+            {
+                _ = stringBuilder.Clear();
+                _ = stringBuilder.Append("0.1").Append('\t');
+                _ = stringBuilder.Append('1').Append('\t');
+                _ = stringBuilder.Append('2').Append('\t');
+                foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
+                    _ = stringBuilder.Append(jsonProperty.Value).Append('\t');
+                _ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
+                lines.Add(stringBuilder.ToString());
+            }
+            lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
+            lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
+            lines.Add("DELIMITER	;");
+            lines.Add(string.Concat("START_TIME_FORMAT	", format));
+            lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
+            lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
+            lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
+            if (!string.IsNullOrEmpty(logisticsText))
+                lines.Add(logisticsText);
+            else
+            {
+                lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
+                lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
+                lines.Add("END_HEADER");
+            }
+            _ = stringBuilder.Clear();
+            foreach (string line in lines)
+                _ = stringBuilder.AppendLine(line);
+            result = stringBuilder.ToString();
+            result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
+                Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
+                Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
+        }
+        return result;
+    }
+
+    internal static Tuple>>> GetTestDictionary(ProcessDataStandardFormat processDataStandardFormat)
+    {
+        Dictionary>> results = new();
+        List? collection;
+        string testColumn = nameof(Test);
+        Dictionary> keyValuePairs = GetDictionary(processDataStandardFormat);
+        if (!keyValuePairs.TryGetValue(testColumn, out collection))
+            throw new Exception();
+        int min;
+        int max;
+        Test testKey;
+        List vs;
+        string columnKey;
+        Dictionary> tests = new();
+        for (int i = 0; i < collection.Count; i++)
+        {
+            if (Enum.TryParse(collection[i], out Test test))
+            {
+                if (!results.ContainsKey(test))
+                {
+                    tests.Add(test, new List());
+                    results.Add(test, new Dictionary>());
+                }
+                tests[test].Add(i);
+            }
+        }
+        foreach (KeyValuePair> testKeyValuePair in tests)
+        {
+            testKey = testKeyValuePair.Key;
+            min = testKeyValuePair.Value.Min();
+            max = testKeyValuePair.Value.Max() + 1;
+            foreach (KeyValuePair> keyValuePair in keyValuePairs)
+                results[testKey].Add(keyValuePair.Key, new List());
+            foreach (KeyValuePair> keyValuePair in keyValuePairs)
+            {
+                vs = keyValuePair.Value;
+                columnKey = keyValuePair.Key;
+                for (int i = min; i < max; i++)
+                {
+                    if (vs.Count > i)
+                        results[testKey][columnKey].Add(vs[i]);
+                    else
+                        results[testKey][columnKey].Add(string.Empty);
+                }
+            }
+        }
+        return new Tuple>>>(processDataStandardFormat.Logistics[0], results);
+    }
+
+    internal static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List names, Dictionary> keyValuePairs, string dateFormat, string timeFormat, List pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List? ignoreParameterNames = null)
+    {
+        StringBuilder result = new();
+        ignoreParameterNames ??= new List();
+        if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
+            throw new Exception();
+        else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
+            throw new Exception();
+        string? nullData;
+        const string columnDate = "Date";
+        const string columnTime = "Time";
+        const string firstDuplicate = "_1";
+        _ = result.AppendLine(scopeInfo.Header);
+        StringBuilder line = new();
+        if (logistics.NullData is null)
+            nullData = string.Empty;
+        else
+            nullData = logistics.NullData.ToString();
+        int count = (from l in keyValuePairs select l.Value.Count).Min();
+        for (int r = 0; r < count; r++)
+        {
+            _ = line.Clear();
+            _ = line.Append('!');
+            foreach (KeyValuePair> keyValuePair in keyValuePairs)
+            {
+                if (!names.Contains(keyValuePair.Key))
+                    continue;
+                if (ignoreParameterNames.Contains(keyValuePair.Key))
+                    continue;
+                if (pairedParameterNames.Contains(keyValuePair.Key))
+                {
+                    if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
+                        continue;
+                    else
+                        _ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
+                }
+                else
+                {
+                    if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
+                        _ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
+                    else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
+                        _ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
+                    else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
+                        _ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
+                    else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
+                        _ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
+                    else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
+                        _ = line.Append(nullData);
+                    else
+                        _ = line.Append(keyValuePair.Value[r]);
+                    _ = line.Append(';');
+                }
+            }
+            if (pairedParameterNames.Count == 0)
+            {
+                _ = line.Remove(line.Length - 1, 1);
+                _ = result.AppendLine(line.ToString());
+            }
+        }
+        return result.ToString();
+    }
+
+    private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
+    {
+        if (!addSpaces)
+            return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
+        else
+            return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
+    }
+
+    private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName)
+    {
+        int? result = null;
+        for (int i = 0; i < jsonProperties.Length; i++)
+        {
+            if (jsonProperties[i].Name != propertyName)
+                continue;
+            result = i;
+            break;
+        }
+        if (result is null)
+        {
+            for (int i = 0; i < jsonProperties.Length; i++)
+            {
+                if (jsonProperties[i].Name[0] != propertyName[0])
+                    continue;
+                if (jsonProperties[i].Name.Length != propertyName.Length)
+                    continue;
+                if (jsonProperties[i].Name != propertyName)
+                    continue;
+                result = i;
+                break;
+            }
+        }
+        return result;
+    }
+
+}
+
+[JsonSourceGenerationOptions(WriteIndented = true)]
+[JsonSerializable(typeof(JsonElement[]))]
+internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
+{
 }
\ No newline at end of file
diff --git a/Adaptation/Shared/ProcessDataStandardFormatMapping.cs b/Adaptation/Shared/ProcessDataStandardFormatMapping.cs
new file mode 100644
index 0000000..c5a75ec
--- /dev/null
+++ b/Adaptation/Shared/ProcessDataStandardFormatMapping.cs
@@ -0,0 +1,33 @@
+using System.Collections.ObjectModel;
+
+namespace Adaptation.Shared;
+
+public class ProcessDataStandardFormatMapping
+{
+
+    public ReadOnlyCollection BackfillColumns { get; private set; }
+    public ReadOnlyCollection ColumnIndices { get; private set; }
+    public ReadOnlyCollection IgnoreColumns { get; private set; }
+    public ReadOnlyCollection IndexOnlyColumns { get; private set; }
+    public ReadOnlyDictionary KeyValuePairs { get; private set; }
+    public ReadOnlyCollection NewColumnNames { get; private set; }
+    public ReadOnlyCollection OldColumnNames { get; private set; }
+
+    public ProcessDataStandardFormatMapping(ReadOnlyCollection backfillColumns,
+                                            ReadOnlyCollection columnIndices,
+                                            ReadOnlyCollection ignoreColumns,
+                                            ReadOnlyCollection indexOnlyColumns,
+                                            ReadOnlyDictionary keyValuePairs,
+                                            ReadOnlyCollection newColumnNames,
+                                            ReadOnlyCollection oldColumnNames)
+    {
+        BackfillColumns = backfillColumns;
+        ColumnIndices = columnIndices;
+        IgnoreColumns = ignoreColumns;
+        IndexOnlyColumns = indexOnlyColumns;
+        KeyValuePairs = keyValuePairs;
+        NewColumnNames = newColumnNames;
+        OldColumnNames = oldColumnNames;
+    }
+
+}
\ No newline at end of file
diff --git a/Adaptation/_Tests/CreateSelfDescription/Development/v2.59.0/MESAFIBACKLOG.cs b/Adaptation/_Tests/CreateSelfDescription/Development/v2.59.0/MESAFIBACKLOG.cs
index a312aba..2dd6ab9 100644
--- a/Adaptation/_Tests/CreateSelfDescription/Development/v2.59.0/MESAFIBACKLOG.cs
+++ b/Adaptation/_Tests/CreateSelfDescription/Development/v2.59.0/MESAFIBACKLOG.cs
@@ -100,5 +100,18 @@ public class MESAFIBACKLOG : EAFLoggingUnitTesting
         EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
     }
 
+#if DEBUG
+    [Ignore]
+#endif
+    [TestMethod]
+    public void Development__v2_59_0__MESAFIBACKLOG__Violation()
+    {
+        string check = "*.json";
+        MethodBase methodBase = new StackFrame().GetMethod();
+        EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
+        _ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
+        EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
+    }
+
 }
 #endif
\ No newline at end of file
diff --git a/Adaptation/_Tests/Extract/Development/v2.59.0/MESAFIBACKLOG.cs b/Adaptation/_Tests/Extract/Development/v2.59.0/MESAFIBACKLOG.cs
index 3943681..0bf095b 100644
--- a/Adaptation/_Tests/Extract/Development/v2.59.0/MESAFIBACKLOG.cs
+++ b/Adaptation/_Tests/Extract/Development/v2.59.0/MESAFIBACKLOG.cs
@@ -51,8 +51,8 @@ public class MESAFIBACKLOG
     {
         string check = "*.json";
         bool validatePDSF = false;
-        _MESAFIBACKLOG.Development__v2_59_0__MESAFIBACKLOG__Kanban();
         MethodBase methodBase = new StackFrame().GetMethod();
+        _MESAFIBACKLOG.Development__v2_59_0__MESAFIBACKLOG__Kanban();
         Assert.IsFalse(string.IsNullOrEmpty(_MESAFIBACKLOG.AdaptationTesting.TestContext.FullyQualifiedTestClassName));
         string[] variables = _MESAFIBACKLOG.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
         IFileRead fileRead = _MESAFIBACKLOG.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
@@ -141,8 +141,8 @@ public class MESAFIBACKLOG
     {
         string check = "*.json";
         bool validatePDSF = false;
-        _MESAFIBACKLOG.Development__v2_59_0__MESAFIBACKLOG__Markdown();
         MethodBase methodBase = new StackFrame().GetMethod();
+        _MESAFIBACKLOG.Development__v2_59_0__MESAFIBACKLOG__Markdown();
         Assert.IsFalse(string.IsNullOrEmpty(_MESAFIBACKLOG.AdaptationTesting.TestContext.FullyQualifiedTestClassName));
         string[] variables = _MESAFIBACKLOG.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
         IFileRead fileRead = _MESAFIBACKLOG.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
@@ -150,19 +150,6 @@ public class MESAFIBACKLOG
         Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
         Assert.IsNotNull(extractResult.Item3);
         Assert.IsNotNull(extractResult.Item4);
-        ReadOnlyDictionary keyValuePairs = GetKeyValuePairs(extractResult.Item4);
-        Assert.IsTrue(keyValuePairs.ContainsKey("check-122508.json"));
-        Assert.IsTrue(keyValuePairs.ContainsKey("check-122514.json"));
-        Assert.IsTrue(keyValuePairs.ContainsKey("check-126169.json"));
-        Assert.IsTrue(keyValuePairs.ContainsKey("check-123066.json"));
-        Assert.IsTrue(keyValuePairs.ContainsKey("check-123067.json"));
-        Assert.IsTrue(keyValuePairs.ContainsKey("check-122517.json"));
-        Verify122508(keyValuePairs["check-122508.json"]);
-        Verify122514(keyValuePairs["check-122514.json"]);
-        Verify126169(keyValuePairs["check-126169.json"]);
-        Verify123066(keyValuePairs["check-123066.json"]);
-        Verify123067(keyValuePairs["check-123067.json"]);
-        Verify122517(keyValuePairs["check-122517.json"]);
         NonThrowTryCatch();
     }
 
@@ -226,8 +213,8 @@ public class MESAFIBACKLOG
     {
         string check = "*.json";
         bool validatePDSF = false;
-        _MESAFIBACKLOG.Development__v2_59_0__MESAFIBACKLOG__Markdown();
         MethodBase methodBase = new StackFrame().GetMethod();
+        _MESAFIBACKLOG.Development__v2_59_0__MESAFIBACKLOG__Markdown();
         Assert.IsFalse(string.IsNullOrEmpty(_MESAFIBACKLOG.AdaptationTesting.TestContext.FullyQualifiedTestClassName));
         string[] variables = _MESAFIBACKLOG.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
         IFileRead fileRead = _MESAFIBACKLOG.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
@@ -238,5 +225,38 @@ public class MESAFIBACKLOG
         NonThrowTryCatch();
     }
 
+#if DEBUG
+    [Ignore]
+#endif
+    [TestMethod]
+    public void Development__v2_59_0__MESAFIBACKLOG__Violation638779784153157287__Normal()
+    {
+        string check = "*.json";
+        bool validatePDSF = false;
+        MethodBase methodBase = new StackFrame().GetMethod();
+        _MESAFIBACKLOG.Development__v2_59_0__MESAFIBACKLOG__Violation();
+        Assert.IsFalse(string.IsNullOrEmpty(_MESAFIBACKLOG.AdaptationTesting.TestContext.FullyQualifiedTestClassName));
+        string[] variables = _MESAFIBACKLOG.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
+        IFileRead fileRead = _MESAFIBACKLOG.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
+        Tuple> extractResult = fileRead.ReExtract();
+        Assert.IsFalse(string.IsNullOrEmpty(extractResult?.Item1));
+        Assert.IsNotNull(extractResult.Item3);
+        Assert.IsNotNull(extractResult.Item4);
+        ReadOnlyDictionary keyValuePairs = GetKeyValuePairs(extractResult.Item4);
+        Assert.IsTrue(keyValuePairs.ContainsKey("check-122508.json"));
+        Assert.IsTrue(keyValuePairs.ContainsKey("check-122514.json"));
+        Assert.IsTrue(keyValuePairs.ContainsKey("check-126169.json"));
+        Assert.IsTrue(keyValuePairs.ContainsKey("check-123066.json"));
+        Assert.IsTrue(keyValuePairs.ContainsKey("check-123067.json"));
+        Assert.IsTrue(keyValuePairs.ContainsKey("check-122517.json"));
+        Verify122508(keyValuePairs["check-122508.json"]);
+        Verify122514(keyValuePairs["check-122514.json"]);
+        Verify126169(keyValuePairs["check-126169.json"]);
+        Verify123066(keyValuePairs["check-123066.json"]);
+        Verify123067(keyValuePairs["check-123067.json"]);
+        Verify122517(keyValuePairs["check-122517.json"]);
+        NonThrowTryCatch();
+    }
+
 }
 #endif
\ No newline at end of file
diff --git a/Adaptation/_Tests/Shared/AdaptationTesting.cs b/Adaptation/_Tests/Shared/AdaptationTesting.cs
index 64393eb..e58e25e 100644
--- a/Adaptation/_Tests/Shared/AdaptationTesting.cs
+++ b/Adaptation/_Tests/Shared/AdaptationTesting.cs
@@ -56,11 +56,33 @@ public class AdaptationTesting : ISMTP
     public Dictionary ParameterizedModelObjectDefinitionTypes => _ParameterizedModelObjectDefinitionTypes;
     public Dictionary>> EquipmentDictionaryEventDescriptions => _EquipmentDictionaryEventDescriptions;
 
-    void ISMTP.SendLowPriorityEmailMessage(string subject, string body) => throw new NotImplementedException();
+    void ISMTP.SendLowPriorityEmailMessage(string subject, string body) =>
+        throw new NotImplementedException();
 
-    void ISMTP.SendHighPriorityEmailMessage(string subject, string body) => throw new NotImplementedException();
+    void ISMTP.SendHighPriorityEmailMessage(string subject, string body) =>
+        throw new NotImplementedException();
 
-    void ISMTP.SendNormalPriorityEmailMessage(string subject, string body) => throw new NotImplementedException();
+    void ISMTP.SendNormalPriorityEmailMessage(string subject, string body) =>
+        throw new NotImplementedException();
+
+    internal static T ParseXML(string @this, bool throwExceptions) where T : class
+    {
+        object result = null;
+        try
+        {
+            Stream stream = ToStream(@this.Trim());
+            XmlReader xmlReader = XmlReader.Create(stream, new XmlReaderSettings() { ConformanceLevel = ConformanceLevel.Document });
+            XmlSerializer xmlSerializer = new(typeof(T), typeof(T).GetNestedTypes());
+            result = xmlSerializer.Deserialize(xmlReader);
+            stream.Dispose();
+        }
+        catch (Exception)
+        {
+            if (throwExceptions)
+                throw;
+        }
+        return result as T;
+    }
 
     public AdaptationTesting(string dummyRoot, TestContext testContext, bool skipEquipmentDictionary, string testContextPropertiesAsJson, bool hasWaitForProperty)
     {
@@ -105,93 +127,6 @@ public class AdaptationTesting : ISMTP
         return result;
     }
 
-    public static string GetTestResultsDirectory(string testContextTestResultsDirectory, bool hasWaitForProperty)
-    {
-        string result = string.Empty;
-        string testResults = "05_TestResults";
-        string checkDirectory = testContextTestResultsDirectory;
-        if (hasWaitForProperty && (string.IsNullOrEmpty(checkDirectory) || !checkDirectory.Contains(testResults)))
-            throw new Exception($"A:{checkDirectory}; B:{testResults};");
-        else if (!hasWaitForProperty && (string.IsNullOrEmpty(checkDirectory) || !checkDirectory.Contains(testResults)))
-            result = testContextTestResultsDirectory;
-        else
-        {
-            string rootDirectory = Path.GetPathRoot(checkDirectory);
-            for (int i = 0; i < int.MaxValue; i++)
-            {
-                checkDirectory = Path.GetDirectoryName(checkDirectory);
-                if (string.IsNullOrEmpty(checkDirectory) || checkDirectory == rootDirectory)
-                    break;
-                if (checkDirectory.EndsWith(testResults) && Directory.Exists(checkDirectory))
-                {
-                    result = checkDirectory;
-                    break;
-                }
-            }
-        }
-        if (string.IsNullOrEmpty(result))
-            throw new Exception();
-        return result;
-    }
-
-    private string GetTestResultsDirectory(bool hasWaitForProperty)
-    {
-        string result = GetTestResultsDirectory(_TestContext.TestResultsDirectory, hasWaitForProperty);
-        return result;
-    }
-
-    protected static string GetCellInstanceConnectionName(string cellInstanceConnectionName)
-    {
-        string result;
-        if (string.IsNullOrEmpty(cellInstanceConnectionName) || cellInstanceConnectionName[cellInstanceConnectionName.Length - 1] != '_')
-            result = cellInstanceConnectionName;
-        else
-        {
-            bool check = false;
-            List chars = new();
-            StringBuilder stringBuilder = new();
-            for (int i = cellInstanceConnectionName.Length - 1; i > -1; i--)
-            {
-                if (!check && cellInstanceConnectionName[i] != '_')
-                    check = true;
-                else if (!check && cellInstanceConnectionName[i] == '_')
-                    chars.Add('-');
-                if (check)
-                    chars.Add(cellInstanceConnectionName[i]);
-            }
-            for (int i = chars.Count - 1; i > -1; i--)
-                _ = stringBuilder.Append(chars[i]);
-            result = stringBuilder.ToString();
-        }
-        return result;
-    }
-
-    private static string GetMethodBaseNameWithActualCICN(string methodBaseName, string cellInstanceName, string cellInstanceConnectionNameFromMethodBaseName, string cellInstanceConnectionName, string ticks)
-    {
-        string results;
-        if (string.IsNullOrEmpty(cellInstanceConnectionNameFromMethodBaseName) || string.IsNullOrEmpty(cellInstanceConnectionName))
-            results = methodBaseName;
-        else if (cellInstanceConnectionNameFromMethodBaseName.Length != cellInstanceConnectionName.Length)
-            throw new Exception();
-        else
-        {
-            string[] segments = methodBaseName.Split(new string[] { cellInstanceName }, StringSplitOptions.None);
-            if (segments.Length == 2)
-                results = methodBaseName.Replace(cellInstanceConnectionNameFromMethodBaseName, cellInstanceConnectionName);
-            else if (segments.Length != 3)
-                throw new Exception();
-            else if (string.IsNullOrEmpty(ticks))
-                results = string.Concat(segments[0], cellInstanceName, segments[1], cellInstanceConnectionName);
-            else if (!segments[2].Contains(ticks))
-                throw new Exception();
-            else
-                results = string.Concat(segments[0], cellInstanceName, segments[1], cellInstanceConnectionName, ticks, segments[2].Split(new string[] { ticks }, StringSplitOptions.None)[1]);
-        }
-        if (methodBaseName.Length != results.Length)
-            throw new Exception();
-        return results;
-    }
-
     public static MethodBaseName GetMethodBaseName(string dummyRoot, string environment, bool hasWaitForProperty, string methodBaseName, string testResultsDirectory)
     {
         MethodBaseName result;
@@ -275,74 +210,58 @@ public class AdaptationTesting : ISMTP
         return result;
     }
 
-    private MethodBaseName GetMethodBaseName(MethodBase methodBase)
+    protected static string GetCellInstanceConnectionName(string cellInstanceConnectionName)
     {
-        MethodBaseName result;
-        string testResultsDirectory = GetTestResultsDirectory(_HasWaitForProperty);
-        result = GetMethodBaseName(_DummyRoot, _Environment, _HasWaitForProperty, methodBase.Name, testResultsDirectory);
+        string result;
+        if (string.IsNullOrEmpty(cellInstanceConnectionName) || cellInstanceConnectionName[cellInstanceConnectionName.Length - 1] != '_')
+            result = cellInstanceConnectionName;
+        else
+        {
+            bool check = false;
+            List chars = new();
+            StringBuilder stringBuilder = new();
+            for (int i = cellInstanceConnectionName.Length - 1; i > -1; i--)
+            {
+                if (!check && cellInstanceConnectionName[i] != '_')
+                    check = true;
+                else if (!check && cellInstanceConnectionName[i] == '_')
+                    chars.Add('-');
+                if (check)
+                    chars.Add(cellInstanceConnectionName[i]);
+            }
+            for (int i = chars.Count - 1; i > -1; i--)
+                _ = stringBuilder.Append(chars[i]);
+            result = stringBuilder.ToString();
+        }
         return result;
     }
 
-    private string[] GetTextFiles(MethodBaseName mbn)
+    private static string GetMethodBaseNameWithActualCICN(string methodBaseName, string cellInstanceName, string cellInstanceConnectionNameFromMethodBaseName, string cellInstanceConnectionName, string ticks)
     {
-        string[] results;
-        if (string.IsNullOrEmpty(mbn.TextFileDirectory))
-            results = Array.Empty();
-        else if (!Directory.Exists(mbn.TextFileDirectory))
-        {
-            results = Array.Empty();
-            if (!_HasWaitForProperty)
-                _ = Directory.CreateDirectory(mbn.TextFileDirectory);
-            else
-            {
-                string renameDirectory = Path.Combine(Path.GetDirectoryName(mbn.TextFileDirectory), $"_Rename - {Path.GetFileName(mbn.TextFileDirectory)}");
-                _ = Directory.CreateDirectory(renameDirectory);
-                _ = Process.Start("explorer.exe", renameDirectory);
-                File.WriteAllText(Path.Combine(renameDirectory, $"{nameof(FileConnectorConfiguration.SourceFileFilter)}.txt"), string.Empty);
-                File.WriteAllText(Path.Combine(renameDirectory, $"{nameof(FileConnectorConfiguration.SourceFileLocation)}.txt"), string.Empty);
-            }
-        }
+        string results;
+        if (string.IsNullOrEmpty(cellInstanceConnectionNameFromMethodBaseName) || string.IsNullOrEmpty(cellInstanceConnectionName))
+            results = methodBaseName;
+        else if (cellInstanceConnectionNameFromMethodBaseName.Length != cellInstanceConnectionName.Length)
+            throw new Exception();
         else
         {
-            results = Directory.GetFiles(mbn.TextFileDirectory, "*.txt", SearchOption.TopDirectoryOnly);
-            if (!string.IsNullOrEmpty(mbn.Ticks) && _HasWaitForProperty && results.Length == 0)
-            {
-                _ = Process.Start("explorer.exe", mbn.TextFileDirectory);
-                File.WriteAllText(Path.Combine(mbn.TextFileDirectory, "_ Why.why"), string.Empty);
-            }
+            string[] segments = methodBaseName.Split(new string[] { cellInstanceName }, StringSplitOptions.None);
+            if (segments.Length == 2)
+                results = methodBaseName.Replace(cellInstanceConnectionNameFromMethodBaseName, cellInstanceConnectionName);
+            else if (segments.Length != 3)
+                throw new Exception();
+            else if (string.IsNullOrEmpty(ticks))
+                results = string.Concat(segments[0], cellInstanceName, segments[1], cellInstanceConnectionName);
+            else if (!segments[2].Contains(ticks))
+                throw new Exception();
+            else
+                results = string.Concat(segments[0], cellInstanceName, segments[1], cellInstanceConnectionName, ticks, segments[2].Split(new string[] { ticks }, StringSplitOptions.None)[1]);
         }
+        if (methodBaseName.Length != results.Length)
+            throw new Exception();
         return results;
     }
 
-    protected static Stream ToStream(string @this)
-    {
-        MemoryStream memoryStream = new();
-        StreamWriter streamWriter = new(memoryStream);
-        streamWriter.Write(@this);
-        streamWriter.Flush();
-        memoryStream.Position = 0;
-        return memoryStream;
-    }
-
-    internal static T ParseXML(string @this, bool throwExceptions) where T : class
-    {
-        object result = null;
-        try
-        {
-            Stream stream = ToStream(@this.Trim());
-            XmlReader xmlReader = XmlReader.Create(stream, new XmlReaderSettings() { ConformanceLevel = ConformanceLevel.Document });
-            XmlSerializer xmlSerializer = new(typeof(T), typeof(T).GetNestedTypes());
-            result = xmlSerializer.Deserialize(xmlReader);
-            stream.Dispose();
-        }
-        catch (Exception)
-        {
-            if (throwExceptions)
-                throw;
-        }
-        return result as T;
-    }
-
     public static CellInstanceVersion GetCellInstanceVersion(string url)
     {
         CellInstanceVersion result;
@@ -368,6 +287,54 @@ public class AdaptationTesting : ISMTP
         return result;
     }
 
+    public static string GetTestResultsDirectory(string testContextTestResultsDirectory, bool hasWaitForProperty)
+    {
+        string result = string.Empty;
+        string testResults = "05_TestResults";
+        string checkDirectory = testContextTestResultsDirectory;
+        if (hasWaitForProperty && (string.IsNullOrEmpty(checkDirectory) || !checkDirectory.Contains(testResults)))
+            throw new Exception($"A:{checkDirectory}; B:{testResults};");
+        else if (!hasWaitForProperty && (string.IsNullOrEmpty(checkDirectory) || !checkDirectory.Contains(testResults)))
+            result = testContextTestResultsDirectory;
+        else
+        {
+            string rootDirectory = Path.GetPathRoot(checkDirectory);
+            for (int i = 0; i < int.MaxValue; i++)
+            {
+                checkDirectory = Path.GetDirectoryName(checkDirectory);
+                if (string.IsNullOrEmpty(checkDirectory) || checkDirectory == rootDirectory)
+                    break;
+                if (checkDirectory.EndsWith(testResults) && Directory.Exists(checkDirectory))
+                {
+                    result = checkDirectory;
+                    break;
+                }
+            }
+        }
+        if (string.IsNullOrEmpty(result))
+            throw new Exception();
+        return result;
+    }
+
+    public string[] GetCSharpText(string testName)
+    {
+        string[] results;
+        string testResultsDirectory = GetTestResultsDirectory(_HasWaitForProperty);
+        MethodBaseName mbn = GetMethodBaseName(_DummyRoot, _Environment, _HasWaitForProperty, testName, testResultsDirectory);
+        FileInfo fileInfo = new(mbn.FileFullName);
+        if (!string.IsNullOrEmpty(mbn.CellInstanceConnectionName) && !Directory.Exists(fileInfo.DirectoryName))
+            _ = Directory.CreateDirectory(fileInfo.Directory.FullName);
+        Tuple cellInstanceVersionTuple = GetCellInstanceVersionTuple(mbn.CellInstanceName, mbn.CellInstanceVersionName);
+        results = GetCSharpTextB(fileInfo, mbn.CellInstanceName, mbn.CellInstanceVersionName, cellInstanceVersionTuple.Item2);
+        return results;
+    }
+
+    private string GetTestResultsDirectory(bool hasWaitForProperty)
+    {
+        string result = GetTestResultsDirectory(_TestContext.TestResultsDirectory, hasWaitForProperty);
+        return result;
+    }
+
     protected Tuple GetCellInstanceVersionTuple(string cellInstanceName, string cellInstanceVersionName)
     {
         Tuple result;
@@ -382,41 +349,6 @@ public class AdaptationTesting : ISMTP
         return result;
     }
 
-    protected static Dictionary GetComponentModelComponentsIndexes(CellInstanceVersion cellInstanceVersion, string cellInstanceConnectionName)
-    {
-        Dictionary results = new();
-        ComponentsCellComponent componentsCellComponent;
-        if (cellInstanceVersion.ComponentModel.Components is not null)
-        {
-            for (int i = 0; i < cellInstanceVersion.ComponentModel.Components.Length; i++)
-            {
-                componentsCellComponent = cellInstanceVersion.ComponentModel.Components[i];
-                for (int j = 0; j < componentsCellComponent.Children.Length; j++)
-                {
-                    if (string.IsNullOrEmpty(componentsCellComponent.Children[j].Equipment.Name))
-                        continue;
-                    results.Add(componentsCellComponent.Children[j].Name, new int[] { i, j });
-                }
-            }
-        }
-        if (results.Count == 0 || (!string.IsNullOrEmpty(cellInstanceConnectionName) && !results.ContainsKey(cellInstanceConnectionName)))
-            throw new Exception("Match not found (check test method name matches Mango)!");
-        return results;
-    }
-
-    protected static int[] GetCellInstanceConnectionNameIndexes(string cellInstanceConnectionName, Dictionary componentModelComponentsIndexes)
-    {
-        int[] result;
-        if (string.IsNullOrEmpty(cellInstanceConnectionName))
-            result = componentModelComponentsIndexes.ElementAt(0).Value;
-        else
-        {
-            if (componentModelComponentsIndexes is null || !componentModelComponentsIndexes.TryGetValue(cellInstanceConnectionName, out result))
-                throw new Exception();
-        }
-        return result;
-    }
-
     protected string[] GetCSharpTextB(FileInfo fileInfo, string cellInstanceName, string cellInstanceVersionName, CellInstanceVersion cellInstanceVersion)
     {
         List results = new();
@@ -608,6 +540,62 @@ public class AdaptationTesting : ISMTP
         return results.ToArray();
     }
 
+    public string[] GetConfiguration(MethodBase methodBase)
+    {
+        string[] results;
+        MethodBaseName mbn = GetMethodBaseName(methodBase);
+        FileInfo fileInfo = new(mbn.FileFullName);
+        if (!string.IsNullOrEmpty(mbn.CellInstanceConnectionName) && !Directory.Exists(fileInfo.DirectoryName))
+            _ = Directory.CreateDirectory(fileInfo.Directory.FullName);
+        Tuple cellInstanceVersionTuple = GetCellInstanceVersionTuple(mbn.CellInstanceName, mbn.CellInstanceVersionName);
+        Tuple fileConnectorConfigurationTuple = GetFileConnectorConfigurationTuple(cellInstanceVersionTuple, mbn.CellInstanceConnectionName);
+        if (string.IsNullOrEmpty(mbn.Ticks) && fileConnectorConfigurationTuple.Item2?.FileScanningIntervalInSeconds is not null)
+        {
+            string fileScanningIntervalInSecondsLine;
+            string versionDirectory = Path.GetDirectoryName(fileInfo.DirectoryName);
+            if (fileConnectorConfigurationTuple.Item2.FileScanningIntervalInSeconds.Value < 0)
+                fileScanningIntervalInSecondsLine = $"-\t{fileConnectorConfigurationTuple.Item2.FileScanningIntervalInSeconds.Value:0000}\t{Path.GetFileName(fileInfo.DirectoryName)}";
+            else
+                fileScanningIntervalInSecondsLine = $"+\t{fileConnectorConfigurationTuple.Item2.FileScanningIntervalInSeconds.Value:+0000}\t{Path.GetFileName(fileInfo.DirectoryName)}";
+            File.AppendAllLines(Path.Combine(versionDirectory, "FileScanningIntervalInSeconds.txt"), new string[] { fileScanningIntervalInSecondsLine });
+        }
+        Tuple equipmentTypeVersionTuple = GetEquipmentTypeVersionTuple(cellInstanceVersionTuple.Item2, mbn.CellInstanceConnectionName);
+        Tuple parameterizedModelObjectDefinitionTypeTuple = GetParameterizedModelObjectDefinitionTypeTuple(equipmentTypeVersionTuple);
+        Tuple> modelObjectParametersTuple = GetModelObjectParameters(equipmentTypeVersionTuple);
+        Tuple equipmentDictionaryVersionTuple = GetEquipmentDictionaryVersionTuple(cellInstanceVersionTuple.Item2, mbn.CellInstanceConnectionName, equipmentTypeVersionTuple.Item4);
+        Tuple>> equipmentDictionaryIsAlwaysEnabledEventsTuple = GetEquipmentDictionaryIsAlwaysEnabledEventsTuple(equipmentDictionaryVersionTuple);
+        Dictionary objects = GetKeyValuePairs(mbn.CellInstanceName, mbn.CellInstanceVersionName, mbn.CellInstanceConnectionName, fileConnectorConfigurationTuple.Item2, equipmentTypeVersionTuple.Item2, parameterizedModelObjectDefinitionTypeTuple.Item2, modelObjectParametersTuple.Item2, equipmentDictionaryVersionTuple.Item2, equipmentDictionaryIsAlwaysEnabledEventsTuple.Item2, cellInstanceVersionTuple.Item2.EdaConnection.PortNumber);
+        string json = JsonSerializer.Serialize(objects, new JsonSerializerOptions { WriteIndented = true });
+        results = new string[] { fileInfo.FullName, json };
+        return results;
+    }
+
+    private MethodBaseName GetMethodBaseName(MethodBase methodBase)
+    {
+        MethodBaseName result;
+        string testResultsDirectory = GetTestResultsDirectory(_HasWaitForProperty);
+        result = GetMethodBaseName(_DummyRoot, _Environment, _HasWaitForProperty, methodBase.Name, testResultsDirectory);
+        return result;
+    }
+
+    protected Tuple GetFileConnectorConfigurationTuple(Tuple cellInstanceVersionTuple, string cellInstanceConnectionName)
+    {
+        Tuple result;
+        FileConnectorConfiguration fileConnectorConfiguration;
+        string cellInstanceServiceV2With = string.Concat(cellInstanceVersionTuple.Item1, '/', cellInstanceConnectionName);
+        if (!_FileConnectorConfigurations.TryGetValue(cellInstanceServiceV2With, out fileConnectorConfiguration))
+        {
+            Dictionary componentModelComponentsIndexes = GetComponentModelComponentsIndexes(cellInstanceVersionTuple.Item2, cellInstanceConnectionName);
+            int[] cellInstanceConnectionNameIndexes = GetCellInstanceConnectionNameIndexes(cellInstanceConnectionName, componentModelComponentsIndexes);
+            ComponentsCellComponentCellComponent componentsCellComponentCellComponent = cellInstanceVersionTuple.Item2.ComponentModel.Components[cellInstanceConnectionNameIndexes[0]].Children[cellInstanceConnectionNameIndexes[1]];
+            string json = JsonSerializer.Serialize(componentsCellComponentCellComponent.Equipment, new JsonSerializerOptions { WriteIndented = true });
+            fileConnectorConfiguration = GetFileConnectorConfiguration(json, componentsCellComponentCellComponent);
+            _FileConnectorConfigurations.Add(cellInstanceServiceV2With, fileConnectorConfiguration);
+        }
+        result = new Tuple(cellInstanceServiceV2With, fileConnectorConfiguration);
+        return result;
+    }
+
     protected static FileConnectorConfiguration GetFileConnectorConfiguration(string json, ComponentsCellComponentCellComponent componentsCellComponentCellComponent)
     {
         FileConnectorConfiguration result;
@@ -640,21 +628,55 @@ public class AdaptationTesting : ISMTP
         return result;
     }
 
-    protected Tuple GetFileConnectorConfigurationTuple(Tuple cellInstanceVersionTuple, string cellInstanceConnectionName)
+    protected Tuple GetEquipmentTypeVersionTuple(CellInstanceVersion cellInstanceVersion, string cellInstanceConnectionName)
     {
-        Tuple result;
-        FileConnectorConfiguration fileConnectorConfiguration;
-        string cellInstanceServiceV2With = string.Concat(cellInstanceVersionTuple.Item1, '/', cellInstanceConnectionName);
-        if (!_FileConnectorConfigurations.TryGetValue(cellInstanceServiceV2With, out fileConnectorConfiguration))
+        Tuple result;
+        EquipmentTypeVersion equipmentTypeVersion;
+        Dictionary componentModelComponentsIndexes = GetComponentModelComponentsIndexes(cellInstanceVersion, cellInstanceConnectionName);
+        int[] cellInstanceConnectionNameIndexes = GetCellInstanceConnectionNameIndexes(cellInstanceConnectionName, componentModelComponentsIndexes);
+        ComponentsCellComponentCellComponent componentsCellComponentCellComponent = cellInstanceVersion.ComponentModel.Components[cellInstanceConnectionNameIndexes[0]].Children[cellInstanceConnectionNameIndexes[1]];
+        string equipmentTypeServiceV2 = string.Concat("http://", _HostNameAndPort, "/EquipmentTypeServiceV2/", componentsCellComponentCellComponent.Equipment.EquipmentType.Name, "/", componentsCellComponentCellComponent.Equipment.EquipmentType.Version, "/configuration");
+        if (!_EquipmentTypeVersions.TryGetValue(equipmentTypeServiceV2, out equipmentTypeVersion))
         {
-            Dictionary componentModelComponentsIndexes = GetComponentModelComponentsIndexes(cellInstanceVersionTuple.Item2, cellInstanceConnectionName);
-            int[] cellInstanceConnectionNameIndexes = GetCellInstanceConnectionNameIndexes(cellInstanceConnectionName, componentModelComponentsIndexes);
-            ComponentsCellComponentCellComponent componentsCellComponentCellComponent = cellInstanceVersionTuple.Item2.ComponentModel.Components[cellInstanceConnectionNameIndexes[0]].Children[cellInstanceConnectionNameIndexes[1]];
-            string json = JsonSerializer.Serialize(componentsCellComponentCellComponent.Equipment, new JsonSerializerOptions { WriteIndented = true });
-            fileConnectorConfiguration = GetFileConnectorConfiguration(json, componentsCellComponentCellComponent);
-            _FileConnectorConfigurations.Add(cellInstanceServiceV2With, fileConnectorConfiguration);
+            equipmentTypeVersion = GetEquipmentTypeVersion(equipmentTypeServiceV2);
+            _EquipmentTypeVersions.Add(equipmentTypeServiceV2, equipmentTypeVersion);
+        }
+        result = new Tuple(equipmentTypeServiceV2, componentsCellComponentCellComponent.Equipment.EquipmentType.Name, componentsCellComponentCellComponent.Equipment.EquipmentType.Version, equipmentTypeVersion);
+        return result;
+    }
+
+    protected static Dictionary GetComponentModelComponentsIndexes(CellInstanceVersion cellInstanceVersion, string cellInstanceConnectionName)
+    {
+        Dictionary results = new();
+        ComponentsCellComponent componentsCellComponent;
+        if (cellInstanceVersion.ComponentModel.Components is not null)
+        {
+            for (int i = 0; i < cellInstanceVersion.ComponentModel.Components.Length; i++)
+            {
+                componentsCellComponent = cellInstanceVersion.ComponentModel.Components[i];
+                for (int j = 0; j < componentsCellComponent.Children.Length; j++)
+                {
+                    if (string.IsNullOrEmpty(componentsCellComponent.Children[j].Equipment.Name))
+                        continue;
+                    results.Add(componentsCellComponent.Children[j].Name, new int[] { i, j });
+                }
+            }
+        }
+        if (results.Count == 0 || (!string.IsNullOrEmpty(cellInstanceConnectionName) && !results.ContainsKey(cellInstanceConnectionName)))
+            throw new Exception("Match not found (check test method name matches Mango)!");
+        return results;
+    }
+
+    protected static int[] GetCellInstanceConnectionNameIndexes(string cellInstanceConnectionName, Dictionary componentModelComponentsIndexes)
+    {
+        int[] result;
+        if (string.IsNullOrEmpty(cellInstanceConnectionName))
+            result = componentModelComponentsIndexes.ElementAt(0).Value;
+        else
+        {
+            if (componentModelComponentsIndexes is null || !componentModelComponentsIndexes.TryGetValue(cellInstanceConnectionName, out result))
+                throw new Exception();
         }
-        result = new Tuple(cellInstanceServiceV2With, fileConnectorConfiguration);
         return result;
     }
 
@@ -683,35 +705,6 @@ public class AdaptationTesting : ISMTP
         return result;
     }
 
-    protected Tuple GetEquipmentTypeVersionTuple(CellInstanceVersion cellInstanceVersion, string cellInstanceConnectionName)
-    {
-        Tuple result;
-        EquipmentTypeVersion equipmentTypeVersion;
-        Dictionary componentModelComponentsIndexes = GetComponentModelComponentsIndexes(cellInstanceVersion, cellInstanceConnectionName);
-        int[] cellInstanceConnectionNameIndexes = GetCellInstanceConnectionNameIndexes(cellInstanceConnectionName, componentModelComponentsIndexes);
-        ComponentsCellComponentCellComponent componentsCellComponentCellComponent = cellInstanceVersion.ComponentModel.Components[cellInstanceConnectionNameIndexes[0]].Children[cellInstanceConnectionNameIndexes[1]];
-        string equipmentTypeServiceV2 = string.Concat("http://", _HostNameAndPort, "/EquipmentTypeServiceV2/", componentsCellComponentCellComponent.Equipment.EquipmentType.Name, "/", componentsCellComponentCellComponent.Equipment.EquipmentType.Version, "/configuration");
-        if (!_EquipmentTypeVersions.TryGetValue(equipmentTypeServiceV2, out equipmentTypeVersion))
-        {
-            equipmentTypeVersion = GetEquipmentTypeVersion(equipmentTypeServiceV2);
-            _EquipmentTypeVersions.Add(equipmentTypeServiceV2, equipmentTypeVersion);
-        }
-        result = new Tuple(equipmentTypeServiceV2, componentsCellComponentCellComponent.Equipment.EquipmentType.Name, componentsCellComponentCellComponent.Equipment.EquipmentType.Version, equipmentTypeVersion);
-        return result;
-    }
-
-    protected Tuple GetParameterizedModelObjectDefinitionTypeTuple(Tuple equipmentTypeVersionTuple)
-    {
-        Tuple result;
-        string parameterizedModelObjectDefinitionType;
-        if (_FileConnectorConfigurations.ContainsKey(equipmentTypeVersionTuple.Item1))
-            parameterizedModelObjectDefinitionType = _ParameterizedModelObjectDefinitionTypes[equipmentTypeVersionTuple.Item1];
-        else
-            parameterizedModelObjectDefinitionType = equipmentTypeVersionTuple.Item4.FileHandlerObjectTypes.ParameterizedModelObjectDefinition.Type;
-        result = new Tuple(equipmentTypeVersionTuple.Item1, parameterizedModelObjectDefinitionType);
-        return result;
-    }
-
     protected IList GetModelObjectParameters(string json)
     {
         IList results;
@@ -736,18 +729,38 @@ public class AdaptationTesting : ISMTP
         return results;
     }
 
-    protected Tuple> GetModelObjectParameters(Tuple equipmentTypeVersionTuple)
+    protected Tuple GetEquipmentDictionaryVersionTuple(CellInstanceVersion cellInstanceVersion, string cellInstanceConnectionName, EquipmentTypeVersion equipmentTypeVersion)
     {
-        Tuple> result;
-        IList modelObjectParameters;
-        if (_FileConnectorConfigurations.ContainsKey(equipmentTypeVersionTuple.Item1))
-            modelObjectParameters = _ModelObjectParameters[equipmentTypeVersionTuple.Item1];
+        Tuple result;
+        string equipmentDictionaryName;
+        string equipmentDictionaryVersionName;
+        EquipmentDictionaryVersion equipmentDictionaryVersion;
+        Dictionary componentModelComponentsIndexes = GetComponentModelComponentsIndexes(cellInstanceVersion, cellInstanceConnectionName);
+        int[] cellInstanceConnectionNameIndexes = GetCellInstanceConnectionNameIndexes(cellInstanceConnectionName, componentModelComponentsIndexes);
+        ComponentsCellComponentCellComponent componentsCellComponentCellComponent = cellInstanceVersion.ComponentModel.Components[cellInstanceConnectionNameIndexes[0]].Children[cellInstanceConnectionNameIndexes[1]];
+        string[] segments = GetEquipmentDictionaryStrings(componentsCellComponentCellComponent.Equipment, equipmentTypeVersion);
+        if (_SkipEquipmentDictionary || segments is null || segments.Length != 2 || string.IsNullOrEmpty(segments[0]) || string.IsNullOrEmpty(segments[1]))
+        {
+            equipmentDictionaryName = string.Empty;
+            equipmentDictionaryVersionName = string.Empty;
+        }
         else
         {
-            string json = JsonSerializer.Serialize(equipmentTypeVersionTuple.Item4, new JsonSerializerOptions { WriteIndented = true });
-            modelObjectParameters = GetModelObjectParameters(json);
+            equipmentDictionaryName = segments[0];
+            equipmentDictionaryVersionName = segments[1];
         }
-        result = new Tuple>(equipmentTypeVersionTuple.Item1, modelObjectParameters);
+        string equipmentDictionaryServiceV2 = string.Concat("http://", _HostNameAndPort, "/EquipmentDictionaryServiceV2/", equipmentDictionaryName, "/", equipmentDictionaryVersionName, "/configuration");
+        if (string.IsNullOrEmpty(equipmentDictionaryName) || string.IsNullOrEmpty(equipmentDictionaryVersionName))
+            equipmentDictionaryVersion = null;
+        else
+        {
+            if (!_EquipmentDictionaryVersions.TryGetValue(equipmentDictionaryServiceV2, out equipmentDictionaryVersion))
+            {
+                equipmentDictionaryVersion = GetEquipmentDictionaryVersion(equipmentDictionaryServiceV2);
+                _EquipmentDictionaryVersions.Add(equipmentDictionaryServiceV2, equipmentDictionaryVersion);
+            }
+        }
+        result = new Tuple(equipmentDictionaryServiceV2, equipmentDictionaryName, equipmentDictionaryVersionName, equipmentDictionaryVersion);
         return result;
     }
 
@@ -802,70 +815,6 @@ public class AdaptationTesting : ISMTP
         return result;
     }
 
-    protected Tuple GetEquipmentDictionaryVersionTuple(CellInstanceVersion cellInstanceVersion, string cellInstanceConnectionName, EquipmentTypeVersion equipmentTypeVersion)
-    {
-        Tuple result;
-        string equipmentDictionaryName;
-        string equipmentDictionaryVersionName;
-        EquipmentDictionaryVersion equipmentDictionaryVersion;
-        Dictionary componentModelComponentsIndexes = GetComponentModelComponentsIndexes(cellInstanceVersion, cellInstanceConnectionName);
-        int[] cellInstanceConnectionNameIndexes = GetCellInstanceConnectionNameIndexes(cellInstanceConnectionName, componentModelComponentsIndexes);
-        ComponentsCellComponentCellComponent componentsCellComponentCellComponent = cellInstanceVersion.ComponentModel.Components[cellInstanceConnectionNameIndexes[0]].Children[cellInstanceConnectionNameIndexes[1]];
-        string[] segments = GetEquipmentDictionaryStrings(componentsCellComponentCellComponent.Equipment, equipmentTypeVersion);
-        if (_SkipEquipmentDictionary || segments is null || segments.Length != 2 || string.IsNullOrEmpty(segments[0]) || string.IsNullOrEmpty(segments[1]))
-        {
-            equipmentDictionaryName = string.Empty;
-            equipmentDictionaryVersionName = string.Empty;
-        }
-        else
-        {
-            equipmentDictionaryName = segments[0];
-            equipmentDictionaryVersionName = segments[1];
-        }
-        string equipmentDictionaryServiceV2 = string.Concat("http://", _HostNameAndPort, "/EquipmentDictionaryServiceV2/", equipmentDictionaryName, "/", equipmentDictionaryVersionName, "/configuration");
-        if (string.IsNullOrEmpty(equipmentDictionaryName) || string.IsNullOrEmpty(equipmentDictionaryVersionName))
-            equipmentDictionaryVersion = null;
-        else
-        {
-            if (!_EquipmentDictionaryVersions.TryGetValue(equipmentDictionaryServiceV2, out equipmentDictionaryVersion))
-            {
-                equipmentDictionaryVersion = GetEquipmentDictionaryVersion(equipmentDictionaryServiceV2);
-                _EquipmentDictionaryVersions.Add(equipmentDictionaryServiceV2, equipmentDictionaryVersion);
-            }
-        }
-        result = new Tuple(equipmentDictionaryServiceV2, equipmentDictionaryName, equipmentDictionaryVersionName, equipmentDictionaryVersion);
-        return result;
-    }
-
-    protected Tuple>> GetEquipmentDictionaryIsAlwaysEnabledEventsTuple(Tuple equipmentDictionaryVersionTuple)
-    {
-        Tuple>> result;
-        List> results;
-        List> collection;
-        if (_SkipEquipmentDictionary)
-            results = new List>();
-        else if (string.IsNullOrEmpty(equipmentDictionaryVersionTuple.Item1))
-            throw new Exception();
-        else if (equipmentDictionaryVersionTuple?.Item4?.Events?.Event is null)
-            results = new List>();
-        else if (_EquipmentDictionaryEventDescriptions.TryGetValue(equipmentDictionaryVersionTuple.Item1, out collection))
-            results = collection;
-        else
-        {
-            results = new List>();
-            foreach (EquipmentDictionaryVersionEventsEvent equipmentDictionaryVersionEventsEvent in equipmentDictionaryVersionTuple.Item4.Events.Event)
-            {
-                if (string.IsNullOrEmpty(equipmentDictionaryVersionEventsEvent.Description))
-                    continue;
-                if (!equipmentDictionaryVersionEventsEvent.IsAlwaysEnabled)
-                    continue;
-                results.Add(new Tuple(equipmentDictionaryVersionEventsEvent.Name, equipmentDictionaryVersionEventsEvent.Description));
-            }
-        }
-        result = new Tuple>>(equipmentDictionaryVersionTuple.Item1, results);
-        return result;
-    }
-
     protected Dictionary GetKeyValuePairs(string cellInstanceName, string cellInstanceVersionName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList modelObjectParameters, string equipmentDictionaryName, List> equipmentDictionaryIsAlwaysEnabledEvents, int edaConnectionPortNumber)
     {
         Dictionary results = new()
@@ -886,108 +835,6 @@ public class AdaptationTesting : ISMTP
         return results;
     }
 
-    public string[] GetCSharpText(string testName)
-    {
-        string[] results;
-        string testResultsDirectory = GetTestResultsDirectory(_HasWaitForProperty);
-        MethodBaseName mbn = GetMethodBaseName(_DummyRoot, _Environment, _HasWaitForProperty, testName, testResultsDirectory);
-        FileInfo fileInfo = new(mbn.FileFullName);
-        if (!string.IsNullOrEmpty(mbn.CellInstanceConnectionName) && !Directory.Exists(fileInfo.DirectoryName))
-            _ = Directory.CreateDirectory(fileInfo.Directory.FullName);
-        Tuple cellInstanceVersionTuple = GetCellInstanceVersionTuple(mbn.CellInstanceName, mbn.CellInstanceVersionName);
-        results = GetCSharpTextB(fileInfo, mbn.CellInstanceName, mbn.CellInstanceVersionName, cellInstanceVersionTuple.Item2);
-        return results;
-    }
-
-    public string[] GetConfiguration(MethodBase methodBase)
-    {
-        string[] results;
-        MethodBaseName mbn = GetMethodBaseName(methodBase);
-        FileInfo fileInfo = new(mbn.FileFullName);
-        if (!string.IsNullOrEmpty(mbn.CellInstanceConnectionName) && !Directory.Exists(fileInfo.DirectoryName))
-            _ = Directory.CreateDirectory(fileInfo.Directory.FullName);
-        Tuple cellInstanceVersionTuple = GetCellInstanceVersionTuple(mbn.CellInstanceName, mbn.CellInstanceVersionName);
-        Tuple fileConnectorConfigurationTuple = GetFileConnectorConfigurationTuple(cellInstanceVersionTuple, mbn.CellInstanceConnectionName);
-        if (string.IsNullOrEmpty(mbn.Ticks) && fileConnectorConfigurationTuple.Item2?.FileScanningIntervalInSeconds is not null)
-        {
-            string fileScanningIntervalInSecondsLine;
-            string versionDirectory = Path.GetDirectoryName(fileInfo.DirectoryName);
-            if (fileConnectorConfigurationTuple.Item2.FileScanningIntervalInSeconds.Value < 0)
-                fileScanningIntervalInSecondsLine = $"-\t{fileConnectorConfigurationTuple.Item2.FileScanningIntervalInSeconds.Value:0000}\t{Path.GetFileName(fileInfo.DirectoryName)}";
-            else
-                fileScanningIntervalInSecondsLine = $"+\t{fileConnectorConfigurationTuple.Item2.FileScanningIntervalInSeconds.Value:+0000}\t{Path.GetFileName(fileInfo.DirectoryName)}";
-            File.AppendAllLines(Path.Combine(versionDirectory, "FileScanningIntervalInSeconds.txt"), new string[] { fileScanningIntervalInSecondsLine });
-        }
-        Tuple equipmentTypeVersionTuple = GetEquipmentTypeVersionTuple(cellInstanceVersionTuple.Item2, mbn.CellInstanceConnectionName);
-        Tuple parameterizedModelObjectDefinitionTypeTuple = GetParameterizedModelObjectDefinitionTypeTuple(equipmentTypeVersionTuple);
-        Tuple> modelObjectParametersTuple = GetModelObjectParameters(equipmentTypeVersionTuple);
-        Tuple equipmentDictionaryVersionTuple = GetEquipmentDictionaryVersionTuple(cellInstanceVersionTuple.Item2, mbn.CellInstanceConnectionName, equipmentTypeVersionTuple.Item4);
-        Tuple>> equipmentDictionaryIsAlwaysEnabledEventsTuple = GetEquipmentDictionaryIsAlwaysEnabledEventsTuple(equipmentDictionaryVersionTuple);
-        Dictionary objects = GetKeyValuePairs(mbn.CellInstanceName, mbn.CellInstanceVersionName, mbn.CellInstanceConnectionName, fileConnectorConfigurationTuple.Item2, equipmentTypeVersionTuple.Item2, parameterizedModelObjectDefinitionTypeTuple.Item2, modelObjectParametersTuple.Item2, equipmentDictionaryVersionTuple.Item2, equipmentDictionaryIsAlwaysEnabledEventsTuple.Item2, cellInstanceVersionTuple.Item2.EdaConnection.PortNumber);
-        string json = JsonSerializer.Serialize(objects, new JsonSerializerOptions { WriteIndented = true });
-        results = new string[] { fileInfo.FullName, json };
-        return results;
-    }
-
-    public IFileRead Get(MethodBase methodBase, string sourceFileLocation, string sourceFileFilter, bool useCyclicalForDescription)
-    {
-        IFileRead result;
-        MethodBaseName mbn = GetMethodBaseName(methodBase);
-        FileInfo fileInfo = new(mbn.FileFullName);
-        Dictionary fileParameter = new();
-        if (!string.IsNullOrEmpty(mbn.CellInstanceConnectionName) && !Directory.Exists(fileInfo.DirectoryName))
-            _ = Directory.CreateDirectory(fileInfo.Directory.FullName);
-        Dictionary> dummyRuns = new();
-        Dictionary> staticRuns = new();
-        Tuple cellInstanceVersionTuple = GetCellInstanceVersionTuple(mbn.CellInstanceName, mbn.CellInstanceVersionName);
-        Tuple fileConnectorConfigurationTuple = GetFileConnectorConfigurationTuple(cellInstanceVersionTuple, mbn.CellInstanceConnectionName);
-        Tuple equipmentTypeVersionTuple = GetEquipmentTypeVersionTuple(cellInstanceVersionTuple.Item2, mbn.CellInstanceConnectionName);
-        Tuple parameterizedModelObjectDefinitionTypeTuple = GetParameterizedModelObjectDefinitionTypeTuple(equipmentTypeVersionTuple);
-        Tuple> modelObjectParametersTuple = GetModelObjectParameters(equipmentTypeVersionTuple);
-        Tuple equipmentDictionaryVersionTuple = GetEquipmentDictionaryVersionTuple(cellInstanceVersionTuple.Item2, mbn.CellInstanceConnectionName, equipmentTypeVersionTuple.Item4);
-        _ = GetEquipmentDictionaryIsAlwaysEnabledEventsTuple(equipmentDictionaryVersionTuple);
-        if (!string.IsNullOrEmpty(sourceFileLocation) && sourceFileLocation != fileConnectorConfigurationTuple.Item2.SourceFileLocation)
-            fileConnectorConfigurationTuple.Item2.SourceFileLocation = sourceFileLocation;
-        if (!string.IsNullOrEmpty(sourceFileFilter) && sourceFileFilter != fileConnectorConfigurationTuple.Item2.SourceFileFilter)
-        {
-            fileConnectorConfigurationTuple.Item2.SourceFileFilter = sourceFileFilter;
-            fileConnectorConfigurationTuple.Item2.SourceFileFilters = sourceFileFilter.Split('|').ToList();
-        }
-        if (_TestContext.FullyQualifiedTestClassName.Contains(nameof(Extract)))
-        {
-            try
-            {
-                if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation))
-                {
-                    if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation))
-                        _ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation);
-                }
-                if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.SourceFileLocation))
-                {
-                    if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.SourceFileLocation))
-                        _ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.SourceFileLocation);
-                }
-                if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.TargetFileLocation))
-                {
-                    if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.TargetFileLocation))
-                        _ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.TargetFileLocation);
-                }
-                if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder))
-                {
-                    if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Split('|')[0]))
-                        _ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Split('|')[0]);
-                }
-            }
-            catch (IOException ex)
-            {
-                if (!ex.Message.Contains("SMB1"))
-                    throw;
-            }
-        }
-        result = FileHandlers.CellInstanceConnectionName.Get(this, fileParameter, mbn.CellInstanceName, mbn.CellInstanceConnectionName, fileConnectorConfigurationTuple.Item2, equipmentTypeVersionTuple.Item2, parameterizedModelObjectDefinitionTypeTuple.Item2, modelObjectParametersTuple.Item2, equipmentDictionaryVersionTuple.Item2, dummyRuns, staticRuns, useCyclicalForDescription, connectionCount: cellInstanceVersionTuple.Item2.EquipmentConnections.Length);
-        return result;
-    }
-
     public string[] GetVariables(MethodBase methodBase, string check, bool validatePDSF = true)
     {
         string[] results;
@@ -1090,80 +937,118 @@ public class AdaptationTesting : ISMTP
         return results;
     }
 
-    internal static ProcessData GetProcessData(string fileFullName)
+    private string[] GetTextFiles(MethodBaseName mbn)
     {
-        ProcessData results;
-        results = ProcessDataStandardFormat.GetProcessData(fileFullName);
-        Assert.IsFalse(string.IsNullOrEmpty(results.Logistics));
-        Assert.IsTrue(results.Columns.Length > 0, "Column check");
-        Assert.IsTrue(results.BodyLines.Length > 0, "Body check");
-        return results;
-    }
-
-    internal static ProcessData GetProcessData(string searchDirectory, string searchPattern)
-    {
-        ProcessData results;
-        if (searchPattern.Length > 3 && !searchPattern.Contains('*') && File.Exists(searchPattern))
-            results = GetProcessData(searchPattern);
+        string[] results;
+        if (string.IsNullOrEmpty(mbn.TextFileDirectory))
+            results = Array.Empty();
+        else if (!Directory.Exists(mbn.TextFileDirectory))
+        {
+            results = Array.Empty();
+            if (!_HasWaitForProperty)
+                _ = Directory.CreateDirectory(mbn.TextFileDirectory);
+            else
+            {
+                string renameDirectory = Path.Combine(Path.GetDirectoryName(mbn.TextFileDirectory), $"_Rename - {Path.GetFileName(mbn.TextFileDirectory)}");
+                _ = Directory.CreateDirectory(renameDirectory);
+                _ = Process.Start("explorer.exe", renameDirectory);
+                File.WriteAllText(Path.Combine(renameDirectory, $"{nameof(FileConnectorConfiguration.SourceFileFilter)}.txt"), string.Empty);
+                File.WriteAllText(Path.Combine(renameDirectory, $"{nameof(FileConnectorConfiguration.SourceFileLocation)}.txt"), string.Empty);
+            }
+        }
         else
         {
-            string[] pdsfFiles;
-            pdsfFiles = Directory.GetFiles(searchDirectory, searchPattern, SearchOption.TopDirectoryOnly);
-            if (pdsfFiles.Length == 0)
-                _ = Process.Start("explorer.exe", searchDirectory);
-            Assert.AreNotEqual(0, pdsfFiles.Length, "GetFiles check");
-            results = GetProcessData(pdsfFiles[0]);
+            results = Directory.GetFiles(mbn.TextFileDirectory, "*.txt", SearchOption.TopDirectoryOnly);
+            if (!string.IsNullOrEmpty(mbn.Ticks) && _HasWaitForProperty && results.Length == 0)
+            {
+                _ = Process.Start("explorer.exe", mbn.TextFileDirectory);
+                File.WriteAllText(Path.Combine(mbn.TextFileDirectory, "_ Why.why"), string.Empty);
+            }
         }
-        Assert.IsFalse(string.IsNullOrEmpty(results.Logistics));
-        Assert.IsTrue(results.Columns.Length > 0, "Column check");
-        Assert.IsTrue(results.BodyLines.Length > 0, "Body check");
         return results;
     }
 
-    internal static ProcessData GetProcessData(IFileRead fileRead, Logistics logistics, Tuple> extractResult, ProcessData processData)
+    public IFileRead Get(MethodBase methodBase, string sourceFileLocation, string sourceFileFilter, bool useCyclicalForDescription)
     {
-        ProcessData results;
-        string text = ProcessDataStandardFormat.GetPDSFText(fileRead, logistics, extractResult.Item3, logisticsText: processData.Logistics);
-        string[] lines = text.Split(new string[] { System.Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);
-        results = ProcessDataStandardFormat.GetProcessData(logistics.ReportFullPath, lines);
-        Assert.IsFalse(string.IsNullOrEmpty(results.Logistics));
-        Assert.IsTrue(results.Columns.Length > 0, "Column check");
-        Assert.IsTrue(results.BodyLines.Length > 0, "Body check");
-        return results;
-    }
-
-    internal static string[] GetItem2(ProcessData processData, ProcessData processDataNew)
-    {
-        JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
-        string jsonOld = JsonSerializer.Serialize(processData.Columns, processData.Columns.GetType(), jsonSerializerOptions);
-        string jsonNew = JsonSerializer.Serialize(processDataNew.Columns, processDataNew.Columns.GetType(), jsonSerializerOptions);
-        return new string[] { jsonOld, jsonNew };
-    }
-
-    internal static string[] GetItem3(ProcessData processData, ProcessData processDataNew)
-    {
-        string joinOld = string.Join(System.Environment.NewLine, from l in processData.BodyLines select string.Join('\t', from t in l.Split('\t') where !t.Contains(@"\\") select t));
-        string joinNew = string.Join(System.Environment.NewLine, from l in processDataNew.BodyLines select string.Join('\t', from t in l.Split('\t') where !t.Contains(@"\\") select t));
-        return new string[] { joinOld, joinNew };
-    }
-
-    internal static void UpdatePassDirectory(string searchDirectory)
-    {
-        DateTime dateTime = DateTime.Now;
-        try
-        { Directory.SetLastWriteTime(searchDirectory, dateTime); }
-        catch (Exception) { }
-        string ticksDirectory = Path.GetDirectoryName(searchDirectory);
-        try
-        { Directory.SetLastWriteTime(ticksDirectory, dateTime); }
-        catch (Exception) { }
-        string[] directories = Directory.GetDirectories(searchDirectory, "*", SearchOption.TopDirectoryOnly);
-        foreach (string directory in directories)
+        IFileRead result;
+        MethodBaseName mbn = GetMethodBaseName(methodBase);
+        FileInfo fileInfo = new(mbn.FileFullName);
+        Dictionary fileParameter = new();
+        if (!string.IsNullOrEmpty(mbn.CellInstanceConnectionName) && !Directory.Exists(fileInfo.DirectoryName))
+            _ = Directory.CreateDirectory(fileInfo.Directory.FullName);
+        Dictionary> dummyRuns = new();
+        Dictionary> staticRuns = new();
+        Tuple cellInstanceVersionTuple = GetCellInstanceVersionTuple(mbn.CellInstanceName, mbn.CellInstanceVersionName);
+        Tuple fileConnectorConfigurationTuple = GetFileConnectorConfigurationTuple(cellInstanceVersionTuple, mbn.CellInstanceConnectionName);
+        Tuple equipmentTypeVersionTuple = GetEquipmentTypeVersionTuple(cellInstanceVersionTuple.Item2, mbn.CellInstanceConnectionName);
+        Tuple parameterizedModelObjectDefinitionTypeTuple = GetParameterizedModelObjectDefinitionTypeTuple(equipmentTypeVersionTuple);
+        Tuple> modelObjectParametersTuple = GetModelObjectParameters(equipmentTypeVersionTuple);
+        Tuple equipmentDictionaryVersionTuple = GetEquipmentDictionaryVersionTuple(cellInstanceVersionTuple.Item2, mbn.CellInstanceConnectionName, equipmentTypeVersionTuple.Item4);
+        _ = GetEquipmentDictionaryIsAlwaysEnabledEventsTuple(equipmentDictionaryVersionTuple);
+        if (!string.IsNullOrEmpty(sourceFileLocation) && sourceFileLocation != fileConnectorConfigurationTuple.Item2.SourceFileLocation)
+            fileConnectorConfigurationTuple.Item2.SourceFileLocation = sourceFileLocation;
+        if (!string.IsNullOrEmpty(sourceFileFilter) && sourceFileFilter != fileConnectorConfigurationTuple.Item2.SourceFileFilter)
+        {
+            fileConnectorConfigurationTuple.Item2.SourceFileFilter = sourceFileFilter;
+            fileConnectorConfigurationTuple.Item2.SourceFileFilters = sourceFileFilter.Split('|').ToList();
+        }
+        if (_TestContext.FullyQualifiedTestClassName.Contains(nameof(Extract)))
         {
             try
-            { Directory.SetLastWriteTime(directory, dateTime); }
-            catch (Exception) { }
+            {
+                if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation))
+                {
+                    if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation))
+                        _ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.ErrorTargetFileLocation);
+                }
+                if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.SourceFileLocation))
+                {
+                    if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.SourceFileLocation))
+                        _ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.SourceFileLocation);
+                }
+                if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.TargetFileLocation))
+                {
+                    if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.TargetFileLocation))
+                        _ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.TargetFileLocation);
+                }
+                if (!string.IsNullOrEmpty(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder))
+                {
+                    if (!Directory.Exists(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Split('|')[0]))
+                        _ = Directory.CreateDirectory(fileConnectorConfigurationTuple.Item2.AlternateTargetFolder.Split('|')[0]);
+                }
+            }
+            catch (IOException ex)
+            {
+                if (!ex.Message.Contains("SMB1"))
+                    throw;
+            }
         }
+        result = FileHandlers.CellInstanceConnectionName.Get(this, fileParameter, mbn.CellInstanceName, mbn.CellInstanceConnectionName, fileConnectorConfigurationTuple.Item2, equipmentTypeVersionTuple.Item2, parameterizedModelObjectDefinitionTypeTuple.Item2, modelObjectParametersTuple.Item2, equipmentDictionaryVersionTuple.Item2, dummyRuns, staticRuns, useCyclicalForDescription, connectionCount: cellInstanceVersionTuple.Item2.EquipmentConnections.Length);
+        return result;
+    }
+
+    internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string fileFullName)
+    {
+        ProcessDataStandardFormat result;
+        result = ProcessDataStandardFormat.GetProcessDataStandardFormat(fileFullName);
+        Assert.IsTrue(result.Logistics.Count > 0, "Logistics check");
+        Assert.IsFalse(string.IsNullOrEmpty(result.Logistics[0]));
+        Assert.IsTrue(result.Columns.Count > 0, "Column check");
+        Assert.IsTrue(result.Body.Count > 0, "Body check");
+        return result;
+    }
+
+    internal static ProcessDataStandardFormat GetProcessDataStandardFormat(IFileRead fileRead, Logistics logistics, Tuple> extractResult, ProcessDataStandardFormat processDataStandardFormat)
+    {
+        ProcessDataStandardFormat result;
+        string text = ProcessDataStandardFormat.GetPDSFText(fileRead, logistics, extractResult.Item3, logisticsText: processDataStandardFormat.Logistics[0]);
+        string[] lines = text.Split(new string[] { System.Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);
+        result = ProcessDataStandardFormat.GetProcessDataStandardFormat(logistics.ReportFullPath, lines);
+        Assert.IsTrue(result.Logistics.Count > 0, "Logistics check");
+        Assert.IsFalse(string.IsNullOrEmpty(result.Logistics[0]));
+        Assert.IsTrue(result.Columns.Count > 0, "Column check");
+        Assert.IsTrue(result.Body.Count > 0, "Body check");
+        return result;
     }
 
     internal static string GetFileName(MethodBase methodBase)
@@ -1199,6 +1084,25 @@ public class AdaptationTesting : ISMTP
         return result;
     }
 
+    internal static void UpdatePassDirectory(string searchDirectory)
+    {
+        DateTime dateTime = DateTime.Now;
+        try
+        { Directory.SetLastWriteTime(searchDirectory, dateTime); }
+        catch (Exception) { }
+        string ticksDirectory = Path.GetDirectoryName(searchDirectory);
+        try
+        { Directory.SetLastWriteTime(ticksDirectory, dateTime); }
+        catch (Exception) { }
+        string[] directories = Directory.GetDirectories(searchDirectory, "*", SearchOption.TopDirectoryOnly);
+        foreach (string directory in directories)
+        {
+            try
+            { Directory.SetLastWriteTime(directory, dateTime); }
+            catch (Exception) { }
+        }
+    }
+
     internal static void CompareSaveTSV(string textFileDirectory, string[] join)
     {
         if (join[0] != join[1])
@@ -1219,14 +1123,25 @@ public class AdaptationTesting : ISMTP
         }
     }
 
-    internal static void CompareSave(string textFileDirectory, ProcessData processData, ProcessData processDataNew)
+    internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string searchDirectory, string searchPattern)
     {
-        if (processData.Logistics != processDataNew.Logistics)
+        ProcessDataStandardFormat result;
+        if (searchPattern.Length > 3 && !searchPattern.Contains('*') && File.Exists(searchPattern))
+            result = GetProcessDataStandardFormat(searchPattern);
+        else
         {
-            _ = Process.Start("explorer.exe", textFileDirectory);
-            File.WriteAllText(Path.Combine(textFileDirectory, "0.dat"), processData.Logistics);
-            File.WriteAllText(Path.Combine(textFileDirectory, "1.dat"), processDataNew.Logistics);
+            string[] pdsfFiles;
+            pdsfFiles = Directory.GetFiles(searchDirectory, searchPattern, SearchOption.TopDirectoryOnly);
+            if (pdsfFiles.Length == 0)
+                _ = Process.Start("explorer.exe", searchDirectory);
+            Assert.AreNotEqual(0, pdsfFiles.Length, "GetFiles check");
+            result = GetProcessDataStandardFormat(pdsfFiles[0]);
         }
+        Assert.IsTrue(result.Logistics.Count > 0, "Logistics check");
+        Assert.IsFalse(string.IsNullOrEmpty(result.Logistics[0]));
+        Assert.IsTrue(result.Columns.Count > 0, "Column check");
+        Assert.IsTrue(result.Body.Count > 0, "Body check");
+        return result;
     }
 
     internal static IFileRead GetWriteConfigurationGetFileRead(MethodBase methodBase, string check, AdaptationTesting adaptationTesting)
@@ -1240,6 +1155,21 @@ public class AdaptationTesting : ISMTP
         return result;
     }
 
+    internal static string[] GetItem2(ProcessDataStandardFormat processDataStandardFormat, ProcessDataStandardFormat processDataStandardFormatNew)
+    {
+        JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
+        string jsonOld = JsonSerializer.Serialize(processDataStandardFormat.Columns, processDataStandardFormat.Columns.GetType(), jsonSerializerOptions);
+        string jsonNew = JsonSerializer.Serialize(processDataStandardFormatNew.Columns, processDataStandardFormatNew.Columns.GetType(), jsonSerializerOptions);
+        return new string[] { jsonOld, jsonNew };
+    }
+
+    internal static string[] GetItem3(ProcessDataStandardFormat processDataStandardFormat, ProcessDataStandardFormat processDataStandardFormatNew)
+    {
+        string joinOld = string.Join(System.Environment.NewLine, from l in processDataStandardFormat.Body select string.Join('\t', from t in l.Split('\t') where !t.Contains(@"\\") select t));
+        string joinNew = string.Join(System.Environment.NewLine, from l in processDataStandardFormatNew.Body select string.Join('\t', from t in l.Split('\t') where !t.Contains(@"\\") select t));
+        return new string[] { joinOld, joinNew };
+    }
+
     internal static string ReExtractCompareUpdatePassDirectory(string[] variables, IFileRead fileRead, Logistics logistics, bool validatePDSF = true)
     {
         string result;
@@ -1252,18 +1182,18 @@ public class AdaptationTesting : ISMTP
             Assert.IsNotNull(extractResult.Item3);
             Assert.IsNotNull(extractResult.Item4);
             if (!validatePDSF)
-                _ = GetProcessData(fileRead, logistics, extractResult, new(string.Empty, Array.Empty(), Array.Empty()));
+                _ = GetProcessDataStandardFormat(fileRead, logistics, extractResult, ProcessDataStandardFormat.GetEmpty());
             else
             {
                 Assert.IsTrue(extractResult.Item3.Length > 0, "extractResult Array Length check!");
-                ProcessData processData = GetProcessData(variables[2], variables[4]);
-                ProcessData processDataNew = GetProcessData(fileRead, logistics, extractResult, processData);
-                CompareSave(variables[5], processData, processDataNew);
-                Assert.AreEqual(processDataNew.Logistics, processData.Logistics, "Item1 check!");
-                string[] json = GetItem2(processData, processDataNew);
+                ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(variables[2], variables[4]);
+                ProcessDataStandardFormat processDataStandardFormatNew = GetProcessDataStandardFormat(fileRead, logistics, extractResult, processDataStandardFormat);
+                CompareSave(variables[5], processDataStandardFormat, processDataStandardFormatNew);
+                Assert.AreEqual(processDataStandardFormatNew.Logistics, processDataStandardFormat.Logistics, "Item1 check!");
+                string[] json = GetItem2(processDataStandardFormat, processDataStandardFormatNew);
                 CompareSaveJSON(variables[5], json);
                 Assert.AreEqual(json[1], json[0], "Item2 check!");
-                string[] join = GetItem3(processData, processDataNew);
+                string[] join = GetItem3(processDataStandardFormat, processDataStandardFormatNew);
                 CompareSaveTSV(variables[5], join);
                 Assert.AreEqual(join[1], join[0], "Item3 (Join) check!");
             }
@@ -1273,6 +1203,89 @@ public class AdaptationTesting : ISMTP
         return result;
     }
 
-}
-// namespace Adaptation._Tests.Helpers { public class AdaptationTesting { } }
-// 2022-08-05 -> AdaptationTesting
\ No newline at end of file
+    internal static void CompareSave(string textFileDirectory, ProcessDataStandardFormat processDataStandardFormat, ProcessDataStandardFormat processDataStandardFormatNew)
+    {
+        if (processDataStandardFormat.Logistics[0] != processDataStandardFormatNew.Logistics[0])
+        {
+            _ = Process.Start("explorer.exe", textFileDirectory);
+            File.WriteAllText(Path.Combine(textFileDirectory, "0.dat"), processDataStandardFormat.Logistics[0]);
+            File.WriteAllText(Path.Combine(textFileDirectory, "1.dat"), processDataStandardFormatNew.Logistics[0]);
+        }
+    }
+
+    protected static Stream ToStream(string @this)
+    {
+        MemoryStream memoryStream = new();
+        StreamWriter streamWriter = new(memoryStream);
+        streamWriter.Write(@this);
+        streamWriter.Flush();
+        memoryStream.Position = 0;
+        return memoryStream;
+    }
+
+    protected Tuple GetParameterizedModelObjectDefinitionTypeTuple(Tuple equipmentTypeVersionTuple)
+    {
+        Tuple result;
+        string parameterizedModelObjectDefinitionType;
+        if (_FileConnectorConfigurations.ContainsKey(equipmentTypeVersionTuple.Item1))
+            parameterizedModelObjectDefinitionType = _ParameterizedModelObjectDefinitionTypes[equipmentTypeVersionTuple.Item1];
+        else
+            parameterizedModelObjectDefinitionType = equipmentTypeVersionTuple.Item4.FileHandlerObjectTypes.ParameterizedModelObjectDefinition.Type;
+        result = new Tuple(equipmentTypeVersionTuple.Item1, parameterizedModelObjectDefinitionType);
+        return result;
+    }
+
+    protected Tuple> GetModelObjectParameters(Tuple equipmentTypeVersionTuple)
+    {
+        Tuple> result;
+        IList modelObjectParameters;
+        if (_FileConnectorConfigurations.ContainsKey(equipmentTypeVersionTuple.Item1))
+            modelObjectParameters = _ModelObjectParameters[equipmentTypeVersionTuple.Item1];
+        else
+        {
+            string json = JsonSerializer.Serialize(equipmentTypeVersionTuple.Item4, new JsonSerializerOptions { WriteIndented = true });
+            modelObjectParameters = GetModelObjectParameters(json);
+        }
+        result = new Tuple>(equipmentTypeVersionTuple.Item1, modelObjectParameters);
+        return result;
+    }
+
+    protected Tuple>> GetEquipmentDictionaryIsAlwaysEnabledEventsTuple(Tuple equipmentDictionaryVersionTuple)
+    {
+        Tuple>> result;
+        List> results;
+        List> collection;
+        if (_SkipEquipmentDictionary)
+            results = new List>();
+        else if (string.IsNullOrEmpty(equipmentDictionaryVersionTuple.Item1))
+            throw new Exception();
+        else if (equipmentDictionaryVersionTuple?.Item4?.Events?.Event is null)
+            results = new List>();
+        else if (_EquipmentDictionaryEventDescriptions.TryGetValue(equipmentDictionaryVersionTuple.Item1, out collection))
+            results = collection;
+        else
+        {
+            results = new List>();
+            foreach (EquipmentDictionaryVersionEventsEvent equipmentDictionaryVersionEventsEvent in equipmentDictionaryVersionTuple.Item4.Events.Event)
+            {
+                if (string.IsNullOrEmpty(equipmentDictionaryVersionEventsEvent.Description))
+                    continue;
+                if (!equipmentDictionaryVersionEventsEvent.IsAlwaysEnabled)
+                    continue;
+                results.Add(new Tuple(equipmentDictionaryVersionEventsEvent.Name, equipmentDictionaryVersionEventsEvent.Description));
+            }
+        }
+        result = new Tuple>>(equipmentDictionaryVersionTuple.Item1, results);
+        return result;
+    }
+
+    public (string i, string v, string c, string n, int p, string f) GetCellInstanceVersionCore(string testName)
+    {
+        (string, string, string, string, int, string) results;
+        MethodBaseName mbn = GetMethodBaseName(_DummyRoot, _Environment, _HasWaitForProperty, testName, @"D:\Tmp\Phares");
+        Tuple cellInstanceVersionTuple = GetCellInstanceVersionTuple(mbn.CellInstanceName, mbn.CellInstanceVersionName);
+        results = new(mbn.CellInstanceName, mbn.CellInstanceVersionName, cellInstanceVersionTuple.Item2.CellCommunicatingRule, cellInstanceVersionTuple.Item2.CellNotCommunicatingRule, cellInstanceVersionTuple.Item2.EdaConnection.PortNumber, cellInstanceVersionTuple.Item2.FrozenBy);
+        return results;
+    }
+
+}
\ No newline at end of file
diff --git a/MESAFIBACKLOG.csproj b/MESAFIBACKLOG.csproj
index b3a0aec..1f49220 100644
--- a/MESAFIBACKLOG.csproj
+++ b/MESAFIBACKLOG.csproj
@@ -157,6 +157,8 @@
     
     
     
+    
+    
     
     
     
@@ -186,8 +188,8 @@
     
     
     
-    
     
+    
     
     
     
diff --git a/Properties/AssemblyInfo.cs b/Properties/AssemblyInfo.cs
index 0e774cf..23e2e00 100644
--- a/Properties/AssemblyInfo.cs
+++ b/Properties/AssemblyInfo.cs
@@ -32,5 +32,5 @@ using System.Runtime.InteropServices;
 // You can specify all the values or you can default the Build and Revision Numbers
 // by using the '*' as shown below:
 // [assembly: AssemblyVersion("1.0.*")]
-[assembly: AssemblyVersion("2.58.0.0")]
-[assembly: AssemblyFileVersion("2.58.0.0")]
+[assembly: AssemblyVersion("2.59.0.0")]
+[assembly: AssemblyFileVersion("2.59.0.0")]