met08ddupsfs6420/Adaptation/Shared/ProcessDataStandardFormat.cs

722 lines
33 KiB
C#

using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.Shared;
#nullable enable
internal class ProcessDataStandardFormat
{
internal enum SearchFor
{
EquipmentIntegration = 1,
BusinessIntegration = 2,
SystemExport = 3,
Archive = 4
}
internal long? Sequence { get; private set; }
internal ReadOnlyCollection<string> Body { get; private set; }
internal ReadOnlyCollection<string> Footer { get; private set; }
internal ReadOnlyCollection<string> Header { get; private set; }
internal ReadOnlyCollection<string> Columns { get; private set; }
internal ProcessDataStandardFormat? InputPDSF { get; private set; }
internal ReadOnlyCollection<string> Logistics { get; private set; }
internal ProcessDataStandardFormat(ReadOnlyCollection<string> body,
ReadOnlyCollection<string> columns,
ReadOnlyCollection<string> footer,
ReadOnlyCollection<string> header,
ProcessDataStandardFormat? inputPDSF,
ReadOnlyCollection<string> logistics,
long? sequence)
{
Body = body;
Columns = columns;
Footer = footer;
Header = header;
InputPDSF = inputPDSF;
Logistics = logistics;
Sequence = sequence;
}
internal static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
internal static string BusinessIntegration(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.BusinessIntegration, addSpaces, separator);
internal static string SystemExport(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.SystemExport, addSpaces, separator);
internal static string Archive(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.Archive, addSpaces, separator);
internal static ProcessDataStandardFormat GetEmpty() =>
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null, new(new string[] { "LOGISTICS_1" }), null);
internal static List<string> PDSFToFixedWidth(string reportFullPath)
{
List<string> results = new();
if (!File.Exists(reportFullPath))
throw new Exception();
int[] group;
string line;
int startsAt = 0;
string[] segments;
int? currentGroup = null;
char inputSeparator = '\t';
char outputSeparator = '\t';
List<int> vs = new();
List<int[]> groups = new();
string[] lines = File.ReadAllLines(reportFullPath);
StringBuilder stringBuilder = new();
for (int i = 0; i < lines.Length; i++)
{
if (string.IsNullOrEmpty(lines[i]))
continue;
segments = lines[i].Split(inputSeparator);
currentGroup ??= segments.Length;
if (segments.Length != currentGroup)
{
currentGroup = segments.Length;
groups.Add(new int[] { startsAt, i - 1 });
startsAt = i;
}
}
if (startsAt == lines.Length - 1 && lines[0].Split(inputSeparator).Length != currentGroup)
groups.Add(new int[] { lines.Length - 1, lines.Length - 1 });
for (int g = 0; g < groups.Count; g++)
{
vs.Clear();
group = groups[g];
line = lines[group[0]];
segments = line.Split(inputSeparator);
for (int s = 0; s < segments.Length; s++)
vs.Add(segments[s].Length);
for (int i = group[0]; i <= group[1]; i++)
{
line = lines[i];
segments = line.Split(inputSeparator);
for (int s = 0; s < segments.Length; s++)
{
if (vs[s] < segments[s].Length)
vs[s] = segments[s].Length;
}
}
_ = stringBuilder.Clear();
for (int s = 0; s < segments.Length; s++)
_ = stringBuilder.Append((s + 1).ToString().PadLeft(vs[s], ' ')).Append(outputSeparator);
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
for (int i = group[0]; i <= group[1]; i++)
{
line = lines[i];
_ = stringBuilder.Clear();
segments = line.Split(inputSeparator);
for (int s = 0; s < segments.Length; s++)
_ = stringBuilder.Append(segments[s].PadLeft(vs[s], ' ')).Append(outputSeparator);
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
}
results.Add(string.Empty);
}
return results;
}
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6)
{
ProcessDataStandardFormat result;
string segment;
string[] segments;
bool addToFooter = false;
List<string> body = new();
List<string> header = new();
List<string> footer = new();
List<string> columns = new();
ReadOnlyCollection<string> logistics;
lines ??= File.ReadAllLines(reportFullPath);
if (lines.Length < columnsLine + 1)
segments = Array.Empty<string>();
else
{
segments = lines[columnsLine].Trim().Split('\t');
for (int i = 0; i < columnsLine; i++)
header.Add(lines[i]);
}
for (int c = 0; c < segments.Length; c++)
{
segment = segments[c].Substring(1, segments[c].Length - 2);
if (!columns.Contains(segment))
columns.Add(segment);
else
{
for (short i = 1; i < short.MaxValue; i++)
{
segment = string.Concat(segment, "_", i);
if (!columns.Contains(segment))
{
columns.Add(segment);
break;
}
}
}
}
for (int r = columnsLine + 1; r < lines.Length; r++)
{
if (lines[r].StartsWith("NUM_DATA_ROWS"))
addToFooter = true;
if (!addToFooter)
body.Add(lines[r]);
else
{
footer.Add(lines[r]);
if (lines[r].StartsWith("END_HEADER"))
break;
}
}
string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
logistics = GetLogistics(footer, linesOne: linesOne);
result = new(body: body.AsReadOnly(),
columns: columns.AsReadOnly(),
footer: footer.AsReadOnly(),
header: header.AsReadOnly(),
inputPDSF: null,
logistics: logistics,
sequence: null);
return result;
}
private static ReadOnlyCollection<string> GetLogistics(List<string> footer, string? linesOne)
{
List<string> results = new();
bool foundLogistics1 = false;
foreach (string line in footer)
{
if (line.StartsWith("END_HEADER"))
break;
if (line.StartsWith("LOGISTICS_1"))
foundLogistics1 = true;
if (foundLogistics1 && line.StartsWith("LOGISTICS_"))
results.Add(line);
}
if (!string.IsNullOrEmpty(linesOne) && results.Count == 0)
results.Add(linesOne);
return results.AsReadOnly();
}
internal static ProcessDataStandardFormat? GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
{
ProcessDataStandardFormat? result;
const int columnsLine = 6;
FileInfo fileInfo = new(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, pdsfMapping.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = GetArray(pdsfMapping.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
if (jsonElements is null || jsonElements.Length == 0 || pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count)
result = null;
else
{
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
result = null;
}
return result;
}
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int expectedColumns, int columnsLine, string path, string[]? lines)
{
ProcessDataStandardFormat result;
long sequence;
string[] segments;
bool addToFooter = false;
List<string> body = new();
List<string> header = new();
List<string> footer = new();
ReadOnlyCollection<string> logistics;
lines ??= File.ReadAllLines(path);
if (lines.Length <= columnsLine)
segments = Array.Empty<string>();
else
{
segments = lines[columnsLine].Split('\t');
if (segments.Length != expectedColumns)
segments = Array.Empty<string>();
for (int i = 0; i < columnsLine; i++)
header.Add(lines[i]);
}
string[] columns = segments.Select(l => l.Trim('"')).ToArray();
for (int r = columnsLine + 1; r < lines.Length; r++)
{
if (lines[r].StartsWith("NUM_DATA_ROWS"))
addToFooter = true;
if (!addToFooter)
body.Add(lines[r]);
else
{
footer.Add(lines[r]);
if (lines[r].StartsWith("END_HEADER"))
break;
}
}
logistics = GetLogistics(footer, linesOne: null);
if (logistics.Count == 0)
sequence = lastWriteTime.Ticks;
else
{
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s;
}
result = new(body: body.AsReadOnly(),
columns: new(columns),
footer: footer.AsReadOnly(),
header: header.AsReadOnly(),
inputPDSF: null,
logistics: logistics,
sequence: sequence);
return result;
}
private static JsonElement[]? GetArray(int expectedColumns, ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers)
{
JsonElement[]? results;
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
else
{
string value;
string[] segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in processDataStandardFormat.Body)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Split('\t');
if (segments.Length != expectedColumns)
continue;
if (!lookForNumbers)
{
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("}");
lines.Add(stringBuilder.ToString());
}
string json = $"[{string.Join(",", lines)}]";
results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray);
}
return results;
}
private static ProcessDataStandardFormat GetProcessDataStandardFormat(ProcessDataStandardFormatMapping processDataStandardFormatMapping, JsonElement[] jsonElements, ProcessDataStandardFormat processDataStandardFormat)
{
ProcessDataStandardFormat result;
int column;
string value;
JsonProperty jsonProperty;
List<string> values = new();
List<string> results = new();
JsonProperty[] jsonProperties;
List<string> unknownColumns = new();
for (int i = 0; i < jsonElements.Length; i++)
{
values.Clear();
if (jsonElements[i].ValueKind != JsonValueKind.Object)
{
unknownColumns.Add(string.Empty);
break;
}
jsonProperties = jsonElements[i].EnumerateObject().ToArray();
if (jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
continue;
for (int c = 0; c < processDataStandardFormatMapping.ColumnIndices.Count; c++)
{
column = processDataStandardFormatMapping.ColumnIndices[c];
if (column == -1)
value = processDataStandardFormatMapping.OldColumnNames[c];
else
{
jsonProperty = jsonProperties[column];
value = jsonProperty.Value.ToString();
}
values.Add(value);
}
results.Add(string.Join("\t", values));
}
result = new(body: new(results),
columns: processDataStandardFormatMapping.OldColumnNames,
footer: processDataStandardFormat.Footer,
header: processDataStandardFormat.Header,
inputPDSF: processDataStandardFormat,
logistics: processDataStandardFormat.Logistics,
sequence: processDataStandardFormat.Sequence);
return result;
}
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults)
{
List<string> results = new();
if (processDataStandardFormat.Sequence is null)
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
string startTime = new DateTime(processDataStandardFormat.Sequence.Value).ToString(format);
results.Add("HEADER_TAG\tHEADER_VALUE");
results.Add("FORMAT\t2.00");
results.Add("NUMBER_PASSES\t0001");
results.Add($"HEADER_OFFSET\t{headerOffset}");
results.Add($"DATA_OFFSET\t{dataOffset}");
results.Add($"END_OFFSET\t{endOffset}");
results.Add($"\"{string.Join("\"\t\"", processDataStandardFormat.Columns)}\"");
results.AddRange(processDataStandardFormat.Body);
results.Add($"NUM_DATA_ROWS\t{processDataStandardFormat.Body.Count.ToString().PadLeft(9, '0')}");
results.Add($"NUM_DATA_COLUMNS\t{processDataStandardFormat.Columns.Count.ToString().PadLeft(9, '0')}");
results.Add("DELIMITER\t;");
results.Add($"START_TIME_FORMAT\t{format}");
results.Add($"START_TIME\t{startTime}");
results.Add("LOGISTICS_COLUMN\tA_LOGISTICS");
results.Add("LOGISTICS_COLUMN\tB_LOGISTICS");
if (wsResults is null || wsResults.Count != 1)
results.AddRange(processDataStandardFormat.Logistics);
else
{
string[] segments;
foreach (string logistics in processDataStandardFormat.Logistics)
{
segments = logistics.Split(new string[] { "\t" }, StringSplitOptions.None);
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1]))
results.Add(logistics);
else
results.Add($"{segments[0]}\t{segments[1][0]}_HeaderId={wsResults[0].HeaderId};{segments[1][0]}_SubgroupId={wsResults[0].SubgroupId};{segments[1]}");
}
}
results.Add("EOF");
if (processDataStandardFormat.InputPDSF is not null)
{
List<char> hyphens = new();
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|')));
results.Add(string.Empty);
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
hyphens.Add('-');
results.Add($"|{string.Join("|", hyphens)}|");
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|')));
results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|')));
}
File.WriteAllText(path, string.Join(Environment.NewLine, results));
}
internal static Dictionary<string, List<string>> GetDictionary(ProcessDataStandardFormat processDataStandardFormat)
{
Dictionary<string, List<string>> results = new();
string[] segments;
foreach (string column in processDataStandardFormat.Columns)
results.Add(column, new List<string>());
foreach (string bodyLine in processDataStandardFormat.Body)
{
segments = bodyLine.Split('\t');
for (int c = 1; c < segments.Length; c++)
{
if (c >= processDataStandardFormat.Columns.Count)
continue;
results[processDataStandardFormat.Columns[c]].Add(segments[c]);
}
}
return results;
}
internal static JsonElement[] GetArray(ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers = false)
{
JsonElement[] results;
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
else
{
string value;
string[] segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in processDataStandardFormat.Body)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Trim().Split('\t');
if (!lookForNumbers)
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("}");
lines.Add(stringBuilder.ToString());
}
string json = $"[{string.Join(",", lines)}]";
results = JsonSerializer.Deserialize<JsonElement[]>(json) ?? throw new Exception();
}
return results;
}
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
{
string result;
if (jsonElements.Length == 0)
result = string.Empty;
else
{
int columns = 0;
List<string> lines;
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
StringBuilder stringBuilder = new();
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
_ = stringBuilder.Append("\"Time\"").Append('\t');
_ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
_ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
for (int i = 0; i < jsonElements.Length;)
{
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
}
break;
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements.Length; i++)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append("0.1").Append('\t');
_ = stringBuilder.Append('1').Append('\t');
_ = stringBuilder.Append('2').Append('\t');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
_ = stringBuilder.Append(jsonProperty.Value).Append('\t');
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
}
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
lines.Add("DELIMITER ;");
lines.Add(string.Concat("START_TIME_FORMAT ", format));
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
if (!string.IsNullOrEmpty(logisticsText))
lines.Add(logisticsText);
else
{
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
lines.Add("END_HEADER");
}
_ = stringBuilder.Clear();
foreach (string line in lines)
_ = stringBuilder.AppendLine(line);
result = stringBuilder.ToString();
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
}
return result;
}
internal static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(ProcessDataStandardFormat processDataStandardFormat)
{
Dictionary<Test, Dictionary<string, List<string>>> results = new();
List<string>? collection;
string testColumn = nameof(Test);
Dictionary<string, List<string>> keyValuePairs = GetDictionary(processDataStandardFormat);
if (!keyValuePairs.TryGetValue(testColumn, out collection))
throw new Exception();
int min;
int max;
Test testKey;
List<string> vs;
string columnKey;
Dictionary<Test, List<int>> tests = new();
for (int i = 0; i < collection.Count; i++)
{
if (Enum.TryParse(collection[i], out Test test))
{
if (!results.ContainsKey(test))
{
tests.Add(test, new List<int>());
results.Add(test, new Dictionary<string, List<string>>());
}
tests[test].Add(i);
}
}
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
{
testKey = testKeyValuePair.Key;
min = testKeyValuePair.Value.Min();
max = testKeyValuePair.Value.Max() + 1;
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
results[testKey].Add(keyValuePair.Key, new List<string>());
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
vs = keyValuePair.Value;
columnKey = keyValuePair.Key;
for (int i = min; i < max; i++)
{
if (vs.Count > i)
results[testKey][columnKey].Add(vs[i]);
else
results[testKey][columnKey].Add(string.Empty);
}
}
}
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(processDataStandardFormat.Logistics[0], results);
}
internal static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string>? ignoreParameterNames = null)
{
StringBuilder result = new();
ignoreParameterNames ??= new List<string>();
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
throw new Exception();
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
throw new Exception();
string? nullData;
const string columnDate = "Date";
const string columnTime = "Time";
const string firstDuplicate = "_1";
_ = result.AppendLine(scopeInfo.Header);
StringBuilder line = new();
if (logistics.NullData is null)
nullData = string.Empty;
else
nullData = logistics.NullData.ToString();
int count = (from l in keyValuePairs select l.Value.Count).Min();
for (int r = 0; r < count; r++)
{
_ = line.Clear();
_ = line.Append('!');
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
if (!names.Contains(keyValuePair.Key))
continue;
if (ignoreParameterNames.Contains(keyValuePair.Key))
continue;
if (pairedParameterNames.Contains(keyValuePair.Key))
{
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
continue;
else
_ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
}
else
{
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
_ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
_ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
_ = line.Append(nullData);
else
_ = line.Append(keyValuePair.Value[r]);
_ = line.Append(';');
}
}
if (pairedParameterNames.Count == 0)
{
_ = line.Remove(line.Length - 1, 1);
_ = result.AppendLine(line.ToString());
}
}
return result.ToString();
}
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
{
if (!addSpaces)
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
else
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
}
private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName)
{
int? result = null;
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
if (result is null)
{
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name[0] != propertyName[0])
continue;
if (jsonProperties[i].Name.Length != propertyName.Length)
continue;
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
}
return result;
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(JsonElement[]))]
internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
{
}