Total User Story Points by Site - Iteration - Assigned To (Initials)

This commit is contained in:
Mike Phares 2025-05-08 21:00:30 -07:00
parent 736a39245f
commit 7df7d5f4d6
8 changed files with 187 additions and 165 deletions

View File

@ -30,6 +30,9 @@ public class ProcessData : IProcessData
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection) =>
new(logistics.Logistics1[0], Array.Empty<Test>(), Array.Empty<JsonElement>(), fileInfoCollection);
private static string GetClosed(WorkItem workItem) =>
workItem.State != "Closed" ? "[ ]" : "[x]";
public ProcessData(IFileRead fileRead, Logistics logistics, string targetFileLocation, string url, ReadOnlyCollection<string> workItemTypes, List<FileInfo> fileInfoCollection)
{
_Details = new List<object>();
@ -90,6 +93,7 @@ public class ProcessData : IProcessData
ReadOnlyCollection<Record> results;
ReadOnlyDictionary<int, Record> keyValuePairs = GetWorkItems(workItems, keepRelations);
ReadOnlyCollection<Record> records = new(keyValuePairs.Values.ToArray());
ReadOnlyCollection<string> userStoryWorkItemTypes = new(new string[] { "User Story" });
ReadOnlyCollection<string> bugFeatureWorkItemTypes = new(new string[] { "Bug", "Feature" });
ReadOnlyCollection<string> bugUserStoryWorkItemTypes = new(new string[] { "Bug", "User Story" });
messages.AddRange(WriteFile(fileRead, destinationDirectory, fileInfoCollection, records, "records"));
@ -105,6 +109,15 @@ public class ProcessData : IProcessData
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, workItemType);
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "User Story";
lines.Add($"# Total User Story Points by Site - Iteration - Assigned To (Initials)");
lines.Add(string.Empty);
results = UserStoryCheckIterationPath228385(url, lines, userStoryWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), results, $"{workItemType} check 228385");
_Details.Add(results);
}
if (messages.Count > 0)
throw new Exception($"{messages.Count}{Environment.NewLine}{string.Join(Environment.NewLine, messages)}");
}
@ -283,8 +296,104 @@ public class ProcessData : IProcessData
return result;
}
private static string GetClosed(WorkItem workItem) =>
workItem.State != "Closed" ? "[ ]" : "[x]";
private static ReadOnlyCollection<Record> UserStoryCheckIterationPath228385(string url, List<string> lines, ReadOnlyCollection<string> _, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
long totalStoryPoints;
List<long> collection = new();
ReadOnlyDictionary<string, List<Record>> records = GetWorkItemsMatching228385(keyValuePairs, workItemType);
lines.Add("<table border='1'>");
lines.Add($"<tr><td>{string.Join("</td><td>", records.Select(l => l.Key))}</td></tr>");
foreach (KeyValuePair<string, List<Record>> keyValuePair in records)
{
totalStoryPoints = 0;
foreach (Record record in keyValuePair.Value)
{
if (record.WorkItem.StoryPoints is null)
continue;
totalStoryPoints += record.WorkItem.StoryPoints.Value;
}
collection.Add(totalStoryPoints);
}
lines.Add($"<tr><td>{string.Join("</td><td>", collection)}</td></tr>");
lines.Add("</table>");
lines.Add(string.Empty);
foreach (KeyValuePair<string, List<Record>> keyValuePair in records)
{
totalStoryPoints = 0;
foreach (Record record in keyValuePair.Value)
{
if (record.WorkItem.StoryPoints is null)
continue;
totalStoryPoints += record.WorkItem.StoryPoints.Value;
}
lines.Add(string.Empty);
lines.Add($"## {keyValuePair.Key} => {totalStoryPoints}");
lines.Add(string.Empty);
foreach (Record record in keyValuePair.Value)
lines.Add($"- [ ] [{record.WorkItem.Id}]({url}{record.WorkItem.Id}) - {record.WorkItem.Title}");
}
return new(results);
}
private static ReadOnlyDictionary<string, List<Record>> GetWorkItemsMatching228385(ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
ReadOnlyDictionary<string, List<Record>> results;
Record record;
List<Record> records = new();
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed" or "Closed")
continue;
if (!record.WorkItem.IterationPath.Contains('\\'))
continue;
if (record.WorkItem.StoryPoints is null)
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
records.Add(record);
}
Record[] sorted = (from l in records orderby l.WorkItem.AreaPath, l.WorkItem.IterationPath, l.WorkItem.AssignedTo select l).ToArray();
results = GetWorkItemsMatching228385(new(sorted));
return results;
}
private static ReadOnlyDictionary<string, List<Record>> GetWorkItemsMatching228385(ReadOnlyCollection<Record> records)
{
Dictionary<string, List<Record>> results = new();
string key;
string[] segments;
List<Record>? collection;
foreach (Record record in records)
{
key = $"{record.WorkItem.AreaPath.Split('\\').Last()}-{record.WorkItem.IterationPath.Split('\\').Last().Split(' ').Last()}";
if (!results.TryGetValue(key, out collection))
{
results.Add(key, new());
if (!results.TryGetValue(key, out collection))
throw new Exception();
}
collection.Add(record);
}
foreach (Record record in records)
{
if (string.IsNullOrEmpty(record.WorkItem.AssignedTo))
continue;
segments = record.WorkItem.AssignedTo.Split(' ');
if (segments.Length < 3)
continue;
key = $"{record.WorkItem.IterationPath.Split('\\').Last().Split(' ').Last()}-{segments[0][0]}{segments[1][0]}";
if (!results.TryGetValue(key, out collection))
{
results.Add(key, new());
if (!results.TryGetValue(key, out collection))
throw new Exception();
}
collection.Add(record);
}
return new(results);
}
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{

View File

@ -22,17 +22,21 @@ public class FileRead : Shared.FileRead, IFileRead
internal class PreWith
{
internal string MatchingFile { get; private set; }
internal string CheckFile { get; private set; }
internal string ErrFile { get; private set; }
internal string CheckFile { get; private set; }
internal string MatchingFile { get; private set; }
internal string CheckDirectory { get; private set; }
internal string NoWaitDirectory { get; private set; }
internal PreWith(string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory)
internal PreWith(string checkDirectory,
string checkFile,
string errFile,
string matchingFile,
string noWaitDirectory)
{
MatchingFile = matchingFile;
CheckFile = checkFile;
ErrFile = errFile;
CheckFile = checkFile;
MatchingFile = matchingFile;
CheckDirectory = checkDirectory;
NoWaitDirectory = noWaitDirectory;
}
@ -221,7 +225,11 @@ public class FileRead : Shared.FileRead, IFileRead
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
preWith = new(pre.MatchingFile, pre.CheckFile, errFile, checkDirectory, noWaitDirectory);
preWith = new(checkDirectory: checkDirectory,
checkFile: pre.CheckFile,
errFile: errFile,
matchingFile: pre.MatchingFile,
noWaitDirectory: noWaitDirectory);
results.Add(preWith);
}
return results.AsReadOnly();
@ -260,7 +268,7 @@ public class FileRead : Shared.FileRead, IFileRead
}
}
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles, bool _)
{
List<Pre> results = new();
Pre pre;
@ -275,7 +283,7 @@ public class FileRead : Shared.FileRead, IFileRead
return results.AsReadOnly();
}
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
{
ReadOnlyCollection<Post> postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
if (postCollection.Count != 0)
@ -294,7 +302,7 @@ public class FileRead : Shared.FileRead, IFileRead
}
}
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
{
List<Post> results = new();
Post post;
@ -303,15 +311,10 @@ public class FileRead : Shared.FileRead, IFileRead
{
if (!_IsEAFHosted)
continue;
if (processDataStandardFormat is null)
File.Move(preWith.MatchingFile, preWith.CheckFile);
else
{
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
wsResults = null;
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
File.Delete(preWith.MatchingFile);
}
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
wsResults = null;
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
File.Delete(preWith.MatchingFile);
if (Directory.Exists(preWith.NoWaitDirectory))
{
post = new(preWith.CheckFile, preWith.ErrFile);
@ -345,16 +348,9 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
ProcessDataStandardFormat? processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, _ProcessDataStandardFormatMapping);
if (processDataStandardFormat is not null)
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
else
{
processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
processDataStandardFormat = null;
}
if (!_IsEAFHosted && processDataStandardFormat is not null)
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, _ProcessDataStandardFormatMapping);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
if (!_IsEAFHosted)
ProcessDataStandardFormat.Write(".pdsf", processDataStandardFormat, wsResults: null);
SetFileParameterLotIDToLogisticsMID();
int numberLength = 2;
@ -364,10 +360,14 @@ public class FileRead : Shared.FileRead, IFileRead
ReadOnlyCollection<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
if (matchingFiles.Count != searchDirectories.Count)
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
try
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { }
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
if (_IsEAFHosted)
{
try
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { }
}
bool mesEntityMatchesProcess = false;
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles, mesEntityMatchesProcess);
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
return results;

View File

@ -140,7 +140,7 @@ public class FileRead : Shared.FileRead, IFileRead
string[] lines = new string[] { string.Empty, "NUM_DATA_ROWS", $"LOGISTICS_1{'\t'}A_JOBID={"BACKLOG"};A_MES_ENTITY={"BACKLOG"};" };
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, lines);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
results = new(_Logistics.Logistics1[0], Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
results = new(string.Join(Environment.NewLine, _Logistics.Logistics1), Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
return results;
}

View File

@ -30,6 +30,17 @@ public class ProcessData : IProcessData
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection) =>
new(logistics.Logistics1[0], Array.Empty<Test>(), Array.Empty<JsonElement>(), fileInfoCollection);
private static int GetState(WorkItem workItem) =>
workItem.State switch
{
"New" => 1,
"Active" => 2,
"Resolved" => 3,
"Closed" => 4,
"Removed" => 5,
_ => 8
};
public ProcessData(IFileRead fileRead, Logistics logistics, string targetFileLocation, string url, ReadOnlyCollection<string> workItemTypes, List<FileInfo> fileInfoCollection)
{
if (fileRead.IsEAFHosted)
@ -66,8 +77,6 @@ public class ProcessData : IProcessData
ReadOnlyCollection<Record> results;
ReadOnlyDictionary<int, Record> keyValuePairs = GetWorkItems(workItems, keepRelations);
ReadOnlyCollection<Record> records = new(keyValuePairs.Values.ToArray());
ReadOnlyCollection<string> userStoryWorkItemTypes = new(new string[] { "User Story" });
ReadOnlyCollection<string> bugFeatureWorkItemTypes = new(new string[] { "Bug", "Feature" });
ReadOnlyCollection<string> bugUserStoryWorkItemTypes = new(new string[] { "Bug", "User Story" });
ReadOnlyCollection<string> bugUserStoryTaskWorkItemTypes = new(new string[] { "Bug", "User Story", "Task" });
{
@ -124,15 +133,6 @@ public class ProcessData : IProcessData
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-122517");
_Details.Add(results);
}
{
lines.Clear();
string workItemType = "User Story";
lines.Add($"# {nameof(UserStoryCheckIterationPath228385)}");
lines.Add(string.Empty);
results = UserStoryCheckIterationPath228385(url, lines, userStoryWorkItemTypes, keyValuePairs, workItemType);
WriteFiles(fileRead, destinationDirectory, fileInfoCollection, new(lines), workItemType, results, "check-228385");
_Details.Add(results);
}
if (messages.Count > 0)
throw new Exception($"{messages.Count}{Environment.NewLine}{string.Join(Environment.NewLine, messages)}");
}
@ -475,17 +475,6 @@ public class ProcessData : IProcessData
return new(results);
}
private static int GetState(WorkItem workItem) =>
workItem.State switch
{
"New" => 1,
"Active" => 2,
"Resolved" => 3,
"Closed" => 4,
"Removed" => 5,
_ => 8
};
private static ReadOnlyCollection<Record> FeatureCheckState123067(string url, List<string> lines, ReadOnlyCollection<string> workItemTypes, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
@ -622,71 +611,6 @@ public class ProcessData : IProcessData
return new(results);
}
private static ReadOnlyCollection<Record> UserStoryCheckIterationPath228385(string url, List<string> lines, ReadOnlyCollection<string> _, ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
List<Record> results = new();
long totalStoryPoints;
ReadOnlyDictionary<string, List<Record>> records = GetWorkItemsMatching228385(keyValuePairs, workItemType);
foreach (KeyValuePair<string, List<Record>> keyValuePair in records)
{
totalStoryPoints = 0;
foreach (Record record in keyValuePair.Value)
{
if (record.WorkItem.StoryPoints is null)
continue;
totalStoryPoints += record.WorkItem.StoryPoints.Value;
}
lines.Add(string.Empty);
lines.Add($"## {keyValuePair.Key} => {totalStoryPoints}");
lines.Add(string.Empty);
foreach (Record record in keyValuePair.Value)
lines.Add($"- [ ] [{record.WorkItem.Id}]({url}{record.WorkItem.Id}) - {record.WorkItem.Title}");
}
return new(results);
}
private static ReadOnlyDictionary<string, List<Record>> GetWorkItemsMatching228385(ReadOnlyDictionary<int, Record> keyValuePairs, string workItemType)
{
ReadOnlyDictionary<string, List<Record>> results;
Record record;
List<Record> records = new();
foreach (KeyValuePair<int, Record> keyValuePair in keyValuePairs)
{
record = keyValuePair.Value;
if (record.WorkItem.State is "Removed" or "Closed")
continue;
if (!record.WorkItem.IterationPath.Contains('\\'))
continue;
if (record.WorkItem.StoryPoints is null)
continue;
if (record.WorkItem.WorkItemType != workItemType)
continue;
records.Add(record);
}
Record[] sorted = records.OrderByDescending(l => l.WorkItem.IterationPath).ToArray();
results = GetWorkItemsMatching228385(new(sorted));
return results;
}
private static ReadOnlyDictionary<string, List<Record>> GetWorkItemsMatching228385(ReadOnlyCollection<Record> records)
{
Dictionary<string, List<Record>> results = new();
string key;
List<Record>? collection;
foreach (Record record in records)
{
key = $"{record.WorkItem.IterationPath}-{record.WorkItem.AssignedTo}";
if (!results.TryGetValue(key, out collection))
{
results.Add(key, new());
if (!results.TryGetValue(key, out collection))
throw new Exception();
}
collection.Add(record);
}
return new(results);
}
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
List<Description> results = new();

View File

@ -363,7 +363,7 @@ public class FileRead : Shared.FileRead, IFileRead
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveJson(reportFullPath, dateTime);
results = new(_Logistics.Logistics1[0], Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
results = new(string.Join(Environment.NewLine, _Logistics.Logistics1), Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
return results;
}

View File

@ -377,17 +377,25 @@ public class FileRead : Properties.IFileRead
internal string[] GetInProcessDirectory(string jobIdDirectory)
{
string[] results;
List<string> results = new();
if (!_IsEAFHosted)
results = new string[] { jobIdDirectory };
results = new string[] { jobIdDirectory }.ToList();
else
{
string[] files;
string logisticsSequence = _Logistics.Sequence.ToString();
results = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
string[] directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories)
{
files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
if (files.Length == 0)
continue;
results.Add(directory);
}
}
if ((results is null) || results.Length != 1)
if ((results is null) || results.Count != 1)
throw new Exception("Didn't find directory by logistics sequence");
return results;
return results.ToArray();
}
protected static string[] GetMatches(FileConnectorConfiguration fileConnectorConfiguration)

View File

@ -61,8 +61,8 @@ internal class ProcessDataStandardFormat
internal static string Archive(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.Archive, addSpaces, separator);
internal static ProcessDataStandardFormat GetEmpty() =>
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null, new(Array.Empty<string>()), null);
internal static ProcessDataStandardFormat GetEmpty(Logistics logistics) =>
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null, new(logistics.Logistics1), null);
internal static List<string> PDSFToFixedWidth(string reportFullPath)
{
@ -214,25 +214,26 @@ internal class ProcessDataStandardFormat
return results.AsReadOnly();
}
internal static ProcessDataStandardFormat? GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
{
ProcessDataStandardFormat? result;
ProcessDataStandardFormat result;
const int columnsLine = 6;
FileInfo fileInfo = new(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, pdsfMapping.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = GetArray(pdsfMapping.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
if (jsonElements is null || jsonElements.Length == 0 || pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count)
result = null;
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
if (jsonElements is null || jsonProperties is null || jsonProperties.Length != pdsfMapping.NewColumnNames.Count)
result = processDataStandardFormat;
else
{
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
result = null;
result = processDataStandardFormat;
}
return result;
}
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int expectedColumns, int columnsLine, string path, string[]? lines)
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines)
{
ProcessDataStandardFormat result;
long sequence;
@ -248,8 +249,6 @@ internal class ProcessDataStandardFormat
else
{
segments = lines[columnsLine].Split('\t');
if (segments.Length != expectedColumns)
segments = Array.Empty<string>();
for (int i = 0; i < columnsLine; i++)
header.Add(lines[i]);
}
@ -285,7 +284,7 @@ internal class ProcessDataStandardFormat
return result;
}
private static JsonElement[]? GetArray(int expectedColumns, ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers)
private static JsonElement[]? GetFullArray(ProcessDataStandardFormat processDataStandardFormat)
{
JsonElement[]? results;
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
@ -293,36 +292,18 @@ internal class ProcessDataStandardFormat
else
{
string value;
string[] segments;
List<string> segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in processDataStandardFormat.Body)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Split('\t');
if (segments.Length != expectedColumns)
continue;
if (!lookForNumbers)
segments = bodyLine.Split('\t').ToList();
for (int c = 0; c < segments.Count; c++)
{
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("}");

View File

@ -1182,7 +1182,7 @@ public class AdaptationTesting : ISMTP
Assert.IsNotNull(extractResult.Item3);
Assert.IsNotNull(extractResult.Item4);
if (!validatePDSF)
_ = GetProcessDataStandardFormat(fileRead, logistics, extractResult, ProcessDataStandardFormat.GetEmpty());
_ = GetProcessDataStandardFormat(fileRead, logistics, extractResult, ProcessDataStandardFormat.GetEmpty(logistics));
else
{
Assert.IsTrue(extractResult.Item3.Length > 0, "extractResult Array Length check!");