ProcessData over Tuple

This commit is contained in:
Mike Phares 2025-03-17 15:07:57 -07:00
parent 534d0ccc5c
commit e02b70e258
24 changed files with 151 additions and 115 deletions

View File

@ -121,6 +121,7 @@ dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]cs
dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified
dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods
dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning
dotnet_naming_rule.abstract_method_should_be_pascal_case.style = pascal_case

View File

@ -1 +1,20 @@
[]
[
{
"DocumentId": {
"ProjectId": {
"Id": "88b6bb05-fef2-487e-bb6c-9ae68922c0bb"
},
"Id": "63c5cda5-30ee-4e20-9ec8-d45777057452"
},
"FileName": "MonIn.cs",
"FilePath": "L:\\DevOps\\EAF-Mesa-Integration\\mesafibacklog\\Adaptation\\Infineon\\Monitoring\\MonA\\MonIn.cs",
"FileChanges": [
{
"LineNumber": 268,
"CharNumber": 17,
"DiagnosticId": "CA1816",
"FormatDescription": "warning CA1816: Change MonIn.Dispose() to call GC.SuppressFinalize(object). This will prevent derived types that introduce a finalizer from needing to re-implement \u0027IDisposable\u0027 to call it."
}
]
}
]

View File

@ -28,7 +28,6 @@ public class ProcessData : IProcessData
{ }
if (url is null)
throw new ArgumentNullException(nameof(url));
fileInfoCollection.Clear();
_Details = new List<object>();
_Log = LogManager.GetLogger(typeof(ProcessData));
WriteFiles(fileRead, logistics, targetFileLocation, fileInfoCollection);

View File

@ -120,15 +120,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
_Logistics = new Logistics(reportFullPath, processData.Logistics);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(processData.Logistics, tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -144,15 +144,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
_Logistics = new Logistics(reportFullPath, processData.Logistics);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveArchive(reportFullPath, dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(processData.Logistics, tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -119,15 +119,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
_Logistics = new Logistics(reportFullPath, processData.Logistics);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(processData.Logistics, tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -28,7 +28,6 @@ public class ProcessData : IProcessData
{ }
if (url is null)
throw new ArgumentNullException(nameof(url));
fileInfoCollection.Clear();
_Details = new List<object>();
_Log = LogManager.GetLogger(typeof(ProcessData));
WriteFiles(fileRead, logistics, calendar, targetFileLocation, fileInfoCollection);

View File

@ -26,7 +26,6 @@ public class ProcessData : IProcessData
{
if (fileRead.IsEAFHosted)
{ }
fileInfoCollection.Clear();
_Details = new List<object>();
_Log = LogManager.GetLogger(typeof(ProcessData));
WriteFiles(fileRead, logistics, url, workItemTypes, targetFileLocation, fileInfoCollection);

View File

@ -244,8 +244,8 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
_Logistics = new Logistics(reportFullPath, processData.Logistics);
SetFileParameterLotIDToLogisticsMID();
int numberLength = 2;
long ticks = dateTime.Ticks;

View File

@ -118,15 +118,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
_Logistics = new Logistics(reportFullPath, processData.Logistics);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveOpenInsightFile(reportFullPath, dateTime, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(processData.Logistics, tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -129,15 +129,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
_Logistics = new Logistics(reportFullPath, processData.Logistics);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(processData.Logistics, tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -146,15 +146,15 @@ public class FileRead : Shared.FileRead, IFileRead
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
_Logistics = new Logistics(reportFullPath, processData.Logistics);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(processData.Logistics, tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -143,13 +143,13 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
_Logistics = new Logistics(reportFullPath, processData.Logistics);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(processData.Logistics, tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
DirectoryMove(reportFullPath, dateTime, descriptions);
else if (!_IsEAFHosted)

View File

@ -117,15 +117,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessData processData = ProcessDataStandardFormat.GetProcessData(reportFullPath);
_Logistics = new Logistics(reportFullPath, processData.Logistics);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processData);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(processData.Logistics, tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -19,7 +19,6 @@ public class ProcessData : IProcessData
Logistics logistics,
List<FileInfo> fileInfoCollection)
{
fileInfoCollection.Clear();
_Details = new List<object>();
Parse();
}

View File

@ -94,7 +94,7 @@
<PackageReference Include="Microsoft.TeamFoundationServer.Client" Version="16.205.1" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Infineon.Mesa.PDF.Text.Stripper" Version="4.8.0.1">
<PackageReference Include="Infineon.Mesa.PDF.Text.Stripper" Version="4.8.0.2">
<NoWarn>NU1701</NoWarn>
</PackageReference>
</ItemGroup>

View File

@ -25,7 +25,7 @@ stages:
nugetSource: "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/"
jobs:
- job: SetupEnviroment
- job: SetupEnvironment
steps:
- script: |
echo $(Build.BuildId)
@ -51,7 +51,7 @@ stages:
- job: BuildDebug
dependsOn:
- SetupEnviroment
- SetupEnvironment
steps:
- script: |
set configuration=Debug
@ -66,7 +66,7 @@ stages:
- job: BuildRelease
dependsOn:
- SetupEnviroment
- SetupEnvironment
steps:
- script: |
set configuration=Release
@ -98,7 +98,7 @@ stages:
- job: TestDebug
dependsOn:
- SetupEnviroment
- SetupEnvironment
- BuildDebug
- BuildRelease
steps:
@ -168,7 +168,7 @@ stages:
nugetSource: "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/"
jobs:
- job: SetupEnviroment
- job: SetupEnvironment
steps:
- script: |
echo $(Build.BuildId)
@ -194,7 +194,7 @@ stages:
- job: BuildRelease
dependsOn:
- SetupEnviroment
- SetupEnvironment
steps:
- script: |
set configuration=Release
@ -226,7 +226,7 @@ stages:
- job: TestRelease
dependsOn:
- SetupEnviroment
- SetupEnvironment
- BuildRelease
steps:
- script: |

View File

@ -0,0 +1,21 @@
namespace Adaptation.Shared;
public class ProcessData
{
protected readonly string _Logistics;
protected readonly string[] _Columns;
protected readonly string[] _BodyLines;
public string Logistics => _Logistics;
public string[] Columns => _Columns;
public string[] BodyLines => _BodyLines;
public ProcessData(string logistics, string[] columns, string[] bodyLines)
{
_Logistics = logistics;
_Columns = columns;
_BodyLines = bodyLines;
}
}

View File

@ -86,7 +86,7 @@ public class ProcessDataStandardFormat
return result;
}
public static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(string reportFullPath, string[] lines = null)
public static ProcessData GetProcessData(string reportFullPath, string[] lines = null)
{
string segment;
List<string> body = new();
@ -137,24 +137,23 @@ public class ProcessDataStandardFormat
break;
}
}
return new Tuple<string, string[], string[]>(logistics.ToString(), columns.ToArray(), body.ToArray());
return new(logistics.ToString(), columns.ToArray(), body.ToArray());
}
public static JsonElement[] GetArray(Tuple<string, string[], string[]> pdsf, bool lookForNumbers = false)
public static JsonElement[] GetArray(ProcessData processData, bool lookForNumbers = false)
{
JsonElement[] results;
string logistics = pdsf.Item1;
string[] columns = pdsf.Item2;
string[] bodyLines = pdsf.Item3;
if (bodyLines.Length == 0 || !bodyLines[0].Contains('\t'))
if (processData.BodyLines.Length == 0 || !processData.BodyLines[0].Contains('\t'))
results = JsonSerializer.Deserialize<JsonElement[]>("[]");
else
{
string value;
string[] segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in bodyLines)
foreach (string bodyLine in processData.BodyLines)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Trim().Split('\t');
if (!lookForNumbers)
@ -162,7 +161,7 @@ public class ProcessDataStandardFormat
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
_ = stringBuilder.Append('"').Append(processData.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
@ -171,49 +170,48 @@ public class ProcessDataStandardFormat
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append("null,");
_ = stringBuilder.Append('"').Append(processData.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append(',');
_ = stringBuilder.Append('"').Append(processData.Columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
_ = stringBuilder.Append('"').Append(processData.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("},");
_ = stringBuilder.AppendLine("}");
lines.Add(stringBuilder.ToString());
}
_ = stringBuilder.Remove(stringBuilder.Length - 3, 3);
results = JsonSerializer.Deserialize<JsonElement[]>(string.Concat("[", stringBuilder, "]"));
string json = $"[{string.Join(",", lines)}]";
results = JsonSerializer.Deserialize<JsonElement[]>(json);
}
return results;
}
public static Dictionary<string, List<string>> GetDictionary(Tuple<string, string[], string[]> pdsf)
public static Dictionary<string, List<string>> GetDictionary(ProcessData processData)
{
Dictionary<string, List<string>> results = new();
string[] segments;
string[] columns = pdsf.Item2;
string[] bodyLines = pdsf.Item3;
foreach (string column in columns)
foreach (string column in processData.Columns)
results.Add(column, new List<string>());
foreach (string bodyLine in bodyLines)
foreach (string bodyLine in processData.BodyLines)
{
segments = bodyLine.Split('\t');
for (int c = 1; c < segments.Length; c++)
{
if (c >= columns.Length)
if (c >= processData.Columns.Length)
continue;
results[columns[c]].Add(segments[c]);
results[processData.Columns[c]].Add(segments[c]);
}
}
return results;
}
public static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(Tuple<string, string[], string[]> pdsf)
public static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(ProcessData processData)
{
Dictionary<Test, Dictionary<string, List<string>>> results = new();
List<string> collection;
string testColumn = nameof(Test);
Dictionary<string, List<string>> keyValuePairs = GetDictionary(pdsf);
Dictionary<string, List<string>> keyValuePairs = GetDictionary(processData);
if (!keyValuePairs.TryGetValue(testColumn, out collection))
throw new Exception();
int min;
@ -254,7 +252,7 @@ public class ProcessDataStandardFormat
}
}
}
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(pdsf.Item1, results);
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(processData.Logistics, results);
}
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')

View File

@ -1090,60 +1090,60 @@ public class AdaptationTesting : ISMTP
return results;
}
internal static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(string fileFullName)
internal static ProcessData GetProcessData(string fileFullName)
{
Tuple<string, string[], string[]> results;
results = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(fileFullName);
Assert.IsFalse(string.IsNullOrEmpty(results.Item1));
Assert.IsTrue(results.Item2.Length > 0, "Column check");
Assert.IsTrue(results.Item3.Length > 0, "Body check");
ProcessData results;
results = ProcessDataStandardFormat.GetProcessData(fileFullName);
Assert.IsFalse(string.IsNullOrEmpty(results.Logistics));
Assert.IsTrue(results.Columns.Length > 0, "Column check");
Assert.IsTrue(results.BodyLines.Length > 0, "Body check");
return results;
}
internal static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(string searchDirectory, string searchPattern)
internal static ProcessData GetProcessData(string searchDirectory, string searchPattern)
{
Tuple<string, string[], string[]> results;
ProcessData results;
if (searchPattern.Length > 3 && !searchPattern.Contains('*') && File.Exists(searchPattern))
results = GetLogisticsColumnsAndBody(searchPattern);
results = GetProcessData(searchPattern);
else
{
string[] pdsfFiles;
pdsfFiles = Directory.GetFiles(searchDirectory, searchPattern, SearchOption.TopDirectoryOnly);
if (pdsfFiles.Length == 0)
_ = Process.Start("explorer.exe", searchDirectory);
Assert.IsTrue(pdsfFiles.Length != 0, "GetFiles check");
results = GetLogisticsColumnsAndBody(pdsfFiles[0]);
Assert.AreNotEqual(0, pdsfFiles.Length, "GetFiles check");
results = GetProcessData(pdsfFiles[0]);
}
Assert.IsFalse(string.IsNullOrEmpty(results.Item1));
Assert.IsTrue(results.Item2.Length > 0, "Column check");
Assert.IsTrue(results.Item3.Length > 0, "Body check");
Assert.IsFalse(string.IsNullOrEmpty(results.Logistics));
Assert.IsTrue(results.Columns.Length > 0, "Column check");
Assert.IsTrue(results.BodyLines.Length > 0, "Body check");
return results;
}
internal static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(IFileRead fileRead, Logistics logistics, Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult, Tuple<string, string[], string[]> pdsf)
internal static ProcessData GetProcessData(IFileRead fileRead, Logistics logistics, Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResult, ProcessData processData)
{
Tuple<string, string[], string[]> results;
string text = ProcessDataStandardFormat.GetPDSFText(fileRead, logistics, extractResult.Item3, logisticsText: pdsf.Item1);
ProcessData results;
string text = ProcessDataStandardFormat.GetPDSFText(fileRead, logistics, extractResult.Item3, logisticsText: processData.Logistics);
string[] lines = text.Split(new string[] { System.Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries);
results = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(logistics.ReportFullPath, lines);
Assert.IsFalse(string.IsNullOrEmpty(results.Item1));
Assert.IsTrue(results.Item2.Length > 0, "Column check");
Assert.IsTrue(results.Item3.Length > 0, "Body check");
results = ProcessDataStandardFormat.GetProcessData(logistics.ReportFullPath, lines);
Assert.IsFalse(string.IsNullOrEmpty(results.Logistics));
Assert.IsTrue(results.Columns.Length > 0, "Column check");
Assert.IsTrue(results.BodyLines.Length > 0, "Body check");
return results;
}
internal static string[] GetItem2(Tuple<string, string[], string[]> pdsf, Tuple<string, string[], string[]> pdsfNew)
internal static string[] GetItem2(ProcessData processData, ProcessData processDataNew)
{
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
string jsonOld = JsonSerializer.Serialize(pdsf.Item2, pdsf.Item2.GetType(), jsonSerializerOptions);
string jsonNew = JsonSerializer.Serialize(pdsfNew.Item2, pdsfNew.Item2.GetType(), jsonSerializerOptions);
string jsonOld = JsonSerializer.Serialize(processData.Columns, processData.Columns.GetType(), jsonSerializerOptions);
string jsonNew = JsonSerializer.Serialize(processDataNew.Columns, processDataNew.Columns.GetType(), jsonSerializerOptions);
return new string[] { jsonOld, jsonNew };
}
internal static string[] GetItem3(Tuple<string, string[], string[]> pdsf, Tuple<string, string[], string[]> pdsfNew)
internal static string[] GetItem3(ProcessData processData, ProcessData processDataNew)
{
string joinOld = string.Join(System.Environment.NewLine, from l in pdsf.Item3 select string.Join('\t', from t in l.Split('\t') where !t.Contains(@"\\") select t));
string joinNew = string.Join(System.Environment.NewLine, from l in pdsfNew.Item3 select string.Join('\t', from t in l.Split('\t') where !t.Contains(@"\\") select t));
string joinOld = string.Join(System.Environment.NewLine, from l in processData.BodyLines select string.Join('\t', from t in l.Split('\t') where !t.Contains(@"\\") select t));
string joinNew = string.Join(System.Environment.NewLine, from l in processDataNew.BodyLines select string.Join('\t', from t in l.Split('\t') where !t.Contains(@"\\") select t));
return new string[] { joinOld, joinNew };
}
@ -1219,13 +1219,13 @@ public class AdaptationTesting : ISMTP
}
}
internal static void CompareSave(string textFileDirectory, Tuple<string, string[], string[]> pdsf, Tuple<string, string[], string[]> pdsfNew)
internal static void CompareSave(string textFileDirectory, ProcessData processData, ProcessData processDataNew)
{
if (pdsf.Item1 != pdsfNew.Item1)
if (processData.Logistics != processDataNew.Logistics)
{
_ = Process.Start("explorer.exe", textFileDirectory);
File.WriteAllText(Path.Combine(textFileDirectory, "0.dat"), pdsf.Item1);
File.WriteAllText(Path.Combine(textFileDirectory, "1.dat"), pdsfNew.Item1);
File.WriteAllText(Path.Combine(textFileDirectory, "0.dat"), processData.Logistics);
File.WriteAllText(Path.Combine(textFileDirectory, "1.dat"), processDataNew.Logistics);
}
}
@ -1252,20 +1252,20 @@ public class AdaptationTesting : ISMTP
Assert.IsNotNull(extractResult.Item3);
Assert.IsNotNull(extractResult.Item4);
if (!validatePDSF)
_ = GetLogisticsColumnsAndBody(fileRead, logistics, extractResult, new(string.Empty, Array.Empty<string>(), Array.Empty<string>()));
_ = GetProcessData(fileRead, logistics, extractResult, new(string.Empty, Array.Empty<string>(), Array.Empty<string>()));
else
{
Assert.IsTrue(extractResult.Item3.Length > 0, "extractResult Array Length check!");
Tuple<string, string[], string[]> pdsf = GetLogisticsColumnsAndBody(variables[2], variables[4]);
Tuple<string, string[], string[]> pdsfNew = GetLogisticsColumnsAndBody(fileRead, logistics, extractResult, pdsf);
CompareSave(variables[5], pdsf, pdsfNew);
Assert.IsTrue(pdsf.Item1 == pdsfNew.Item1, "Item1 check!");
string[] json = GetItem2(pdsf, pdsfNew);
ProcessData processData = GetProcessData(variables[2], variables[4]);
ProcessData processDataNew = GetProcessData(fileRead, logistics, extractResult, processData);
CompareSave(variables[5], processData, processDataNew);
Assert.AreEqual(processDataNew.Logistics, processData.Logistics, "Item1 check!");
string[] json = GetItem2(processData, processDataNew);
CompareSaveJSON(variables[5], json);
Assert.IsTrue(json[0] == json[1], "Item2 check!");
string[] join = GetItem3(pdsf, pdsfNew);
Assert.AreEqual(json[1], json[0], "Item2 check!");
string[] join = GetItem3(processData, processDataNew);
CompareSaveTSV(variables[5], join);
Assert.IsTrue(join[0] == join[1], "Item3 (Join) check!");
Assert.AreEqual(join[1], join[0], "Item3 (Join) check!");
}
UpdatePassDirectory(variables[2]);
}

View File

@ -51,7 +51,7 @@ public class ALIGNMENT : LoggingUnitTesting, IDisposable
public void TestDateTime()
{
DateTime dateTime = DateTime.Now;
Assert.IsTrue(dateTime.ToString("M/d/yyyy h:mm:ss tt") == dateTime.ToString());
Assert.AreEqual(dateTime.ToString(), dateTime.ToString("M/d/yyyy h:mm:ss tt"));
}
#if DEBUG

View File

@ -51,7 +51,7 @@ public class BACKLOG : LoggingUnitTesting, IDisposable
public void TestDateTime()
{
DateTime dateTime = DateTime.Now;
Assert.IsTrue(dateTime.ToString("M/d/yyyy h:mm:ss tt") == dateTime.ToString());
Assert.AreEqual(dateTime.ToString(), dateTime.ToString("M/d/yyyy h:mm:ss tt"));
}
#if DEBUG

View File

@ -51,7 +51,7 @@ public class MESAFIBACKLOG : LoggingUnitTesting, IDisposable
public void TestDateTime()
{
DateTime dateTime = DateTime.Now;
Assert.IsTrue(dateTime.ToString("M/d/yyyy h:mm:ss tt") == dateTime.ToString());
Assert.AreEqual(dateTime.ToString(), dateTime.ToString("M/d/yyyy h:mm:ss tt"));
}
#if DEBUG

View File

@ -186,6 +186,7 @@
<Compile Include="Adaptation\Shared\Metrology\WS.cs" />
<Compile Include="Adaptation\Shared\Metrology\WS.Results.cs" />
<Compile Include="Adaptation\Shared\ParameterType.cs" />
<Compile Include="Adaptation\Shared\ProcessData.cs" />
<Compile Include="Adaptation\Shared\ProcessDataStandardFormat.cs" />
<Compile Include="Adaptation\Shared\Properties\IDescription.cs" />
<Compile Include="Adaptation\Shared\Properties\IFileRead.cs" />