json in process-data-standard-format
when mes-entity-matches-process save to MET08THFTIRQS408M EDA Logic for GV
This commit is contained in:
parent
13bd899101
commit
374e375c5e
@ -22,17 +22,21 @@ public class FileRead : Shared.FileRead, IFileRead
|
|||||||
internal class PreWith
|
internal class PreWith
|
||||||
{
|
{
|
||||||
|
|
||||||
internal string MatchingFile { get; private set; }
|
|
||||||
internal string CheckFile { get; private set; }
|
|
||||||
internal string ErrFile { get; private set; }
|
internal string ErrFile { get; private set; }
|
||||||
|
internal string CheckFile { get; private set; }
|
||||||
|
internal string MatchingFile { get; private set; }
|
||||||
internal string CheckDirectory { get; private set; }
|
internal string CheckDirectory { get; private set; }
|
||||||
internal string NoWaitDirectory { get; private set; }
|
internal string NoWaitDirectory { get; private set; }
|
||||||
|
|
||||||
internal PreWith(string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory)
|
internal PreWith(string checkDirectory,
|
||||||
|
string checkFile,
|
||||||
|
string errFile,
|
||||||
|
string matchingFile,
|
||||||
|
string noWaitDirectory)
|
||||||
{
|
{
|
||||||
MatchingFile = matchingFile;
|
|
||||||
CheckFile = checkFile;
|
|
||||||
ErrFile = errFile;
|
ErrFile = errFile;
|
||||||
|
CheckFile = checkFile;
|
||||||
|
MatchingFile = matchingFile;
|
||||||
CheckDirectory = checkDirectory;
|
CheckDirectory = checkDirectory;
|
||||||
NoWaitDirectory = noWaitDirectory;
|
NoWaitDirectory = noWaitDirectory;
|
||||||
}
|
}
|
||||||
@ -221,7 +225,11 @@ public class FileRead : Shared.FileRead, IFileRead
|
|||||||
if (!Directory.Exists(checkDirectory))
|
if (!Directory.Exists(checkDirectory))
|
||||||
_ = Directory.CreateDirectory(checkDirectory);
|
_ = Directory.CreateDirectory(checkDirectory);
|
||||||
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
|
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
|
||||||
preWith = new(pre.MatchingFile, pre.CheckFile, errFile, checkDirectory, noWaitDirectory);
|
preWith = new(checkDirectory: checkDirectory,
|
||||||
|
checkFile: pre.CheckFile,
|
||||||
|
errFile: errFile,
|
||||||
|
matchingFile: pre.MatchingFile,
|
||||||
|
noWaitDirectory: noWaitDirectory);
|
||||||
results.Add(preWith);
|
results.Add(preWith);
|
||||||
}
|
}
|
||||||
return results.AsReadOnly();
|
return results.AsReadOnly();
|
||||||
@ -260,7 +268,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
|
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles, bool mesEntityMatchesProcess)
|
||||||
{
|
{
|
||||||
List<Pre> results = new();
|
List<Pre> results = new();
|
||||||
Pre pre;
|
Pre pre;
|
||||||
@ -269,13 +277,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
|||||||
foreach (string matchingFile in matchingFiles)
|
foreach (string matchingFile in matchingFiles)
|
||||||
{
|
{
|
||||||
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
|
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||||
|
if (mesEntityMatchesProcess)
|
||||||
|
checkFile = checkFile.Replace("MET08THFTIRSTRATUS", "MET08THFTIRQS408M");
|
||||||
pre = new(matchingFile, checkFile);
|
pre = new(matchingFile, checkFile);
|
||||||
results.Add(pre);
|
results.Add(pre);
|
||||||
}
|
}
|
||||||
return results.AsReadOnly();
|
return results.AsReadOnly();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
|
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
|
||||||
{
|
{
|
||||||
ReadOnlyCollection<Post> postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
|
ReadOnlyCollection<Post> postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
|
||||||
if (postCollection.Count != 0)
|
if (postCollection.Count != 0)
|
||||||
@ -294,26 +304,19 @@ public class FileRead : Shared.FileRead, IFileRead
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
|
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
|
||||||
{
|
{
|
||||||
List<Post> results = new();
|
List<Post> results = new();
|
||||||
Post post;
|
Post post;
|
||||||
long preWait;
|
long preWait;
|
||||||
PreWith preWith;
|
foreach (PreWith preWith in preWithCollection)
|
||||||
for (int p = 0; p < preWithCollection.Count; p++)
|
|
||||||
{
|
{
|
||||||
if (!_IsEAFHosted)
|
if (!_IsEAFHosted)
|
||||||
continue;
|
continue;
|
||||||
preWith = preWithCollection[p];
|
|
||||||
if (processDataStandardFormat is null)
|
|
||||||
File.Move(preWith.MatchingFile, preWith.CheckFile);
|
|
||||||
else
|
|
||||||
{
|
|
||||||
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
|
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
|
||||||
wsResults = null;
|
wsResults = null;
|
||||||
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
|
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
|
||||||
File.Delete(preWith.MatchingFile);
|
File.Delete(preWith.MatchingFile);
|
||||||
}
|
|
||||||
if (Directory.Exists(preWith.NoWaitDirectory))
|
if (Directory.Exists(preWith.NoWaitDirectory))
|
||||||
{
|
{
|
||||||
post = new(preWith.CheckFile, preWith.ErrFile);
|
post = new(preWith.CheckFile, preWith.ErrFile);
|
||||||
@ -347,17 +350,10 @@ public class FileRead : Shared.FileRead, IFileRead
|
|||||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||||
{
|
{
|
||||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
|
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
|
||||||
ProcessDataStandardFormat? processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, _ProcessDataStandardFormatMapping);
|
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, _ProcessDataStandardFormatMapping);
|
||||||
if (processDataStandardFormat is not null)
|
|
||||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||||
else
|
if (!_IsEAFHosted)
|
||||||
{
|
ProcessDataStandardFormat.Write("../../.pdsf", processDataStandardFormat, wsResults: null);
|
||||||
processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
|
||||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
|
||||||
processDataStandardFormat = null;
|
|
||||||
}
|
|
||||||
if (!_IsEAFHosted && processDataStandardFormat is not null)
|
|
||||||
ProcessDataStandardFormat.Write(".pdsf", processDataStandardFormat, wsResults: null);
|
|
||||||
SetFileParameterLotIDToLogisticsMID();
|
SetFileParameterLotIDToLogisticsMID();
|
||||||
int numberLength = 2;
|
int numberLength = 2;
|
||||||
long ticks = dateTime.Ticks;
|
long ticks = dateTime.Ticks;
|
||||||
@ -366,10 +362,16 @@ public class FileRead : Shared.FileRead, IFileRead
|
|||||||
ReadOnlyCollection<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
|
ReadOnlyCollection<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
|
||||||
if (matchingFiles.Count != searchDirectories.Count)
|
if (matchingFiles.Count != searchDirectories.Count)
|
||||||
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
|
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
|
||||||
|
if (_IsEAFHosted)
|
||||||
|
{
|
||||||
try
|
try
|
||||||
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
|
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
|
||||||
catch (Exception) { }
|
catch (Exception) { }
|
||||||
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
|
}
|
||||||
|
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||||
|
List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements);
|
||||||
|
bool mesEntityMatchesProcess = descriptions.Count > 0 && descriptions[0].MesEntity == descriptions[0].Reactor;
|
||||||
|
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles, mesEntityMatchesProcess);
|
||||||
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
|
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
|
||||||
MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
|
MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
|
||||||
return results;
|
return results;
|
||||||
|
@ -642,7 +642,8 @@ public partial class ProcessData : IProcessData
|
|||||||
|| string.IsNullOrEmpty(details[0].Thickness)
|
|| string.IsNullOrEmpty(details[0].Thickness)
|
||||||
|| string.IsNullOrEmpty(details[1].Thickness)
|
|| string.IsNullOrEmpty(details[1].Thickness)
|
||||||
|| !decimal.TryParse(details[0].Thickness, out decimal thick01)
|
|| !decimal.TryParse(details[0].Thickness, out decimal thick01)
|
||||||
|| !decimal.TryParse(details[1].Thickness, out decimal thick25))
|
|| !decimal.TryParse(details[1].Thickness, out decimal thick25)
|
||||||
|
|| (thick01 == 0 && thick25 == 0))
|
||||||
{
|
{
|
||||||
ThicknessSlotOne = string.Empty;
|
ThicknessSlotOne = string.Empty;
|
||||||
ThicknessSlotTwentyFive = string.Empty;
|
ThicknessSlotTwentyFive = string.Empty;
|
||||||
|
@ -26,13 +26,13 @@ internal class Run
|
|||||||
public ReadOnlyCollection<Wafer> Wafers { get; }
|
public ReadOnlyCollection<Wafer> Wafers { get; }
|
||||||
public Grade Grade { get; }
|
public Grade Grade { get; }
|
||||||
|
|
||||||
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, int r, Run result)
|
private static void WriteJson(Logistics logistics, List<FileInfo> _, int r, Run result)
|
||||||
{
|
{
|
||||||
FileInfo fileInfo = new($"{logistics.ReportFullPath}-{r}.run.json");
|
FileInfo fileInfo = new($"{logistics.ReportFullPath}-{r}.run.json");
|
||||||
string json = JsonSerializer.Serialize(result, RunSourceGenerationContext.Default.Run);
|
string json = JsonSerializer.Serialize(result, RunSourceGenerationContext.Default.Run);
|
||||||
File.WriteAllText(fileInfo.FullName, json);
|
File.WriteAllText(fileInfo.FullName, json);
|
||||||
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
|
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
|
||||||
fileInfoCollection.Add(fileInfo);
|
// Can't add until old parse is removed // fileInfoCollection.Add(fileInfo);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ReadOnlyCollection<string> GetLines(Logistics logistics, JsonElement[]? jsonElements)
|
private static ReadOnlyCollection<string> GetLines(Logistics logistics, JsonElement[]? jsonElements)
|
||||||
|
@ -61,8 +61,8 @@ internal class ProcessDataStandardFormat
|
|||||||
internal static string Archive(bool addSpaces = true, char separator = ' ') =>
|
internal static string Archive(bool addSpaces = true, char separator = ' ') =>
|
||||||
GetString(SearchFor.Archive, addSpaces, separator);
|
GetString(SearchFor.Archive, addSpaces, separator);
|
||||||
|
|
||||||
internal static ProcessDataStandardFormat GetEmpty() =>
|
internal static ProcessDataStandardFormat GetEmpty(Logistics logistics) =>
|
||||||
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null, new(Array.Empty<string>()), null);
|
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null, new(logistics.Logistics1), null);
|
||||||
|
|
||||||
internal static List<string> PDSFToFixedWidth(string reportFullPath)
|
internal static List<string> PDSFToFixedWidth(string reportFullPath)
|
||||||
{
|
{
|
||||||
@ -214,25 +214,26 @@ internal class ProcessDataStandardFormat
|
|||||||
return results.AsReadOnly();
|
return results.AsReadOnly();
|
||||||
}
|
}
|
||||||
|
|
||||||
internal static ProcessDataStandardFormat? GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
|
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
|
||||||
{
|
{
|
||||||
ProcessDataStandardFormat? result;
|
ProcessDataStandardFormat result;
|
||||||
const int columnsLine = 6;
|
const int columnsLine = 6;
|
||||||
FileInfo fileInfo = new(reportFullPath);
|
FileInfo fileInfo = new(reportFullPath);
|
||||||
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, pdsfMapping.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
|
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
|
||||||
JsonElement[]? jsonElements = GetArray(pdsfMapping.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
|
JsonElement[]? jsonElements = pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
|
||||||
if (jsonElements is null || jsonElements.Length == 0 || pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count)
|
JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
|
||||||
result = null;
|
if (jsonElements is null || jsonProperties is null || jsonProperties.Length != pdsfMapping.NewColumnNames.Count)
|
||||||
|
result = processDataStandardFormat;
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
|
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
|
||||||
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
|
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
|
||||||
result = null;
|
result = processDataStandardFormat;
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int expectedColumns, int columnsLine, string path, string[]? lines)
|
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines)
|
||||||
{
|
{
|
||||||
ProcessDataStandardFormat result;
|
ProcessDataStandardFormat result;
|
||||||
long sequence;
|
long sequence;
|
||||||
@ -248,8 +249,6 @@ internal class ProcessDataStandardFormat
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
segments = lines[columnsLine].Split('\t');
|
segments = lines[columnsLine].Split('\t');
|
||||||
if (segments.Length != expectedColumns)
|
|
||||||
segments = Array.Empty<string>();
|
|
||||||
for (int i = 0; i < columnsLine; i++)
|
for (int i = 0; i < columnsLine; i++)
|
||||||
header.Add(lines[i]);
|
header.Add(lines[i]);
|
||||||
}
|
}
|
||||||
@ -285,7 +284,7 @@ internal class ProcessDataStandardFormat
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static JsonElement[]? GetArray(int expectedColumns, ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers)
|
private static JsonElement[]? GetFullArray(ProcessDataStandardFormat processDataStandardFormat)
|
||||||
{
|
{
|
||||||
JsonElement[]? results;
|
JsonElement[]? results;
|
||||||
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
|
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
|
||||||
@ -293,37 +292,19 @@ internal class ProcessDataStandardFormat
|
|||||||
else
|
else
|
||||||
{
|
{
|
||||||
string value;
|
string value;
|
||||||
string[] segments;
|
List<string> segments;
|
||||||
List<string> lines = new();
|
List<string> lines = new();
|
||||||
StringBuilder stringBuilder = new();
|
StringBuilder stringBuilder = new();
|
||||||
foreach (string bodyLine in processDataStandardFormat.Body)
|
foreach (string bodyLine in processDataStandardFormat.Body)
|
||||||
{
|
{
|
||||||
_ = stringBuilder.Clear();
|
_ = stringBuilder.Clear();
|
||||||
_ = stringBuilder.Append('{');
|
_ = stringBuilder.Append('{');
|
||||||
segments = bodyLine.Split('\t');
|
segments = bodyLine.Split('\t').ToList();
|
||||||
if (segments.Length != expectedColumns)
|
for (int c = 0; c < segments.Count; c++)
|
||||||
continue;
|
|
||||||
if (!lookForNumbers)
|
|
||||||
{
|
|
||||||
for (int c = 0; c < segments.Length; c++)
|
|
||||||
{
|
{
|
||||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
|
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||||
}
|
}
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
for (int c = 0; c < segments.Length; c++)
|
|
||||||
{
|
|
||||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
|
||||||
if (string.IsNullOrEmpty(value))
|
|
||||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
|
|
||||||
else if (value.All(char.IsDigit))
|
|
||||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
|
|
||||||
else
|
|
||||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||||
_ = stringBuilder.AppendLine("}");
|
_ = stringBuilder.AppendLine("}");
|
||||||
lines.Add(stringBuilder.ToString());
|
lines.Add(stringBuilder.ToString());
|
||||||
@ -379,6 +360,63 @@ internal class ProcessDataStandardFormat
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static string GetJson(ProcessDataStandardFormat processDataStandardFormat)
|
||||||
|
{
|
||||||
|
if (processDataStandardFormat.InputPDSF is null)
|
||||||
|
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
|
||||||
|
#pragma warning disable CA1845, IDE0057
|
||||||
|
string result;
|
||||||
|
string line;
|
||||||
|
string value;
|
||||||
|
string[] segments;
|
||||||
|
List<string> lines = new();
|
||||||
|
for (int i = 0; i < processDataStandardFormat.InputPDSF.Body.Count; i++)
|
||||||
|
{
|
||||||
|
line = "{";
|
||||||
|
segments = processDataStandardFormat.InputPDSF.Body[i].Trim().Split('\t');
|
||||||
|
if (segments.Length != processDataStandardFormat.InputPDSF.Columns.Count)
|
||||||
|
break;
|
||||||
|
for (int c = 0; c < segments.Length; c++)
|
||||||
|
{
|
||||||
|
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||||
|
line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ',');
|
||||||
|
}
|
||||||
|
line = string.Concat(line.Substring(0, line.Length - 1), '}');
|
||||||
|
lines.Add(line);
|
||||||
|
}
|
||||||
|
result = string.Concat(
|
||||||
|
'{',
|
||||||
|
Environment.NewLine,
|
||||||
|
'"',
|
||||||
|
"Count",
|
||||||
|
'"',
|
||||||
|
": ",
|
||||||
|
processDataStandardFormat.Body.Count,
|
||||||
|
',',
|
||||||
|
Environment.NewLine,
|
||||||
|
'"',
|
||||||
|
"Records",
|
||||||
|
'"',
|
||||||
|
": ",
|
||||||
|
Environment.NewLine,
|
||||||
|
'[',
|
||||||
|
Environment.NewLine,
|
||||||
|
string.Join($",{Environment.NewLine}", lines),
|
||||||
|
Environment.NewLine,
|
||||||
|
']',
|
||||||
|
',',
|
||||||
|
Environment.NewLine,
|
||||||
|
'"',
|
||||||
|
"Sequence",
|
||||||
|
'"',
|
||||||
|
": ",
|
||||||
|
processDataStandardFormat.Sequence,
|
||||||
|
Environment.NewLine,
|
||||||
|
'}');
|
||||||
|
return result;
|
||||||
|
#pragma warning restore CA1845, IDE0057
|
||||||
|
}
|
||||||
|
|
||||||
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults)
|
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults)
|
||||||
{
|
{
|
||||||
List<string> results = new();
|
List<string> results = new();
|
||||||
@ -418,12 +456,12 @@ internal class ProcessDataStandardFormat
|
|||||||
results.Add($"{segments[0]}\t{segments[1][0]}_HeaderId={wsResults[0].HeaderId};{segments[1][0]}_SubgroupId={wsResults[0].SubgroupId};{segments[1]}");
|
results.Add($"{segments[0]}\t{segments[1][0]}_HeaderId={wsResults[0].HeaderId};{segments[1][0]}_SubgroupId={wsResults[0].SubgroupId};{segments[1]}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
results.Add("EOF");
|
results.Add("END_HEADER");
|
||||||
if (processDataStandardFormat.InputPDSF is not null)
|
if (processDataStandardFormat.InputPDSF is not null)
|
||||||
{
|
{
|
||||||
|
results.Add(string.Empty);
|
||||||
List<char> hyphens = new();
|
List<char> hyphens = new();
|
||||||
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|')));
|
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|')));
|
||||||
results.Add(string.Empty);
|
|
||||||
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
|
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
|
||||||
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
|
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
|
||||||
hyphens.Add('-');
|
hyphens.Add('-');
|
||||||
@ -431,6 +469,11 @@ internal class ProcessDataStandardFormat
|
|||||||
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|')));
|
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|')));
|
||||||
results.Add(string.Empty);
|
results.Add(string.Empty);
|
||||||
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|')));
|
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|')));
|
||||||
|
results.Add(string.Empty);
|
||||||
|
results.Add("EOF");
|
||||||
|
results.Add(string.Empty);
|
||||||
|
string json = GetJson(processDataStandardFormat);
|
||||||
|
results.Add(json);
|
||||||
}
|
}
|
||||||
File.WriteAllText(path, string.Join(Environment.NewLine, results));
|
File.WriteAllText(path, string.Join(Environment.NewLine, results));
|
||||||
}
|
}
|
||||||
|
@ -30,6 +30,20 @@ public class MET08THFTIRSTRATUS
|
|||||||
[TestMethod]
|
[TestMethod]
|
||||||
public void Production__v2_59_0__MET08THFTIRSTRATUS__MoveMatchingFiles() => _MET08THFTIRSTRATUS.Production__v2_59_0__MET08THFTIRSTRATUS__MoveMatchingFiles();
|
public void Production__v2_59_0__MET08THFTIRSTRATUS__MoveMatchingFiles() => _MET08THFTIRSTRATUS.Production__v2_59_0__MET08THFTIRSTRATUS__MoveMatchingFiles();
|
||||||
|
|
||||||
|
#if DEBUG
|
||||||
|
[Ignore]
|
||||||
|
#endif
|
||||||
|
[TestMethod]
|
||||||
|
public void Production__v2_59_0__MET08THFTIRSTRATUS__MoveMatchingFiles638014829236768048__Normal()
|
||||||
|
{
|
||||||
|
string check = "*.pdsf";
|
||||||
|
_MET08THFTIRSTRATUS.Production__v2_59_0__MET08THFTIRSTRATUS__MoveMatchingFiles();
|
||||||
|
MethodBase method = new StackFrame().GetMethod();
|
||||||
|
string[] variables = _MET08THFTIRSTRATUS.AdaptationTesting.GetVariables(method, check);
|
||||||
|
_ = _MET08THFTIRSTRATUS.AdaptationTesting.Get(method, variables[2], variables[3], false).ReExtract();
|
||||||
|
AdaptationTesting.UpdatePassDirectory(variables[2]);
|
||||||
|
}
|
||||||
|
|
||||||
#if DEBUG
|
#if DEBUG
|
||||||
[Ignore]
|
[Ignore]
|
||||||
#endif
|
#endif
|
||||||
|
@ -1182,7 +1182,7 @@ public class AdaptationTesting : ISMTP
|
|||||||
Assert.IsNotNull(extractResult.Item3);
|
Assert.IsNotNull(extractResult.Item3);
|
||||||
Assert.IsNotNull(extractResult.Item4);
|
Assert.IsNotNull(extractResult.Item4);
|
||||||
if (!validatePDSF)
|
if (!validatePDSF)
|
||||||
_ = GetProcessDataStandardFormat(fileRead, logistics, extractResult, ProcessDataStandardFormat.GetEmpty());
|
_ = GetProcessDataStandardFormat(fileRead, logistics, extractResult, ProcessDataStandardFormat.GetEmpty(logistics));
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
Assert.IsTrue(extractResult.Item3.Length > 0, "extractResult Array Length check!");
|
Assert.IsTrue(extractResult.Item3.Length > 0, "extractResult Array Length check!");
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
// getValue($('dcp.BIORAD4/csv/Count', ''), $('dcp.BIORAD4/csv/Index', ''), $('dcp.BIORAD4/csv/Slot', ''), $('dcp.BIORAD4/csv/Mean', ''));
|
// getValue($('gv.thicknessMeanSlotOne', ''), $('dcp.BIORAD5/csv/Count', ''), $('dcp.BIORAD5/csv/Index', ''), $('dcp.BIORAD5/csv/Slot', ''), $('dcp.BIORAD5/csv/Mean', ''));
|
||||||
|
|
||||||
function getValue5(count, index, slot, mean) {
|
function getValue5(thicknessMeanSlotOne, count, index, slot, mean) {
|
||||||
let result = null;
|
let result = null;
|
||||||
if (count !== 2 || index !== 0 || slot !== "1" || mean == '')
|
if (count !== 2 || index !== 0 || slot !== '1' || mean == '')
|
||||||
result = '';
|
result = thicknessMeanSlotOne;
|
||||||
else
|
else
|
||||||
result = mean;
|
result = mean;
|
||||||
return result;
|
return result;
|
||||||
@ -26,12 +26,12 @@ values14.forEach(element => {
|
|||||||
console.info('Match');
|
console.info('Match');
|
||||||
});
|
});
|
||||||
|
|
||||||
// getValue($('dcp.BIORAD4/csv/Count', ''), $('dcp.BIORAD4/csv/Index', ''), $('dcp.BIORAD4/csv/Slot', ''), $('dcp.BIORAD4/csv/Mean', ''));
|
// getValue($('gv.thicknessMeanSlotTwentyFive', ''), $('dcp.BIORAD5/csv/Count', ''), $('dcp.BIORAD5/csv/Index', ''), $('dcp.BIORAD5/csv/Slot', ''), $('dcp.BIORAD5/csv/Mean', ''));
|
||||||
|
|
||||||
function getValue31(count, index, slot, mean) {
|
function getValue31(thicknessMeanSlotTwentyFive, count, index, slot, mean) {
|
||||||
let result = null;
|
let result = null;
|
||||||
if (count !== 2 || index !== 1 || slot !== "25" || mean == '')
|
if (count !== 2 || index !== 1 || slot !== '25' || mean == '')
|
||||||
result = '';
|
result = thicknessMeanSlotTwentyFive;
|
||||||
else
|
else
|
||||||
result = mean;
|
result = mean;
|
||||||
return result;
|
return result;
|
||||||
@ -52,7 +52,7 @@ values40.forEach(element => {
|
|||||||
console.info('Match');
|
console.info('Match');
|
||||||
});
|
});
|
||||||
|
|
||||||
// getValue($('dcp.BIORAD4/csv/Count', ''), $('dcp.BIORAD4/csv/Index', ''), $('dcp.BIORAD4/csv/Slot', ''), $('gv.thicknessMeanSlotOne', ''), $('gv.thicknessMeanSlotTwentyFive', ''));
|
// getValue($('dcp.BIORAD5/csv/Count', ''), $('dcp.BIORAD5/csv/Index', ''), $('dcp.BIORAD5/csv/Slot', ''), $('gv.thicknessMeanSlotOne', ''), $('gv.thicknessMeanSlotTwentyFive', ''));
|
||||||
|
|
||||||
function getValue57(count, index, slot, thicknessMeanSlotOne, thicknessMeanSlotTwentyFive) {
|
function getValue57(count, index, slot, thicknessMeanSlotOne, thicknessMeanSlotTwentyFive) {
|
||||||
let result = null;
|
let result = null;
|
||||||
@ -78,7 +78,7 @@ values66.forEach(element => {
|
|||||||
console.info('Match');
|
console.info('Match');
|
||||||
});
|
});
|
||||||
|
|
||||||
// getValue($('dcp.BIORAD4/csv/Count', ''), $('dcp.BIORAD4/csv/Index', ''), $('dcp.BIORAD4/csv/Slot', ''), $('gv.thicknessMeanSlotOne', ''), $('gv.thicknessMeanSlotTwentyFive', ''));
|
// getValue($('dcp.BIORAD5/csv/Count', ''), $('dcp.BIORAD5/csv/Index', ''), $('dcp.BIORAD5/csv/Slot', ''), $('gv.thicknessMeanSlotOne', ''), $('gv.thicknessMeanSlotTwentyFive', ''));
|
||||||
|
|
||||||
function getValue(count, index, slot, thicknessMeanSlotOne, thicknessMeanSlotTwentyFive) {
|
function getValue(count, index, slot, thicknessMeanSlotOne, thicknessMeanSlotTwentyFive) {
|
||||||
let result = null;
|
let result = null;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user