UniqueId replacement for attachments

Write input PDSF in output after EOF

GetPropertyValue for MoveMatchingFiles

ProcessDataStandardFormat over Tuple

MoveMatchingFiles to use ProcessDataStandardFormatMapping
This commit is contained in:
Mike Phares 2025-04-23 13:45:07 -07:00
parent 4e8348ebc8
commit 61188f434d
43 changed files with 2281 additions and 1578 deletions

View File

@ -5,6 +5,12 @@
"type": "coreclr",
"request": "attach",
"processId": 24012
},
{
"type": "node",
"request": "launch",
"name": "node Launch Current Opened File",
"program": "${file}"
}
]
}

View File

@ -92,6 +92,26 @@
"command": "code ../MET08THFTIRSTRATUS.csproj",
"problemMatcher": []
},
{
"label": "Readme",
"type": "shell",
"command": "code ../README.md",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s X Day-Helper-2025-03-20",
"type": "shell",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe",
"args": [
"s",
"X",
"L:/DevOps/EAF-Mesa-Integration/MET08THFTIRSTRATUS",
"Day-Helper-2025-03-20",
"false",
"4"
],
"problemMatcher": []
},
{
"label": "Git Config",
"type": "shell",

View File

@ -120,15 +120,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -120,9 +120,10 @@ public class FileRead : Shared.FileRead, IFileRead
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
string logisticsSequence = _Logistics.Sequence.ToString();
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory);
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory, day);
if (!Directory.Exists(destinationArchiveDirectory))
_ = Directory.CreateDirectory(destinationArchiveDirectory);
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
@ -144,15 +145,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveArchive(reportFullPath, dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -119,15 +119,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -5,16 +5,70 @@ using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.MoveMatchingFiles;
#nullable enable
public class FileRead : Shared.FileRead, IFileRead
{
internal class PreWith
{
internal string MatchingFile { get; private set; }
internal string CheckFile { get; private set; }
internal string ErrFile { get; private set; }
internal string CheckDirectory { get; private set; }
internal string NoWaitDirectory { get; private set; }
internal PreWith(string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory)
{
MatchingFile = matchingFile;
CheckFile = checkFile;
ErrFile = errFile;
CheckDirectory = checkDirectory;
NoWaitDirectory = noWaitDirectory;
}
}
internal class Pre
{
internal string MatchingFile { get; private set; }
internal string CheckFile { get; private set; }
internal Pre(string matchingFile, string checkFile)
{
MatchingFile = matchingFile;
CheckFile = checkFile;
}
}
internal class Post
{
internal string ErrFile { get; private set; }
internal string CheckFile { get; private set; }
internal Post(string checkFile, string errFile)
{
ErrFile = errFile;
CheckFile = checkFile;
}
}
private readonly ProcessDataStandardFormatMapping _ProcessDataStandardFormatMapping;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
@ -27,6 +81,12 @@ public class FileRead : Shared.FileRead, IFileRead
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
_ProcessDataStandardFormatMapping = GetProcessDataStandardFormatMapping(processDataStandardFormatMappingOldColumnNames,
processDataStandardFormatMappingNewColumnNames,
processDataStandardFormatMappingColumnIndices);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
@ -41,7 +101,8 @@ public class FileRead : Shared.FileRead, IFileRead
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
void IFileRead.WaitForThread() =>
WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
@ -88,7 +149,7 @@ public class FileRead : Shared.FileRead, IFileRead
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]") ?? throw new Exception(), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
@ -104,7 +165,69 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
private static List<string> GetSearchDirectories(int numberLength, string parentDirectory)
private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
{
ProcessDataStandardFormatMapping result;
string[] segmentsB;
List<string> distinct = new();
Dictionary<string, string> keyValuePairs = new();
string args4 = "Time,HeaderUniqueId,UniqueId,Date,Wafer,Position,BIORAD4";
string args5 = ",BIORAD4";
string args6 = ",BIORAD4";
string args7 = "Test|EventId,Date|DateTime,Position|Slot,ThicknessSlotOne|Thickness First Slot,ThicknessSlotTwentyFive|Thickness Last Slot,DeltaThicknessSlotsOneAndTwentyFive|Actual Delta Thick Pts 1 and 25,PercentDeltaThicknessSlotsOneAndTwentyFive|% Delta Thick Pts 1 and 25,MID|Cassette,Lot|Batch,Title|Batch,Wafer|Text,Thickness|Site,MeanThickness|GradeMean,|BIORAD4";
// string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,MID,Date,Employee,Lot,PSN,Reactor,Recipe,Cassette,GradeStdDev,HeaderUniqueId,Layer,MeanThickness,PassFail,RDS,Slot,Title,UniqueId,Wafer,Zone,Mean,Position,StdDev,Thickness,ThicknessSlotOne,ThicknessSlotTwentyFive,DeltaThicknessSlotsOneAndTwentyFive,PercentDeltaThicknessSlotsOneAndTwentyFive";
// string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Batch,Cassette,DateTime,Destination,Mean,PassFail,Recipe,Reference,Site,Slot,Source,StdDev,Text,GradeMean,GradeStdDev,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Actual Delta Thick Pts 1 and 25,% Delta Thick Pts 1 and 25,EventId",
// string args10 = "0,1,2,31,3,6,5,8,-1,27,7,23,24,13,8,21,-1,25,20,12,22,16,7,-1,19,26,11,16,18,15,-1,-1,29,30";
string[] segments = args7.Split(',');
ReadOnlyCollection<string> ignoreColumns = new(args4.Split(','));
ReadOnlyCollection<string> backfillColumns = new(args5.Split(','));
ReadOnlyCollection<string> indexOnlyColumns = new(args6.Split(','));
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
foreach (string segment in segments)
{
segmentsB = segment.Split('|');
if (segmentsB.Length != 2)
continue;
if (distinct.Contains(segmentsB[0]))
continue;
distinct.Add(segmentsB[0]);
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
}
result = new(backfillColumns: backfillColumns,
columnIndices: columnIndices,
newColumnNames: newColumnNames,
ignoreColumns: ignoreColumns,
indexOnlyColumns: indexOnlyColumns,
keyValuePairs: new(keyValuePairs),
oldColumnNames: oldColumnNames);
return result;
}
private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection)
{
List<PreWith> results = new();
string errFile;
PreWith preWith;
string? checkDirectory;
string noWaitDirectory;
foreach (Pre pre in preCollection)
{
errFile = string.Concat(pre.CheckFile, ".err");
checkDirectory = Path.GetDirectoryName(pre.CheckFile);
if (string.IsNullOrEmpty(checkDirectory))
continue;
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
preWith = new(pre.MatchingFile, pre.CheckFile, errFile, checkDirectory, noWaitDirectory);
results.Add(preWith);
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<string> GetSearchDirectories(int numberLength, string parentDirectory)
{
List<string> results = new();
string[] directories = Directory.GetDirectories(parentDirectory, "*", SearchOption.TopDirectoryOnly);
@ -115,10 +238,140 @@ public class FileRead : Shared.FileRead, IFileRead
results.Add(directory);
}
results.Sort();
return results.AsReadOnly();
}
private static void CreatePointerFile(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
{
string checkFile;
string writeFile;
string? directoryName;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
directoryName = Path.GetDirectoryName(matchingFile);
if (directoryName is null)
continue;
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
if (File.Exists(writeFile))
continue;
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
}
}
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
{
List<Pre> results = new();
Pre pre;
string checkFile;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
pre = new(matchingFile, checkFile);
results.Add(pre);
}
return results.AsReadOnly();
}
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
{
ReadOnlyCollection<Post> postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
if (postCollection.Count != 0)
{
Thread.Sleep(500);
StringBuilder stringBuilder = new();
foreach (Post post in postCollection)
{
if (File.Exists(post.ErrFile))
_ = stringBuilder.AppendLine(File.ReadAllText(post.ErrFile));
if (File.Exists(post.CheckFile))
_ = stringBuilder.AppendLine($"<{post.CheckFile}> was not consumed by the end!");
}
if (stringBuilder.Length > 0)
throw new Exception(stringBuilder.ToString());
}
}
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
{
List<Post> results = new();
Post post;
long preWait;
foreach (PreWith preWith in preWithCollection)
{
if (!_IsEAFHosted)
continue;
if (processDataStandardFormat is null)
File.Move(preWith.MatchingFile, preWith.CheckFile);
else
{
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat);
File.Delete(preWith.MatchingFile);
}
if (Directory.Exists(preWith.NoWaitDirectory))
{
post = new(preWith.CheckFile, preWith.ErrFile);
results.Add(post);
continue;
}
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
else
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(500);
}
for (int i = 0; i < int.MaxValue; i++)
{
if (File.Exists(preWith.ErrFile))
throw new Exception(File.ReadAllText(preWith.ErrFile));
if (!File.Exists(preWith.CheckFile))
break;
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
Thread.Sleep(500);
}
}
return results.AsReadOnly();
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
ProcessDataStandardFormat? processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, _ProcessDataStandardFormatMapping);
if (processDataStandardFormat is not null)
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
else
{
processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
processDataStandardFormat = null;
}
if (!_IsEAFHosted && processDataStandardFormat is not null)
ProcessDataStandardFormat.Write(".pdsf", processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
int numberLength = 2;
long ticks = dateTime.Ticks;
string parentParentDirectory = GetParentParent(reportFullPath);
ReadOnlyCollection<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
ReadOnlyCollection<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
if (matchingFiles.Count != searchDirectories.Count)
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
try
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { }
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
return results;
}
private List<string> GetMatchingFiles(long ticks, string reportFullPath, List<string> searchDirectories)
private ReadOnlyCollection<string> GetMatchingFiles(long ticks, string reportFullPath, ReadOnlyCollection<string> searchDirectories)
{
List<string> results = new();
string[] found;
@ -137,129 +390,7 @@ public class FileRead : Shared.FileRead, IFileRead
break;
}
}
return results;
}
private static List<(string matchingFile, string checkFile)> GetCollection(int numberLength, string parentDirectory, List<string> matchingFiles)
{
List<(string matchingFile, string checkFile)> results = new();
string checkFile;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
results.Add(new(matchingFile, checkFile));
}
return results;
}
private static List<(string, string, string, string, string)> GetCollection(List<(string matchingFile, string checkFile)> collection)
{
List<(string, string, string, string, string)> results = new();
string errFile;
string checkDirectory;
string noWaitDirectory;
foreach ((string matchingFile, string checkFile) in collection)
{
errFile = string.Concat(checkFile, ".err");
checkDirectory = Path.GetDirectoryName(checkFile);
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
results.Add(new(matchingFile, checkFile, errFile, checkDirectory, noWaitDirectory));
}
return results;
}
private void MoveCollection(DateTime dateTime, List<(string matchingFile, string checkFile)> collection)
{
long preWait;
List<(string checkFile, string errFile)> postCollection = new();
foreach ((string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory) in GetCollection(collection))
{
File.Move(matchingFile, checkFile);
if (Directory.Exists(noWaitDirectory))
{
postCollection.Add(new(checkFile, errFile));
continue;
}
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
else
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(500);
}
for (int i = 0; i < int.MaxValue; i++)
{
if (File.Exists(errFile))
throw new Exception(File.ReadAllText(errFile));
if (!File.Exists(checkFile))
break;
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
Thread.Sleep(500);
}
}
if (postCollection.Count != 0)
{
Thread.Sleep(500);
StringBuilder stringBuilder = new();
foreach ((string checkFile, string errFile) in postCollection)
{
if (File.Exists(errFile))
_ = stringBuilder.AppendLine(File.ReadAllText(errFile));
if (File.Exists(checkFile))
_ = stringBuilder.AppendLine($"<{checkFile}> was not consumed by the end!");
}
if (stringBuilder.Length > 0)
throw new Exception(stringBuilder.ToString());
}
}
private static void CreatePointerFile(int numberLength, string parentDirectory, List<string> matchingFiles)
{
#nullable enable
string checkFile;
string writeFile;
string? directoryName;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
directoryName = Path.GetDirectoryName(matchingFile);
if (directoryName is null)
continue;
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
if (File.Exists(writeFile))
continue;
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
}
#nullable disable
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
int numberLength = 2;
long ticks = dateTime.Ticks;
string parentParentDirectory = GetParentParent(reportFullPath);
List<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
List<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
if (matchingFiles.Count != searchDirectories.Count)
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
try
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { }
List<(string matchingFile, string checkFile)> collection = GetCollection(numberLength, parentParentDirectory, matchingFiles);
MoveCollection(dateTime, collection);
return results;
return results.AsReadOnly();
}
}

View File

@ -113,14 +113,14 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
private string GetLines(List<Stratus.Description> descriptions)
private static string GetLines(Logistics logistics, List<Stratus.Description> descriptions)
{
StringBuilder results = new();
char del = '\t';
Stratus.Description x = descriptions[0];
_ = results.Append("Stratus_").Append(_Logistics.MID).Append('_').Append(_Logistics.DateTimeFromSequence.ToString("yyyyMMddhhmmssfff")).Append(del).
_ = results.Append("Stratus_").Append(logistics.MID).Append('_').Append(logistics.DateTimeFromSequence.ToString("yyyyMMddhhmmssfff")).Append(del).
Append(x.Date).Append(del).
Append(_Logistics.JobID).Append(del).
Append(logistics.JobID).Append(del).
Append("FQA Thickness").Append(del).
Append(x.Employee).Append(del).
Append(x.Recipe).Append(del).
@ -136,7 +136,7 @@ public class FileRead : Shared.FileRead, IFileRead
return results.ToString();
}
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, string logistics, List<Stratus.Description> descriptions, Test[] tests)
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<Stratus.Description> descriptions, Test[] tests)
{
bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
@ -156,7 +156,7 @@ public class FileRead : Shared.FileRead, IFileRead
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
if (descriptions.Count != 0 && tests.Length != 0)
{
string lines = GetLines(descriptions);
string lines = GetLines(_Logistics, descriptions);
if (!string.IsNullOrEmpty(lines))
{
long? subGroupId;
@ -177,7 +177,7 @@ public class FileRead : Shared.FileRead, IFileRead
values[0] = $"{values[0]}|{subGroupId}";
}
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, logistics, descriptions.First(), lines, subGroupId, weekOfYear);
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), lines, subGroupId, weekOfYear);
}
}
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
@ -187,15 +187,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveOpenInsightFile(reportFullPath, dateTime, pdsf.Item1, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -325,7 +325,7 @@ public class FromIQS
return new(result, count, commandText);
}
private static string GetJson(Logistics logistics, string logisticLines, Stratus.Description description)
private static string GetJson(Logistics logistics, ProcessDataStandardFormat processDataStandardFormat, Stratus.Description description)
{
string result;
StringBuilder stringBuilder = new();
@ -345,7 +345,7 @@ public class FromIQS
string safeValue;
string[] segments;
string serializerValue;
foreach (string line in logisticLines.Split(new string[] { Environment.NewLine }, StringSplitOptions.None))
foreach (string line in processDataStandardFormat.Logistics)
{
segments = line.Split('\t');
if (segments.Length < 2)
@ -376,11 +376,11 @@ public class FromIQS
return result;
}
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, string logisticLines, Stratus.Description description, string lines, long? subGroupId, string weekOfYear)
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, Stratus.Description description, string lines, long? subGroupId, string weekOfYear)
{
string checkFile;
string fileName = Path.GetFileName(reportFullPath);
string json = GetJson(logistics, logisticLines, description);
string json = GetJson(logistics, processDataStandardFormat, description);
string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);
bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot);
string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
@ -396,6 +396,9 @@ public class FromIQS
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.lbl");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, processDataStandardFormat.Body[processDataStandardFormat.Body.Count - 1]);
}
private static string GetCommandText(string[] iqsCopyValues)

View File

@ -139,15 +139,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -85,8 +85,12 @@ public class WSRequest
Details.Add(detail);
}
Date ??= logistics.DateTimeFromSequence.ToString();
if (UniqueId is null && Details.Count != 0)
UniqueId = Details[0].HeaderUniqueId;
UniqueId = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}";
for (int i = 0; i < Details.Count; i++)
{
Details[i].HeaderUniqueId = UniqueId;
Details[i].UniqueId = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}_Item-{i + 1}";
}
}
internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, string json, List<Stratus.Description> descriptions)
@ -135,7 +139,7 @@ public class WSRequest
pdDocument.close();
List<WS.Attachment> headerAttachments = new()
{
new WS.Attachment(subGroupId, headerId, headerIdDirectory, descriptions[0].HeaderUniqueId, "Data.pdf", dataPDFFile)
new WS.Attachment(subGroupId, headerId, headerIdDirectory, $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}", "Data.pdf", dataPDFFile)
};
WS.AttachFiles(openInsightMetrologyViewerAPI, headerAttachments, dataAttachments: null);
}

View File

@ -170,15 +170,15 @@ public class FileRead : Shared.FileRead, IFileRead
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -129,6 +129,7 @@ public class FileRead : Shared.FileRead, IFileRead
string destinationJobIdDirectory = Path.Combine(_JobIdProcessParentDirectory, _Logistics.JobID, directoryName);
string sequenceDirectory = Path.Combine(destinationJobIdDirectory, logisticsSequence);
string jsonFileName = Path.Combine(sequenceDirectory, $"{Path.GetFileNameWithoutExtension(reportFullPath)}.json");
MoveMatchingFile(jobIdDirectory, matchDirectories[0]);
Directory.Move(matchDirectories[0], destinationJobIdDirectory);
if (!Directory.Exists(sequenceDirectory))
_ = Directory.CreateDirectory(sequenceDirectory);
@ -136,16 +137,40 @@ public class FileRead : Shared.FileRead, IFileRead
File.WriteAllText(jsonFileName, json);
}
private static void MoveMatchingFile(string jobIdDirectory, string matchDirectory)
{
string checkFile;
string jobIdDirectoryFileName;
string matchDirectoryFileName;
string[] jobIdDirectoryFiles = Directory.GetFiles(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
string[] matchDirectoryFiles = Directory.GetFiles(matchDirectory, "*", SearchOption.TopDirectoryOnly);
foreach (string jobIdDirectoryFile in jobIdDirectoryFiles)
{
jobIdDirectoryFileName = Path.GetFileName(jobIdDirectoryFile);
foreach (string matchDirectoryFile in matchDirectoryFiles)
{
matchDirectoryFileName = Path.GetFileName(matchDirectoryFile);
if (jobIdDirectoryFileName.StartsWith(matchDirectoryFileName))
{
checkFile = Path.Combine(matchDirectory, jobIdDirectoryFileName);
if (File.Exists(checkFile))
continue;
File.Move(jobIdDirectoryFile, checkFile);
}
}
}
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
DirectoryMove(reportFullPath, dateTime, descriptions);
else if (!_IsEAFHosted)

View File

@ -117,15 +117,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -13,10 +13,14 @@ internal class Constant
public string Source { get; } = "Source:";
public string Started { get; } = "started";
public string Cassette { get; } = "Cassette";
public string Finished { get; } = "finished.";
public string Reference { get; } = "Reference";
public string StartedAt { get; } = "started at";
public string Thickness { get; } = "Thickness,";
public string Destination { get; } = "Destination:";
public string ProcessFailed { get; } = "------------- Process failed -------------";
public string IsPut { get; } = "is put to the slot";
public string WaferParentheses { get; } = "Wafer (";
public string IsTaken { get; } = "is taken from the slot";
public string ProcessFailed { get; } = "- Process failed -";
}

View File

@ -47,6 +47,11 @@ public class Description : IDescription, Shared.Properties.IDescription
public string Position { get; set; }
public string StdDev { get; set; }
public string Thickness { get; set; }
//
public string ThicknessSlotOne { get; set; }
public string ThicknessSlotTwentyFive { get; set; }
public string DeltaThicknessSlotsOneAndTwentyFive { get; set; }
public string PercentDeltaThicknessSlotsOneAndTwentyFive { get; set; }
string IDescription.GetEventDescription() => "File Has been read and parsed";
@ -215,7 +220,13 @@ public class Description : IDescription, Shared.Properties.IDescription
Mean = detail.Mean,
Position = detail.Position,
StdDev = detail.StdDev,
Thickness = detail.Thickness
Thickness = detail.Thickness,
//
ThicknessSlotOne = processData.ThicknessSlotOne,
ThicknessSlotTwentyFive = processData.ThicknessSlotTwentyFive,
//
DeltaThicknessSlotsOneAndTwentyFive = processData.DeltaThicknessSlotsOneAndTwentyFive,
PercentDeltaThicknessSlotsOneAndTwentyFive = processData.PercentDeltaThicknessSlotsOneAndTwentyFive,
};
results.Add(description);
}
@ -269,7 +280,12 @@ public class Description : IDescription, Shared.Properties.IDescription
Mean = nameof(Mean),
Position = nameof(Position),
StdDev = nameof(StdDev),
Thickness = nameof(Thickness)
Thickness = nameof(Thickness),
//
ThicknessSlotOne = nameof(ThicknessSlotOne),
ThicknessSlotTwentyFive = nameof(ThicknessSlotTwentyFive),
DeltaThicknessSlotsOneAndTwentyFive = nameof(DeltaThicknessSlotsOneAndTwentyFive),
PercentDeltaThicknessSlotsOneAndTwentyFive = nameof(PercentDeltaThicknessSlotsOneAndTwentyFive),
};
return result;
}

View File

@ -110,7 +110,7 @@ public class FileRead : Shared.FileRead, IFileRead
results.Item4.Add(_Logistics.FileInfo);
else
{
Run? run = Run.Get(_Logistics, results.Item4);
Run? run = Run.Get(_Logistics);
if (run is null)
throw new Exception(string.Concat("A) No Data - ", dateTime.Ticks));
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, _OriginalDataBioRad, run, dataText: string.Empty);

View File

@ -9,36 +9,39 @@ public class Grade
public Grade(string meanThickness, string stdDev)
{
Mean = meanThickness;
MeanThickness = meanThickness;
StdDev = stdDev;
}
public string Mean { get; }
public string MeanThickness { get; }
public string StdDev { get; }
internal static Grade? Get(Constant constant, ReadOnlyCollection<string> groups)
{
Grade? result;
string? mean = null;
string? stdDev = null;
int[] j = new int[] { 0 };
string stdDev = string.Empty;
string meanThickness = string.Empty;
foreach (string groupText in groups)
{
if (groupText.Contains(constant.Destination))
if (!groupText.Contains(constant.Finished))
continue;
mean = string.Empty;
stdDev = string.Empty;
meanThickness = string.Empty;
Header.ScanPast(groupText, j, constant.Mean);
meanThickness = Wafer.GetToken(groupText, j);
if (meanThickness.EndsWith(","))
meanThickness = meanThickness.Remove(meanThickness.Length - 1, 1);
mean = Wafer.GetToken(groupText, j);
if (mean.EndsWith(","))
mean = mean.Remove(mean.Length - 1, 1);
Header.ScanPast(groupText, j, constant.STDD);
stdDev = Wafer.GetToken(groupText, j);
if (stdDev.EndsWith(","))
stdDev = stdDev.Remove(stdDev.Length - 1, 1);
}
result = new(meanThickness: meanThickness,
stdDev: stdDev);
if (mean is null || stdDev is null)
result = null;
else
result = new(meanThickness: mean,
stdDev: stdDev);
return result;
}

View File

@ -35,6 +35,11 @@ public partial class ProcessData : IProcessData
public string Title { get; set; }
public string UniqueId { get; set; }
public string Zone { get; set; }
//
public string ThicknessSlotOne { get; set; }
public string ThicknessSlotTwentyFive { get; set; }
public string DeltaThicknessSlotsOneAndTwentyFive { get; set; }
public string PercentDeltaThicknessSlotsOneAndTwentyFive { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details;
@ -631,12 +636,31 @@ public partial class ProcessData : IProcessData
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
detail.Position = stringBuilder.ToString();
}
if (details.Count != 2
|| details[0].Slot != "1"
|| details[1].Slot != "25"
|| string.IsNullOrEmpty(details[0].Thickness)
|| string.IsNullOrEmpty(details[1].Thickness)
|| !decimal.TryParse(details[0].Thickness, out decimal thick01)
|| !decimal.TryParse(details[1].Thickness, out decimal thick25))
{
ThicknessSlotOne = string.Empty;
ThicknessSlotTwentyFive = string.Empty;
DeltaThicknessSlotsOneAndTwentyFive = string.Empty;
PercentDeltaThicknessSlotsOneAndTwentyFive = string.Empty;
}
else
{
ThicknessSlotOne = details[0].Thickness;
ThicknessSlotTwentyFive = details[1].Thickness;
DeltaThicknessSlotsOneAndTwentyFive = (thick01 - thick25).ToString();
// https://www.calculatorsoup.com/calculators/algebra/percent-difference-calculator.php
PercentDeltaThicknessSlotsOneAndTwentyFive = $"{Math.Abs(thick01 - thick25) / ((thick01 + thick25) / 2) * 100:0.000}";
}
fileInfoCollection.Add(logistics.FileInfo);
_Details.AddRange(details);
}
#nullable enable
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
{
List<Description> results = new();

View File

@ -26,7 +26,7 @@ internal class Row
StdDev = run.Wafers[i].StdDev;
Text = run.Wafers[i].Text;
//
GradeMean = run.Grade.Mean;
GradeMean = run.Grade.MeanThickness;
GradeStdDev = run.Grade.StdDev;
}

View File

@ -1,9 +1,6 @@
using Adaptation.Shared;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.FileHandlers.Stratus;
@ -24,57 +21,7 @@ internal class Run
public ReadOnlyCollection<Wafer> Wafers { get; }
public Grade Grade { get; }
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, Run result)
{
FileInfo fileInfo = new($"{logistics.ReportFullPath}.run.json");
string json = JsonSerializer.Serialize(result, StratusRunSourceGenerationContext.Default.Run);
File.WriteAllText(fileInfo.FullName, json);
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
fileInfoCollection.Add(fileInfo);
}
private static ReadOnlyCollection<string> GetLines(Logistics logistics, JsonElement[]? jsonElements)
{
List<string> results = new();
int columns = 0;
StringBuilder stringBuilder = new();
results.Add($"\"Count\",{jsonElements?.Length}");
results.Add($"\"{nameof(logistics.Sequence)}\",\"{logistics.Sequence}\"");
results.Add($"\"{nameof(logistics.MesEntity)}\",\"{logistics.MesEntity}\"");
string dateTimeFromSequence = logistics.DateTimeFromSequence.ToString("MM/dd/yyyy hh:mm:ss tt");
for (int i = 0; i < jsonElements?.Length;)
{
_ = stringBuilder.Append('"').Append(nameof(logistics.DateTimeFromSequence)).Append('"').Append(',');
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append(',');
}
break;
}
if (jsonElements?.Length != 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements?.Length; i++)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('"').Append(dateTimeFromSequence).Append('"').Append(',');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
{
if (jsonProperty.Value.ValueKind == JsonValueKind.Object)
_ = stringBuilder.Append(',');
else if (jsonProperty.Value.ValueKind != JsonValueKind.String)
_ = stringBuilder.Append(jsonProperty.Value).Append(',');
else
_ = stringBuilder.Append('"').Append(jsonProperty.Value).Append('"').Append(',');
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
}
return results.AsReadOnly();
}
internal static Run? Get(Logistics logistics, List<FileInfo> fileInfoCollection)
internal static Run? Get(Logistics logistics)
{
Run? result;
Constant constant = new();
@ -99,10 +46,9 @@ internal class Run
if (grade is null)
result = null;
else
{
result = new(header, wafers, grade);
WriteJson(logistics, fileInfoCollection, result);
}
// WriteJson(logistics, fileInfoCollection, result);
// WriteCommaSeparatedValues(logistics, result);
}
}
}

View File

@ -93,11 +93,16 @@ public class Wafer
List<string> group = new();
foreach (string line in lines)
{
group.Add(line);
if (!line.StartsWith(constant.Destination))
if (string.IsNullOrEmpty(line.Trim()))
continue;
results.Add(string.Join(Environment.NewLine, group));
group.Clear();
group.Add(line);
if (line.StartsWith(constant.Destination)
|| line.Contains(constant.ProcessFailed)
|| line.StartsWith(constant.WaferParentheses) && line.Contains(constant.IsPut))
{
results.Add(string.Join(Environment.NewLine, group));
group.Clear();
}
}
results.Add(string.Join(Environment.NewLine, group));
}

View File

@ -18,6 +18,9 @@ internal class Constant
public string StartedAt { get; } = "started at";
public string Thickness { get; } = "Thickness,";
public string Destination { get; } = "Destination:";
public string ProcessFailed { get; } = "------------- Process failed -------------";
public string IsPut { get; } = "is put to the slot";
public string WaferParentheses { get; } = "Wafer (";
public string IsTaken { get; } = "is taken from the slot";
public string ProcessFailed { get; } = "- Process failed -";
}

View File

@ -116,7 +116,7 @@ public class FileRead : Shared.FileRead, IFileRead
{
try
{
ReadOnlyCollection<Run>? runs = Run.Get(_TickOffset.Value, _Logistics);
ReadOnlyCollection<Run>? runs = Run.Get(_TickOffset.Value, _Logistics, results.Item4);
// if (run is null)
// throw new Exception(string.Concat("A) No Data - ", dateTime.Ticks));
if (runs.Count == 0)

View File

@ -7,13 +7,13 @@ namespace Adaptation.FileHandlers.txt;
public class Grade
{
public Grade(string mean, string stdDev)
public Grade(string meanThickness, string stdDev)
{
Mean = mean;
MeanThickness = meanThickness;
StdDev = stdDev;
}
public string Mean { get; }
public string MeanThickness { get; }
public string StdDev { get; }
internal static Grade? Get(Constant constant, ReadOnlyCollection<string> groups)
@ -24,7 +24,7 @@ public class Grade
int[] j = new int[] { 0 };
foreach (string groupText in groups)
{
if (!groupText.Trim().StartsWith(constant.Cassette) || !groupText.Contains(constant.Finished))
if (!groupText.Contains(constant.Finished))
continue;
mean = string.Empty;
stdDev = string.Empty;
@ -37,8 +37,11 @@ public class Grade
if (stdDev.EndsWith(","))
stdDev = stdDev.Remove(stdDev.Length - 1, 1);
}
result = mean is null || stdDev is null ? null : new(mean: mean,
stdDev: stdDev);
if (mean is null || stdDev is null)
result = null;
else
result = new(meanThickness: mean,
stdDev: stdDev);
return result;
}

View File

@ -26,7 +26,7 @@ internal class Row
StdDev = run.Wafers[i].StdDev;
Text = run.Wafers[i].Text;
//
GradeMean = run.Grade.Mean;
GradeMean = run.Grade.MeanThickness;
GradeStdDev = run.Grade.StdDev;
}

View File

@ -26,6 +26,15 @@ internal class Run
public ReadOnlyCollection<Wafer> Wafers { get; }
public Grade Grade { get; }
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, int r, Run result)
{
FileInfo fileInfo = new($"{logistics.ReportFullPath}-{r}.run.json");
string json = JsonSerializer.Serialize(result, RunSourceGenerationContext.Default.Run);
File.WriteAllText(fileInfo.FullName, json);
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
fileInfoCollection.Add(fileInfo);
}
private static ReadOnlyCollection<string> GetLines(Logistics logistics, JsonElement[]? jsonElements)
{
List<string> results = new();
@ -67,16 +76,16 @@ internal class Run
return results.AsReadOnly();
}
private static void WriteCommaSeparatedValues(Logistics logistics, Run run)
private static void WriteCommaSeparatedValues(Logistics logistics, int r, Run run)
{
List<Row> results = new();
Row row;
int index = 0;
for (int i = 0; i < run.Wafers.Count; i++)
for (int w = 0; w < run.Wafers.Count; w++)
{
for (int j = 0; j < run.Wafers[i].Sites.Count; j++)
for (int s = 0; s < run.Wafers[w].Sites.Count; s++)
{
row = new(run, index, i, j);
row = new(run, index, w, s);
results.Add(row);
index += 1;
}
@ -84,7 +93,7 @@ internal class Run
string json = JsonSerializer.Serialize(results);
JsonElement[]? jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
ReadOnlyCollection<string> lines = GetLines(logistics, jsonElements);
File.WriteAllText($"{logistics.ReportFullPath}_{logistics.DateTimeFromSequence.Ticks}.csv", string.Join(Environment.NewLine, lines));
File.WriteAllText($"{logistics.ReportFullPath}-{r}.csv", string.Join(Environment.NewLine, lines));
}
private static ReadOnlyCollection<string> GetRuns(Constant constant, string text)
@ -111,8 +120,8 @@ internal class Run
private static ReadOnlyCollection<Run> GetRuns(Logistics logistics)
{
List<Run> results = new();
int[] i;
Constant constant = new();
int[] i = new int[] { 0 };
string allText = File.ReadAllText(logistics.ReportFullPath);
string[] segments = allText.Split(new string[] { constant.Finished }, StringSplitOptions.None);
if (segments.Length > 1)
@ -121,6 +130,7 @@ internal class Run
ReadOnlyCollection<string> runs = GetRuns(constant, allText);
foreach (string text in runs)
{
i = new int[] { 0 };
Header? header = Header.Get(text, constant, i);
if (header is not null)
{
@ -131,11 +141,10 @@ internal class Run
if (wafers.Count > 0)
{
Grade? grade = Grade.Get(constant, groups);
if (grade is not null)
{
run = new(header, wafers, grade);
results.Add(run);
}
if (grade is null)
continue;
run = new(header, wafers, grade);
results.Add(run);
}
}
}
@ -144,14 +153,17 @@ internal class Run
return results.AsReadOnly();
}
internal static ReadOnlyCollection<Run> Get(long tickOffset, Logistics logistics)
internal static ReadOnlyCollection<Run> Get(long tickOffset, Logistics logistics, List<FileInfo> fileInfoCollection)
{
ReadOnlyCollection<Run> results = GetRuns(logistics);
DateTime afterCheck = new(File.GetLastWriteTime(logistics.ReportFullPath).Ticks + tickOffset);
if (logistics.DateTimeFromSequence != afterCheck)
results = new(new List<Run>());
foreach (Run run in results)
WriteCommaSeparatedValues(logistics, run);
for (int i = 0; i < results.Count; i++)
{
WriteJson(logistics, fileInfoCollection, i, results[i]);
WriteCommaSeparatedValues(logistics, i, results[i]);
}
return results;
}

View File

@ -93,11 +93,16 @@ public class Wafer
List<string> group = new();
foreach (string line in lines)
{
group.Add(line);
if (!line.StartsWith(constant.Destination))
if (string.IsNullOrEmpty(line.Trim()))
continue;
results.Add(string.Join(Environment.NewLine, group));
group.Clear();
group.Add(line);
if (line.StartsWith(constant.Destination)
|| line.Contains(constant.ProcessFailed)
|| line.StartsWith(constant.WaferParentheses) && line.Contains(constant.IsPut))
{
results.Add(string.Join(Environment.NewLine, group));
group.Clear();
}
}
results.Add(string.Join(Environment.NewLine, group));
}
@ -145,17 +150,39 @@ public class Wafer
stdDev = string.Empty;
passFail = string.Empty;
waferText = string.Empty;
if (!groupText.Contains(constant.IsTaken))
source = string.Empty;
else
{
Header.ScanPast(groupText, j, constant.IsTaken);
source = Header.GetToEOL(groupText, j).Trim();
}
Header.ScanPast(groupText, j, constant.Reference);
reference = Header.GetToEOL(groupText, j);
Header.ScanPast(groupText, j, constant.Source);
source = Header.GetToEOL(groupText, j).Trim();
Header.ScanPast(groupText, j, constant.Destination);
destination = Header.GetToEOL(groupText, j).Trim();
if (groupText.Contains(constant.Destination))
{
Header.ScanPast(groupText, j, constant.Source);
source = Header.GetToEOL(groupText, j).Trim();
Header.ScanPast(groupText, j, constant.Destination);
destination = Header.GetToEOL(groupText, j).Trim();
}
else
{
Header.ScanPast(groupText, j, constant.IsPut);
destination = Header.GetToEOL(groupText, j).Trim();
}
}
else
{
if (!groupText.Contains(constant.Wafer))
continue;
if (!groupText.Contains(constant.IsTaken))
source = string.Empty;
else
{
Header.ScanPast(groupText, j, constant.IsTaken);
source = Header.GetToEOL(groupText, j).Trim();
}
Header.ScanPast(groupText, j, constant.Wafer);
waferText = Header.GetToEOL(groupText, j);
if (waferText.EndsWith("."))
@ -206,10 +233,20 @@ public class Wafer
if (stdDev.EndsWith("."))
stdDev = stdDev.Remove(stdDev.Length - 1, 1);
reference = string.Empty;
Header.ScanPast(groupText, j, constant.Source);
source = Header.GetToEOL(groupText, j).Trim();
Header.ScanPast(groupText, j, constant.Destination);
destination = Header.GetToEOL(groupText, j).Trim();
if (groupText.Contains(constant.Destination))
{
Header.ScanPast(groupText, j, constant.Source);
source = Header.GetToEOL(groupText, j).Trim();
Header.ScanPast(groupText, j, constant.Destination);
destination = Header.GetToEOL(groupText, j).Trim();
}
else
{
Header.ScanPast(groupText, j, constant.IsTaken);
source = Header.GetToEOL(groupText, j).Trim();
Header.ScanPast(groupText, j, constant.IsPut);
destination = Header.GetToEOL(groupText, j).Trim();
}
}
wafer = new(destination: destination,
mean: mean,

View File

@ -25,7 +25,7 @@ stages:
nugetSource: "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/"
jobs:
- job: SetupEnviroment
- job: SetupEnvironment
steps:
- script: |
echo $(Build.BuildId)
@ -51,7 +51,7 @@ stages:
- job: BuildDebug
dependsOn:
- SetupEnviroment
- SetupEnvironment
steps:
- script: |
set configuration=Debug
@ -66,7 +66,7 @@ stages:
- job: BuildRelease
dependsOn:
- SetupEnviroment
- SetupEnvironment
steps:
- script: |
set configuration=Release
@ -98,7 +98,7 @@ stages:
- job: TestDebug
dependsOn:
- SetupEnviroment
- SetupEnvironment
- BuildDebug
- BuildRelease
steps:
@ -168,7 +168,7 @@ stages:
nugetSource: "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/"
jobs:
- job: SetupEnviroment
- job: SetupEnvironment
steps:
- script: |
echo $(Build.BuildId)
@ -194,7 +194,7 @@ stages:
- job: BuildRelease
dependsOn:
- SetupEnviroment
- SetupEnvironment
steps:
- script: |
set configuration=Release
@ -226,7 +226,7 @@ stages:
- job: TestRelease
dependsOn:
- SetupEnviroment
- SetupEnvironment
- BuildRelease
steps:
- script: |

File diff suppressed because it is too large Load Diff

View File

@ -35,6 +35,9 @@ public class Logistics : ILogistics
public long Sequence => _Sequence;
public double TotalSecondsSinceLastWriteTimeFromSequence => _TotalSecondsSinceLastWriteTimeFromSequence;
private static string DefaultMesEntity(DateTime dateTime) =>
string.Concat(dateTime.Ticks, "_MES_ENTITY");
public Logistics(IFileRead fileRead)
{
DateTime dateTime = DateTime.Now;
@ -84,13 +87,13 @@ public class Logistics : ILogistics
_Logistics2 = new List<Logistics2>();
}
public Logistics(string reportFullPath, string logistics)
internal Logistics(string reportFullPath, ProcessDataStandardFormat processDataStandardFormat)
{
string key;
DateTime dateTime;
string[] segments;
_FileInfo = new(reportFullPath);
_Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
_Logistics1 = processDataStandardFormat.Logistics.ToList();
if (Logistics1.Count == 0 || !Logistics1[0].StartsWith("LOGISTICS_1"))
{
_NullData = null;
@ -190,8 +193,6 @@ public class Logistics : ILogistics
}
}
private static string DefaultMesEntity(DateTime dateTime) => string.Concat(dateTime.Ticks, "_MES_ENTITY");
internal void Update(string mid, string processJobID)
{
_MID = mid;

View File

@ -0,0 +1 @@


View File

@ -1,18 +1,22 @@
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.Shared;
public class ProcessDataStandardFormat
#nullable enable
internal class ProcessDataStandardFormat
{
public enum SearchFor
internal enum SearchFor
{
EquipmentIntegration = 1,
BusinessIntegration = 2,
@ -20,325 +24,47 @@ public class ProcessDataStandardFormat
Archive = 4
}
public static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
internal long? Sequence { get; private set; }
internal ReadOnlyCollection<string> Body { get; private set; }
internal ReadOnlyCollection<string> Footer { get; private set; }
internal ReadOnlyCollection<string> Header { get; private set; }
internal ReadOnlyCollection<string> Columns { get; private set; }
internal ProcessDataStandardFormat? InputPDSF { get; private set; }
internal ReadOnlyCollection<string> Logistics { get; private set; }
internal ProcessDataStandardFormat(ReadOnlyCollection<string> body,
ReadOnlyCollection<string> columns,
ReadOnlyCollection<string> footer,
ReadOnlyCollection<string> header,
ProcessDataStandardFormat? inputPDSF,
ReadOnlyCollection<string> logistics,
long? sequence)
{
string result;
if (jsonElements.Length == 0)
result = string.Empty;
else
{
int columns = 0;
List<string> lines;
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
StringBuilder stringBuilder = new();
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
_ = stringBuilder.Append("\"Time\"").Append('\t');
_ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
_ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
for (int i = 0; i < jsonElements.Length;)
{
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
}
break;
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements.Length; i++)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append("0.1").Append('\t');
_ = stringBuilder.Append('1').Append('\t');
_ = stringBuilder.Append('2').Append('\t');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
_ = stringBuilder.Append(jsonProperty.Value).Append('\t');
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
}
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
lines.Add("DELIMITER ;");
lines.Add(string.Concat("START_TIME_FORMAT ", format));
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
if (!string.IsNullOrEmpty(logisticsText))
lines.Add(logisticsText);
else
{
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
lines.Add("END_HEADER");
}
_ = stringBuilder.Clear();
foreach (string line in lines)
_ = stringBuilder.AppendLine(line);
result = stringBuilder.ToString();
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
}
return result;
Body = body;
Columns = columns;
Footer = footer;
Header = header;
InputPDSF = inputPDSF;
Logistics = logistics;
Sequence = sequence;
}
public static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(string reportFullPath, string[] lines = null)
{
string segment;
List<string> body = new();
StringBuilder logistics = new();
lines ??= File.ReadAllLines(reportFullPath);
string[] segments;
if (lines.Length < 7)
segments = Array.Empty<string>();
else
segments = lines[6].Trim().Split('\t');
List<string> columns = new();
for (int c = 0; c < segments.Length; c++)
{
segment = segments[c].Substring(1, segments[c].Length - 2);
if (!columns.Contains(segment))
columns.Add(segment);
else
{
for (short i = 1; i < short.MaxValue; i++)
{
segment = string.Concat(segment, "_", i);
if (!columns.Contains(segment))
{
columns.Add(segment);
break;
}
}
}
}
bool lookForLogistics = false;
for (int r = 7; r < lines.Length; r++)
{
if (lines[r].StartsWith("NUM_DATA_ROWS"))
lookForLogistics = true;
if (!lookForLogistics)
{
body.Add(lines[r]);
continue;
}
if (lines[r].StartsWith("LOGISTICS_1"))
{
for (int i = r; i < lines.Length; i++)
{
if (lines[r].StartsWith("END_HEADER"))
break;
_ = logistics.AppendLine(lines[i]);
}
break;
}
}
return new Tuple<string, string[], string[]>(logistics.ToString(), columns.ToArray(), body.ToArray());
}
internal static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
public static JsonElement[] GetArray(Tuple<string, string[], string[]> pdsf, bool lookForNumbers = false)
{
JsonElement[] results;
string logistics = pdsf.Item1;
string[] columns = pdsf.Item2;
string[] bodyLines = pdsf.Item3;
if (bodyLines.Length == 0 || !bodyLines[0].Contains('\t'))
results = JsonSerializer.Deserialize<JsonElement[]>("[]");
else
{
string value;
string[] segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in bodyLines)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Trim().Split('\t');
if (!lookForNumbers)
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("}");
lines.Add(stringBuilder.ToString());
}
string json = $"[{string.Join(",", lines)}]";
results = JsonSerializer.Deserialize<JsonElement[]>(json);
}
return results;
}
internal static string BusinessIntegration(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.BusinessIntegration, addSpaces, separator);
public static Dictionary<string, List<string>> GetDictionary(Tuple<string, string[], string[]> pdsf)
{
Dictionary<string, List<string>> results = new();
string[] segments;
string[] columns = pdsf.Item2;
string[] bodyLines = pdsf.Item3;
foreach (string column in columns)
results.Add(column, new List<string>());
foreach (string bodyLine in bodyLines)
{
segments = bodyLine.Split('\t');
for (int c = 1; c < segments.Length; c++)
{
if (c >= columns.Length)
continue;
results[columns[c]].Add(segments[c]);
}
}
return results;
}
internal static string SystemExport(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.SystemExport, addSpaces, separator);
public static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(Tuple<string, string[], string[]> pdsf)
{
Dictionary<Test, Dictionary<string, List<string>>> results = new();
List<string> collection;
string testColumn = nameof(Test);
Dictionary<string, List<string>> keyValuePairs = GetDictionary(pdsf);
if (!keyValuePairs.TryGetValue(testColumn, out collection))
throw new Exception();
int min;
int max;
Test testKey;
List<string> vs;
string columnKey;
Dictionary<Test, List<int>> tests = new();
for (int i = 0; i < collection.Count; i++)
{
if (Enum.TryParse(collection[i], out Test test))
{
if (!results.ContainsKey(test))
{
tests.Add(test, new List<int>());
results.Add(test, new Dictionary<string, List<string>>());
}
tests[test].Add(i);
}
}
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
{
testKey = testKeyValuePair.Key;
min = testKeyValuePair.Value.Min();
max = testKeyValuePair.Value.Max() + 1;
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
results[testKey].Add(keyValuePair.Key, new List<string>());
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
vs = keyValuePair.Value;
columnKey = keyValuePair.Key;
for (int i = min; i < max; i++)
{
if (vs.Count > i)
results[testKey][columnKey].Add(vs[i]);
else
results[testKey][columnKey].Add(string.Empty);
}
}
}
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(pdsf.Item1, results);
}
internal static string Archive(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.Archive, addSpaces, separator);
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
{
if (!addSpaces)
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
else
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
}
internal static ProcessDataStandardFormat GetEmpty() =>
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null, new(Array.Empty<string>()), null);
public static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
public static string BusinessIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.BusinessIntegration, addSpaces, separator);
public static string SystemExport(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.SystemExport, addSpaces, separator);
public static string Archive(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.Archive, addSpaces, separator);
public static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string> ignoreParameterNames = null)
{
StringBuilder result = new();
ignoreParameterNames ??= new List<string>();
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
throw new Exception();
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
throw new Exception();
string nullData;
const string columnDate = "Date";
const string columnTime = "Time";
const string firstDuplicate = "_1";
_ = result.AppendLine(scopeInfo.Header);
StringBuilder line = new();
if (logistics.NullData is null)
nullData = string.Empty;
else
nullData = logistics.NullData.ToString();
int count = (from l in keyValuePairs select l.Value.Count).Min();
for (int r = 0; r < count; r++)
{
_ = line.Clear();
_ = line.Append('!');
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
if (!names.Contains(keyValuePair.Key))
continue;
if (ignoreParameterNames.Contains(keyValuePair.Key))
continue;
if (pairedParameterNames.Contains(keyValuePair.Key))
{
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
continue;
else
_ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
}
else
{
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
_ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
_ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
_ = line.Append(nullData);
else
_ = line.Append(keyValuePair.Value[r]);
_ = line.Append(';');
}
}
if (pairedParameterNames.Count == 0)
{
_ = line.Remove(line.Length - 1, 1);
_ = result.AppendLine(line.ToString());
}
}
return result.ToString();
}
public static List<string> PDSFToFixedWidth(string reportFullPath)
internal static List<string> PDSFToFixedWidth(string reportFullPath)
{
List<string> results = new();
if (!File.Exists(reportFullPath))
@ -407,4 +133,577 @@ public class ProcessDataStandardFormat
return results;
}
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6)
{
ProcessDataStandardFormat result;
string segment;
string[] segments;
bool addToFooter = false;
List<string> body = new();
List<string> header = new();
List<string> footer = new();
List<string> columns = new();
ReadOnlyCollection<string> logistics;
lines ??= File.ReadAllLines(reportFullPath);
if (lines.Length < columnsLine + 1)
segments = Array.Empty<string>();
else
{
segments = lines[columnsLine].Trim().Split('\t');
for (int i = 0; i < columnsLine; i++)
header.Add(lines[i]);
}
for (int c = 0; c < segments.Length; c++)
{
segment = segments[c].Substring(1, segments[c].Length - 2);
if (!columns.Contains(segment))
columns.Add(segment);
else
{
for (short i = 1; i < short.MaxValue; i++)
{
segment = string.Concat(segment, "_", i);
if (!columns.Contains(segment))
{
columns.Add(segment);
break;
}
}
}
}
for (int r = columnsLine + 1; r < lines.Length; r++)
{
if (lines[r].StartsWith("NUM_DATA_ROWS"))
addToFooter = true;
if (!addToFooter)
body.Add(lines[r]);
else
{
footer.Add(lines[r]);
if (lines[r].StartsWith("END_HEADER"))
break;
}
}
string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
logistics = GetLogistics(footer, linesOne: linesOne);
result = new(body: body.AsReadOnly(),
columns: columns.AsReadOnly(),
footer: footer.AsReadOnly(),
header: header.AsReadOnly(),
inputPDSF: null,
logistics: logistics,
sequence: null);
return result;
}
private static ReadOnlyCollection<string> GetLogistics(List<string> footer, string? linesOne)
{
List<string> results = new();
bool foundLogistics1 = false;
foreach (string line in footer)
{
if (line.StartsWith("END_HEADER"))
break;
if (line.StartsWith("LOGISTICS_1"))
foundLogistics1 = true;
if (foundLogistics1 && line.StartsWith("LOGISTICS_"))
results.Add(line);
}
if (!string.IsNullOrEmpty(linesOne) && results.Count == 0)
results.Add(linesOne);
return results.AsReadOnly();
}
internal static ProcessDataStandardFormat? GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
{
ProcessDataStandardFormat? result;
const int columnsLine = 6;
FileInfo fileInfo = new(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, pdsfMapping.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = GetArray(pdsfMapping.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
if (jsonElements is null || jsonElements.Length == 0 || pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count)
result = null;
else
{
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
result = null;
}
return result;
}
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int expectedColumns, int columnsLine, string path, string[]? lines)
{
ProcessDataStandardFormat result;
long sequence;
string[] segments;
bool addToFooter = false;
List<string> body = new();
List<string> header = new();
List<string> footer = new();
ReadOnlyCollection<string> logistics;
lines ??= File.ReadAllLines(path);
if (lines.Length <= columnsLine)
segments = Array.Empty<string>();
else
{
segments = lines[columnsLine].Split('\t');
if (segments.Length != expectedColumns)
segments = Array.Empty<string>();
for (int i = 0; i < columnsLine; i++)
header.Add(lines[i]);
}
string[] columns = segments.Select(l => l.Trim('"')).ToArray();
for (int r = columnsLine + 1; r < lines.Length; r++)
{
if (lines[r].StartsWith("NUM_DATA_ROWS"))
addToFooter = true;
if (!addToFooter)
body.Add(lines[r]);
else
{
footer.Add(lines[r]);
if (lines[r].StartsWith("END_HEADER"))
break;
}
}
logistics = GetLogistics(footer, linesOne: null);
if (logistics.Count == 0)
sequence = lastWriteTime.Ticks;
else
{
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s;
}
result = new(body: body.AsReadOnly(),
columns: new(columns),
footer: footer.AsReadOnly(),
header: header.AsReadOnly(),
inputPDSF: null,
logistics: logistics,
sequence: sequence);
return result;
}
private static JsonElement[]? GetArray(int expectedColumns, ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers)
{
JsonElement[]? results;
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
else
{
string value;
string[] segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in processDataStandardFormat.Body)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Split('\t');
if (segments.Length != expectedColumns)
continue;
if (!lookForNumbers)
{
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("}");
lines.Add(stringBuilder.ToString());
}
string json = $"[{string.Join(",", lines)}]";
results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray);
}
return results;
}
private static ProcessDataStandardFormat GetProcessDataStandardFormat(ProcessDataStandardFormatMapping processDataStandardFormatMapping, JsonElement[] jsonElements, ProcessDataStandardFormat processDataStandardFormat)
{
ProcessDataStandardFormat result;
int column;
string value;
JsonProperty jsonProperty;
List<string> values = new();
List<string> results = new();
JsonProperty[] jsonProperties;
List<string> unknownColumns = new();
for (int i = 0; i < jsonElements.Length; i++)
{
values.Clear();
if (jsonElements[i].ValueKind != JsonValueKind.Object)
{
unknownColumns.Add(string.Empty);
break;
}
jsonProperties = jsonElements[i].EnumerateObject().ToArray();
if (jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
continue;
for (int c = 0; c < processDataStandardFormatMapping.ColumnIndices.Count; c++)
{
column = processDataStandardFormatMapping.ColumnIndices[c];
if (column == -1)
value = processDataStandardFormatMapping.OldColumnNames[c];
else
{
jsonProperty = jsonProperties[column];
value = jsonProperty.Value.ToString();
}
values.Add(value);
}
results.Add(string.Join("\t", values));
}
result = new(body: new(results),
columns: processDataStandardFormatMapping.OldColumnNames,
footer: processDataStandardFormat.Footer,
header: processDataStandardFormat.Header,
inputPDSF: processDataStandardFormat,
logistics: processDataStandardFormat.Logistics,
sequence: processDataStandardFormat.Sequence);
return result;
}
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat)
{
List<string> results = new();
if (processDataStandardFormat.Sequence is null)
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
string startTime = new DateTime(processDataStandardFormat.Sequence.Value).ToString(format);
results.Add("HEADER_TAG\tHEADER_VALUE");
results.Add("FORMAT\t2.00");
results.Add("NUMBER_PASSES\t0001");
results.Add($"HEADER_OFFSET\t{headerOffset}");
results.Add($"DATA_OFFSET\t{dataOffset}");
results.Add($"END_OFFSET\t{endOffset}");
results.Add($"\"{string.Join("\"\t\"", processDataStandardFormat.Columns)}\"");
results.AddRange(processDataStandardFormat.Body);
results.Add($"NUM_DATA_ROWS\t{processDataStandardFormat.Body.Count.ToString().PadLeft(9, '0')}");
results.Add($"NUM_DATA_COLUMNS\t{processDataStandardFormat.Columns.Count.ToString().PadLeft(9, '0')}");
results.Add("DELIMITER\t;");
results.Add($"START_TIME_FORMAT\t{format}");
results.Add($"START_TIME\t{startTime}");
results.Add("LOGISTICS_COLUMN\tA_LOGISTICS");
results.Add("LOGISTICS_COLUMN\tB_LOGISTICS");
results.AddRange(processDataStandardFormat.Logistics);
results.Add("EOF");
if (processDataStandardFormat.InputPDSF is not null)
{
List<char> hyphens = new();
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|')));
results.Add(string.Empty);
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
hyphens.Add('-');
results.Add($"|{string.Join("|", hyphens)}|");
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|')));
results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|')));
}
File.WriteAllText(path, string.Join(Environment.NewLine, results));
}
internal static Dictionary<string, List<string>> GetDictionary(ProcessDataStandardFormat processDataStandardFormat)
{
Dictionary<string, List<string>> results = new();
string[] segments;
foreach (string column in processDataStandardFormat.Columns)
results.Add(column, new List<string>());
foreach (string bodyLine in processDataStandardFormat.Body)
{
segments = bodyLine.Split('\t');
for (int c = 1; c < segments.Length; c++)
{
if (c >= processDataStandardFormat.Columns.Count)
continue;
results[processDataStandardFormat.Columns[c]].Add(segments[c]);
}
}
return results;
}
internal static JsonElement[] GetArray(ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers = false)
{
JsonElement[] results;
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
else
{
string value;
string[] segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in processDataStandardFormat.Body)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Trim().Split('\t');
if (!lookForNumbers)
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("}");
lines.Add(stringBuilder.ToString());
}
string json = $"[{string.Join(",", lines)}]";
results = JsonSerializer.Deserialize<JsonElement[]>(json) ?? throw new Exception();
}
return results;
}
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
{
string result;
if (jsonElements.Length == 0)
result = string.Empty;
else
{
int columns = 0;
List<string> lines;
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
StringBuilder stringBuilder = new();
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
_ = stringBuilder.Append("\"Time\"").Append('\t');
_ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
_ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
for (int i = 0; i < jsonElements.Length;)
{
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
}
break;
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements.Length; i++)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append("0.1").Append('\t');
_ = stringBuilder.Append('1').Append('\t');
_ = stringBuilder.Append('2').Append('\t');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
_ = stringBuilder.Append(jsonProperty.Value).Append('\t');
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
}
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
lines.Add("DELIMITER ;");
lines.Add(string.Concat("START_TIME_FORMAT ", format));
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
if (!string.IsNullOrEmpty(logisticsText))
lines.Add(logisticsText);
else
{
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
lines.Add("END_HEADER");
}
_ = stringBuilder.Clear();
foreach (string line in lines)
_ = stringBuilder.AppendLine(line);
result = stringBuilder.ToString();
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
}
return result;
}
internal static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(ProcessDataStandardFormat processDataStandardFormat)
{
Dictionary<Test, Dictionary<string, List<string>>> results = new();
List<string>? collection;
string testColumn = nameof(Test);
Dictionary<string, List<string>> keyValuePairs = GetDictionary(processDataStandardFormat);
if (!keyValuePairs.TryGetValue(testColumn, out collection))
throw new Exception();
int min;
int max;
Test testKey;
List<string> vs;
string columnKey;
Dictionary<Test, List<int>> tests = new();
for (int i = 0; i < collection.Count; i++)
{
if (Enum.TryParse(collection[i], out Test test))
{
if (!results.ContainsKey(test))
{
tests.Add(test, new List<int>());
results.Add(test, new Dictionary<string, List<string>>());
}
tests[test].Add(i);
}
}
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
{
testKey = testKeyValuePair.Key;
min = testKeyValuePair.Value.Min();
max = testKeyValuePair.Value.Max() + 1;
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
results[testKey].Add(keyValuePair.Key, new List<string>());
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
vs = keyValuePair.Value;
columnKey = keyValuePair.Key;
for (int i = min; i < max; i++)
{
if (vs.Count > i)
results[testKey][columnKey].Add(vs[i]);
else
results[testKey][columnKey].Add(string.Empty);
}
}
}
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(processDataStandardFormat.Logistics[0], results);
}
internal static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string>? ignoreParameterNames = null)
{
StringBuilder result = new();
ignoreParameterNames ??= new List<string>();
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
throw new Exception();
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
throw new Exception();
string? nullData;
const string columnDate = "Date";
const string columnTime = "Time";
const string firstDuplicate = "_1";
_ = result.AppendLine(scopeInfo.Header);
StringBuilder line = new();
if (logistics.NullData is null)
nullData = string.Empty;
else
nullData = logistics.NullData.ToString();
int count = (from l in keyValuePairs select l.Value.Count).Min();
for (int r = 0; r < count; r++)
{
_ = line.Clear();
_ = line.Append('!');
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
if (!names.Contains(keyValuePair.Key))
continue;
if (ignoreParameterNames.Contains(keyValuePair.Key))
continue;
if (pairedParameterNames.Contains(keyValuePair.Key))
{
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
continue;
else
_ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
}
else
{
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
_ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
_ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
_ = line.Append(nullData);
else
_ = line.Append(keyValuePair.Value[r]);
_ = line.Append(';');
}
}
if (pairedParameterNames.Count == 0)
{
_ = line.Remove(line.Length - 1, 1);
_ = result.AppendLine(line.ToString());
}
}
return result.ToString();
}
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
{
if (!addSpaces)
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
else
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
}
private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName)
{
int? result = null;
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
if (result is null)
{
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name[0] != propertyName[0])
continue;
if (jsonProperties[i].Name.Length != propertyName.Length)
continue;
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
}
return result;
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(JsonElement[]))]
internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -0,0 +1,33 @@
using System.Collections.ObjectModel;
namespace Adaptation.Shared;
public class ProcessDataStandardFormatMapping
{
public ReadOnlyCollection<string> BackfillColumns { get; private set; }
public ReadOnlyCollection<int> ColumnIndices { get; private set; }
public ReadOnlyCollection<string> IgnoreColumns { get; private set; }
public ReadOnlyCollection<string> IndexOnlyColumns { get; private set; }
public ReadOnlyDictionary<string, string> KeyValuePairs { get; private set; }
public ReadOnlyCollection<string> NewColumnNames { get; private set; }
public ReadOnlyCollection<string> OldColumnNames { get; private set; }
public ProcessDataStandardFormatMapping(ReadOnlyCollection<string> backfillColumns,
ReadOnlyCollection<int> columnIndices,
ReadOnlyCollection<string> ignoreColumns,
ReadOnlyCollection<string> indexOnlyColumns,
ReadOnlyDictionary<string, string> keyValuePairs,
ReadOnlyCollection<string> newColumnNames,
ReadOnlyCollection<string> oldColumnNames)
{
BackfillColumns = backfillColumns;
ColumnIndices = columnIndices;
IgnoreColumns = ignoreColumns;
IndexOnlyColumns = indexOnlyColumns;
KeyValuePairs = keyValuePairs;
NewColumnNames = newColumnNames;
OldColumnNames = oldColumnNames;
}
}

View File

@ -46,7 +46,7 @@ public class BIORAD4
Shared.AdaptationTesting.UpdatePassDirectory(variables[2]);
}
#if DEBUG
#if (!DEBUG)
[Ignore]
#endif
[TestMethod]
@ -62,7 +62,7 @@ public class BIORAD4
Shared.AdaptationTesting.UpdatePassDirectory(variables[2]);
}
#if DEBUG
#if (!DEBUG)
[Ignore]
#endif
[TestMethod]
@ -78,7 +78,7 @@ public class BIORAD4
Shared.AdaptationTesting.UpdatePassDirectory(variables[2]);
}
#if DEBUG
#if (!DEBUG)
[Ignore]
#endif
[TestMethod]
@ -94,7 +94,7 @@ public class BIORAD4
Shared.AdaptationTesting.UpdatePassDirectory(variables[2]);
}
#if DEBUG
#if (!DEBUG)
[Ignore]
#endif
[TestMethod]

View File

@ -37,7 +37,7 @@ public class BIORAD5
[TestMethod]
public void Production__v2_59_0__BIORAD5__txt() => _BIORAD5.Production__v2_59_0__BIORAD5__txt();
#if DEBUG
#if (!DEBUG)
[Ignore]
#endif
[TestMethod]
@ -60,7 +60,7 @@ public class BIORAD5
[TestMethod]
public void Production__v2_59_0__BIORAD5__Stratus() => _BIORAD5.Production__v2_59_0__BIORAD5__Stratus();
#if DEBUG
#if (!DEBUG)
[Ignore]
#endif
[TestMethod]

View File

@ -41,7 +41,7 @@ public class BIORAD4
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check);
_ = Shared.AdaptationTesting.GetLogisticsColumnsAndBody(variables[2], variables[4]);
_ = Shared.AdaptationTesting.GetProcessData(variables[2], variables[4]);
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
_ = fileRead.ReExtract();
@ -59,7 +59,7 @@ public class BIORAD4
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check);
_ = Shared.AdaptationTesting.GetLogisticsColumnsAndBody(variables[2], variables[4]);
_ = Shared.AdaptationTesting.GetProcessData(variables[2], variables[4]);
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
_ = fileRead.ReExtract();

View File

@ -51,7 +51,7 @@ public class MET08THFTIRSTRATUS
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS__();
string[] variables = _MET08THFTIRSTRATUS.AdaptationTesting.GetVariables(methodBase, check);
_ = Shared.AdaptationTesting.GetLogisticsColumnsAndBody(variables[2], variables[4]);
_ = Shared.AdaptationTesting.GetProcessData(variables[2], variables[4]);
IFileRead fileRead = _MET08THFTIRSTRATUS.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
_ = fileRead.ReExtract();
@ -75,7 +75,7 @@ public class MET08THFTIRSTRATUS
_MET08THFTIRSTRATUS.Staging__v2_36_3__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS___();
string[] variables = _MET08THFTIRSTRATUS.AdaptationTesting.GetVariables(methodBase, check);
_ = Shared.AdaptationTesting.GetLogisticsColumnsAndBody(variables[2], variables[4]);
_ = Shared.AdaptationTesting.GetProcessData(variables[2], variables[4]);
IFileRead fileRead = _MET08THFTIRSTRATUS.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
_ = fileRead.ReExtract();

View File

@ -41,7 +41,7 @@ public class BIORAD4
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check);
_ = Shared.AdaptationTesting.GetLogisticsColumnsAndBody(variables[2], variables[4]);
_ = Shared.AdaptationTesting.GetProcessData(variables[2], variables[4]);
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
_ = fileRead.ReExtract();

View File

@ -51,7 +51,7 @@ public class MET08THFTIRSTRATUS
_MET08THFTIRSTRATUS.Staging__v2_39_0__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS__();
string[] variables = _MET08THFTIRSTRATUS.AdaptationTesting.GetVariables(methodBase, check);
_ = Shared.AdaptationTesting.GetLogisticsColumnsAndBody(variables[2], variables[4]);
_ = Shared.AdaptationTesting.GetProcessData(variables[2], variables[4]);
IFileRead fileRead = _MET08THFTIRSTRATUS.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
_ = fileRead.ReExtract();
@ -75,7 +75,7 @@ public class MET08THFTIRSTRATUS
_MET08THFTIRSTRATUS.Staging__v2_39_0__MET08THFTIRSTRATUS__MET08THFTIRSTRATUS___();
string[] variables = _MET08THFTIRSTRATUS.AdaptationTesting.GetVariables(methodBase, check);
_ = Shared.AdaptationTesting.GetLogisticsColumnsAndBody(variables[2], variables[4]);
_ = Shared.AdaptationTesting.GetProcessData(variables[2], variables[4]);
IFileRead fileRead = _MET08THFTIRSTRATUS.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
_ = fileRead.ReExtract();

View File

@ -41,7 +41,7 @@ public class BIORAD4
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _BIORAD4.AdaptationTesting.GetVariables(methodBase, check);
_ = Shared.AdaptationTesting.GetLogisticsColumnsAndBody(variables[2], variables[4]);
_ = Shared.AdaptationTesting.GetProcessData(variables[2], variables[4]);
IFileRead fileRead = _BIORAD4.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
_ = fileRead.ReExtract();

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,109 @@
"use strict";
// getValue($('dcp.BIORAD4/csv/Count', ''), $('dcp.BIORAD4/csv/Index', ''), $('dcp.BIORAD4/csv/Slot', ''), $('dcp.BIORAD4/csv/Mean', ''));
function getValue5(count, index, slot, mean) {
let result = null;
if (count !== 2 || index !== 0 || slot !== "1" || mean == '')
result = '';
else
result = mean;
return result;
}
const values14 = [
{ count: 2, index: 0, slot: '1', mean: '1.23', result: '1.23' },
{ count: 3, index: 0, slot: '1', mean: '1.23', result: '' },
{ count: 2, index: 1, slot: '1', mean: '1.23', result: '' },
{ count: 2, index: 0, slot: '2', mean: '1.23', result: '' },
];
values14.forEach(element => {
let result = getValue5(element.count, element.index, element.slot, element.mean);
if (result != element.result)
console.error("result doesn't match!");
else
console.info('Match');
});
// getValue($('dcp.BIORAD4/csv/Count', ''), $('dcp.BIORAD4/csv/Index', ''), $('dcp.BIORAD4/csv/Slot', ''), $('dcp.BIORAD4/csv/Mean', ''));
function getValue31(count, index, slot, mean) {
let result = null;
if (count !== 2 || index !== 1 || slot !== "25" || mean == '')
result = '';
else
result = mean;
return result;
}
const values40 = [
{ count: 2, index: 1, slot: '25', mean: '1.23', result: '1.23' },
{ count: 3, index: 0, slot: '25', mean: '1.23', result: '' },
{ count: 2, index: 0, slot: '25', mean: '1.23', result: '' },
{ count: 2, index: 0, slot: '24', mean: '1.23', result: '' },
];
values40.forEach(element => {
let result = getValue31(element.count, element.index, element.slot, element.mean);
if (result != element.result)
console.error("result doesn't match!");
else
console.info('Match');
});
// getValue($('dcp.BIORAD4/csv/Count', ''), $('dcp.BIORAD4/csv/Index', ''), $('dcp.BIORAD4/csv/Slot', ''), $('gv.thicknessMeanSlotOne', ''), $('gv.thicknessMeanSlotTwentyFive', ''));
function getValue57(count, index, slot, thicknessMeanSlotOne, thicknessMeanSlotTwentyFive) {
let result = null;
if (count !== 2 || index !== 1 || slot !== "25" || thicknessMeanSlotOne == '' || thicknessMeanSlotTwentyFive == '')
result = '';
else
result = Math.round((parseFloat(thicknessMeanSlotOne) - parseFloat(thicknessMeanSlotTwentyFive)) * 1000) / 1000;
return result;
}
const values66 = [
{ count: 2, index: 1, slot: '25', thicknessMeanSlotOne: '9.163', thicknessMeanSlotTwentyFive: '9.112', result: '0.051' },
{ count: 3, index: 0, slot: '25', thicknessMeanSlotOne: '9.163', thicknessMeanSlotTwentyFive: '9.112', result: '' },
{ count: 2, index: 0, slot: '25', thicknessMeanSlotOne: '9.163', thicknessMeanSlotTwentyFive: '9.112', result: '' },
{ count: 2, index: 0, slot: '24', thicknessMeanSlotOne: '9.163', thicknessMeanSlotTwentyFive: '9.112', result: '' },
];
values66.forEach(element => {
let result = getValue57(element.count, element.index, element.slot, element.thicknessMeanSlotOne, element.thicknessMeanSlotTwentyFive);
if (result != element.result)
console.error("result doesn't match!");
else
console.info('Match');
});
// getValue($('dcp.BIORAD4/csv/Count', ''), $('dcp.BIORAD4/csv/Index', ''), $('dcp.BIORAD4/csv/Slot', ''), $('gv.thicknessMeanSlotOne', ''), $('gv.thicknessMeanSlotTwentyFive', ''));
function getValue(count, index, slot, thicknessMeanSlotOne, thicknessMeanSlotTwentyFive) {
let result = null;
if (count !== 2 || index !== 1 || slot !== "25" || thicknessMeanSlotOne == '' || thicknessMeanSlotTwentyFive == '')
result = '';
else {
const thick01 = parseFloat(thicknessMeanSlotOne);
const thick25 = parseFloat(thicknessMeanSlotTwentyFive);
const percent = Math.abs(thick01 - thick25) / ((thick01 + thick25) / 2) * 100;
result = (Math.round(percent * 1000)) / 1000;
}
return result;
}
const values92 = [
{ count: 2, index: 1, slot: '25', thicknessMeanSlotOne: '9.163', thicknessMeanSlotTwentyFive: '9.112', result: '0.558' },
{ count: 3, index: 0, slot: '25', thicknessMeanSlotOne: '9.163', thicknessMeanSlotTwentyFive: '9.112', result: '' },
{ count: 2, index: 0, slot: '25', thicknessMeanSlotOne: '9.163', thicknessMeanSlotTwentyFive: '9.112', result: '' },
{ count: 2, index: 0, slot: '24', thicknessMeanSlotOne: '9.163', thicknessMeanSlotTwentyFive: '9.112', result: '' },
];
values92.forEach(element => {
let result = getValue(element.count, element.index, element.slot, element.thicknessMeanSlotOne, element.thicknessMeanSlotTwentyFive);
if (result != element.result)
console.error("result doesn't match!");
else
console.info('Match');
});

View File

@ -171,6 +171,7 @@
<Compile Include="Adaptation\Shared\Metrology\WS.Results.cs" />
<Compile Include="Adaptation\Shared\ParameterType.cs" />
<Compile Include="Adaptation\Shared\ProcessDataStandardFormat.cs" />
<Compile Include="Adaptation\Shared\ProcessDataStandardFormatMapping.cs" />
<Compile Include="Adaptation\Shared\Properties\IDescription.cs" />
<Compile Include="Adaptation\Shared\Properties\IFileRead.cs" />
<Compile Include="Adaptation\Shared\Properties\ILogistics.cs" />