ProcessDataStandardFormat over Tuple
MoveMatchingFiles to use ProcessDataStandardFormatMapping
This commit is contained in:
parent
3966b75da7
commit
3e9fd6224e
20
Adaptation/.vscode/tasks.json
vendored
20
Adaptation/.vscode/tasks.json
vendored
@ -92,6 +92,26 @@
|
||||
"command": "code ../MET08DDUPSFS6420.csproj",
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Readme",
|
||||
"type": "shell",
|
||||
"command": "code ../README.md",
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "File-Folder-Helper AOT s X Day-Helper-2025-03-20",
|
||||
"type": "shell",
|
||||
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe",
|
||||
"args": [
|
||||
"s",
|
||||
"X",
|
||||
"L:/DevOps/EAF-Mesa-Integration/MET08DDUPSFS6420",
|
||||
"Day-Helper-2025-03-20",
|
||||
"false",
|
||||
"4"
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Git Config",
|
||||
"type": "shell",
|
||||
|
@ -120,15 +120,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
FileCopy(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -120,9 +120,10 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
if (dateTime == DateTime.MinValue)
|
||||
throw new ArgumentNullException(nameof(dateTime));
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
|
||||
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory);
|
||||
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
|
||||
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory, day);
|
||||
if (!Directory.Exists(destinationArchiveDirectory))
|
||||
_ = Directory.CreateDirectory(destinationArchiveDirectory);
|
||||
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
|
||||
@ -144,15 +145,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
MoveArchive(reportFullPath, dateTime);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -154,13 +154,13 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SaveIQSFile(reportFullPath, dateTime, descriptions, tests);
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
|
@ -5,16 +5,70 @@ using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.MoveMatchingFiles;
|
||||
|
||||
#nullable enable
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
internal class PreWith
|
||||
{
|
||||
|
||||
internal string MatchingFile { get; private set; }
|
||||
internal string CheckFile { get; private set; }
|
||||
internal string ErrFile { get; private set; }
|
||||
internal string CheckDirectory { get; private set; }
|
||||
internal string NoWaitDirectory { get; private set; }
|
||||
|
||||
internal PreWith(string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory)
|
||||
{
|
||||
MatchingFile = matchingFile;
|
||||
CheckFile = checkFile;
|
||||
ErrFile = errFile;
|
||||
CheckDirectory = checkDirectory;
|
||||
NoWaitDirectory = noWaitDirectory;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
internal class Pre
|
||||
{
|
||||
|
||||
internal string MatchingFile { get; private set; }
|
||||
internal string CheckFile { get; private set; }
|
||||
|
||||
internal Pre(string matchingFile, string checkFile)
|
||||
{
|
||||
MatchingFile = matchingFile;
|
||||
CheckFile = checkFile;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
internal class Post
|
||||
{
|
||||
|
||||
internal string ErrFile { get; private set; }
|
||||
internal string CheckFile { get; private set; }
|
||||
|
||||
internal Post(string checkFile, string errFile)
|
||||
{
|
||||
ErrFile = errFile;
|
||||
CheckFile = checkFile;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private readonly ProcessDataStandardFormatMapping _ProcessDataStandardFormatMapping;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
@ -27,6 +81,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_ProcessDataStandardFormatMapping = GetProcessDataStandardFormatMapping();
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
@ -41,7 +96,8 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
void IFileRead.WaitForThread() =>
|
||||
WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
@ -88,7 +144,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]") ?? throw new Exception(), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
@ -104,7 +160,69 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return results;
|
||||
}
|
||||
|
||||
private static List<string> GetSearchDirectories(int numberLength, string parentDirectory)
|
||||
private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping()
|
||||
{
|
||||
ProcessDataStandardFormatMapping result;
|
||||
string[] segmentsB;
|
||||
List<string> distinct = new();
|
||||
Dictionary<string, string> keyValuePairs = new();
|
||||
string args4 = "Time,HeaderUniqueId,UniqueId,Date";
|
||||
string args5 = "";
|
||||
string args6 = "";
|
||||
string args7 = "Test|EventId,Lot|Id,Slot|WaferId,AreaTotal|WaferAreaTotal,HazeAverage|WaferHazeAverage,HazeRegion|WaferHazeRegion,ScratchTotal|WaferScratchTotal";
|
||||
string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Comments,Diameter,Exclusion,Gain,HeaderUniqueId,Laser,ParseErrorText,RDS,Slot,UniqueId,AreaCount,AreaCountAvg,AreaCountMax,AreaCountMin,AreaCountStdDev,AreaTotal,AreaTotalAvg,AreaTotalMax,AreaTotalMin,AreaTotalStdDev,Bin1,Bin2,Bin3,Bin4,Bin5,Bin6,Bin7,Bin8,HazeAverage,HazeAverageAvg,HazeAverageMax,HazeAverageMin,HazeAverageStdDev,HazePeak,HazeRegion,HazeRegionAvg,HazeRegionMax,HazeRegionMin,HazeRegionStdDev,HazeRng,LPDCM2,LPDCM2Avg,LPDCM2Max,LPDCM2Min,LPDCM2StdDev,LPDCount,LPDCountAvg,LPDCountMax,LPDCountMin,LPDCountStdDev,Mean,ScratchCount,ScratchCountAvg,ScratchCountMax,ScratchCountMin,ScratchCountStdDev,ScratchTotal,ScratchTotalAvg,ScratchTotalMax,ScratchTotalMin,ScratchTotalStdDev,Sort,StdDev,SumOfDefects,SumOfDefectsAvg,SumOfDefectsMax,SumOfDefectsMin,SumOfDefectsStdDev,Thresh,Thruput";
|
||||
string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Date,Recipe,Id,WaferId,LPDCount,LPDCM2,AreaCount,AreaTotal,ScratchCount,ScratchTotal,SumOfDefects,HazeRegion,HazeAverage,Grade,LPDCountMin,LPDCM2Min,AreaCountMin,AreaTotalMin,ScratchCountMin,ScratchTotalMin,SumOfDefectsMin,HazeRegionMin,HazeAverageMin,LPDCountMax,LPDCM2Max,AreaCountMax,AreaTotalMax,ScratchCountMax,ScratchTotalMax,SumOfDefectsMax,HazeRegionMax,HazeAverageMax,LPDCountAvg,LPDCM2Avg,AreaCountAvg,AreaTotalAvg,ScratchCountAvg,ScratchTotalAvg,SumOfDefectsAvg,HazeRegionAvg,HazeAverageAvg,LPDCountStdDev,LPDCM2StdDev,AreaCountStdDev,AreaTotalStdDev,ScratchCountStdDev,ScratchTotalStdDev,SumOfDefectsStdDev,HazeRegionStdDev,HazeAverageStdDev,WaferDate,Comments,Sort,WaferLPDCount,WaferLPDCM2,Bin1,Bin2,Bin3,Bin4,Bin5,Bin6,Bin7,Bin8,Mean,StdDev,WaferAreaCount,WaferAreaTotal,WaferScratchCount,WaferScratchTotal,WaferSumOfDefects,WaferHazeRegion,WaferHazeAverage,HazePeak,Laser,Gain,Diameter,Thresh,Exclusion,HazeRng,Thruput,WaferRecipe,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,EventId";
|
||||
string args10 = "0,1,2,95,3,6,5,7,93,9,89,90,8,58,82,84,81,-1,80,-1,88,10,-1,13,41,32,23,50,73,42,33,24,51,62,63,64,65,66,67,68,69,78,47,38,29,56,79,77,46,37,28,55,85,12,40,31,22,49,11,39,30,21,48,70,15,43,34,25,52,75,44,35,26,53,59,71,17,45,36,27,54,83,86";
|
||||
string[] segments = args7.Split(',');
|
||||
ReadOnlyCollection<string> ignoreColumns = new(args4.Split(','));
|
||||
ReadOnlyCollection<string> newColumnNames = new(args9.Split(','));
|
||||
ReadOnlyCollection<string> oldColumnNames = new(args8.Split(','));
|
||||
ReadOnlyCollection<string> backfillColumns = new(args5.Split(','));
|
||||
ReadOnlyCollection<string> indexOnlyColumns = new(args6.Split(','));
|
||||
ReadOnlyCollection<int> columnIndices = new(args10.Split(',').Select(int.Parse).ToArray());
|
||||
foreach (string segment in segments)
|
||||
{
|
||||
segmentsB = segment.Split('|');
|
||||
if (segmentsB.Length != 2)
|
||||
continue;
|
||||
if (distinct.Contains(segmentsB[0]))
|
||||
continue;
|
||||
distinct.Add(segmentsB[0]);
|
||||
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
|
||||
}
|
||||
result = new(backfillColumns: backfillColumns,
|
||||
columnIndices: columnIndices,
|
||||
newColumnNames: newColumnNames,
|
||||
ignoreColumns: ignoreColumns,
|
||||
indexOnlyColumns: indexOnlyColumns,
|
||||
keyValuePairs: new(keyValuePairs),
|
||||
oldColumnNames: oldColumnNames);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection)
|
||||
{
|
||||
List<PreWith> results = new();
|
||||
string errFile;
|
||||
PreWith preWith;
|
||||
string? checkDirectory;
|
||||
string noWaitDirectory;
|
||||
foreach (Pre pre in preCollection)
|
||||
{
|
||||
errFile = string.Concat(pre.CheckFile, ".err");
|
||||
checkDirectory = Path.GetDirectoryName(pre.CheckFile);
|
||||
if (string.IsNullOrEmpty(checkDirectory))
|
||||
continue;
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
|
||||
preWith = new(pre.MatchingFile, pre.CheckFile, errFile, checkDirectory, noWaitDirectory);
|
||||
results.Add(preWith);
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<string> GetSearchDirectories(int numberLength, string parentDirectory)
|
||||
{
|
||||
List<string> results = new();
|
||||
string[] directories = Directory.GetDirectories(parentDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
@ -115,10 +233,136 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
results.Add(directory);
|
||||
}
|
||||
results.Sort();
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private static void CreatePointerFile(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
|
||||
{
|
||||
string checkFile;
|
||||
string writeFile;
|
||||
string? directoryName;
|
||||
int parentDirectoryLength = parentDirectory.Length;
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
directoryName = Path.GetDirectoryName(matchingFile);
|
||||
if (directoryName is null)
|
||||
continue;
|
||||
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
|
||||
if (File.Exists(writeFile))
|
||||
continue;
|
||||
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
|
||||
{
|
||||
List<Pre> results = new();
|
||||
Pre pre;
|
||||
string checkFile;
|
||||
int parentDirectoryLength = parentDirectory.Length;
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
pre = new(matchingFile, checkFile);
|
||||
results.Add(pre);
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
|
||||
{
|
||||
ReadOnlyCollection<Post> postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
|
||||
if (postCollection.Count != 0)
|
||||
{
|
||||
Thread.Sleep(500);
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach (Post post in postCollection)
|
||||
{
|
||||
if (File.Exists(post.ErrFile))
|
||||
_ = stringBuilder.AppendLine(File.ReadAllText(post.ErrFile));
|
||||
if (File.Exists(post.CheckFile))
|
||||
_ = stringBuilder.AppendLine($"<{post.CheckFile}> was not consumed by the end!");
|
||||
}
|
||||
if (stringBuilder.Length > 0)
|
||||
throw new Exception(stringBuilder.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
|
||||
{
|
||||
List<Post> results = new();
|
||||
Post post;
|
||||
long preWait;
|
||||
foreach (PreWith preWith in preWithCollection)
|
||||
{
|
||||
if (processDataStandardFormat is null)
|
||||
File.Move(preWith.MatchingFile, preWith.CheckFile);
|
||||
else
|
||||
{
|
||||
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat);
|
||||
File.Delete(preWith.MatchingFile);
|
||||
}
|
||||
if (Directory.Exists(preWith.NoWaitDirectory))
|
||||
{
|
||||
post = new(preWith.CheckFile, preWith.ErrFile);
|
||||
results.Add(post);
|
||||
continue;
|
||||
}
|
||||
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
|
||||
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
|
||||
else
|
||||
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
|
||||
for (short i = 0; i < short.MaxValue; i++)
|
||||
{
|
||||
if (DateTime.Now.Ticks > preWait)
|
||||
break;
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
for (int i = 0; i < int.MaxValue; i++)
|
||||
{
|
||||
if (File.Exists(preWith.ErrFile))
|
||||
throw new Exception(File.ReadAllText(preWith.ErrFile));
|
||||
if (!File.Exists(preWith.CheckFile))
|
||||
break;
|
||||
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
|
||||
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
|
||||
ProcessDataStandardFormat? processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, _ProcessDataStandardFormatMapping);
|
||||
if (processDataStandardFormat is not null)
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
else
|
||||
{
|
||||
processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
processDataStandardFormat = null;
|
||||
}
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
int numberLength = 2;
|
||||
long ticks = dateTime.Ticks;
|
||||
string parentParentDirectory = GetParentParent(reportFullPath);
|
||||
ReadOnlyCollection<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
|
||||
ReadOnlyCollection<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
|
||||
if (matchingFiles.Count != searchDirectories.Count)
|
||||
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
|
||||
try
|
||||
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
|
||||
catch (Exception) { }
|
||||
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
|
||||
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
|
||||
MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
|
||||
return results;
|
||||
}
|
||||
|
||||
private List<string> GetMatchingFiles(long ticks, string reportFullPath, List<string> searchDirectories)
|
||||
private ReadOnlyCollection<string> GetMatchingFiles(long ticks, string reportFullPath, ReadOnlyCollection<string> searchDirectories)
|
||||
{
|
||||
List<string> results = new();
|
||||
string[] found;
|
||||
@ -137,129 +381,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
break;
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static List<(string matchingFile, string checkFile)> GetCollection(int numberLength, string parentDirectory, List<string> matchingFiles)
|
||||
{
|
||||
List<(string matchingFile, string checkFile)> results = new();
|
||||
string checkFile;
|
||||
int parentDirectoryLength = parentDirectory.Length;
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
results.Add(new(matchingFile, checkFile));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static List<(string, string, string, string, string)> GetCollection(List<(string matchingFile, string checkFile)> collection)
|
||||
{
|
||||
List<(string, string, string, string, string)> results = new();
|
||||
string errFile;
|
||||
string checkDirectory;
|
||||
string noWaitDirectory;
|
||||
foreach ((string matchingFile, string checkFile) in collection)
|
||||
{
|
||||
errFile = string.Concat(checkFile, ".err");
|
||||
checkDirectory = Path.GetDirectoryName(checkFile);
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
|
||||
results.Add(new(matchingFile, checkFile, errFile, checkDirectory, noWaitDirectory));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private void MoveCollection(DateTime dateTime, List<(string matchingFile, string checkFile)> collection)
|
||||
{
|
||||
long preWait;
|
||||
List<(string checkFile, string errFile)> postCollection = new();
|
||||
foreach ((string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory) in GetCollection(collection))
|
||||
{
|
||||
File.Move(matchingFile, checkFile);
|
||||
if (Directory.Exists(noWaitDirectory))
|
||||
{
|
||||
postCollection.Add(new(checkFile, errFile));
|
||||
continue;
|
||||
}
|
||||
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
|
||||
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
|
||||
else
|
||||
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
|
||||
for (short i = 0; i < short.MaxValue; i++)
|
||||
{
|
||||
if (DateTime.Now.Ticks > preWait)
|
||||
break;
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
for (int i = 0; i < int.MaxValue; i++)
|
||||
{
|
||||
if (File.Exists(errFile))
|
||||
throw new Exception(File.ReadAllText(errFile));
|
||||
if (!File.Exists(checkFile))
|
||||
break;
|
||||
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
|
||||
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
}
|
||||
if (postCollection.Count != 0)
|
||||
{
|
||||
Thread.Sleep(500);
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach ((string checkFile, string errFile) in postCollection)
|
||||
{
|
||||
if (File.Exists(errFile))
|
||||
_ = stringBuilder.AppendLine(File.ReadAllText(errFile));
|
||||
if (File.Exists(checkFile))
|
||||
_ = stringBuilder.AppendLine($"<{checkFile}> was not consumed by the end!");
|
||||
}
|
||||
if (stringBuilder.Length > 0)
|
||||
throw new Exception(stringBuilder.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreatePointerFile(int numberLength, string parentDirectory, List<string> matchingFiles)
|
||||
{
|
||||
#nullable enable
|
||||
string checkFile;
|
||||
string writeFile;
|
||||
string? directoryName;
|
||||
int parentDirectoryLength = parentDirectory.Length;
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
directoryName = Path.GetDirectoryName(matchingFile);
|
||||
if (directoryName is null)
|
||||
continue;
|
||||
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
|
||||
if (File.Exists(writeFile))
|
||||
continue;
|
||||
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
|
||||
}
|
||||
#nullable disable
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
int numberLength = 2;
|
||||
long ticks = dateTime.Ticks;
|
||||
string parentParentDirectory = GetParentParent(reportFullPath);
|
||||
List<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
|
||||
List<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
|
||||
if (matchingFiles.Count != searchDirectories.Count)
|
||||
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
|
||||
try
|
||||
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
|
||||
catch (Exception) { }
|
||||
List<(string matchingFile, string checkFile)> collection = GetCollection(numberLength, parentParentDirectory, matchingFiles);
|
||||
MoveCollection(dateTime, collection);
|
||||
return results;
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
}
|
@ -238,7 +238,35 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, string logistics, List<pcl.Description> descriptions, Test[] tests)
|
||||
private static string GetJson(int columnsLine, string[] columns, string[] body)
|
||||
{
|
||||
#pragma warning disable CA1845, IDE0057
|
||||
string result = "[\n";
|
||||
string line;
|
||||
string value;
|
||||
string[] segments;
|
||||
if (columns.Length == 0)
|
||||
columns = body[columnsLine].Trim().Split('\t');
|
||||
for (int i = columnsLine + 1; i < body.Length; i++)
|
||||
{
|
||||
line = "{";
|
||||
segments = body[i].Trim().Split('\t');
|
||||
if (segments.Length != columns.Length)
|
||||
break;
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
line += '"' + columns[c].Trim('"') + '"' + ':' + '"' + value + '"' + ',';
|
||||
}
|
||||
line = line.Substring(0, line.Length - 1) + '}' + ',' + '\n';
|
||||
result += line;
|
||||
}
|
||||
result = result.Substring(0, result.Length - 1) + ']';
|
||||
return result;
|
||||
#pragma warning restore CA1845, IDE0057
|
||||
}
|
||||
|
||||
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<pcl.Description> descriptions, Test[] tests)
|
||||
{
|
||||
bool isDummyRun = false;
|
||||
List<(Shared.Properties.IScopeInfo, string)> collection = new();
|
||||
@ -283,7 +311,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
values[0] = $"{values[0]}|{subGroupId}";
|
||||
}
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, logistics, descriptions.First(), lines, subGroupId, weekOfYear);
|
||||
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), lines, subGroupId, weekOfYear);
|
||||
try
|
||||
{ FromIQS.SaveCopy(_FileConnectorConfiguration.SourceFileLocation, _IqsConnectionString, modelObjectParameterDefinition.Name, modelObjectParameterDefinition.Value.Split('|')); }
|
||||
catch (Exception exception)
|
||||
@ -303,15 +331,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SaveOpenInsightFile(reportFullPath, dateTime, pdsf.Item1, descriptions, tests);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -325,7 +325,7 @@ public class FromIQS
|
||||
return new(result, count, commandText);
|
||||
}
|
||||
|
||||
private static string GetJson(Logistics logistics, string logisticLines, pcl.Description description)
|
||||
private static string GetJson(Logistics logistics, ProcessDataStandardFormat processDataStandardFormat, pcl.Description description)
|
||||
{
|
||||
string result;
|
||||
StringBuilder stringBuilder = new();
|
||||
@ -345,7 +345,7 @@ public class FromIQS
|
||||
string safeValue;
|
||||
string[] segments;
|
||||
string serializerValue;
|
||||
foreach (string line in logisticLines.Split(new string[] { Environment.NewLine }, StringSplitOptions.None))
|
||||
foreach (string line in processDataStandardFormat.Logistics)
|
||||
{
|
||||
segments = line.Split('\t');
|
||||
if (segments.Length < 2)
|
||||
@ -376,11 +376,11 @@ public class FromIQS
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, string logisticLines, pcl.Description description, string lines, long? subGroupId, string weekOfYear)
|
||||
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, pcl.Description description, string lines, long? subGroupId, string weekOfYear)
|
||||
{
|
||||
string checkFile;
|
||||
string fileName = Path.GetFileName(reportFullPath);
|
||||
string json = GetJson(logistics, logisticLines, description);
|
||||
string json = GetJson(logistics, processDataStandardFormat, description);
|
||||
string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);
|
||||
bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot);
|
||||
string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
|
||||
@ -396,6 +396,9 @@ public class FromIQS
|
||||
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
|
||||
if (ecExists && !File.Exists(checkFile))
|
||||
File.WriteAllText(checkFile, json);
|
||||
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.lbl");
|
||||
if (ecExists && !File.Exists(checkFile))
|
||||
File.WriteAllText(checkFile, processDataStandardFormat.Body[processDataStandardFormat.Body.Count - 1]);
|
||||
}
|
||||
|
||||
private static string GetCommandText(string[] iqsCopyValues)
|
||||
|
@ -139,15 +139,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SendData(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -168,15 +168,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
if (dateTime == DateTime.MinValue)
|
||||
throw new ArgumentNullException(nameof(dateTime));
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
PostOpenInsightMetrologyViewerAttachments(descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -139,13 +139,13 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
DirectoryMove(reportFullPath, dateTime, descriptions);
|
||||
else if (!_IsEAFHosted)
|
||||
|
@ -117,15 +117,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
FileCopy(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -25,7 +25,7 @@ stages:
|
||||
nugetSource: "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/"
|
||||
|
||||
jobs:
|
||||
- job: SetupEnviroment
|
||||
- job: SetupEnvironment
|
||||
steps:
|
||||
- script: |
|
||||
echo $(Build.BuildId)
|
||||
@ -51,7 +51,7 @@ stages:
|
||||
|
||||
- job: BuildDebug
|
||||
dependsOn:
|
||||
- SetupEnviroment
|
||||
- SetupEnvironment
|
||||
steps:
|
||||
- script: |
|
||||
set configuration=Debug
|
||||
@ -66,7 +66,7 @@ stages:
|
||||
|
||||
- job: BuildRelease
|
||||
dependsOn:
|
||||
- SetupEnviroment
|
||||
- SetupEnvironment
|
||||
steps:
|
||||
- script: |
|
||||
set configuration=Release
|
||||
@ -98,7 +98,7 @@ stages:
|
||||
|
||||
- job: TestDebug
|
||||
dependsOn:
|
||||
- SetupEnviroment
|
||||
- SetupEnvironment
|
||||
- BuildDebug
|
||||
- BuildRelease
|
||||
steps:
|
||||
@ -168,7 +168,7 @@ stages:
|
||||
nugetSource: "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/"
|
||||
|
||||
jobs:
|
||||
- job: SetupEnviroment
|
||||
- job: SetupEnvironment
|
||||
steps:
|
||||
- script: |
|
||||
echo $(Build.BuildId)
|
||||
@ -194,7 +194,7 @@ stages:
|
||||
|
||||
- job: BuildRelease
|
||||
dependsOn:
|
||||
- SetupEnviroment
|
||||
- SetupEnvironment
|
||||
steps:
|
||||
- script: |
|
||||
set configuration=Release
|
||||
@ -226,7 +226,7 @@ stages:
|
||||
|
||||
- job: TestRelease
|
||||
dependsOn:
|
||||
- SetupEnviroment
|
||||
- SetupEnvironment
|
||||
- BuildRelease
|
||||
steps:
|
||||
- script: |
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -35,6 +35,9 @@ public class Logistics : ILogistics
|
||||
public long Sequence => _Sequence;
|
||||
public double TotalSecondsSinceLastWriteTimeFromSequence => _TotalSecondsSinceLastWriteTimeFromSequence;
|
||||
|
||||
private static string DefaultMesEntity(DateTime dateTime) =>
|
||||
string.Concat(dateTime.Ticks, "_MES_ENTITY");
|
||||
|
||||
public Logistics(IFileRead fileRead)
|
||||
{
|
||||
DateTime dateTime = DateTime.Now;
|
||||
@ -84,13 +87,13 @@ public class Logistics : ILogistics
|
||||
_Logistics2 = new List<Logistics2>();
|
||||
}
|
||||
|
||||
public Logistics(string reportFullPath, string logistics)
|
||||
internal Logistics(string reportFullPath, ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
string key;
|
||||
DateTime dateTime;
|
||||
string[] segments;
|
||||
_FileInfo = new(reportFullPath);
|
||||
_Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
|
||||
_Logistics1 = processDataStandardFormat.Logistics.ToList();
|
||||
if (Logistics1.Count == 0 || !Logistics1[0].StartsWith("LOGISTICS_1"))
|
||||
{
|
||||
_NullData = null;
|
||||
@ -190,8 +193,6 @@ public class Logistics : ILogistics
|
||||
}
|
||||
}
|
||||
|
||||
private static string DefaultMesEntity(DateTime dateTime) => string.Concat(dateTime.Ticks, "_MES_ENTITY");
|
||||
|
||||
internal void Update(string mid, string processJobID)
|
||||
{
|
||||
_MID = mid;
|
||||
|
@ -1,18 +1,22 @@
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.Shared;
|
||||
|
||||
public class ProcessDataStandardFormat
|
||||
#nullable enable
|
||||
|
||||
internal class ProcessDataStandardFormat
|
||||
{
|
||||
|
||||
public enum SearchFor
|
||||
internal enum SearchFor
|
||||
{
|
||||
EquipmentIntegration = 1,
|
||||
BusinessIntegration = 2,
|
||||
@ -20,325 +24,38 @@ public class ProcessDataStandardFormat
|
||||
Archive = 4
|
||||
}
|
||||
|
||||
public static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
|
||||
internal long? Sequence { get; private set; }
|
||||
internal ReadOnlyCollection<string> Body { get; private set; }
|
||||
internal ReadOnlyCollection<string> Columns { get; private set; }
|
||||
internal ReadOnlyCollection<string> Logistics { get; private set; }
|
||||
|
||||
internal ProcessDataStandardFormat(ReadOnlyCollection<string> body,
|
||||
ReadOnlyCollection<string> columns,
|
||||
ReadOnlyCollection<string> logistics,
|
||||
long? sequence)
|
||||
{
|
||||
string result;
|
||||
if (jsonElements.Length == 0)
|
||||
result = string.Empty;
|
||||
else
|
||||
{
|
||||
int columns = 0;
|
||||
List<string> lines;
|
||||
string endOffset = "E#######T";
|
||||
string dataOffset = "D#######T";
|
||||
string headerOffset = "H#######T";
|
||||
string format = "MM/dd/yyyy HH:mm:ss";
|
||||
StringBuilder stringBuilder = new();
|
||||
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
|
||||
_ = stringBuilder.Append("\"Time\"").Append('\t');
|
||||
_ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
|
||||
_ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
|
||||
for (int i = 0; i < jsonElements.Length;)
|
||||
{
|
||||
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
|
||||
{
|
||||
columns += 1;
|
||||
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
|
||||
}
|
||||
break;
|
||||
}
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
lines.Add(stringBuilder.ToString());
|
||||
for (int i = 0; i < jsonElements.Length; i++)
|
||||
{
|
||||
_ = stringBuilder.Clear();
|
||||
_ = stringBuilder.Append("0.1").Append('\t');
|
||||
_ = stringBuilder.Append('1').Append('\t');
|
||||
_ = stringBuilder.Append('2').Append('\t');
|
||||
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
|
||||
_ = stringBuilder.Append(jsonProperty.Value).Append('\t');
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
lines.Add(stringBuilder.ToString());
|
||||
}
|
||||
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
|
||||
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
|
||||
lines.Add("DELIMITER ;");
|
||||
lines.Add(string.Concat("START_TIME_FORMAT ", format));
|
||||
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
|
||||
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
|
||||
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
|
||||
if (!string.IsNullOrEmpty(logisticsText))
|
||||
lines.Add(logisticsText);
|
||||
else
|
||||
{
|
||||
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
|
||||
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
|
||||
lines.Add("END_HEADER");
|
||||
}
|
||||
_ = stringBuilder.Clear();
|
||||
foreach (string line in lines)
|
||||
_ = stringBuilder.AppendLine(line);
|
||||
result = stringBuilder.ToString();
|
||||
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
|
||||
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
|
||||
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
|
||||
}
|
||||
return result;
|
||||
Body = body;
|
||||
Columns = columns;
|
||||
Logistics = logistics;
|
||||
Sequence = sequence;
|
||||
}
|
||||
|
||||
public static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(string reportFullPath, string[] lines = null)
|
||||
{
|
||||
string segment;
|
||||
List<string> body = new();
|
||||
StringBuilder logistics = new();
|
||||
lines ??= File.ReadAllLines(reportFullPath);
|
||||
string[] segments;
|
||||
if (lines.Length < 7)
|
||||
segments = Array.Empty<string>();
|
||||
else
|
||||
segments = lines[6].Trim().Split('\t');
|
||||
List<string> columns = new();
|
||||
for (int c = 0; c < segments.Length; c++)
|
||||
{
|
||||
segment = segments[c].Substring(1, segments[c].Length - 2);
|
||||
if (!columns.Contains(segment))
|
||||
columns.Add(segment);
|
||||
else
|
||||
{
|
||||
for (short i = 1; i < short.MaxValue; i++)
|
||||
{
|
||||
segment = string.Concat(segment, "_", i);
|
||||
if (!columns.Contains(segment))
|
||||
{
|
||||
columns.Add(segment);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
bool lookForLogistics = false;
|
||||
for (int r = 7; r < lines.Length; r++)
|
||||
{
|
||||
if (lines[r].StartsWith("NUM_DATA_ROWS"))
|
||||
lookForLogistics = true;
|
||||
if (!lookForLogistics)
|
||||
{
|
||||
body.Add(lines[r]);
|
||||
continue;
|
||||
}
|
||||
if (lines[r].StartsWith("LOGISTICS_1"))
|
||||
{
|
||||
for (int i = r; i < lines.Length; i++)
|
||||
{
|
||||
if (lines[r].StartsWith("END_HEADER"))
|
||||
break;
|
||||
_ = logistics.AppendLine(lines[i]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return new Tuple<string, string[], string[]>(logistics.ToString(), columns.ToArray(), body.ToArray());
|
||||
}
|
||||
internal static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') =>
|
||||
GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
|
||||
|
||||
public static JsonElement[] GetArray(Tuple<string, string[], string[]> pdsf, bool lookForNumbers = false)
|
||||
{
|
||||
JsonElement[] results;
|
||||
string logistics = pdsf.Item1;
|
||||
string[] columns = pdsf.Item2;
|
||||
string[] bodyLines = pdsf.Item3;
|
||||
if (bodyLines.Length == 0 || !bodyLines[0].Contains('\t'))
|
||||
results = JsonSerializer.Deserialize<JsonElement[]>("[]");
|
||||
else
|
||||
{
|
||||
string value;
|
||||
string[] segments;
|
||||
List<string> lines = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach (string bodyLine in bodyLines)
|
||||
{
|
||||
_ = stringBuilder.Clear();
|
||||
_ = stringBuilder.Append('{');
|
||||
segments = bodyLine.Trim().Split('\t');
|
||||
if (!lookForNumbers)
|
||||
{
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
if (string.IsNullOrEmpty(value))
|
||||
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append("null,");
|
||||
else if (value.All(char.IsDigit))
|
||||
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append(',');
|
||||
else
|
||||
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
_ = stringBuilder.AppendLine("}");
|
||||
lines.Add(stringBuilder.ToString());
|
||||
}
|
||||
string json = $"[{string.Join(",", lines)}]";
|
||||
results = JsonSerializer.Deserialize<JsonElement[]>(json);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
internal static string BusinessIntegration(bool addSpaces = true, char separator = ' ') =>
|
||||
GetString(SearchFor.BusinessIntegration, addSpaces, separator);
|
||||
|
||||
public static Dictionary<string, List<string>> GetDictionary(Tuple<string, string[], string[]> pdsf)
|
||||
{
|
||||
Dictionary<string, List<string>> results = new();
|
||||
string[] segments;
|
||||
string[] columns = pdsf.Item2;
|
||||
string[] bodyLines = pdsf.Item3;
|
||||
foreach (string column in columns)
|
||||
results.Add(column, new List<string>());
|
||||
foreach (string bodyLine in bodyLines)
|
||||
{
|
||||
segments = bodyLine.Split('\t');
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
if (c >= columns.Length)
|
||||
continue;
|
||||
results[columns[c]].Add(segments[c]);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
internal static string SystemExport(bool addSpaces = true, char separator = ' ') =>
|
||||
GetString(SearchFor.SystemExport, addSpaces, separator);
|
||||
|
||||
public static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(Tuple<string, string[], string[]> pdsf)
|
||||
{
|
||||
Dictionary<Test, Dictionary<string, List<string>>> results = new();
|
||||
List<string> collection;
|
||||
string testColumn = nameof(Test);
|
||||
Dictionary<string, List<string>> keyValuePairs = GetDictionary(pdsf);
|
||||
if (!keyValuePairs.TryGetValue(testColumn, out collection))
|
||||
throw new Exception();
|
||||
int min;
|
||||
int max;
|
||||
Test testKey;
|
||||
List<string> vs;
|
||||
string columnKey;
|
||||
Dictionary<Test, List<int>> tests = new();
|
||||
for (int i = 0; i < collection.Count; i++)
|
||||
{
|
||||
if (Enum.TryParse(collection[i], out Test test))
|
||||
{
|
||||
if (!results.ContainsKey(test))
|
||||
{
|
||||
tests.Add(test, new List<int>());
|
||||
results.Add(test, new Dictionary<string, List<string>>());
|
||||
}
|
||||
tests[test].Add(i);
|
||||
}
|
||||
}
|
||||
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
|
||||
{
|
||||
testKey = testKeyValuePair.Key;
|
||||
min = testKeyValuePair.Value.Min();
|
||||
max = testKeyValuePair.Value.Max() + 1;
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
results[testKey].Add(keyValuePair.Key, new List<string>());
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
{
|
||||
vs = keyValuePair.Value;
|
||||
columnKey = keyValuePair.Key;
|
||||
for (int i = min; i < max; i++)
|
||||
{
|
||||
if (vs.Count > i)
|
||||
results[testKey][columnKey].Add(vs[i]);
|
||||
else
|
||||
results[testKey][columnKey].Add(string.Empty);
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(pdsf.Item1, results);
|
||||
}
|
||||
internal static string Archive(bool addSpaces = true, char separator = ' ') =>
|
||||
GetString(SearchFor.Archive, addSpaces, separator);
|
||||
|
||||
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
|
||||
{
|
||||
if (!addSpaces)
|
||||
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
|
||||
else
|
||||
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
|
||||
}
|
||||
internal static ProcessDataStandardFormat GetEmpty() =>
|
||||
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null);
|
||||
|
||||
public static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
|
||||
|
||||
public static string BusinessIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.BusinessIntegration, addSpaces, separator);
|
||||
|
||||
public static string SystemExport(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.SystemExport, addSpaces, separator);
|
||||
|
||||
public static string Archive(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.Archive, addSpaces, separator);
|
||||
|
||||
public static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string> ignoreParameterNames = null)
|
||||
{
|
||||
StringBuilder result = new();
|
||||
ignoreParameterNames ??= new List<string>();
|
||||
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
|
||||
throw new Exception();
|
||||
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
|
||||
throw new Exception();
|
||||
string nullData;
|
||||
const string columnDate = "Date";
|
||||
const string columnTime = "Time";
|
||||
const string firstDuplicate = "_1";
|
||||
_ = result.AppendLine(scopeInfo.Header);
|
||||
StringBuilder line = new();
|
||||
if (logistics.NullData is null)
|
||||
nullData = string.Empty;
|
||||
else
|
||||
nullData = logistics.NullData.ToString();
|
||||
int count = (from l in keyValuePairs select l.Value.Count).Min();
|
||||
for (int r = 0; r < count; r++)
|
||||
{
|
||||
_ = line.Clear();
|
||||
_ = line.Append('!');
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
{
|
||||
if (!names.Contains(keyValuePair.Key))
|
||||
continue;
|
||||
if (ignoreParameterNames.Contains(keyValuePair.Key))
|
||||
continue;
|
||||
if (pairedParameterNames.Contains(keyValuePair.Key))
|
||||
{
|
||||
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
|
||||
continue;
|
||||
else
|
||||
_ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
|
||||
_ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
|
||||
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
|
||||
_ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
|
||||
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
|
||||
_ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
|
||||
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
|
||||
_ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
|
||||
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
|
||||
_ = line.Append(nullData);
|
||||
else
|
||||
_ = line.Append(keyValuePair.Value[r]);
|
||||
_ = line.Append(';');
|
||||
}
|
||||
}
|
||||
if (pairedParameterNames.Count == 0)
|
||||
{
|
||||
_ = line.Remove(line.Length - 1, 1);
|
||||
_ = result.AppendLine(line.ToString());
|
||||
}
|
||||
}
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
public static List<string> PDSFToFixedWidth(string reportFullPath)
|
||||
internal static List<string> PDSFToFixedWidth(string reportFullPath)
|
||||
{
|
||||
List<string> results = new();
|
||||
if (!File.Exists(reportFullPath))
|
||||
@ -407,4 +124,537 @@ public class ProcessDataStandardFormat
|
||||
return results;
|
||||
}
|
||||
|
||||
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null)
|
||||
{
|
||||
ProcessDataStandardFormat result;
|
||||
string segment;
|
||||
List<string> body = new();
|
||||
List<string> logistics = new();
|
||||
lines ??= File.ReadAllLines(reportFullPath);
|
||||
string[] segments;
|
||||
if (lines.Length < 7)
|
||||
segments = Array.Empty<string>();
|
||||
else
|
||||
segments = lines[6].Trim().Split('\t');
|
||||
List<string> columns = new();
|
||||
for (int c = 0; c < segments.Length; c++)
|
||||
{
|
||||
segment = segments[c].Substring(1, segments[c].Length - 2);
|
||||
if (!columns.Contains(segment))
|
||||
columns.Add(segment);
|
||||
else
|
||||
{
|
||||
for (short i = 1; i < short.MaxValue; i++)
|
||||
{
|
||||
segment = string.Concat(segment, "_", i);
|
||||
if (!columns.Contains(segment))
|
||||
{
|
||||
columns.Add(segment);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
bool lookForLogistics = false;
|
||||
for (int r = 7; r < lines.Length; r++)
|
||||
{
|
||||
if (lines[r].StartsWith("NUM_DATA_ROWS"))
|
||||
lookForLogistics = true;
|
||||
if (!lookForLogistics)
|
||||
{
|
||||
body.Add(lines[r]);
|
||||
continue;
|
||||
}
|
||||
if (lines[r].StartsWith("LOGISTICS_1"))
|
||||
{
|
||||
for (int i = r; i < lines.Length; i++)
|
||||
{
|
||||
if (lines[i].StartsWith("END_HEADER"))
|
||||
break;
|
||||
logistics.Add(lines[i]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
result = new(body: body.AsReadOnly(),
|
||||
columns: columns.AsReadOnly(),
|
||||
logistics: logistics.AsReadOnly(),
|
||||
sequence: null);
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static ProcessDataStandardFormat? GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
|
||||
{
|
||||
ProcessDataStandardFormat? result;
|
||||
const int columnsLine = 6;
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, pdsfMapping.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
|
||||
JsonElement[]? jsonElements = GetArray(pdsfMapping.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
|
||||
if (jsonElements is null || pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
|
||||
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
|
||||
result = null;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int expectedColumns, int columnsLine, string path, string[]? lines)
|
||||
{
|
||||
ProcessDataStandardFormat result;
|
||||
long sequence;
|
||||
string[] segments;
|
||||
List<string> body = new();
|
||||
bool lookForLogistics = false;
|
||||
List<string> logistics = new();
|
||||
lines ??= File.ReadAllLines(path);
|
||||
if (lines.Length <= columnsLine)
|
||||
segments = Array.Empty<string>();
|
||||
else
|
||||
{
|
||||
segments = lines[columnsLine].Split('\t');
|
||||
if (segments.Length != expectedColumns)
|
||||
segments = Array.Empty<string>();
|
||||
}
|
||||
string[] columns = segments.Select(l => l.Trim('"')).ToArray();
|
||||
for (int r = columnsLine + 1; r < lines.Length; r++)
|
||||
{
|
||||
if (lines[r].StartsWith("NUM_DATA_ROWS"))
|
||||
lookForLogistics = true;
|
||||
if (!lookForLogistics)
|
||||
{
|
||||
body.Add(lines[r]);
|
||||
continue;
|
||||
}
|
||||
if (lines[r].StartsWith("LOGISTICS_1"))
|
||||
{
|
||||
for (int i = r; i < lines.Length; i++)
|
||||
{
|
||||
if (lines[i].StartsWith("END_HEADER"))
|
||||
break;
|
||||
logistics.Add(lines[i]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (logistics.Count == 0)
|
||||
sequence = lastWriteTime.Ticks;
|
||||
else
|
||||
{
|
||||
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
|
||||
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s;
|
||||
}
|
||||
result = new(body: body.AsReadOnly(),
|
||||
columns: new(columns),
|
||||
logistics: logistics.AsReadOnly(),
|
||||
sequence: sequence);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static JsonElement[]? GetArray(int expectedColumns, ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers)
|
||||
{
|
||||
JsonElement[]? results;
|
||||
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
|
||||
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
|
||||
else
|
||||
{
|
||||
string value;
|
||||
string[] segments;
|
||||
List<string> lines = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach (string bodyLine in processDataStandardFormat.Body)
|
||||
{
|
||||
_ = stringBuilder.Clear();
|
||||
_ = stringBuilder.Append('{');
|
||||
segments = bodyLine.Split('\t');
|
||||
if (segments.Length != expectedColumns)
|
||||
continue;
|
||||
if (!lookForNumbers)
|
||||
{
|
||||
for (int c = 0; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int c = 0; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
if (string.IsNullOrEmpty(value))
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
|
||||
else if (value.All(char.IsDigit))
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
|
||||
else
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
_ = stringBuilder.AppendLine("}");
|
||||
lines.Add(stringBuilder.ToString());
|
||||
}
|
||||
string json = $"[{string.Join(",", lines)}]";
|
||||
results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static ProcessDataStandardFormat GetProcessDataStandardFormat(ProcessDataStandardFormatMapping processDataStandardFormatMapping, JsonElement[] jsonElements, ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
ProcessDataStandardFormat result;
|
||||
int column;
|
||||
string value;
|
||||
JsonProperty jsonProperty;
|
||||
List<string> values = new();
|
||||
List<string> results = new();
|
||||
JsonProperty[] jsonProperties;
|
||||
List<string> unknownColumns = new();
|
||||
for (int i = 0; i < jsonElements.Length; i++)
|
||||
{
|
||||
values.Clear();
|
||||
if (jsonElements[i].ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
unknownColumns.Add(string.Empty);
|
||||
break;
|
||||
}
|
||||
jsonProperties = jsonElements[i].EnumerateObject().ToArray();
|
||||
if (jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
|
||||
continue;
|
||||
for (int c = 0; c < processDataStandardFormatMapping.ColumnIndices.Count; c++)
|
||||
{
|
||||
column = processDataStandardFormatMapping.ColumnIndices[c];
|
||||
if (column == -1)
|
||||
value = processDataStandardFormatMapping.OldColumnNames[c];
|
||||
else
|
||||
{
|
||||
jsonProperty = jsonProperties[column];
|
||||
value = jsonProperty.Value.ToString();
|
||||
}
|
||||
values.Add(value);
|
||||
}
|
||||
results.Add(string.Join("\t", values));
|
||||
}
|
||||
result = new(body: new(results),
|
||||
columns: processDataStandardFormatMapping.OldColumnNames,
|
||||
logistics: processDataStandardFormat.Logistics,
|
||||
sequence: processDataStandardFormat.Sequence);
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
List<string> results = new();
|
||||
if (processDataStandardFormat.Sequence is null)
|
||||
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
|
||||
string endOffset = "E#######T";
|
||||
string dataOffset = "D#######T";
|
||||
string headerOffset = "H#######T";
|
||||
string format = "MM/dd/yyyy HH:mm:ss";
|
||||
string startTime = new DateTime(processDataStandardFormat.Sequence.Value).ToString(format);
|
||||
results.Add("HEADER_TAG\tHEADER_VALUE");
|
||||
results.Add("FORMAT\t2.00");
|
||||
results.Add("NUMBER_PASSES\t0001");
|
||||
results.Add($"HEADER_OFFSET\t{headerOffset}");
|
||||
results.Add($"DATA_OFFSET\t{dataOffset}");
|
||||
results.Add($"END_OFFSET\t{endOffset}");
|
||||
results.Add($"\"{string.Join("\"\t\"", processDataStandardFormat.Columns)}\"");
|
||||
results.AddRange(processDataStandardFormat.Body);
|
||||
results.Add($"NUM_DATA_ROWS\t{processDataStandardFormat.Body.Count.ToString().PadLeft(9, '0')}");
|
||||
results.Add($"NUM_DATA_COLUMNS\t{processDataStandardFormat.Columns.Count.ToString().PadLeft(9, '0')}");
|
||||
results.Add("DELIMITER\t;");
|
||||
results.Add($"START_TIME_FORMAT\t{format}");
|
||||
results.Add($"START_TIME\t{startTime}");
|
||||
results.Add("LOGISTICS_COLUMN\tA_LOGISTICS");
|
||||
results.Add("LOGISTICS_COLUMN\tB_LOGISTICS");
|
||||
results.AddRange(processDataStandardFormat.Logistics);
|
||||
File.WriteAllText(path, string.Join(Environment.NewLine, results));
|
||||
}
|
||||
|
||||
internal static Dictionary<string, List<string>> GetDictionary(ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
Dictionary<string, List<string>> results = new();
|
||||
string[] segments;
|
||||
foreach (string column in processDataStandardFormat.Columns)
|
||||
results.Add(column, new List<string>());
|
||||
foreach (string bodyLine in processDataStandardFormat.Body)
|
||||
{
|
||||
segments = bodyLine.Split('\t');
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
if (c >= processDataStandardFormat.Columns.Count)
|
||||
continue;
|
||||
results[processDataStandardFormat.Columns[c]].Add(segments[c]);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
internal static JsonElement[] GetArray(ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers = false)
|
||||
{
|
||||
JsonElement[] results;
|
||||
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
|
||||
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
|
||||
else
|
||||
{
|
||||
string value;
|
||||
string[] segments;
|
||||
List<string> lines = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach (string bodyLine in processDataStandardFormat.Body)
|
||||
{
|
||||
_ = stringBuilder.Clear();
|
||||
_ = stringBuilder.Append('{');
|
||||
segments = bodyLine.Trim().Split('\t');
|
||||
if (!lookForNumbers)
|
||||
{
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
if (string.IsNullOrEmpty(value))
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
|
||||
else if (value.All(char.IsDigit))
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
|
||||
else
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
_ = stringBuilder.AppendLine("}");
|
||||
lines.Add(stringBuilder.ToString());
|
||||
}
|
||||
string json = $"[{string.Join(",", lines)}]";
|
||||
results = JsonSerializer.Deserialize<JsonElement[]>(json) ?? throw new Exception();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
|
||||
{
|
||||
string result;
|
||||
if (jsonElements.Length == 0)
|
||||
result = string.Empty;
|
||||
else
|
||||
{
|
||||
int columns = 0;
|
||||
List<string> lines;
|
||||
string endOffset = "E#######T";
|
||||
string dataOffset = "D#######T";
|
||||
string headerOffset = "H#######T";
|
||||
string format = "MM/dd/yyyy HH:mm:ss";
|
||||
StringBuilder stringBuilder = new();
|
||||
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
|
||||
_ = stringBuilder.Append("\"Time\"").Append('\t');
|
||||
_ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
|
||||
_ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
|
||||
for (int i = 0; i < jsonElements.Length;)
|
||||
{
|
||||
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
|
||||
{
|
||||
columns += 1;
|
||||
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
|
||||
}
|
||||
break;
|
||||
}
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
lines.Add(stringBuilder.ToString());
|
||||
for (int i = 0; i < jsonElements.Length; i++)
|
||||
{
|
||||
_ = stringBuilder.Clear();
|
||||
_ = stringBuilder.Append("0.1").Append('\t');
|
||||
_ = stringBuilder.Append('1').Append('\t');
|
||||
_ = stringBuilder.Append('2').Append('\t');
|
||||
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
|
||||
_ = stringBuilder.Append(jsonProperty.Value).Append('\t');
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
lines.Add(stringBuilder.ToString());
|
||||
}
|
||||
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
|
||||
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
|
||||
lines.Add("DELIMITER ;");
|
||||
lines.Add(string.Concat("START_TIME_FORMAT ", format));
|
||||
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
|
||||
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
|
||||
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
|
||||
if (!string.IsNullOrEmpty(logisticsText))
|
||||
lines.Add(logisticsText);
|
||||
else
|
||||
{
|
||||
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
|
||||
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
|
||||
lines.Add("END_HEADER");
|
||||
}
|
||||
_ = stringBuilder.Clear();
|
||||
foreach (string line in lines)
|
||||
_ = stringBuilder.AppendLine(line);
|
||||
result = stringBuilder.ToString();
|
||||
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
|
||||
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
|
||||
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
Dictionary<Test, Dictionary<string, List<string>>> results = new();
|
||||
List<string>? collection;
|
||||
string testColumn = nameof(Test);
|
||||
Dictionary<string, List<string>> keyValuePairs = GetDictionary(processDataStandardFormat);
|
||||
if (!keyValuePairs.TryGetValue(testColumn, out collection))
|
||||
throw new Exception();
|
||||
int min;
|
||||
int max;
|
||||
Test testKey;
|
||||
List<string> vs;
|
||||
string columnKey;
|
||||
Dictionary<Test, List<int>> tests = new();
|
||||
for (int i = 0; i < collection.Count; i++)
|
||||
{
|
||||
if (Enum.TryParse(collection[i], out Test test))
|
||||
{
|
||||
if (!results.ContainsKey(test))
|
||||
{
|
||||
tests.Add(test, new List<int>());
|
||||
results.Add(test, new Dictionary<string, List<string>>());
|
||||
}
|
||||
tests[test].Add(i);
|
||||
}
|
||||
}
|
||||
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
|
||||
{
|
||||
testKey = testKeyValuePair.Key;
|
||||
min = testKeyValuePair.Value.Min();
|
||||
max = testKeyValuePair.Value.Max() + 1;
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
results[testKey].Add(keyValuePair.Key, new List<string>());
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
{
|
||||
vs = keyValuePair.Value;
|
||||
columnKey = keyValuePair.Key;
|
||||
for (int i = min; i < max; i++)
|
||||
{
|
||||
if (vs.Count > i)
|
||||
results[testKey][columnKey].Add(vs[i]);
|
||||
else
|
||||
results[testKey][columnKey].Add(string.Empty);
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(processDataStandardFormat.Logistics[0], results);
|
||||
}
|
||||
|
||||
internal static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string>? ignoreParameterNames = null)
|
||||
{
|
||||
StringBuilder result = new();
|
||||
ignoreParameterNames ??= new List<string>();
|
||||
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
|
||||
throw new Exception();
|
||||
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
|
||||
throw new Exception();
|
||||
string? nullData;
|
||||
const string columnDate = "Date";
|
||||
const string columnTime = "Time";
|
||||
const string firstDuplicate = "_1";
|
||||
_ = result.AppendLine(scopeInfo.Header);
|
||||
StringBuilder line = new();
|
||||
if (logistics.NullData is null)
|
||||
nullData = string.Empty;
|
||||
else
|
||||
nullData = logistics.NullData.ToString();
|
||||
int count = (from l in keyValuePairs select l.Value.Count).Min();
|
||||
for (int r = 0; r < count; r++)
|
||||
{
|
||||
_ = line.Clear();
|
||||
_ = line.Append('!');
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
{
|
||||
if (!names.Contains(keyValuePair.Key))
|
||||
continue;
|
||||
if (ignoreParameterNames.Contains(keyValuePair.Key))
|
||||
continue;
|
||||
if (pairedParameterNames.Contains(keyValuePair.Key))
|
||||
{
|
||||
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
|
||||
continue;
|
||||
else
|
||||
_ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
|
||||
_ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
|
||||
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
|
||||
_ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
|
||||
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
|
||||
_ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
|
||||
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
|
||||
_ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
|
||||
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
|
||||
_ = line.Append(nullData);
|
||||
else
|
||||
_ = line.Append(keyValuePair.Value[r]);
|
||||
_ = line.Append(';');
|
||||
}
|
||||
}
|
||||
if (pairedParameterNames.Count == 0)
|
||||
{
|
||||
_ = line.Remove(line.Length - 1, 1);
|
||||
_ = result.AppendLine(line.ToString());
|
||||
}
|
||||
}
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
|
||||
{
|
||||
if (!addSpaces)
|
||||
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
|
||||
else
|
||||
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
|
||||
}
|
||||
|
||||
private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName)
|
||||
{
|
||||
int? result = null;
|
||||
for (int i = 0; i < jsonProperties.Length; i++)
|
||||
{
|
||||
if (jsonProperties[i].Name != propertyName)
|
||||
continue;
|
||||
result = i;
|
||||
break;
|
||||
}
|
||||
if (result is null)
|
||||
{
|
||||
for (int i = 0; i < jsonProperties.Length; i++)
|
||||
{
|
||||
if (jsonProperties[i].Name[0] != propertyName[0])
|
||||
continue;
|
||||
if (jsonProperties[i].Name.Length != propertyName.Length)
|
||||
continue;
|
||||
if (jsonProperties[i].Name != propertyName)
|
||||
continue;
|
||||
result = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(JsonElement[]))]
|
||||
internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
33
Adaptation/Shared/ProcessDataStandardFormatMapping.cs
Normal file
33
Adaptation/Shared/ProcessDataStandardFormatMapping.cs
Normal file
@ -0,0 +1,33 @@
|
||||
using System.Collections.ObjectModel;
|
||||
|
||||
namespace Adaptation.Shared;
|
||||
|
||||
public class ProcessDataStandardFormatMapping
|
||||
{
|
||||
|
||||
public ReadOnlyCollection<string> BackfillColumns { get; private set; }
|
||||
public ReadOnlyCollection<int> ColumnIndices { get; private set; }
|
||||
public ReadOnlyCollection<string> IgnoreColumns { get; private set; }
|
||||
public ReadOnlyCollection<string> IndexOnlyColumns { get; private set; }
|
||||
public ReadOnlyDictionary<string, string> KeyValuePairs { get; private set; }
|
||||
public ReadOnlyCollection<string> NewColumnNames { get; private set; }
|
||||
public ReadOnlyCollection<string> OldColumnNames { get; private set; }
|
||||
|
||||
public ProcessDataStandardFormatMapping(ReadOnlyCollection<string> backfillColumns,
|
||||
ReadOnlyCollection<int> columnIndices,
|
||||
ReadOnlyCollection<string> ignoreColumns,
|
||||
ReadOnlyCollection<string> indexOnlyColumns,
|
||||
ReadOnlyDictionary<string, string> keyValuePairs,
|
||||
ReadOnlyCollection<string> newColumnNames,
|
||||
ReadOnlyCollection<string> oldColumnNames)
|
||||
{
|
||||
BackfillColumns = backfillColumns;
|
||||
ColumnIndices = columnIndices;
|
||||
IgnoreColumns = ignoreColumns;
|
||||
IndexOnlyColumns = indexOnlyColumns;
|
||||
KeyValuePairs = keyValuePairs;
|
||||
NewColumnNames = newColumnNames;
|
||||
OldColumnNames = oldColumnNames;
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
@ -166,6 +166,7 @@
|
||||
<Compile Include="Adaptation\Shared\Metrology\WS.Results.cs" />
|
||||
<Compile Include="Adaptation\Shared\ParameterType.cs" />
|
||||
<Compile Include="Adaptation\Shared\ProcessDataStandardFormat.cs" />
|
||||
<Compile Include="Adaptation\Shared\ProcessDataStandardFormatMapping.cs" />
|
||||
<Compile Include="Adaptation\Shared\Properties\IDescription.cs" />
|
||||
<Compile Include="Adaptation\Shared\Properties\IFileRead.cs" />
|
||||
<Compile Include="Adaptation\Shared\Properties\ILogistics.cs" />
|
||||
|
@ -35,6 +35,9 @@ public class Logistics : ILogistics
|
||||
public long Sequence => _Sequence;
|
||||
public double TotalSecondsSinceLastWriteTimeFromSequence => _TotalSecondsSinceLastWriteTimeFromSequence;
|
||||
|
||||
private static string DefaultMesEntity(DateTime dateTime) =>
|
||||
string.Concat(dateTime.Ticks, "_MES_ENTITY");
|
||||
|
||||
public Logistics(IFileRead fileRead)
|
||||
{
|
||||
DateTime dateTime = DateTime.Now;
|
||||
@ -84,14 +87,14 @@ public class Logistics : ILogistics
|
||||
_Logistics2 = new List<Logistics2>();
|
||||
}
|
||||
|
||||
public Logistics(string reportFullPath, string logistics)
|
||||
internal Logistics(string reportFullPath, ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
string key;
|
||||
DateTime dateTime;
|
||||
string[] segments;
|
||||
_FileInfo = new(reportFullPath);
|
||||
_Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
|
||||
if (!Logistics1.Any() || !Logistics1[0].StartsWith("LOGISTICS_1"))
|
||||
_Logistics1 = processDataStandardFormat.Logistics.ToList();
|
||||
if (Logistics1.Count == 0 || !Logistics1[0].StartsWith("LOGISTICS_1"))
|
||||
{
|
||||
_NullData = null;
|
||||
_JobID = "null";
|
||||
@ -190,8 +193,6 @@ public class Logistics : ILogistics
|
||||
}
|
||||
}
|
||||
|
||||
private static string DefaultMesEntity(DateTime dateTime) => string.Concat(dateTime.Ticks, "_MES_ENTITY");
|
||||
|
||||
internal void Update(string mid, string processJobID)
|
||||
{
|
||||
_MID = mid;
|
||||
|
Loading…
x
Reference in New Issue
Block a user