Hardcode Bio-Rad in OI output

UniqueId replacement for attachments

Write input PDSF in output after EOF

GetPropertyValue for MoveMatchingFiles

ProcessDataStandardFormat over Tuple

MoveMatchingFiles to use ProcessDataStandardFormatMapping
This commit is contained in:
Mike Phares 2025-04-17 15:47:54 -07:00
parent 35abc2569b
commit 6b8626d426
24 changed files with 2250 additions and 1455 deletions

View File

@ -5,6 +5,12 @@
"type": "coreclr",
"request": "attach",
"processId": 16660
},
{
"type": "node",
"request": "launch",
"name": "node Launch Current Opened File",
"program": "${file}"
}
]
}

View File

@ -92,6 +92,26 @@
"command": "code ../MET08THFTIRQS408M.csproj",
"problemMatcher": []
},
{
"label": "Readme",
"type": "shell",
"command": "code ../README.md",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s X Day-Helper-2025-03-20",
"type": "shell",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe",
"args": [
"s",
"X",
"L:/DevOps/EAF-Mesa-Integration/MET08THFTIRQS408M",
"Day-Helper-2025-03-20",
"false",
"4"
],
"problemMatcher": []
},
{
"label": "Git Config",
"type": "shell",

View File

@ -120,15 +120,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -120,9 +120,10 @@ public class FileRead : Shared.FileRead, IFileRead
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
string logisticsSequence = _Logistics.Sequence.ToString();
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory);
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory, day);
if (!Directory.Exists(destinationArchiveDirectory))
_ = Directory.CreateDirectory(destinationArchiveDirectory);
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
@ -144,15 +145,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveArchive(reportFullPath, dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -119,15 +119,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -5,16 +5,70 @@ using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.MoveMatchingFiles;
#nullable enable
public class FileRead : Shared.FileRead, IFileRead
{
internal class PreWith
{
internal string MatchingFile { get; private set; }
internal string CheckFile { get; private set; }
internal string ErrFile { get; private set; }
internal string CheckDirectory { get; private set; }
internal string NoWaitDirectory { get; private set; }
internal PreWith(string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory)
{
MatchingFile = matchingFile;
CheckFile = checkFile;
ErrFile = errFile;
CheckDirectory = checkDirectory;
NoWaitDirectory = noWaitDirectory;
}
}
internal class Pre
{
internal string MatchingFile { get; private set; }
internal string CheckFile { get; private set; }
internal Pre(string matchingFile, string checkFile)
{
MatchingFile = matchingFile;
CheckFile = checkFile;
}
}
internal class Post
{
internal string ErrFile { get; private set; }
internal string CheckFile { get; private set; }
internal Post(string checkFile, string errFile)
{
ErrFile = errFile;
CheckFile = checkFile;
}
}
private readonly ProcessDataStandardFormatMapping _ProcessDataStandardFormatMapping;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
@ -27,6 +81,12 @@ public class FileRead : Shared.FileRead, IFileRead
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
_ProcessDataStandardFormatMapping = GetProcessDataStandardFormatMapping(processDataStandardFormatMappingOldColumnNames,
processDataStandardFormatMappingNewColumnNames,
processDataStandardFormatMappingColumnIndices);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
@ -41,7 +101,8 @@ public class FileRead : Shared.FileRead, IFileRead
Move(extractResults);
}
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
void IFileRead.WaitForThread() =>
WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
@ -88,7 +149,7 @@ public class FileRead : Shared.FileRead, IFileRead
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]") ?? throw new Exception(), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
@ -104,7 +165,69 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
private static List<string> GetSearchDirectories(int numberLength, string parentDirectory)
private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
{
ProcessDataStandardFormatMapping result;
string[] segmentsB;
List<string> distinct = new();
Dictionary<string, string> keyValuePairs = new();
string args4 = "Time,HeaderUniqueId,UniqueId,Date";
string args5 = "Thickness 14 3mm Edge Mean,Thickness 14 3mm Edge % from R/2,Thickness 14 5mm Edge Mean,Thickness 14 5mm Edge % from R/2,Thickness 14 Center Mean,Thickness 14 Average,Thickness 14 Std Dev,Thickness 14 R/2 Mean";
string args6 = "Thickness01,Thickness02,Thickness03,Thickness04,Thickness05,Thickness06,Thickness07,Thickness08,Thickness09,Thickness10,Thickness11,Thickness12,Thickness13,Thickness14";
string args7 = "Test|EventId,Employee|Operator,Lot|Wafer,MeanThickness|WaferMeanThickness,RVThickness|RadialVariationThickness,ThicknessFourteen3mmEdgeMean|Thickness 14 3mm Edge Mean,ThicknessFourteen3mmEdgePercent|Thickness 14 3mm Edge % from R/2,ThicknessFourteen5mmEdgeMean|Thickness 14 5mm Edge Mean,ThicknessFourteen5mmEdgePercent|Thickness 14 5mm Edge % from R/2,ThicknessFourteenCenterMean|Thickness 14 Center Mean,ThicknessFourteenCriticalPointsAverage|Thickness 14 Average,ThicknessFourteenCriticalPointsStdDev|Thickness 14 Std Dev,ThicknessFourteenMeanFrom|Thickness 14 R/2 Mean,|BIORAD2";
// string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Cassette,HeaderUniqueId,Layer,PassFail,Position,RDS,Title,UniqueId,Wafer,Zone,MeanThickness,RVThickness,StdDev,Thickness,Slot,ThicknessFourteen3mmEdgeMean,ThicknessFourteen3mmEdgePercent,ThicknessFourteen5mmEdgeMean,ThicknessFourteen5mmEdgePercent,ThicknessFourteenCenterMean,ThicknessFourteenCriticalPointsAverage,ThicknessFourteenCriticalPointsStdDev,ThicknessFourteenMeanFrom,Thickness01,Thickness02,Thickness03,Thickness04,Thickness05,Thickness06,Thickness07,Thickness08,Thickness09,Thickness10,Thickness11,Thickness12,Thickness13,Thickness14";
// string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Title,Recipe,DateTime,Operator,Batch,Cassette,UsedLast,Wafer,Position,Thickness,WaferMeanThickness,StdDev,PassFail,Line,RadialVariationThickness,Slot,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Thickness 14 3mm Edge Mean,Thickness 14 3mm Edge % from R/2,Thickness 14 5mm Edge Mean,Thickness 14 5mm Edge % from R/2,Thickness 14 Center Mean,Thickness 14 Average,Thickness 14 Std Dev,Thickness 14 R 2/Mean,Thickness01,Thickness02,Thickness03,Thickness04,Thickness05,Thickness06,Thickness07,Thickness08,Thickness09,Thickness10,Thickness11,Thickness12,Thickness13,Thickness14,EventId";
// string args10 = "0,1,2,52,3,6,5,9,10,14,24,25,8,12,-1,26,19,15,23,7,-1,14,27,17,21,18,16,22,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51";
string[] segments = args7.Split(',');
ReadOnlyCollection<string> ignoreColumns = new(args4.Split(','));
ReadOnlyCollection<string> backfillColumns = new(args5.Split(','));
ReadOnlyCollection<string> indexOnlyColumns = new(args6.Split(','));
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
foreach (string segment in segments)
{
segmentsB = segment.Split('|');
if (segmentsB.Length != 2)
continue;
if (distinct.Contains(segmentsB[0]))
continue;
distinct.Add(segmentsB[0]);
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
}
result = new(backfillColumns: backfillColumns,
columnIndices: columnIndices,
newColumnNames: newColumnNames,
ignoreColumns: ignoreColumns,
indexOnlyColumns: indexOnlyColumns,
keyValuePairs: new(keyValuePairs),
oldColumnNames: oldColumnNames);
return result;
}
private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection)
{
List<PreWith> results = new();
string errFile;
PreWith preWith;
string? checkDirectory;
string noWaitDirectory;
foreach (Pre pre in preCollection)
{
errFile = string.Concat(pre.CheckFile, ".err");
checkDirectory = Path.GetDirectoryName(pre.CheckFile);
if (string.IsNullOrEmpty(checkDirectory))
continue;
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
preWith = new(pre.MatchingFile, pre.CheckFile, errFile, checkDirectory, noWaitDirectory);
results.Add(preWith);
}
return results.AsReadOnly();
}
private static ReadOnlyCollection<string> GetSearchDirectories(int numberLength, string parentDirectory)
{
List<string> results = new();
string[] directories = Directory.GetDirectories(parentDirectory, "*", SearchOption.TopDirectoryOnly);
@ -115,10 +238,136 @@ public class FileRead : Shared.FileRead, IFileRead
results.Add(directory);
}
results.Sort();
return results.AsReadOnly();
}
private static void CreatePointerFile(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
{
string checkFile;
string writeFile;
string? directoryName;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
directoryName = Path.GetDirectoryName(matchingFile);
if (directoryName is null)
continue;
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
if (File.Exists(writeFile))
continue;
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
}
}
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
{
List<Pre> results = new();
Pre pre;
string checkFile;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
pre = new(matchingFile, checkFile);
results.Add(pre);
}
return results.AsReadOnly();
}
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
{
ReadOnlyCollection<Post> postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
if (postCollection.Count != 0)
{
Thread.Sleep(500);
StringBuilder stringBuilder = new();
foreach (Post post in postCollection)
{
if (File.Exists(post.ErrFile))
_ = stringBuilder.AppendLine(File.ReadAllText(post.ErrFile));
if (File.Exists(post.CheckFile))
_ = stringBuilder.AppendLine($"<{post.CheckFile}> was not consumed by the end!");
}
if (stringBuilder.Length > 0)
throw new Exception(stringBuilder.ToString());
}
}
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
{
List<Post> results = new();
Post post;
long preWait;
foreach (PreWith preWith in preWithCollection)
{
if (processDataStandardFormat is null)
File.Move(preWith.MatchingFile, preWith.CheckFile);
else
{
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat);
File.Delete(preWith.MatchingFile);
}
if (Directory.Exists(preWith.NoWaitDirectory))
{
post = new(preWith.CheckFile, preWith.ErrFile);
results.Add(post);
continue;
}
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
else
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(500);
}
for (int i = 0; i < int.MaxValue; i++)
{
if (File.Exists(preWith.ErrFile))
throw new Exception(File.ReadAllText(preWith.ErrFile));
if (!File.Exists(preWith.CheckFile))
break;
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
Thread.Sleep(500);
}
}
return results.AsReadOnly();
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
ProcessDataStandardFormat? processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, _ProcessDataStandardFormatMapping);
if (processDataStandardFormat is not null)
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
else
{
processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
processDataStandardFormat = null;
}
SetFileParameterLotIDToLogisticsMID();
int numberLength = 2;
long ticks = dateTime.Ticks;
string parentParentDirectory = GetParentParent(reportFullPath);
ReadOnlyCollection<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
ReadOnlyCollection<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
if (matchingFiles.Count != searchDirectories.Count)
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
try
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { }
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
return results;
}
private List<string> GetMatchingFiles(long ticks, string reportFullPath, List<string> searchDirectories)
private ReadOnlyCollection<string> GetMatchingFiles(long ticks, string reportFullPath, ReadOnlyCollection<string> searchDirectories)
{
List<string> results = new();
string[] found;
@ -137,129 +386,7 @@ public class FileRead : Shared.FileRead, IFileRead
break;
}
}
return results;
}
private static List<(string matchingFile, string checkFile)> GetCollection(int numberLength, string parentDirectory, List<string> matchingFiles)
{
List<(string matchingFile, string checkFile)> results = new();
string checkFile;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
results.Add(new(matchingFile, checkFile));
}
return results;
}
private static List<(string, string, string, string, string)> GetCollection(List<(string matchingFile, string checkFile)> collection)
{
List<(string, string, string, string, string)> results = new();
string errFile;
string checkDirectory;
string noWaitDirectory;
foreach ((string matchingFile, string checkFile) in collection)
{
errFile = string.Concat(checkFile, ".err");
checkDirectory = Path.GetDirectoryName(checkFile);
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
results.Add(new(matchingFile, checkFile, errFile, checkDirectory, noWaitDirectory));
}
return results;
}
private void MoveCollection(DateTime dateTime, List<(string matchingFile, string checkFile)> collection)
{
long preWait;
List<(string checkFile, string errFile)> postCollection = new();
foreach ((string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory) in GetCollection(collection))
{
File.Move(matchingFile, checkFile);
if (Directory.Exists(noWaitDirectory))
{
postCollection.Add(new(checkFile, errFile));
continue;
}
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
else
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(500);
}
for (int i = 0; i < int.MaxValue; i++)
{
if (File.Exists(errFile))
throw new Exception(File.ReadAllText(errFile));
if (!File.Exists(checkFile))
break;
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
Thread.Sleep(500);
}
}
if (postCollection.Count != 0)
{
Thread.Sleep(500);
StringBuilder stringBuilder = new();
foreach ((string checkFile, string errFile) in postCollection)
{
if (File.Exists(errFile))
_ = stringBuilder.AppendLine(File.ReadAllText(errFile));
if (File.Exists(checkFile))
_ = stringBuilder.AppendLine($"<{checkFile}> was not consumed by the end!");
}
if (stringBuilder.Length > 0)
throw new Exception(stringBuilder.ToString());
}
}
private static void CreatePointerFile(int numberLength, string parentDirectory, List<string> matchingFiles)
{
#nullable enable
string checkFile;
string writeFile;
string? directoryName;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
directoryName = Path.GetDirectoryName(matchingFile);
if (directoryName is null)
continue;
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
if (File.Exists(writeFile))
continue;
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
}
#nullable disable
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
int numberLength = 2;
long ticks = dateTime.Ticks;
string parentParentDirectory = GetParentParent(reportFullPath);
List<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
List<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
if (matchingFiles.Count != searchDirectories.Count)
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
try
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { }
List<(string matchingFile, string checkFile)> collection = GetCollection(numberLength, parentParentDirectory, matchingFiles);
MoveCollection(dateTime, collection);
return results;
return results.AsReadOnly();
}
}

View File

@ -118,9 +118,9 @@ public class FileRead : Shared.FileRead, IFileRead
StringBuilder results = new();
char del = '\t';
QS408M.Description x = descriptions[0];
_ = results.Append(x.UniqueId).Append(del).
_ = results.Append("Bio-Rad ").Append(x.UniqueId).Append(del).
Append(x.Date).Append(del).
Append(x.Employee).Append(del).
Append(x.ThicknessFourteenCriticalPointsAverage).Append(del).
Append(x.Recipe).Append(del).
Append(x.Reactor).Append(del).
Append(x.RDS).Append(del).
@ -136,7 +136,35 @@ public class FileRead : Shared.FileRead, IFileRead
return results.ToString();
}
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, string logistics, List<QS408M.Description> descriptions, Test[] tests)
private static string GetJson(int columnsLine, string[] columns, string[] body)
{
#pragma warning disable CA1845, IDE0057
string result = "[\n";
string line;
string value;
string[] segments;
if (columns.Length == 0)
columns = body[columnsLine].Trim().Split('\t');
for (int i = columnsLine + 1; i < body.Length; i++)
{
line = "{";
segments = body[i].Trim().Split('\t');
if (segments.Length != columns.Length)
break;
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
line += '"' + columns[c].Trim('"') + '"' + ':' + '"' + value + '"' + ',';
}
line = line.Substring(0, line.Length - 1) + '}' + ',' + '\n';
result += line;
}
result = result.Substring(0, result.Length - 1) + ']';
return result;
#pragma warning restore CA1845, IDE0057
}
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<QS408M.Description> descriptions, Test[] tests)
{
bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
@ -160,6 +188,7 @@ public class FileRead : Shared.FileRead, IFileRead
if (!string.IsNullOrEmpty(lines))
{
long? subGroupId;
_ = GetJson(0, processDataStandardFormat.Columns.ToArray(), processDataStandardFormat.Body.ToArray());
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
@ -177,7 +206,7 @@ public class FileRead : Shared.FileRead, IFileRead
values[0] = $"{values[0]}|{subGroupId}";
}
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, logistics, descriptions.First(), lines, subGroupId, weekOfYear);
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), lines, subGroupId, weekOfYear);
}
}
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
@ -187,16 +216,16 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
// List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveOpenInsightFile(reportFullPath, dateTime, pdsf.Item1, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -325,7 +325,7 @@ public class FromIQS
return new(result, count, commandText);
}
private static string GetJson(Logistics logistics, string logisticLines, QS408M.Description description)
private static string GetJson(Logistics logistics, ProcessDataStandardFormat processDataStandardFormat, QS408M.Description description)
{
string result;
StringBuilder stringBuilder = new();
@ -345,7 +345,7 @@ public class FromIQS
string safeValue;
string[] segments;
string serializerValue;
foreach (string line in logisticLines.Split(new string[] { Environment.NewLine }, StringSplitOptions.None))
foreach (string line in processDataStandardFormat.Logistics)
{
segments = line.Split('\t');
if (segments.Length < 2)
@ -376,11 +376,11 @@ public class FromIQS
return result;
}
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, string logisticLines, QS408M.Description description, string lines, long? subGroupId, string weekOfYear)
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, QS408M.Description description, string lines, long? subGroupId, string weekOfYear)
{
string checkFile;
string fileName = Path.GetFileName(reportFullPath);
string json = GetJson(logistics, logisticLines, description);
string json = GetJson(logistics, processDataStandardFormat, description);
string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);
bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot);
string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
@ -396,6 +396,9 @@ public class FromIQS
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.lbl");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, processDataStandardFormat.Body[processDataStandardFormat.Body.Count - 1]);
}
private static string GetCommandText(string[] iqsCopyValues)

View File

@ -139,15 +139,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -80,8 +80,12 @@ public class WSRequest
Details.Add(detail);
}
Date ??= logistics.DateTimeFromSequence.ToString();
if (UniqueId is null && Details.Count != 0)
UniqueId = Details[0].HeaderUniqueId;
UniqueId = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}";
for (int i = 0; i < Details.Count; i++)
{
Details[i].HeaderUniqueId = UniqueId;
Details[i].UniqueId = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}_Item-{i + 1}";
}
}
internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, string json, List<QS408M.Description> descriptions)

View File

@ -168,15 +168,15 @@ public class FileRead : Shared.FileRead, IFileRead
if (dateTime == DateTime.MinValue)
throw new ArgumentNullException(nameof(dateTime));
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -129,6 +129,7 @@ public class FileRead : Shared.FileRead, IFileRead
string destinationJobIdDirectory = Path.Combine(_JobIdProcessParentDirectory, _Logistics.JobID, directoryName);
string sequenceDirectory = Path.Combine(destinationJobIdDirectory, logisticsSequence);
string jsonFileName = Path.Combine(sequenceDirectory, $"{Path.GetFileNameWithoutExtension(reportFullPath)}.json");
MoveMatchingFile(jobIdDirectory, matchDirectories[0]);
Directory.Move(matchDirectories[0], destinationJobIdDirectory);
if (!Directory.Exists(sequenceDirectory))
_ = Directory.CreateDirectory(sequenceDirectory);
@ -136,16 +137,40 @@ public class FileRead : Shared.FileRead, IFileRead
File.WriteAllText(jsonFileName, json);
}
private static void MoveMatchingFile(string jobIdDirectory, string matchDirectory)
{
string checkFile;
string jobIdDirectoryFileName;
string matchDirectoryFileName;
string[] jobIdDirectoryFiles = Directory.GetFiles(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
string[] matchDirectoryFiles = Directory.GetFiles(matchDirectory, "*", SearchOption.TopDirectoryOnly);
foreach (string jobIdDirectoryFile in jobIdDirectoryFiles)
{
jobIdDirectoryFileName = Path.GetFileName(jobIdDirectoryFile);
foreach (string matchDirectoryFile in matchDirectoryFiles)
{
matchDirectoryFileName = Path.GetFileName(matchDirectoryFile);
if (jobIdDirectoryFileName.StartsWith(matchDirectoryFileName))
{
checkFile = Path.Combine(matchDirectory, jobIdDirectoryFileName);
if (File.Exists(checkFile))
continue;
File.Move(jobIdDirectoryFile, checkFile);
}
}
}
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<QS408M.Description> descriptions = QS408M.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
DirectoryMove(reportFullPath, dateTime, descriptions);
else if (!_IsEAFHosted)

View File

@ -117,15 +117,15 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -25,7 +25,7 @@ stages:
nugetSource: "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/"
jobs:
- job: SetupEnviroment
- job: SetupEnvironment
steps:
- script: |
echo $(Build.BuildId)
@ -51,7 +51,7 @@ stages:
- job: BuildDebug
dependsOn:
- SetupEnviroment
- SetupEnvironment
steps:
- script: |
set configuration=Debug
@ -66,7 +66,7 @@ stages:
- job: BuildRelease
dependsOn:
- SetupEnviroment
- SetupEnvironment
steps:
- script: |
set configuration=Release
@ -98,7 +98,7 @@ stages:
- job: TestDebug
dependsOn:
- SetupEnviroment
- SetupEnvironment
- BuildDebug
- BuildRelease
steps:
@ -168,7 +168,7 @@ stages:
nugetSource: "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/"
jobs:
- job: SetupEnviroment
- job: SetupEnvironment
steps:
- script: |
echo $(Build.BuildId)
@ -194,7 +194,7 @@ stages:
- job: BuildRelease
dependsOn:
- SetupEnviroment
- SetupEnvironment
steps:
- script: |
set configuration=Release
@ -226,7 +226,7 @@ stages:
- job: TestRelease
dependsOn:
- SetupEnviroment
- SetupEnvironment
- BuildRelease
steps:
- script: |

File diff suppressed because it is too large Load Diff

View File

@ -35,6 +35,9 @@ public class Logistics : ILogistics
public long Sequence => _Sequence;
public double TotalSecondsSinceLastWriteTimeFromSequence => _TotalSecondsSinceLastWriteTimeFromSequence;
private static string DefaultMesEntity(DateTime dateTime) =>
string.Concat(dateTime.Ticks, "_MES_ENTITY");
public Logistics(IFileRead fileRead)
{
DateTime dateTime = DateTime.Now;
@ -84,13 +87,13 @@ public class Logistics : ILogistics
_Logistics2 = new List<Logistics2>();
}
public Logistics(string reportFullPath, string logistics)
internal Logistics(string reportFullPath, ProcessDataStandardFormat processDataStandardFormat)
{
string key;
DateTime dateTime;
string[] segments;
_FileInfo = new(reportFullPath);
_Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
_Logistics1 = processDataStandardFormat.Logistics.ToList();
if (Logistics1.Count == 0 || !Logistics1[0].StartsWith("LOGISTICS_1"))
{
_NullData = null;
@ -190,8 +193,6 @@ public class Logistics : ILogistics
}
}
private static string DefaultMesEntity(DateTime dateTime) => string.Concat(dateTime.Ticks, "_MES_ENTITY");
internal void Update(string mid, string processJobID)
{
_MID = mid;

View File

@ -0,0 +1 @@


View File

@ -1,18 +1,22 @@
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.Shared;
public class ProcessDataStandardFormat
#nullable enable
internal class ProcessDataStandardFormat
{
public enum SearchFor
internal enum SearchFor
{
EquipmentIntegration = 1,
BusinessIntegration = 2,
@ -20,325 +24,41 @@ public class ProcessDataStandardFormat
Archive = 4
}
public static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
internal long? Sequence { get; private set; }
internal ReadOnlyCollection<string> Body { get; private set; }
internal ReadOnlyCollection<string> Columns { get; private set; }
internal ReadOnlyCollection<string> Logistics { get; private set; }
internal ReadOnlyCollection<string> InputLines { get; private set; }
internal ProcessDataStandardFormat(ReadOnlyCollection<string> body,
ReadOnlyCollection<string> columns,
ReadOnlyCollection<string> inputLines,
ReadOnlyCollection<string> logistics,
long? sequence)
{
string result;
if (jsonElements.Length == 0)
result = string.Empty;
else
{
int columns = 0;
List<string> lines;
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
StringBuilder stringBuilder = new();
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
_ = stringBuilder.Append("\"Time\"").Append('\t');
_ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
_ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
for (int i = 0; i < jsonElements.Length;)
{
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
}
break;
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements.Length; i++)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append("0.1").Append('\t');
_ = stringBuilder.Append('1').Append('\t');
_ = stringBuilder.Append('2').Append('\t');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
_ = stringBuilder.Append(jsonProperty.Value).Append('\t');
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
}
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
lines.Add("DELIMITER ;");
lines.Add(string.Concat("START_TIME_FORMAT ", format));
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
if (!string.IsNullOrEmpty(logisticsText))
lines.Add(logisticsText);
else
{
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
lines.Add("END_HEADER");
}
_ = stringBuilder.Clear();
foreach (string line in lines)
_ = stringBuilder.AppendLine(line);
result = stringBuilder.ToString();
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
}
return result;
Body = body;
Columns = columns;
InputLines = inputLines;
Logistics = logistics;
Sequence = sequence;
}
public static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(string reportFullPath, string[] lines = null)
{
string segment;
List<string> body = new();
StringBuilder logistics = new();
lines ??= File.ReadAllLines(reportFullPath);
string[] segments;
if (lines.Length < 7)
segments = Array.Empty<string>();
else
segments = lines[6].Trim().Split('\t');
List<string> columns = new();
for (int c = 0; c < segments.Length; c++)
{
segment = segments[c].Substring(1, segments[c].Length - 2);
if (!columns.Contains(segment))
columns.Add(segment);
else
{
for (short i = 1; i < short.MaxValue; i++)
{
segment = string.Concat(segment, "_", i);
if (!columns.Contains(segment))
{
columns.Add(segment);
break;
}
}
}
}
bool lookForLogistics = false;
for (int r = 7; r < lines.Length; r++)
{
if (lines[r].StartsWith("NUM_DATA_ROWS"))
lookForLogistics = true;
if (!lookForLogistics)
{
body.Add(lines[r]);
continue;
}
if (lines[r].StartsWith("LOGISTICS_1"))
{
for (int i = r; i < lines.Length; i++)
{
if (lines[r].StartsWith("END_HEADER"))
break;
_ = logistics.AppendLine(lines[i]);
}
break;
}
}
return new Tuple<string, string[], string[]>(logistics.ToString(), columns.ToArray(), body.ToArray());
}
internal static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
public static JsonElement[] GetArray(Tuple<string, string[], string[]> pdsf, bool lookForNumbers = false)
{
JsonElement[] results;
string logistics = pdsf.Item1;
string[] columns = pdsf.Item2;
string[] bodyLines = pdsf.Item3;
if (bodyLines.Length == 0 || !bodyLines[0].Contains('\t'))
results = JsonSerializer.Deserialize<JsonElement[]>("[]");
else
{
string value;
string[] segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in bodyLines)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Trim().Split('\t');
if (!lookForNumbers)
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("}");
lines.Add(stringBuilder.ToString());
}
string json = $"[{string.Join(",", lines)}]";
results = JsonSerializer.Deserialize<JsonElement[]>(json);
}
return results;
}
internal static string BusinessIntegration(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.BusinessIntegration, addSpaces, separator);
public static Dictionary<string, List<string>> GetDictionary(Tuple<string, string[], string[]> pdsf)
{
Dictionary<string, List<string>> results = new();
string[] segments;
string[] columns = pdsf.Item2;
string[] bodyLines = pdsf.Item3;
foreach (string column in columns)
results.Add(column, new List<string>());
foreach (string bodyLine in bodyLines)
{
segments = bodyLine.Split('\t');
for (int c = 1; c < segments.Length; c++)
{
if (c >= columns.Length)
continue;
results[columns[c]].Add(segments[c]);
}
}
return results;
}
internal static string SystemExport(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.SystemExport, addSpaces, separator);
public static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(Tuple<string, string[], string[]> pdsf)
{
Dictionary<Test, Dictionary<string, List<string>>> results = new();
List<string> collection;
string testColumn = nameof(Test);
Dictionary<string, List<string>> keyValuePairs = GetDictionary(pdsf);
if (!keyValuePairs.TryGetValue(testColumn, out collection))
throw new Exception();
int min;
int max;
Test testKey;
List<string> vs;
string columnKey;
Dictionary<Test, List<int>> tests = new();
for (int i = 0; i < collection.Count; i++)
{
if (Enum.TryParse(collection[i], out Test test))
{
if (!results.ContainsKey(test))
{
tests.Add(test, new List<int>());
results.Add(test, new Dictionary<string, List<string>>());
}
tests[test].Add(i);
}
}
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
{
testKey = testKeyValuePair.Key;
min = testKeyValuePair.Value.Min();
max = testKeyValuePair.Value.Max() + 1;
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
results[testKey].Add(keyValuePair.Key, new List<string>());
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
vs = keyValuePair.Value;
columnKey = keyValuePair.Key;
for (int i = min; i < max; i++)
{
if (vs.Count > i)
results[testKey][columnKey].Add(vs[i]);
else
results[testKey][columnKey].Add(string.Empty);
}
}
}
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(pdsf.Item1, results);
}
internal static string Archive(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.Archive, addSpaces, separator);
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
{
if (!addSpaces)
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
else
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
}
internal static ProcessDataStandardFormat GetEmpty() =>
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null);
public static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
public static string BusinessIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.BusinessIntegration, addSpaces, separator);
public static string SystemExport(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.SystemExport, addSpaces, separator);
public static string Archive(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.Archive, addSpaces, separator);
public static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string> ignoreParameterNames = null)
{
StringBuilder result = new();
ignoreParameterNames ??= new List<string>();
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
throw new Exception();
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
throw new Exception();
string nullData;
const string columnDate = "Date";
const string columnTime = "Time";
const string firstDuplicate = "_1";
_ = result.AppendLine(scopeInfo.Header);
StringBuilder line = new();
if (logistics.NullData is null)
nullData = string.Empty;
else
nullData = logistics.NullData.ToString();
int count = (from l in keyValuePairs select l.Value.Count).Min();
for (int r = 0; r < count; r++)
{
_ = line.Clear();
_ = line.Append('!');
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
if (!names.Contains(keyValuePair.Key))
continue;
if (ignoreParameterNames.Contains(keyValuePair.Key))
continue;
if (pairedParameterNames.Contains(keyValuePair.Key))
{
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
continue;
else
_ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
}
else
{
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
_ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
_ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
_ = line.Append(nullData);
else
_ = line.Append(keyValuePair.Value[r]);
_ = line.Append(';');
}
}
if (pairedParameterNames.Count == 0)
{
_ = line.Remove(line.Length - 1, 1);
_ = result.AppendLine(line.ToString());
}
}
return result.ToString();
}
public static List<string> PDSFToFixedWidth(string reportFullPath)
internal static List<string> PDSFToFixedWidth(string reportFullPath)
{
List<string> results = new();
if (!File.Exists(reportFullPath))
@ -407,4 +127,544 @@ public class ProcessDataStandardFormat
return results;
}
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null)
{
ProcessDataStandardFormat result;
string segment;
List<string> body = new();
List<string> logistics = new();
lines ??= File.ReadAllLines(reportFullPath);
string[] segments;
if (lines.Length < 7)
segments = Array.Empty<string>();
else
segments = lines[6].Trim().Split('\t');
List<string> columns = new();
for (int c = 0; c < segments.Length; c++)
{
segment = segments[c].Substring(1, segments[c].Length - 2);
if (!columns.Contains(segment))
columns.Add(segment);
else
{
for (short i = 1; i < short.MaxValue; i++)
{
segment = string.Concat(segment, "_", i);
if (!columns.Contains(segment))
{
columns.Add(segment);
break;
}
}
}
}
bool lookForLogistics = false;
for (int r = 7; r < lines.Length; r++)
{
if (lines[r].StartsWith("NUM_DATA_ROWS"))
lookForLogistics = true;
if (!lookForLogistics)
{
body.Add(lines[r]);
continue;
}
if (lines[r].StartsWith("LOGISTICS_1"))
{
for (int i = r; i < lines.Length; i++)
{
if (!lines[i].StartsWith("LOGISTICS_") || lines[i].StartsWith("END_HEADER"))
break;
logistics.Add(lines[i]);
}
break;
}
}
if (lines.Length > 0 && body.Count == 0 && columns.Count == 0 && logistics.Count == 0)
logistics.Add(lines[1]);
result = new(body: body.AsReadOnly(),
columns: columns.AsReadOnly(),
inputLines: lines.ToList().AsReadOnly(),
logistics: logistics.AsReadOnly(),
sequence: null);
return result;
}
internal static ProcessDataStandardFormat? GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
{
ProcessDataStandardFormat? result;
const int columnsLine = 6;
FileInfo fileInfo = new(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, pdsfMapping.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = GetArray(pdsfMapping.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
if (jsonElements is null || pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count)
result = null;
else
{
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
result = null;
}
return result;
}
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int expectedColumns, int columnsLine, string path, string[]? lines)
{
ProcessDataStandardFormat result;
long sequence;
string[] segments;
List<string> body = new();
bool lookForLogistics = false;
List<string> logistics = new();
lines ??= File.ReadAllLines(path);
if (lines.Length <= columnsLine)
segments = Array.Empty<string>();
else
{
segments = lines[columnsLine].Split('\t');
if (segments.Length != expectedColumns)
segments = Array.Empty<string>();
}
string[] columns = segments.Select(l => l.Trim('"')).ToArray();
for (int r = columnsLine + 1; r < lines.Length; r++)
{
if (lines[r].StartsWith("NUM_DATA_ROWS"))
lookForLogistics = true;
if (!lookForLogistics)
{
body.Add(lines[r]);
continue;
}
if (lines[r].StartsWith("LOGISTICS_1"))
{
for (int i = r; i < lines.Length; i++)
{
if (!lines[i].StartsWith("LOGISTICS_") || lines[i].StartsWith("END_HEADER"))
break;
logistics.Add(lines[i]);
}
break;
}
}
if (logistics.Count == 0)
sequence = lastWriteTime.Ticks;
else
{
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s;
}
result = new(body: body.AsReadOnly(),
columns: new(columns),
inputLines: lines.ToList().AsReadOnly(),
logistics: logistics.AsReadOnly(),
sequence: sequence);
return result;
}
private static JsonElement[]? GetArray(int expectedColumns, ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers)
{
JsonElement[]? results;
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
else
{
string value;
string[] segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in processDataStandardFormat.Body)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Split('\t');
if (segments.Length != expectedColumns)
continue;
if (!lookForNumbers)
{
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("}");
lines.Add(stringBuilder.ToString());
}
string json = $"[{string.Join(",", lines)}]";
results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray);
}
return results;
}
private static ProcessDataStandardFormat GetProcessDataStandardFormat(ProcessDataStandardFormatMapping processDataStandardFormatMapping, JsonElement[] jsonElements, ProcessDataStandardFormat processDataStandardFormat)
{
ProcessDataStandardFormat result;
int column;
string value;
JsonProperty jsonProperty;
List<string> values = new();
List<string> results = new();
JsonProperty[] jsonProperties;
List<string> unknownColumns = new();
for (int i = 0; i < jsonElements.Length; i++)
{
values.Clear();
if (jsonElements[i].ValueKind != JsonValueKind.Object)
{
unknownColumns.Add(string.Empty);
break;
}
jsonProperties = jsonElements[i].EnumerateObject().ToArray();
if (jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
continue;
for (int c = 0; c < processDataStandardFormatMapping.ColumnIndices.Count; c++)
{
column = processDataStandardFormatMapping.ColumnIndices[c];
if (column == -1)
value = processDataStandardFormatMapping.OldColumnNames[c];
else
{
jsonProperty = jsonProperties[column];
value = jsonProperty.Value.ToString();
}
values.Add(value);
}
results.Add(string.Join("\t", values));
}
result = new(body: new(results),
columns: processDataStandardFormatMapping.OldColumnNames,
inputLines: processDataStandardFormat.InputLines,
logistics: processDataStandardFormat.Logistics,
sequence: processDataStandardFormat.Sequence);
return result;
}
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat)
{
List<string> results = new();
if (processDataStandardFormat.Sequence is null)
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
string startTime = new DateTime(processDataStandardFormat.Sequence.Value).ToString(format);
results.Add("HEADER_TAG\tHEADER_VALUE");
results.Add("FORMAT\t2.00");
results.Add("NUMBER_PASSES\t0001");
results.Add($"HEADER_OFFSET\t{headerOffset}");
results.Add($"DATA_OFFSET\t{dataOffset}");
results.Add($"END_OFFSET\t{endOffset}");
results.Add($"\"{string.Join("\"\t\"", processDataStandardFormat.Columns)}\"");
results.AddRange(processDataStandardFormat.Body);
results.Add($"NUM_DATA_ROWS\t{processDataStandardFormat.Body.Count.ToString().PadLeft(9, '0')}");
results.Add($"NUM_DATA_COLUMNS\t{processDataStandardFormat.Columns.Count.ToString().PadLeft(9, '0')}");
results.Add("DELIMITER\t;");
results.Add($"START_TIME_FORMAT\t{format}");
results.Add($"START_TIME\t{startTime}");
results.Add("LOGISTICS_COLUMN\tA_LOGISTICS");
results.Add("LOGISTICS_COLUMN\tB_LOGISTICS");
results.AddRange(processDataStandardFormat.Logistics);
results.Add("EOF");
results.AddRange(processDataStandardFormat.InputLines.Select(l => l.Replace('\t', '|')));
File.WriteAllText(path, string.Join(Environment.NewLine, results));
}
internal static Dictionary<string, List<string>> GetDictionary(ProcessDataStandardFormat processDataStandardFormat)
{
Dictionary<string, List<string>> results = new();
string[] segments;
foreach (string column in processDataStandardFormat.Columns)
results.Add(column, new List<string>());
foreach (string bodyLine in processDataStandardFormat.Body)
{
segments = bodyLine.Split('\t');
for (int c = 1; c < segments.Length; c++)
{
if (c >= processDataStandardFormat.Columns.Count)
continue;
results[processDataStandardFormat.Columns[c]].Add(segments[c]);
}
}
return results;
}
internal static JsonElement[] GetArray(ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers = false)
{
JsonElement[] results;
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
else
{
string value;
string[] segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in processDataStandardFormat.Body)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Trim().Split('\t');
if (!lookForNumbers)
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("}");
lines.Add(stringBuilder.ToString());
}
string json = $"[{string.Join(",", lines)}]";
results = JsonSerializer.Deserialize<JsonElement[]>(json) ?? throw new Exception();
}
return results;
}
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
{
string result;
if (jsonElements.Length == 0)
result = string.Empty;
else
{
int columns = 0;
List<string> lines;
string endOffset = "E#######T";
string dataOffset = "D#######T";
string headerOffset = "H#######T";
string format = "MM/dd/yyyy HH:mm:ss";
StringBuilder stringBuilder = new();
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
_ = stringBuilder.Append("\"Time\"").Append('\t');
_ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
_ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
for (int i = 0; i < jsonElements.Length;)
{
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
{
columns += 1;
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
}
break;
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
for (int i = 0; i < jsonElements.Length; i++)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append("0.1").Append('\t');
_ = stringBuilder.Append('1').Append('\t');
_ = stringBuilder.Append('2').Append('\t');
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
_ = stringBuilder.Append(jsonProperty.Value).Append('\t');
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
lines.Add(stringBuilder.ToString());
}
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
lines.Add("DELIMITER ;");
lines.Add(string.Concat("START_TIME_FORMAT ", format));
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
if (!string.IsNullOrEmpty(logisticsText))
lines.Add(logisticsText);
else
{
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
lines.Add("END_HEADER");
}
_ = stringBuilder.Clear();
foreach (string line in lines)
_ = stringBuilder.AppendLine(line);
result = stringBuilder.ToString();
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
}
return result;
}
internal static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(ProcessDataStandardFormat processDataStandardFormat)
{
Dictionary<Test, Dictionary<string, List<string>>> results = new();
List<string>? collection;
string testColumn = nameof(Test);
Dictionary<string, List<string>> keyValuePairs = GetDictionary(processDataStandardFormat);
if (!keyValuePairs.TryGetValue(testColumn, out collection))
throw new Exception();
int min;
int max;
Test testKey;
List<string> vs;
string columnKey;
Dictionary<Test, List<int>> tests = new();
for (int i = 0; i < collection.Count; i++)
{
if (Enum.TryParse(collection[i], out Test test))
{
if (!results.ContainsKey(test))
{
tests.Add(test, new List<int>());
results.Add(test, new Dictionary<string, List<string>>());
}
tests[test].Add(i);
}
}
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
{
testKey = testKeyValuePair.Key;
min = testKeyValuePair.Value.Min();
max = testKeyValuePair.Value.Max() + 1;
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
results[testKey].Add(keyValuePair.Key, new List<string>());
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
vs = keyValuePair.Value;
columnKey = keyValuePair.Key;
for (int i = min; i < max; i++)
{
if (vs.Count > i)
results[testKey][columnKey].Add(vs[i]);
else
results[testKey][columnKey].Add(string.Empty);
}
}
}
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(processDataStandardFormat.Logistics[0], results);
}
internal static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string>? ignoreParameterNames = null)
{
StringBuilder result = new();
ignoreParameterNames ??= new List<string>();
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
throw new Exception();
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
throw new Exception();
string? nullData;
const string columnDate = "Date";
const string columnTime = "Time";
const string firstDuplicate = "_1";
_ = result.AppendLine(scopeInfo.Header);
StringBuilder line = new();
if (logistics.NullData is null)
nullData = string.Empty;
else
nullData = logistics.NullData.ToString();
int count = (from l in keyValuePairs select l.Value.Count).Min();
for (int r = 0; r < count; r++)
{
_ = line.Clear();
_ = line.Append('!');
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
{
if (!names.Contains(keyValuePair.Key))
continue;
if (ignoreParameterNames.Contains(keyValuePair.Key))
continue;
if (pairedParameterNames.Contains(keyValuePair.Key))
{
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
continue;
else
_ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
}
else
{
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
_ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
_ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
_ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
_ = line.Append(nullData);
else
_ = line.Append(keyValuePair.Value[r]);
_ = line.Append(';');
}
}
if (pairedParameterNames.Count == 0)
{
_ = line.Remove(line.Length - 1, 1);
_ = result.AppendLine(line.ToString());
}
}
return result.ToString();
}
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
{
if (!addSpaces)
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
else
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
}
private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName)
{
int? result = null;
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
if (result is null)
{
for (int i = 0; i < jsonProperties.Length; i++)
{
if (jsonProperties[i].Name[0] != propertyName[0])
continue;
if (jsonProperties[i].Name.Length != propertyName.Length)
continue;
if (jsonProperties[i].Name != propertyName)
continue;
result = i;
break;
}
}
return result;
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(JsonElement[]))]
internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -0,0 +1,33 @@
using System.Collections.ObjectModel;
namespace Adaptation.Shared;
public class ProcessDataStandardFormatMapping
{
public ReadOnlyCollection<string> BackfillColumns { get; private set; }
public ReadOnlyCollection<int> ColumnIndices { get; private set; }
public ReadOnlyCollection<string> IgnoreColumns { get; private set; }
public ReadOnlyCollection<string> IndexOnlyColumns { get; private set; }
public ReadOnlyDictionary<string, string> KeyValuePairs { get; private set; }
public ReadOnlyCollection<string> NewColumnNames { get; private set; }
public ReadOnlyCollection<string> OldColumnNames { get; private set; }
public ProcessDataStandardFormatMapping(ReadOnlyCollection<string> backfillColumns,
ReadOnlyCollection<int> columnIndices,
ReadOnlyCollection<string> ignoreColumns,
ReadOnlyCollection<string> indexOnlyColumns,
ReadOnlyDictionary<string, string> keyValuePairs,
ReadOnlyCollection<string> newColumnNames,
ReadOnlyCollection<string> oldColumnNames)
{
BackfillColumns = backfillColumns;
ColumnIndices = columnIndices;
IgnoreColumns = ignoreColumns;
IndexOnlyColumns = indexOnlyColumns;
KeyValuePairs = keyValuePairs;
NewColumnNames = newColumnNames;
OldColumnNames = oldColumnNames;
}
}

View File

@ -59,7 +59,7 @@ public class BIORAD2
NonThrowTryCatch();
}
#if DEBUG
#if (!DEBUG)
[Ignore]
#endif
[TestMethod]

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,258 @@
"use strict";
// getValue($('gv.thicknessPoints', ''), $('dcp.BIORAD2/csv/Index', '0'));
function getCollectionParseFloat(collection) {
let result = [];
let value;
for (let i = 0; i < collection.length; i++) {
value = parseFloat(collection[i]);
result.push(value);
}
return result;
}
function getSum(collection) {
let result = 0;
if (!collection || collection.length === 0) {
result = 0;
}
else {
for (let i = 0; i < collection.length; i++) {
result += collection[i];
}
}
return result;
}
function getAverage(collection) {
let result = null;
if (collection == null || collection.length === 0)
result = 0;
else {
let sum = getSum(collection);
result = sum / collection.length;
}
return result;
}
function getValue39(thicknessPoints, index) {
let result = null;
if (index === 13) {
if (thicknessPoints != undefined && thicknessPoints.length > 1) {
let collection = thicknessPoints[0] === '|' ? thicknessPoints.substring(1).split('|') : thicknessPoints.split('|');
let collectionParseFloat = getCollectionParseFloat(collection);
let thicknessFourteen3mmEdgeMean = getAverage([[collectionParseFloat[10], collectionParseFloat[11], collectionParseFloat[12], collectionParseFloat[13]]]);
let thicknessFourteenMeanFrom = getAverage([[collectionParseFloat[1], collectionParseFloat[2], collectionParseFloat[6], collectionParseFloat[7]]]);
result = (thicknessFourteen3mmEdgeMean - thicknessFourteenMeanFrom) / thicknessFourteenMeanFrom * 100;
}
}
return result;
}
function getVariance(collection) {
let result = null;
if (collection == null || collection.length === 0)
result = null;
else {
let variance = 0;
let t = collection[0];
for (let i = 1; i < collection.length; i++) {
t += collection[i];
const diff = ((i + 1) * collection[i]) - t;
variance += diff * diff / ((i + 1.0) * i);
}
result = variance / (collection.length - 1);
}
return result;
}
// $('gv.thicknessPoints', '') + '|' + $('dcp.BIORAD2/csv/Thickness', '')
// $('gv.thicknessPoints', '') + '|' + $('dcp.BIORAD3/csv/Thickness', '')
// $('gv.thicknessPoints', '') + '|' + $('dcp.BIORAD4/csv/Thickness', '')
// $('gv.thicknessPoints', '') + '|' + $('dcp.BIORAD5/b-csv/Thickness', '')
// \\mesfs.infineon.com\EC_Characterization_Si\Archive\BIORAD2\2025_Week_08\2025-02-20\64-659712-4626_2025-02-20_11;50_AM_5144331401\638756490128318288
// 0 1 2 3 4 5 6 7 8 9 10 11 12 13
// 1 2 3 4 5 6 7 8 9 10 11 12 13 14
const thicknessPoints = getCollectionParseFloat('|4.022|3.952|3.936|3.971|3.954|3.976|3.949|3.906|3.967|3.995|3.997|3.932|3.766|3.890'.substring(1).split('|'));
const thicknessTenPoints = thicknessPoints.slice(0, 10);
const thicknessFourteenCriticalPointsAverage = getAverage(thicknessTenPoints); // 15 // *3.962799999999999
const thicknessFourteenCriticalPointsStdDev = Math.sqrt(getVariance(thicknessTenPoints)); // 16 // *0.0318496467798311
const thicknessFourteenCenterMean = thicknessPoints[4]; // 17 // 3.954
const thicknessFourteenMeanFrom = getAverage([thicknessPoints[1], thicknessPoints[2], thicknessPoints[6], thicknessPoints[7]]); // 18 // *3.954
const thicknessFourteen5mmEdgeMean = getAverage([thicknessPoints[0], thicknessPoints[9]]); // 19 // *4.0085
const thicknessFourteen3mmEdgeMean = getAverage([thicknessPoints[10], thicknessPoints[11], thicknessPoints[12], thicknessPoints[13]]); // 20 // *3.89625
const thicknessFourteen5mmEdgePercent = (thicknessFourteen5mmEdgeMean - thicknessFourteenMeanFrom) / thicknessFourteenMeanFrom * 100; // 21 // *1.848440576764267
const thicknessFourteen3mmEdgePercent = (thicknessFourteen3mmEdgeMean - thicknessFourteenMeanFrom) / thicknessFourteenMeanFrom * 100; // 22 // *-1.0036206567998442
console.log(thicknessFourteenCriticalPointsAverage);
// getValue($('dcp.BIORAD2/csv/Batch', ''), $('dcp.BIORAD2/csv/Wafer', ''));
function getSlot92(wafer) {
let result = null;
if (wafer.length !== 1 && wafer.length !== 2)
result = null;
else {
let slot = parseInt(wafer);
if (slot < 1 || slot > 27)
result = null;
else
result = slot < 10 ? '0' + slot : slot;
}
return result;
}
function getBatch(batch, jobId, wafer) {
let result = null;
const slot = getSlot92(wafer);
result = slot == null ? jobId : batch;
return result;
}
function getWafer(wafer) {
let result = null;
const slot = getSlot92(wafer);
result = slot == null ? wafer : slot;
return result;
}
function getValue120(batch, wafer) {
let result = null;
const slot = getSlot92(wafer);
const value = slot == null ? wafer : batch;
result = value.replace(/[\{\}\\\/\:\*\?\"\<\>\|]/g, '_');
return result;
}
const values128 = [
{ wafer: '11', jobId: 'BIORAD2', batch: 'O172068.1.60', waferCheck: '11', batchCheck: 'O172068.1.60', mid: 'O172068.1.60' },
{ wafer: '27-588493-5008', jobId: 'BIORAD2', batch: 'BIORAD#2', waferCheck: '27-588493-5008', batchCheck: 'BIORAD2', mid: '27-588493-5008' },
{ wafer: '27-588493-5"008', jobId: 'BIORAD2', batch: 'BIORAD#2', waferCheck: '27-588493-5008', batchCheck: 'BIORAD2', mid: '27-588493-5_008' },
];
const wafer = "11";
const jobId = "BIORAD2";
const batch = "O172068.1.60";
const waferCheck = getWafer(wafer);
const batchCheck = getBatch(batch, jobId, wafer);
const mid = getValue120(batch, wafer);
console.log(mid);
values128.forEach(element => {
let mid = getValue120(element.batch, element.wafer);
if (mid != element.mid)
console.error("MID doesn't match!");
else
console.info('Match');
});
// '{"Area": "' +
// 'Si' +
// '", "EquipmentType": "' +
// 'MET08THFTIRQS408M' +
// '", "MesEntity": "' +
// $('dcp.BIORAD2/csv/MesEntity', '') +
// '", "Sequence": "' +
// $('dcp.BIORAD2/csv/Sequence', '') +
// '", "MID": "' +
// getValue($('dcp.BIORAD2/csv/Batch', ''), $('dcp.BIORAD2/csv/Wafer', '')) +
// '", "Recipe": "' +
// $('dcp.BIORAD2/csv/Recipe', '').split('"')[0].split('\\')[0] +
// '"}';
function getSlot164(wafer) {
let result = null;
if (wafer.length !== 1 && wafer.length !== 2)
result = null;
else {
let slot = parseInt(wafer);
if (slot < 1 || slot > 27)
result = null;
else
result = slot < 10 ? '0' + slot : slot;
}
return result;
}
function getValue178(batch, wafer) {
let result = null;
const slot = getSlot164(wafer);
const value = slot == null ? wafer : batch;
result = value.replace(/[\{\}\\\/\:\*\?\"\<\>\|]/g, '_');
return result;
}
// getValue(self, $('dcp.BIORAD2/csv/Wafer', ''), getContextData('2', 'cds.MID', ''));
function getSlot188(wafer) {
let result = null;
if (wafer.length !== 1 && wafer.length !== 2)
result = null;
else {
let slot = parseInt(wafer);
if (slot < 1 || slot > 27)
result = null;
else
result = slot < 10 ? '0' + slot : slot;
}
return result;
}
function getValue202(batch, wafer, mid) {
let result = null;
const slot = getSlot188(wafer);
result = slot == null ? batch : mid;
return result;
}
const values210 = [
{ wafer: '11', jobId: 'BIORAD2', batch: 'O172068.1.60', mid: '*O172068.1.60', result: '*O172068.1.60' },
{ wafer: '27-588493-5008', jobId: 'BIORAD2', batch: 'BIORAD#2', mid: '*588493', result: 'BIORAD#2' },
{ wafer: '27-588493-5"008', jobId: 'BIORAD2', batch: 'BIORAD#2', mid: '*588493', result: 'BIORAD#2' },
];
values210.forEach(element => {
let result = getValue202(element.batch, element.wafer, element.mid);
if (result != element.result)
console.error("result doesn't match!");
else
console.info('Match');
});
// getValue(self, getContextData('2', 'cds.MID', ''));
function getSlot(wafer) {
let result = null;
if (wafer.length !== 1 && wafer.length !== 2)
result = null;
else {
let slot = parseInt(wafer);
if (slot < 1 || slot > 27)
result = null;
else
result = slot < 10 ? '0' + slot : slot;
}
return result;
}
function getValue(wafer, mid) {
let result = null;
const slot = getSlot(wafer);
result = slot == null ? mid : wafer;
return result;
}
const values246 = [
{ wafer: '11', jobId: 'BIORAD2', batch: 'O172068.1.60', mid: '*O172068.1.60', result: '11' },
{ wafer: '27-588493-5008', jobId: 'BIORAD2', batch: 'BIORAD#2', mid: '*588493', result: '*588493' },
{ wafer: '27-588493-5"008', jobId: 'BIORAD2', batch: 'BIORAD#2', mid: '*588493', result: '*588493' },
];
values246.forEach(element => {
let result = getValue(element.wafer, element.mid);
if (result != element.result)
console.error("result doesn't match!");
else
console.info('Match');
});

View File

@ -160,6 +160,7 @@
<Compile Include="Adaptation\Shared\Metrology\WS.Results.cs" />
<Compile Include="Adaptation\Shared\ParameterType.cs" />
<Compile Include="Adaptation\Shared\ProcessDataStandardFormat.cs" />
<Compile Include="Adaptation\Shared\ProcessDataStandardFormatMapping.cs" />
<Compile Include="Adaptation\Shared\Properties\IDescription.cs" />
<Compile Include="Adaptation\Shared\Properties\IFileRead.cs" />
<Compile Include="Adaptation\Shared\Properties\ILogistics.cs" />

View File

@ -1,20 +1,30 @@
# Introduction
# ReadMe
## Introduction
MET08THFTIRQS408M EAF adaptation
# Getting Started
## Getting Started
1. Framework - MET08THFTIRQS408M.csproj
2. dotnet core - Adaptation\MET08THFTIRQS408M.Tests.csproj
3. https://tfs.intra.infineon.com/tfs/ManufacturingIT/_packaging/eaf/nuget/v3/index.json
4. https://packagemanagement.eu.infineon.com:4430/packages
# Build and Test
## Build and Test
See Adaptation\_Tests for dotnet core tests
# TFS
$/MIT_EAF_Adaptations/Trunk/MET08THFTIRQS408M/06_SourceCode
## TFS
```
$/MIT_EAF_Adaptations/Trunk/MET08THFTIRQS408M/06_SourceCode
```
## Last TFS Changeset
# Last TFS Changeset
303349 Phares Mike (IFAM IT FI MES) 2/1/2022 6:06:25 PM MET08THFTIRQS408M - Move solution for Azure DevOps git
# Git
## Git
git@tfs.intra.infineon.com:22/tfs/ManufacturingIT/Mesa_FI/_git/MET08THFTIRQS408M