Preparation to switch to xml for InfinityQS export
This commit is contained in:
@ -88,9 +88,9 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
|
||||
string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
|
||||
string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
|
||||
_ProcessDataStandardFormatMapping = GetProcessDataStandardFormatMapping(processDataStandardFormatMappingOldColumnNames,
|
||||
processDataStandardFormatMappingNewColumnNames,
|
||||
processDataStandardFormatMappingColumnIndices);
|
||||
_ProcessDataStandardFormatMapping = ProcessDataStandardFormatMapping.Get(processDataStandardFormatMappingOldColumnNames,
|
||||
processDataStandardFormatMappingNewColumnNames,
|
||||
processDataStandardFormatMappingColumnIndices);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
@ -169,46 +169,6 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return results;
|
||||
}
|
||||
|
||||
private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
|
||||
{
|
||||
ProcessDataStandardFormatMapping result;
|
||||
string[] segmentsB;
|
||||
List<string> distinct = new();
|
||||
Dictionary<string, string> keyValuePairs = new();
|
||||
string args4 = "Time,HeaderUniqueId,UniqueId,Date,Wafer,Position,BIORAD4";
|
||||
string args5 = ",BIORAD4";
|
||||
string args6 = ",BIORAD4";
|
||||
string args7 = "Test|EventId,Date|DateTime,Position|Slot,DeltaThicknessSlotsOneAndTwentyFive|Actual Delta Thick Pts 1 and 25,PercentDeltaThicknessSlotsOneAndTwentyFive|% Delta Thick Pts 1 and 25,MID|Cassette,Lot|Batch,Title|Batch,Wafer|Text,Thickness|Site,MeanThickness|GradeMean,|BIORAD4";
|
||||
// string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,MID,Date,Employee,Lot,PSN,Reactor,Recipe,Cassette,GradeStdDev,HeaderUniqueId,Layer,MeanThickness,PassFail,RDS,Slot,Title,UniqueId,Wafer,Zone,Mean,Position,StdDev,Thickness,ThicknessSlotOne,ThicknessSlotTwentyFive,DeltaThicknessSlotsOneAndTwentyFive,PercentDeltaThicknessSlotsOneAndTwentyFive";
|
||||
// string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Batch,Cassette,DateTime,Destination,Mean,PassFail,Recipe,Reference,Site,Slot,Source,StdDev,Text,GradeMean,GradeStdDev,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Actual Delta Thick Pts 1 and 25,% Delta Thick Pts 1 and 25,EventId",
|
||||
// string args10 = "0,1,2,31,3,6,5,8,9,27,7,23,24,13,8,21,-1,25,20,12,22,16,7,-1,19,26,11,16,18,15,-1,-1,29,30";
|
||||
string[] segments = args7.Split(',');
|
||||
ReadOnlyCollection<string> ignoreColumns = new(args4.Split(','));
|
||||
ReadOnlyCollection<string> backfillColumns = new(args5.Split(','));
|
||||
ReadOnlyCollection<string> indexOnlyColumns = new(args6.Split(','));
|
||||
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
|
||||
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
|
||||
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
|
||||
foreach (string segment in segments)
|
||||
{
|
||||
segmentsB = segment.Split('|');
|
||||
if (segmentsB.Length != 2)
|
||||
continue;
|
||||
if (distinct.Contains(segmentsB[0]))
|
||||
continue;
|
||||
distinct.Add(segmentsB[0]);
|
||||
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
|
||||
}
|
||||
result = new(backfillColumns: backfillColumns,
|
||||
columnIndices: columnIndices,
|
||||
newColumnNames: newColumnNames,
|
||||
ignoreColumns: ignoreColumns,
|
||||
indexOnlyColumns: indexOnlyColumns,
|
||||
keyValuePairs: new(keyValuePairs),
|
||||
oldColumnNames: oldColumnNames);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection)
|
||||
{
|
||||
List<PreWith> results = new();
|
||||
@ -268,7 +228,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles, bool mesEntityMatchesProcess)
|
||||
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
|
||||
{
|
||||
List<Pre> results = new();
|
||||
Pre pre;
|
||||
@ -277,8 +237,6 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
if (mesEntityMatchesProcess)
|
||||
checkFile = checkFile.Replace("MET08THFTIRSTRATUS", "MET08THFTIRQS408M");
|
||||
pre = new(matchingFile, checkFile);
|
||||
results.Add(pre);
|
||||
}
|
||||
@ -315,8 +273,13 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
continue;
|
||||
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
|
||||
wsResults = null;
|
||||
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
|
||||
File.Delete(preWith.MatchingFile);
|
||||
if (processDataStandardFormat.InputPDSF is null)
|
||||
File.Move(preWith.MatchingFile, preWith.CheckFile);
|
||||
else
|
||||
{
|
||||
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
|
||||
File.Delete(preWith.MatchingFile);
|
||||
}
|
||||
if (Directory.Exists(preWith.NoWaitDirectory))
|
||||
{
|
||||
post = new(preWith.CheckFile, preWith.ErrFile);
|
||||
@ -368,10 +331,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Stratus.Description> descriptions = Stratus.ProcessData.GetDescriptions(jsonElements);
|
||||
bool mesEntityMatchesProcess = descriptions.Count > 0 && descriptions[0].MesEntity == descriptions[0].Reactor;
|
||||
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles, mesEntityMatchesProcess);
|
||||
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
|
||||
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
|
||||
MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
|
||||
return results;
|
||||
|
Reference in New Issue
Block a user