Compare commits
7 Commits
4819d17819
...
40dda0c5fc
Author | SHA1 | Date | |
---|---|---|---|
40dda0c5fc | |||
2b7573b33d | |||
954cdf7a26 | |||
e2c40bcec4 | |||
3e9fd6224e | |||
3966b75da7 | |||
1ae00ffd41 |
@ -122,6 +122,7 @@ dotnet_diagnostic.IDE0290.severity = none # Use primary constructor [Distance]cs
|
||||
dotnet_diagnostic.IDE0300.severity = none # IDE0300: Collection initialization can be simplified
|
||||
dotnet_diagnostic.IDE0301.severity = none #IDE0301: Collection initialization can be simplified
|
||||
dotnet_diagnostic.IDE0305.severity = none # IDE0305: Collection initialization can be simplified
|
||||
dotnet_diagnostic.MSTEST0037.severity = error # MSTEST0037: Use proper 'Assert' methods
|
||||
dotnet_diagnostic.SYSLIB1045.severity = none # SYSLIB1045: diagnostics for regex source generation
|
||||
dotnet_naming_rule.abstract_method_should_be_pascal_case.severity = warning
|
||||
dotnet_naming_rule.abstract_method_should_be_pascal_case.style = pascal_case
|
||||
|
20
Adaptation/.vscode/tasks.json
vendored
20
Adaptation/.vscode/tasks.json
vendored
@ -92,6 +92,26 @@
|
||||
"command": "code ../MET08DDUPSFS6420.csproj",
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Readme",
|
||||
"type": "shell",
|
||||
"command": "code ../README.md",
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "File-Folder-Helper AOT s X Day-Helper-2025-03-20",
|
||||
"type": "shell",
|
||||
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe",
|
||||
"args": [
|
||||
"s",
|
||||
"X",
|
||||
"L:/DevOps/EAF-Mesa-Integration/MET08DDUPSFS6420",
|
||||
"Day-Helper-2025-03-20",
|
||||
"false",
|
||||
"4"
|
||||
],
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Git Config",
|
||||
"type": "shell",
|
||||
|
@ -14,7 +14,7 @@ namespace Adaptation.FileHandlers.APC;
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
@ -120,15 +120,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
FileCopy(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -18,7 +18,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private readonly string _JobIdParentDirectory;
|
||||
private readonly string _JobIdArchiveParentDirectory;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
@ -120,9 +120,10 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
if (dateTime == DateTime.MinValue)
|
||||
throw new ArgumentNullException(nameof(dateTime));
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
|
||||
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory);
|
||||
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
|
||||
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory, day);
|
||||
if (!Directory.Exists(destinationArchiveDirectory))
|
||||
_ = Directory.CreateDirectory(destinationArchiveDirectory);
|
||||
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
|
||||
@ -144,15 +145,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
MoveArchive(reportFullPath, dateTime);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -9,7 +9,7 @@ namespace Adaptation.FileHandlers;
|
||||
public class CellInstanceConnectionName
|
||||
{
|
||||
|
||||
internal static IFileRead Get(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, int? connectionCount)
|
||||
internal static IFileRead Get(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, int? connectionCount)
|
||||
{
|
||||
IFileRead result = cellInstanceConnectionName switch
|
||||
{
|
||||
|
@ -23,7 +23,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private int _LastDummyRunIndex;
|
||||
private readonly string[] _CellNames;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
|
@ -17,7 +17,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
|
||||
private readonly string _IQSFile;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
@ -154,13 +154,13 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SaveIQSFile(reportFullPath, dateTime, descriptions, tests);
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
|
@ -5,17 +5,71 @@ using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.MoveMatchingFiles;
|
||||
|
||||
#nullable enable
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
internal class PreWith
|
||||
{
|
||||
|
||||
internal string MatchingFile { get; private set; }
|
||||
internal string CheckFile { get; private set; }
|
||||
internal string ErrFile { get; private set; }
|
||||
internal string CheckDirectory { get; private set; }
|
||||
internal string NoWaitDirectory { get; private set; }
|
||||
|
||||
internal PreWith(string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory)
|
||||
{
|
||||
MatchingFile = matchingFile;
|
||||
CheckFile = checkFile;
|
||||
ErrFile = errFile;
|
||||
CheckDirectory = checkDirectory;
|
||||
NoWaitDirectory = noWaitDirectory;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
internal class Pre
|
||||
{
|
||||
|
||||
internal string MatchingFile { get; private set; }
|
||||
internal string CheckFile { get; private set; }
|
||||
|
||||
internal Pre(string matchingFile, string checkFile)
|
||||
{
|
||||
MatchingFile = matchingFile;
|
||||
CheckFile = checkFile;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
internal class Post
|
||||
{
|
||||
|
||||
internal string ErrFile { get; private set; }
|
||||
internal string CheckFile { get; private set; }
|
||||
|
||||
internal Post(string checkFile, string errFile)
|
||||
{
|
||||
ErrFile = errFile;
|
||||
CheckFile = checkFile;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private readonly ProcessDataStandardFormatMapping _ProcessDataStandardFormatMapping;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
@ -27,6 +81,12 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
|
||||
string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
|
||||
string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
|
||||
_ProcessDataStandardFormatMapping = GetProcessDataStandardFormatMapping(processDataStandardFormatMappingOldColumnNames,
|
||||
processDataStandardFormatMappingNewColumnNames,
|
||||
processDataStandardFormatMappingColumnIndices);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
@ -41,7 +101,8 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
void IFileRead.WaitForThread() =>
|
||||
WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
@ -88,7 +149,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]") ?? throw new Exception(), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
@ -104,7 +165,69 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return results;
|
||||
}
|
||||
|
||||
private static List<string> GetSearchDirectories(int numberLength, string parentDirectory)
|
||||
private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
|
||||
{
|
||||
ProcessDataStandardFormatMapping result;
|
||||
string[] segmentsB;
|
||||
List<string> distinct = new();
|
||||
Dictionary<string, string> keyValuePairs = new();
|
||||
string args4 = "Time,HeaderUniqueId,UniqueId,Date";
|
||||
string args5 = "";
|
||||
string args6 = "";
|
||||
string args7 = "Test|EventId,Lot|Id,Slot|WaferId,AreaTotal|WaferAreaTotal,HazeAverage|WaferHazeAverage,HazeRegion|WaferHazeRegion,ScratchTotal|WaferScratchTotal";
|
||||
// string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Comments,Diameter,Exclusion,Gain,HeaderUniqueId,Laser,ParseErrorText,RDS,Slot,UniqueId,AreaCount,AreaCountAvg,AreaCountMax,AreaCountMin,AreaCountStdDev,AreaTotal,AreaTotalAvg,AreaTotalMax,AreaTotalMin,AreaTotalStdDev,Bin1,Bin2,Bin3,Bin4,Bin5,Bin6,Bin7,Bin8,HazeAverage,HazeAverageAvg,HazeAverageMax,HazeAverageMin,HazeAverageStdDev,HazePeak,HazeRegion,HazeRegionAvg,HazeRegionMax,HazeRegionMin,HazeRegionStdDev,HazeRng,LPDCM2,LPDCM2Avg,LPDCM2Max,LPDCM2Min,LPDCM2StdDev,LPDCount,LPDCountAvg,LPDCountMax,LPDCountMin,LPDCountStdDev,Mean,ScratchCount,ScratchCountAvg,ScratchCountMax,ScratchCountMin,ScratchCountStdDev,ScratchTotal,ScratchTotalAvg,ScratchTotalMax,ScratchTotalMin,ScratchTotalStdDev,Sort,StdDev,SumOfDefects,SumOfDefectsAvg,SumOfDefectsMax,SumOfDefectsMin,SumOfDefectsStdDev,Thresh,Thruput";
|
||||
// string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Count,Sequence,MesEntity,Index,Date,Recipe,Id,WaferId,LPDCount,LPDCM2,AreaCount,AreaTotal,ScratchCount,ScratchTotal,SumOfDefects,HazeRegion,HazeAverage,Grade,LPDCountMin,LPDCM2Min,AreaCountMin,AreaTotalMin,ScratchCountMin,ScratchTotalMin,SumOfDefectsMin,HazeRegionMin,HazeAverageMin,LPDCountMax,LPDCM2Max,AreaCountMax,AreaTotalMax,ScratchCountMax,ScratchTotalMax,SumOfDefectsMax,HazeRegionMax,HazeAverageMax,LPDCountAvg,LPDCM2Avg,AreaCountAvg,AreaTotalAvg,ScratchCountAvg,ScratchTotalAvg,SumOfDefectsAvg,HazeRegionAvg,HazeAverageAvg,LPDCountStdDev,LPDCM2StdDev,AreaCountStdDev,AreaTotalStdDev,ScratchCountStdDev,ScratchTotalStdDev,SumOfDefectsStdDev,HazeRegionStdDev,HazeAverageStdDev,WaferDate,Comments,Sort,WaferLPDCount,WaferLPDCM2,Bin1,Bin2,Bin3,Bin4,Bin5,Bin6,Bin7,Bin8,Mean,StdDev,WaferAreaCount,WaferAreaTotal,WaferScratchCount,WaferScratchTotal,WaferSumOfDefects,WaferHazeRegion,WaferHazeAverage,HazePeak,Laser,Gain,Diameter,Thresh,Exclusion,HazeRng,Thruput,WaferRecipe,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,EventId";
|
||||
// string args10 = "0,1,2,95,3,6,5,7,93,9,89,90,8,58,82,84,81,-1,80,-1,88,10,-1,13,41,32,23,50,73,42,33,24,51,62,63,64,65,66,67,68,69,78,47,38,29,56,79,77,46,37,28,55,85,12,40,31,22,49,11,39,30,21,48,70,15,43,34,25,52,75,44,35,26,53,59,71,17,45,36,27,54,83,86";
|
||||
string[] segments = args7.Split(',');
|
||||
ReadOnlyCollection<string> ignoreColumns = new(args4.Split(','));
|
||||
ReadOnlyCollection<string> backfillColumns = new(args5.Split(','));
|
||||
ReadOnlyCollection<string> indexOnlyColumns = new(args6.Split(','));
|
||||
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
|
||||
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
|
||||
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
|
||||
foreach (string segment in segments)
|
||||
{
|
||||
segmentsB = segment.Split('|');
|
||||
if (segmentsB.Length != 2)
|
||||
continue;
|
||||
if (distinct.Contains(segmentsB[0]))
|
||||
continue;
|
||||
distinct.Add(segmentsB[0]);
|
||||
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
|
||||
}
|
||||
result = new(backfillColumns: backfillColumns,
|
||||
columnIndices: columnIndices,
|
||||
newColumnNames: newColumnNames,
|
||||
ignoreColumns: ignoreColumns,
|
||||
indexOnlyColumns: indexOnlyColumns,
|
||||
keyValuePairs: new(keyValuePairs),
|
||||
oldColumnNames: oldColumnNames);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection)
|
||||
{
|
||||
List<PreWith> results = new();
|
||||
string errFile;
|
||||
PreWith preWith;
|
||||
string? checkDirectory;
|
||||
string noWaitDirectory;
|
||||
foreach (Pre pre in preCollection)
|
||||
{
|
||||
errFile = string.Concat(pre.CheckFile, ".err");
|
||||
checkDirectory = Path.GetDirectoryName(pre.CheckFile);
|
||||
if (string.IsNullOrEmpty(checkDirectory))
|
||||
continue;
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
|
||||
preWith = new(pre.MatchingFile, pre.CheckFile, errFile, checkDirectory, noWaitDirectory);
|
||||
results.Add(preWith);
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<string> GetSearchDirectories(int numberLength, string parentDirectory)
|
||||
{
|
||||
List<string> results = new();
|
||||
string[] directories = Directory.GetDirectories(parentDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
@ -115,10 +238,142 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
results.Add(directory);
|
||||
}
|
||||
results.Sort();
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private static void CreatePointerFile(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
|
||||
{
|
||||
string checkFile;
|
||||
string writeFile;
|
||||
string? directoryName;
|
||||
int parentDirectoryLength = parentDirectory.Length;
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
directoryName = Path.GetDirectoryName(matchingFile);
|
||||
if (directoryName is null)
|
||||
continue;
|
||||
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
|
||||
if (File.Exists(writeFile))
|
||||
continue;
|
||||
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
|
||||
{
|
||||
List<Pre> results = new();
|
||||
Pre pre;
|
||||
string checkFile;
|
||||
int parentDirectoryLength = parentDirectory.Length;
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
pre = new(matchingFile, checkFile);
|
||||
results.Add(pre);
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
|
||||
{
|
||||
ReadOnlyCollection<Post> postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
|
||||
if (postCollection.Count != 0)
|
||||
{
|
||||
Thread.Sleep(500);
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach (Post post in postCollection)
|
||||
{
|
||||
if (File.Exists(post.ErrFile))
|
||||
_ = stringBuilder.AppendLine(File.ReadAllText(post.ErrFile));
|
||||
if (File.Exists(post.CheckFile))
|
||||
_ = stringBuilder.AppendLine($"<{post.CheckFile}> was not consumed by the end!");
|
||||
}
|
||||
if (stringBuilder.Length > 0)
|
||||
throw new Exception(stringBuilder.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
|
||||
{
|
||||
List<Post> results = new();
|
||||
Post post;
|
||||
long preWait;
|
||||
foreach (PreWith preWith in preWithCollection)
|
||||
{
|
||||
if (!_IsEAFHosted)
|
||||
continue;
|
||||
if (processDataStandardFormat is null)
|
||||
File.Move(preWith.MatchingFile, preWith.CheckFile);
|
||||
else
|
||||
{
|
||||
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
|
||||
wsResults = null;
|
||||
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
|
||||
File.Delete(preWith.MatchingFile);
|
||||
}
|
||||
if (Directory.Exists(preWith.NoWaitDirectory))
|
||||
{
|
||||
post = new(preWith.CheckFile, preWith.ErrFile);
|
||||
results.Add(post);
|
||||
continue;
|
||||
}
|
||||
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
|
||||
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
|
||||
else
|
||||
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
|
||||
for (short i = 0; i < short.MaxValue; i++)
|
||||
{
|
||||
if (DateTime.Now.Ticks > preWait)
|
||||
break;
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
for (int i = 0; i < int.MaxValue; i++)
|
||||
{
|
||||
if (File.Exists(preWith.ErrFile))
|
||||
throw new Exception(File.ReadAllText(preWith.ErrFile));
|
||||
if (!File.Exists(preWith.CheckFile))
|
||||
break;
|
||||
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
|
||||
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
|
||||
ProcessDataStandardFormat? processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, _ProcessDataStandardFormatMapping);
|
||||
if (processDataStandardFormat is not null)
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
else
|
||||
{
|
||||
processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
processDataStandardFormat = null;
|
||||
}
|
||||
if (!_IsEAFHosted && processDataStandardFormat is not null)
|
||||
ProcessDataStandardFormat.Write(".pdsf", processDataStandardFormat, wsResults: null);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
int numberLength = 2;
|
||||
long ticks = dateTime.Ticks;
|
||||
string parentParentDirectory = GetParentParent(reportFullPath);
|
||||
ReadOnlyCollection<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
|
||||
ReadOnlyCollection<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
|
||||
if (matchingFiles.Count != searchDirectories.Count)
|
||||
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
|
||||
try
|
||||
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
|
||||
catch (Exception) { }
|
||||
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
|
||||
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
|
||||
MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
|
||||
return results;
|
||||
}
|
||||
|
||||
private List<string> GetMatchingFiles(long ticks, string reportFullPath, List<string> searchDirectories)
|
||||
private ReadOnlyCollection<string> GetMatchingFiles(long ticks, string reportFullPath, ReadOnlyCollection<string> searchDirectories)
|
||||
{
|
||||
List<string> results = new();
|
||||
string[] found;
|
||||
@ -137,129 +392,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
break;
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static List<(string matchingFile, string checkFile)> GetCollection(int numberLength, string parentDirectory, List<string> matchingFiles)
|
||||
{
|
||||
List<(string matchingFile, string checkFile)> results = new();
|
||||
string checkFile;
|
||||
int parentDirectoryLength = parentDirectory.Length;
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
results.Add(new(matchingFile, checkFile));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static List<(string, string, string, string, string)> GetCollection(List<(string matchingFile, string checkFile)> collection)
|
||||
{
|
||||
List<(string, string, string, string, string)> results = new();
|
||||
string errFile;
|
||||
string checkDirectory;
|
||||
string noWaitDirectory;
|
||||
foreach ((string matchingFile, string checkFile) in collection)
|
||||
{
|
||||
errFile = string.Concat(checkFile, ".err");
|
||||
checkDirectory = Path.GetDirectoryName(checkFile);
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
|
||||
results.Add(new(matchingFile, checkFile, errFile, checkDirectory, noWaitDirectory));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private void MoveCollection(DateTime dateTime, List<(string matchingFile, string checkFile)> collection)
|
||||
{
|
||||
long preWait;
|
||||
List<(string checkFile, string errFile)> postCollection = new();
|
||||
foreach ((string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory) in GetCollection(collection))
|
||||
{
|
||||
File.Move(matchingFile, checkFile);
|
||||
if (Directory.Exists(noWaitDirectory))
|
||||
{
|
||||
postCollection.Add(new(checkFile, errFile));
|
||||
continue;
|
||||
}
|
||||
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
|
||||
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
|
||||
else
|
||||
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
|
||||
for (short i = 0; i < short.MaxValue; i++)
|
||||
{
|
||||
if (DateTime.Now.Ticks > preWait)
|
||||
break;
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
for (int i = 0; i < int.MaxValue; i++)
|
||||
{
|
||||
if (File.Exists(errFile))
|
||||
throw new Exception(File.ReadAllText(errFile));
|
||||
if (!File.Exists(checkFile))
|
||||
break;
|
||||
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
|
||||
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
}
|
||||
if (postCollection.Count != 0)
|
||||
{
|
||||
Thread.Sleep(500);
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach ((string checkFile, string errFile) in postCollection)
|
||||
{
|
||||
if (File.Exists(errFile))
|
||||
_ = stringBuilder.AppendLine(File.ReadAllText(errFile));
|
||||
if (File.Exists(checkFile))
|
||||
_ = stringBuilder.AppendLine($"<{checkFile}> was not consumed by the end!");
|
||||
}
|
||||
if (stringBuilder.Length > 0)
|
||||
throw new Exception(stringBuilder.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreatePointerFile(int numberLength, string parentDirectory, List<string> matchingFiles)
|
||||
{
|
||||
#nullable enable
|
||||
string checkFile;
|
||||
string writeFile;
|
||||
string? directoryName;
|
||||
int parentDirectoryLength = parentDirectory.Length;
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
directoryName = Path.GetDirectoryName(matchingFile);
|
||||
if (directoryName is null)
|
||||
continue;
|
||||
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
|
||||
if (File.Exists(writeFile))
|
||||
continue;
|
||||
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
|
||||
}
|
||||
#nullable disable
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
int numberLength = 2;
|
||||
long ticks = dateTime.Ticks;
|
||||
string parentParentDirectory = GetParentParent(reportFullPath);
|
||||
List<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
|
||||
List<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
|
||||
if (matchingFiles.Count != searchDirectories.Count)
|
||||
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
|
||||
try
|
||||
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
|
||||
catch (Exception) { }
|
||||
List<(string matchingFile, string checkFile)> collection = GetCollection(numberLength, parentParentDirectory, matchingFiles);
|
||||
MoveCollection(dateTime, collection);
|
||||
return results;
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
}
|
@ -24,7 +24,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private readonly string _OpenInsightApiECDirectory;
|
||||
private readonly ReadOnlyCollection<ModelObjectParameterDefinition> _IQSCopyCollection;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_LastIndex = -1;
|
||||
@ -238,7 +238,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, string logistics, List<pcl.Description> descriptions, Test[] tests)
|
||||
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<pcl.Description> descriptions, Test[] tests)
|
||||
{
|
||||
bool isDummyRun = false;
|
||||
List<(Shared.Properties.IScopeInfo, string)> collection = new();
|
||||
@ -262,28 +262,29 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
if (!string.IsNullOrEmpty(lines))
|
||||
{
|
||||
_LastIndex += 1;
|
||||
long? subGroupId;
|
||||
long? subgroupId;
|
||||
if (_LastIndex >= _IQSCopyCollection.Count)
|
||||
_LastIndex = 0;
|
||||
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
|
||||
ModelObjectParameterDefinition modelObjectParameterDefinition = _IQSCopyCollection[_LastIndex];
|
||||
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
|
||||
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
|
||||
subGroupId = null;
|
||||
subgroupId = null;
|
||||
else
|
||||
(subGroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
|
||||
if (subGroupId is null)
|
||||
(subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
|
||||
if (subgroupId is null)
|
||||
collection.Add(new(new ScopeInfo(tests[0], _OpenInsightFilePattern), lines));
|
||||
else
|
||||
collection.Add(new(new ScopeInfo(tests[0], $"{subGroupId.Value} {_OpenInsightFilePattern}"), lines));
|
||||
if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<string> values))
|
||||
collection.Add(new(new ScopeInfo(tests[0], $"{subgroupId.Value} {_OpenInsightFilePattern}"), lines));
|
||||
if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
|
||||
{
|
||||
if (values.Count != 1)
|
||||
throw new Exception($"{nameof(_StaticRuns)} {values.Count} != 1 {_Logistics.Sequence}!");
|
||||
values[0] = $"{values[0]}|{subGroupId}";
|
||||
if (wsResults is null || wsResults.Count != 1)
|
||||
throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");
|
||||
lock (_StaticRuns)
|
||||
wsResults[0] = WS.Results.Get(wsResults[0], subgroupId);
|
||||
}
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, logistics, descriptions.First(), lines, subGroupId, weekOfYear);
|
||||
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), lines, subgroupId, weekOfYear);
|
||||
try
|
||||
{ FromIQS.SaveCopy(_FileConnectorConfiguration.SourceFileLocation, _IqsConnectionString, modelObjectParameterDefinition.Name, modelObjectParameterDefinition.Value.Split('|')); }
|
||||
catch (Exception exception)
|
||||
@ -303,15 +304,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SaveOpenInsightFile(reportFullPath, dateTime, pdsf.Item1, descriptions, tests);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -325,7 +325,7 @@ public class FromIQS
|
||||
return new(result, count, commandText);
|
||||
}
|
||||
|
||||
private static string GetJson(Logistics logistics, string logisticLines, pcl.Description description)
|
||||
private static string GetJson(Logistics logistics, ProcessDataStandardFormat processDataStandardFormat, pcl.Description description)
|
||||
{
|
||||
string result;
|
||||
StringBuilder stringBuilder = new();
|
||||
@ -345,7 +345,7 @@ public class FromIQS
|
||||
string safeValue;
|
||||
string[] segments;
|
||||
string serializerValue;
|
||||
foreach (string line in logisticLines.Split(new string[] { Environment.NewLine }, StringSplitOptions.None))
|
||||
foreach (string line in processDataStandardFormat.Logistics)
|
||||
{
|
||||
segments = line.Split('\t');
|
||||
if (segments.Length < 2)
|
||||
@ -376,11 +376,11 @@ public class FromIQS
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, string logisticLines, pcl.Description description, string lines, long? subGroupId, string weekOfYear)
|
||||
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, pcl.Description description, string lines, long? subGroupId, string weekOfYear)
|
||||
{
|
||||
string checkFile;
|
||||
string fileName = Path.GetFileName(reportFullPath);
|
||||
string json = GetJson(logistics, logisticLines, description);
|
||||
string json = GetJson(logistics, processDataStandardFormat, description);
|
||||
string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);
|
||||
bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot);
|
||||
string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
|
||||
@ -396,6 +396,9 @@ public class FromIQS
|
||||
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
|
||||
if (ecExists && !File.Exists(checkFile))
|
||||
File.WriteAllText(checkFile, json);
|
||||
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.lbl");
|
||||
if (ecExists && !File.Exists(checkFile))
|
||||
File.WriteAllText(checkFile, processDataStandardFormat.Body[processDataStandardFormat.Body.Count - 1]);
|
||||
}
|
||||
|
||||
private static string GetCommandText(string[] iqsCopyValues)
|
||||
|
@ -19,7 +19,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private readonly string _OpenInsightMetrologyViewerAPI;
|
||||
private readonly string _OpenInsightMetrologyViewerFileShare;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
@ -121,16 +121,16 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
File.Copy(reportFullPath, Path.Combine(checkDirectory, Path.GetFileName(reportFullPath)), overwrite: true);
|
||||
(string jsonResults, WS.Results wsResults) = WS.SendData(_OpenInsightMetrologyViewerAPI, _Logistics.Sequence, directory, wsRequest);
|
||||
if (!wsResults.Success)
|
||||
throw new Exception(wsResults.ToString());
|
||||
_Log.Debug(wsResults.HeaderID);
|
||||
if (wsResults.Success is null || !wsResults.Success.Value)
|
||||
throw new Exception(jsonResults);
|
||||
_Log.Debug(wsResults.HeaderId);
|
||||
lock (_StaticRuns)
|
||||
{
|
||||
if (!_StaticRuns.ContainsKey(_Logistics.Sequence))
|
||||
_StaticRuns.Add(_Logistics.Sequence, new());
|
||||
_StaticRuns[_Logistics.Sequence].Add(jsonResults);
|
||||
_StaticRuns[_Logistics.Sequence].Add(wsResults);
|
||||
}
|
||||
checkDirectory = Path.Combine(directory, $"-{wsResults.HeaderID}");
|
||||
checkDirectory = Path.Combine(directory, $"-{wsResults.HeaderId}");
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
File.Copy(reportFullPath, Path.Combine(checkDirectory, Path.GetFileName(reportFullPath)), overwrite: true);
|
||||
@ -139,15 +139,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SendData(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,6 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer;
|
||||
|
||||
@ -199,6 +198,12 @@ public class WSRequest
|
||||
if (string.IsNullOrEmpty(Details[i].Bin8))
|
||||
Details[i].Bin8 = null;
|
||||
}
|
||||
UniqueId = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}";
|
||||
for (int i = 0; i < Details.Count; i++)
|
||||
{
|
||||
Details[i].HeaderUniqueId = UniqueId;
|
||||
Details[i].UniqueId = $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}_Item-{i + 1}";
|
||||
}
|
||||
}
|
||||
|
||||
private static void UpdateDataPDF(List<pcl.Description> descriptions, string checkFileName)
|
||||
@ -285,30 +290,32 @@ public class WSRequest
|
||||
pdDocument.close();
|
||||
}
|
||||
|
||||
internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, string json, List<pcl.Description> descriptions)
|
||||
internal static long GetHeaderId(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string openInsightMetrologyViewerFileShare, int weekOfYear, WS.Results results, List<pcl.Description> descriptions)
|
||||
{
|
||||
long result;
|
||||
if (string.IsNullOrEmpty(json))
|
||||
if (results is not null && results.HeaderId is not null)
|
||||
result = results.HeaderId.Value;
|
||||
else
|
||||
{
|
||||
WSRequest wsRequest = new(fileRead, logistics, descriptions);
|
||||
string directory = Path.Combine(openInsightMetrologyViewerFileShare, logistics.DateTimeFromSequence.Year.ToString(), $"WW{weekOfYear:00}");
|
||||
(json, WS.Results wsResults) = WS.SendData(openInsightMetrologyViewerAPI, logistics.Sequence, directory, wsRequest);
|
||||
if (!wsResults.Success)
|
||||
(_, WS.Results wsResults) = WS.SendData(openInsightMetrologyViewerAPI, logistics.Sequence, directory, wsRequest);
|
||||
if (wsResults.Success is null || !wsResults.Success.Value)
|
||||
throw new Exception(wsResults.ToString());
|
||||
result = wsResults.HeaderId.Value;
|
||||
}
|
||||
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json, new JsonSerializerOptions { PropertyNameCaseInsensitive = true });
|
||||
result = metrologyWSRequest.HeaderID;
|
||||
return result;
|
||||
}
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, List<pcl.Description> descriptions, string matchDirectory, string subGroupId, long headerId, string headerIdDirectory)
|
||||
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, List<pcl.Description> descriptions, string matchDirectory, WS.Results results, string headerIdDirectory)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
string checkFileName;
|
||||
pcl.Description description;
|
||||
string[] pclFiles = Directory.GetFiles(matchDirectory, "*.pcl", SearchOption.TopDirectoryOnly);
|
||||
if (pclFiles.Length != 1)
|
||||
throw new Exception($"Invalid source file count for <{headerId}>!");
|
||||
throw new Exception($"Invalid source file count for <{results.HeaderId}>!");
|
||||
string sourceFileNameNoExt = Path.GetFileNameWithoutExtension(pclFiles[0]);
|
||||
List<WS.Attachment> dataAttachments = new();
|
||||
List<WS.Attachment> headerAttachments = new();
|
||||
@ -318,18 +325,19 @@ public class WSRequest
|
||||
else
|
||||
{
|
||||
UpdateDataPDF(descriptions, checkFileName);
|
||||
headerAttachments.Add(new WS.Attachment(subGroupId, headerId, headerIdDirectory, descriptions[0].HeaderUniqueId, "Data.pdf", checkFileName));
|
||||
headerAttachments.Add(new WS.Attachment(results, headerIdDirectory, $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}", "Data.pdf", checkFileName));
|
||||
}
|
||||
foreach (pcl.Description description in descriptions)
|
||||
for (int i = 0; i < descriptions.Count; i++)
|
||||
{
|
||||
checkFileName = Path.Combine(matchDirectory, $"{sourceFileNameNoExt}_{description.Slot.Replace('*', 's')}_image.pdf");
|
||||
description = descriptions[i];
|
||||
checkFileName = Path.Combine(matchDirectory, $"{sourceFileNameNoExt}_s{description.Slot}_image.pdf");
|
||||
if (File.Exists(checkFileName))
|
||||
dataAttachments.Add(new WS.Attachment(subGroupId, headerId, headerIdDirectory, description.UniqueId, "Image.pdf", checkFileName));
|
||||
dataAttachments.Add(new WS.Attachment(results, headerIdDirectory, $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}_Item-{i + 1}", "Image.pdf", checkFileName));
|
||||
else
|
||||
{
|
||||
checkFileName = Path.Combine(matchDirectory, $"{sourceFileNameNoExt}_{description.Slot.Replace('*', 's')}_data.pdf");
|
||||
checkFileName = Path.Combine(matchDirectory, $"{sourceFileNameNoExt}_s{description.Slot}_data.pdf");
|
||||
if (File.Exists(checkFileName))
|
||||
dataAttachments.Add(new WS.Attachment(subGroupId, headerId, headerIdDirectory, description.UniqueId, "Image.pdf", checkFileName));
|
||||
dataAttachments.Add(new WS.Attachment(results, headerIdDirectory, $"{logistics.JobID}_{logistics.MID}_{logistics.DateTimeFromSequence:yyyyMMddHHmmssffff}_Item-{i + 1}", "Image.pdf", checkFileName));
|
||||
}
|
||||
}
|
||||
if (dataAttachments.Count == 0 || dataAttachments.Count != descriptions.Count)
|
||||
|
@ -18,7 +18,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private readonly string _OpenInsightMetrologyViewerAPI;
|
||||
private readonly string _OpenInsightMetrologyViewerFileShare;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
@ -137,30 +137,25 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
|
||||
private void PostOpenInsightMetrologyViewerAttachments(List<pcl.Description> descriptions)
|
||||
{
|
||||
string? json;
|
||||
string? subGroupId;
|
||||
Shared.Metrology.WS.Results? results;
|
||||
string jobIdDirectory = Path.Combine(Path.GetDirectoryName(_FileConnectorConfiguration.AlternateTargetFolder) ?? throw new Exception(), _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
_ = Directory.CreateDirectory(jobIdDirectory);
|
||||
string[] matchDirectories = GetInProcessDirectory(jobIdDirectory);
|
||||
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<string>? values))
|
||||
(json, subGroupId) = (null, null);
|
||||
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
|
||||
results = null;
|
||||
else
|
||||
{
|
||||
if (values.Count != 1)
|
||||
throw new Exception($"{nameof(_StaticRuns)} {values.Count} != 1 {_Logistics.Sequence}!");
|
||||
string[] segments = values[0].Split(new string[] { "|" }, StringSplitOptions.None);
|
||||
json = segments[0];
|
||||
subGroupId = segments.Length > 1 ? segments[1] : null;
|
||||
lock (_StaticRuns)
|
||||
_ = _StaticRuns.Remove(_Logistics.Sequence);
|
||||
if (wsResults is null || wsResults.Count != 1)
|
||||
throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");
|
||||
results = wsResults[0];
|
||||
}
|
||||
int weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
|
||||
long headerId = !_IsEAFHosted ? -1 : OpenInsightMetrologyViewer.WSRequest.GetHeaderId(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OpenInsightMetrologyViewerFileShare, weekOfYear, json, descriptions);
|
||||
long headerId = !_IsEAFHosted ? -1 : OpenInsightMetrologyViewer.WSRequest.GetHeaderId(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OpenInsightMetrologyViewerFileShare, weekOfYear, results, descriptions);
|
||||
string? headerIdDirectory = GetHeaderIdDirectory(headerId);
|
||||
if (string.IsNullOrEmpty(headerIdDirectory))
|
||||
throw new Exception($"Didn't find header id directory <{headerId}>");
|
||||
OpenInsightMetrologyViewer.WSRequest.PostOpenInsightMetrologyViewerAttachments(this, _Logistics, _OpenInsightMetrologyViewerAPI, descriptions, matchDirectories[0], subGroupId, headerId, headerIdDirectory);
|
||||
OpenInsightMetrologyViewer.WSRequest.PostOpenInsightMetrologyViewerAttachments(this, _Logistics, _OpenInsightMetrologyViewerAPI, descriptions, matchDirectories[0], results, headerIdDirectory);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
@ -168,15 +163,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
if (dateTime == DateTime.MinValue)
|
||||
throw new ArgumentNullException(nameof(dateTime));
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
PostOpenInsightMetrologyViewerAttachments(descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -17,7 +17,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private readonly string _JobIdParentDirectory;
|
||||
private readonly string _JobIdProcessParentDirectory;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
@ -129,23 +129,50 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
string destinationJobIdDirectory = Path.Combine(_JobIdProcessParentDirectory, _Logistics.JobID, directoryName);
|
||||
string sequenceDirectory = Path.Combine(destinationJobIdDirectory, logisticsSequence);
|
||||
string jsonFileName = Path.Combine(sequenceDirectory, $"{Path.GetFileNameWithoutExtension(reportFullPath)}.json");
|
||||
MoveMatchingFile(jobIdDirectory, matchDirectories[0]);
|
||||
Directory.Move(matchDirectories[0], destinationJobIdDirectory);
|
||||
if (!Directory.Exists(sequenceDirectory))
|
||||
_ = Directory.CreateDirectory(sequenceDirectory);
|
||||
File.Copy(reportFullPath, Path.Combine(sequenceDirectory, Path.GetFileName(reportFullPath)), overwrite: true);
|
||||
File.WriteAllText(jsonFileName, json);
|
||||
lock (_StaticRuns)
|
||||
_ = _StaticRuns.Remove(_Logistics.Sequence);
|
||||
}
|
||||
|
||||
private static void MoveMatchingFile(string jobIdDirectory, string matchDirectory)
|
||||
{
|
||||
string checkFile;
|
||||
string jobIdDirectoryFileName;
|
||||
string matchDirectoryFileName;
|
||||
string[] jobIdDirectoryFiles = Directory.GetFiles(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
string[] matchDirectoryFiles = Directory.GetFiles(matchDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string jobIdDirectoryFile in jobIdDirectoryFiles)
|
||||
{
|
||||
jobIdDirectoryFileName = Path.GetFileName(jobIdDirectoryFile);
|
||||
foreach (string matchDirectoryFile in matchDirectoryFiles)
|
||||
{
|
||||
matchDirectoryFileName = Path.GetFileName(matchDirectoryFile);
|
||||
if (jobIdDirectoryFileName.StartsWith(matchDirectoryFileName))
|
||||
{
|
||||
checkFile = Path.Combine(matchDirectory, jobIdDirectoryFileName);
|
||||
if (File.Exists(checkFile))
|
||||
continue;
|
||||
File.Move(jobIdDirectoryFile, checkFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
DirectoryMove(reportFullPath, dateTime, descriptions);
|
||||
else if (!_IsEAFHosted)
|
||||
|
@ -14,7 +14,7 @@ namespace Adaptation.FileHandlers.SPaCe;
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
@ -117,15 +117,15 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
FileCopy(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,7 @@ using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
|
||||
namespace Adaptation.FileHandlers.pcl;
|
||||
|
||||
@ -31,7 +32,35 @@ internal class Convert
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static ReadOnlyDictionary<string, string> PDF(Logistics logistics, string ghostPCLFileName, List<FileInfo> fileInfoCollection)
|
||||
private static Dictionary<string, string> PortableDocumentFormatSplit(string pdfTextStripperFileName, string sourcePath, string sourceFileNamePdf)
|
||||
{
|
||||
Dictionary<string, string> results = new();
|
||||
ProcessStartInfo processStartInfo = new(pdfTextStripperFileName, $"s \"{sourceFileNamePdf}\"")
|
||||
{
|
||||
UseShellExecute = false,
|
||||
RedirectStandardError = true,
|
||||
RedirectStandardOutput = true,
|
||||
};
|
||||
Process process = Process.Start(processStartInfo);
|
||||
_ = process.WaitForExit(30000);
|
||||
string text;
|
||||
string checkFile;
|
||||
string[] pdfFiles = Directory.GetFiles(sourcePath, "*.pdf", SearchOption.TopDirectoryOnly);
|
||||
string[] textFiles = Directory.GetFiles(sourcePath, "*.txt", SearchOption.TopDirectoryOnly);
|
||||
foreach (string pdfFile in pdfFiles)
|
||||
{
|
||||
if (pdfFile == sourceFileNamePdf)
|
||||
continue;
|
||||
checkFile = Path.ChangeExtension(pdfFile, ".txt");
|
||||
if (!textFiles.Contains(checkFile))
|
||||
continue;
|
||||
text = File.ReadAllText(checkFile);
|
||||
results.Add(pdfFile, text);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
internal static ReadOnlyDictionary<string, string> PDF(Logistics logistics, string ghostPCLFileName, string pdfTextStripperFileName, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Dictionary<string, string> results = new();
|
||||
object item;
|
||||
@ -58,51 +87,59 @@ internal class Convert
|
||||
}
|
||||
if (results.Count == 0)
|
||||
{
|
||||
java.io.File file = new(sourceFileNamePdf);
|
||||
org.apache.pdfbox.util.Splitter splitter = new();
|
||||
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
|
||||
java.util.List list = splitter.split(pdDocument);
|
||||
java.util.ListIterator iterator = list.listIterator();
|
||||
org.apache.pdfbox.util.PDFTextStripper dataStripper = new();
|
||||
for (short i = 1; i < short.MaxValue; i++)
|
||||
try
|
||||
{
|
||||
if (!iterator.hasNext())
|
||||
break;
|
||||
item = iterator.next();
|
||||
pagePDFFile = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_", i, ".pdf");
|
||||
pageTextFile = Path.ChangeExtension(pagePDFFile, ".txt");
|
||||
if (File.Exists(pageTextFile))
|
||||
java.io.File file = new(sourceFileNamePdf);
|
||||
org.apache.pdfbox.util.Splitter splitter = new();
|
||||
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
|
||||
java.util.List list = splitter.split(pdDocument);
|
||||
java.util.ListIterator iterator = list.listIterator();
|
||||
org.apache.pdfbox.util.PDFTextStripper dataStripper = new();
|
||||
for (short i = 1; i < short.MaxValue; i++)
|
||||
{
|
||||
pageText = File.ReadAllText(pageTextFile);
|
||||
sourceFiles.Add(pageTextFile);
|
||||
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
|
||||
continue;
|
||||
pd.close();
|
||||
if (!iterator.hasNext())
|
||||
break;
|
||||
item = iterator.next();
|
||||
pagePDFFile = string.Concat(sourcePath, @"\", sourceFileNameWithoutExtension, "_", i, ".pdf");
|
||||
pageTextFile = Path.ChangeExtension(pagePDFFile, ".txt");
|
||||
if (File.Exists(pageTextFile))
|
||||
{
|
||||
pageText = File.ReadAllText(pageTextFile);
|
||||
sourceFiles.Add(pageTextFile);
|
||||
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
|
||||
continue;
|
||||
pd.close();
|
||||
}
|
||||
else if (File.Exists(pagePDFFile))
|
||||
{
|
||||
org.apache.pdfbox.pdmodel.PDDocument document = org.apache.pdfbox.pdmodel.PDDocument.load(pagePDFFile);
|
||||
pageText = dataStripper.getText(document);
|
||||
document.close();
|
||||
sourceFiles.Add(pagePDFFile);
|
||||
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
|
||||
continue;
|
||||
pd.close();
|
||||
}
|
||||
else
|
||||
{
|
||||
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
|
||||
continue;
|
||||
pageText = dataStripper.getText(pd);
|
||||
pd.save(pagePDFFile);
|
||||
sourceFiles.Add(pagePDFFile);
|
||||
pd.close();
|
||||
File.WriteAllText(pageTextFile, pageText);
|
||||
sourceFiles.Add(pageTextFile);
|
||||
}
|
||||
results.Add(pagePDFFile, pageText);
|
||||
}
|
||||
else if (File.Exists(pagePDFFile))
|
||||
{
|
||||
org.apache.pdfbox.pdmodel.PDDocument document = org.apache.pdfbox.pdmodel.PDDocument.load(pagePDFFile);
|
||||
pageText = dataStripper.getText(document);
|
||||
document.close();
|
||||
sourceFiles.Add(pagePDFFile);
|
||||
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
|
||||
continue;
|
||||
pd.close();
|
||||
}
|
||||
else
|
||||
{
|
||||
if (item is not org.apache.pdfbox.pdmodel.PDDocument pd)
|
||||
continue;
|
||||
pageText = dataStripper.getText(pd);
|
||||
pd.save(pagePDFFile);
|
||||
sourceFiles.Add(pagePDFFile);
|
||||
pd.close();
|
||||
File.WriteAllText(pageTextFile, pageText);
|
||||
sourceFiles.Add(pageTextFile);
|
||||
}
|
||||
results.Add(pagePDFFile, pageText);
|
||||
pdDocument.close();
|
||||
}
|
||||
catch (MissingMethodException)
|
||||
{
|
||||
if (results.Count == 0)
|
||||
results = PortableDocumentFormatSplit(pdfTextStripperFileName, sourcePath, sourceFileNamePdf);
|
||||
}
|
||||
pdDocument.close();
|
||||
}
|
||||
foreach (string sourceFile in sourceFiles)
|
||||
fileInfoCollection.Add(new FileInfo(sourceFile));
|
||||
|
@ -16,8 +16,9 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
|
||||
private long? _TickOffset;
|
||||
private readonly string _GhostPCLFileName;
|
||||
private readonly string _PDFTextStripperFileName;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), true, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 15;
|
||||
@ -32,6 +33,9 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
_GhostPCLFileName = Path.Combine(AppContext.BaseDirectory, "gpcl6win64.exe");
|
||||
if (!File.Exists(_GhostPCLFileName))
|
||||
throw new Exception("Ghost PCL FileName doesn't Exist!");
|
||||
_PDFTextStripperFileName = Path.Combine(AppContext.BaseDirectory, "PDF-Text-Stripper.exe");
|
||||
if (!File.Exists(_PDFTextStripperFileName))
|
||||
throw new Exception("PDF-Text-Stripper FileName doesn't Exist!");
|
||||
if (_IsEAFHosted)
|
||||
NestExistingFiles(_FileConnectorConfiguration);
|
||||
}
|
||||
@ -113,7 +117,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
results.Item4.Add(_Logistics.FileInfo);
|
||||
else
|
||||
{
|
||||
ReadOnlyDictionary<string, string> pages = Convert.PDF(_Logistics, _GhostPCLFileName, results.Item4);
|
||||
ReadOnlyDictionary<string, string> pages = Convert.PDF(_Logistics, _GhostPCLFileName, _PDFTextStripperFileName, results.Item4);
|
||||
Run? run = Run.Get(_Logistics, results.Item4, pages);
|
||||
if (run is null)
|
||||
throw new Exception(string.Concat("A) No Data - ", dateTime.Ticks));
|
||||
@ -137,7 +141,7 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
if (iProcessData.Details.Count > 0)
|
||||
results = iProcessData.GetResults(this, _Logistics, results.Item4);
|
||||
else
|
||||
results = new(string.Concat("C) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
|
||||
results = new(string.Concat("LOGISTICS_1 - C) No Data - ", dateTime.Ticks), Array.Empty<Test>(), Array.Empty<JsonElement>(), results.Item4);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
|
@ -245,17 +245,17 @@ public class Header
|
||||
{
|
||||
segmentsB = segment.Split(split, StringSplitOptions.None);
|
||||
segmentsC = segmentsB[0].Split(' ');
|
||||
waferSummary = new(id: segmentsC[0].Trim(),
|
||||
lPDCount: segmentsC[1].Trim(),
|
||||
lPDCM2: segmentsC[2].Trim(),
|
||||
areaCount: segmentsC[3].Trim(),
|
||||
areaTotal: segmentsC[4].Trim(),
|
||||
scratchCount: segmentsC[5].Trim(),
|
||||
scratchTotal: segmentsC[6].Trim(),
|
||||
sumOfDefects: segmentsC[7].Trim(),
|
||||
hazeRegion: segmentsC[8].Trim(),
|
||||
hazeAverage: segmentsC[9].Trim(),
|
||||
grade: segmentsC[10].Trim());
|
||||
waferSummary = new(id: segmentsC.Length < 1 ? string.Empty : segmentsC[0].Trim(),
|
||||
lPDCount: segmentsC.Length < 2 ? string.Empty : segmentsC[1].Trim(),
|
||||
lPDCM2: segmentsC.Length < 3 ? string.Empty : segmentsC[2].Trim(),
|
||||
areaCount: segmentsC.Length < 4 ? string.Empty : segmentsC[3].Trim(),
|
||||
areaTotal: segmentsC.Length < 5 ? string.Empty : segmentsC[4].Trim(),
|
||||
scratchCount: segmentsC.Length < 6 ? string.Empty : segmentsC[5].Trim(),
|
||||
scratchTotal: segmentsC.Length < 7 ? string.Empty : segmentsC[6].Trim(),
|
||||
sumOfDefects: segmentsC.Length < 8 ? string.Empty : segmentsC[7].Trim(),
|
||||
hazeRegion: segmentsC.Length < 9 ? string.Empty : segmentsC[8].Trim(),
|
||||
hazeAverage: segmentsC.Length < 10 ? string.Empty : segmentsC[9].Trim(),
|
||||
grade: segmentsC.Length < 11 ? string.Empty : segmentsC[10].Trim());
|
||||
collection.Add(waferSummary);
|
||||
}
|
||||
ScanPast(text, i, constant.Min);
|
||||
|
@ -106,9 +106,13 @@ public class ProcessData : IProcessData
|
||||
if (description.Test != (int)tests[i])
|
||||
throw new Exception();
|
||||
}
|
||||
FileInfo fileInfo = new($"{logistics.ReportFullPath}.descriptions.json");
|
||||
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
|
||||
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
|
||||
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
|
||||
File.WriteAllText(fileInfo.FullName, json);
|
||||
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
|
||||
fileInfoCollection.Add(fileInfo);
|
||||
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json) ?? throw new Exception();
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
|
||||
return results;
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ internal class Run
|
||||
|
||||
private static void WriteJson(Logistics logistics, List<FileInfo> fileInfoCollection, Run result)
|
||||
{
|
||||
FileInfo fileInfo = new($"{logistics.ReportFullPath}.json");
|
||||
FileInfo fileInfo = new($"{logistics.ReportFullPath}.run.json");
|
||||
string json = JsonSerializer.Serialize(result, RunSourceGenerationContext.Default.Run);
|
||||
File.WriteAllText(fileInfo.FullName, json);
|
||||
File.SetLastWriteTime(fileInfo.FullName, logistics.DateTimeFromSequence);
|
||||
|
@ -69,7 +69,7 @@
|
||||
<PackageReference Include="System.Text.Json" Version="9.0.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Infineon.Mesa.PDF.Text.Stripper" Version="4.8.0.1"><NoWarn>NU1701</NoWarn></PackageReference>
|
||||
<PackageReference Include="Infineon.Mesa.PDF.Text.Stripper" Version="4.8.0.2"><NoWarn>NU1701</NoWarn></PackageReference>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Infineon.Yoda.DotNetCore" Version="5.4.3" />
|
||||
|
@ -25,7 +25,7 @@ stages:
|
||||
nugetSource: "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/"
|
||||
|
||||
jobs:
|
||||
- job: SetupEnviroment
|
||||
- job: SetupEnvironment
|
||||
steps:
|
||||
- script: |
|
||||
echo $(Build.BuildId)
|
||||
@ -51,7 +51,7 @@ stages:
|
||||
|
||||
- job: BuildDebug
|
||||
dependsOn:
|
||||
- SetupEnviroment
|
||||
- SetupEnvironment
|
||||
steps:
|
||||
- script: |
|
||||
set configuration=Debug
|
||||
@ -66,7 +66,7 @@ stages:
|
||||
|
||||
- job: BuildRelease
|
||||
dependsOn:
|
||||
- SetupEnviroment
|
||||
- SetupEnvironment
|
||||
steps:
|
||||
- script: |
|
||||
set configuration=Release
|
||||
@ -98,7 +98,7 @@ stages:
|
||||
|
||||
- job: TestDebug
|
||||
dependsOn:
|
||||
- SetupEnviroment
|
||||
- SetupEnvironment
|
||||
- BuildDebug
|
||||
- BuildRelease
|
||||
steps:
|
||||
@ -168,7 +168,7 @@ stages:
|
||||
nugetSource: "https://artifactory.intra.infineon.com/artifactory/api/nuget/ngt-fi-package-main-vir/"
|
||||
|
||||
jobs:
|
||||
- job: SetupEnviroment
|
||||
- job: SetupEnvironment
|
||||
steps:
|
||||
- script: |
|
||||
echo $(Build.BuildId)
|
||||
@ -194,7 +194,7 @@ stages:
|
||||
|
||||
- job: BuildRelease
|
||||
dependsOn:
|
||||
- SetupEnviroment
|
||||
- SetupEnvironment
|
||||
steps:
|
||||
- script: |
|
||||
set configuration=Release
|
||||
@ -226,7 +226,7 @@ stages:
|
||||
|
||||
- job: TestRelease
|
||||
dependsOn:
|
||||
- SetupEnviroment
|
||||
- SetupEnvironment
|
||||
- BuildRelease
|
||||
steps:
|
||||
- script: |
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -35,6 +35,9 @@ public class Logistics : ILogistics
|
||||
public long Sequence => _Sequence;
|
||||
public double TotalSecondsSinceLastWriteTimeFromSequence => _TotalSecondsSinceLastWriteTimeFromSequence;
|
||||
|
||||
private static string DefaultMesEntity(DateTime dateTime) =>
|
||||
string.Concat(dateTime.Ticks, "_MES_ENTITY");
|
||||
|
||||
public Logistics(IFileRead fileRead)
|
||||
{
|
||||
DateTime dateTime = DateTime.Now;
|
||||
@ -84,13 +87,13 @@ public class Logistics : ILogistics
|
||||
_Logistics2 = new List<Logistics2>();
|
||||
}
|
||||
|
||||
public Logistics(string reportFullPath, string logistics)
|
||||
internal Logistics(string reportFullPath, ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
string key;
|
||||
DateTime dateTime;
|
||||
string[] segments;
|
||||
_FileInfo = new(reportFullPath);
|
||||
_Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
|
||||
_Logistics1 = processDataStandardFormat.Logistics.ToList();
|
||||
if (Logistics1.Count == 0 || !Logistics1[0].StartsWith("LOGISTICS_1"))
|
||||
{
|
||||
_NullData = null;
|
||||
@ -190,8 +193,6 @@ public class Logistics : ILogistics
|
||||
}
|
||||
}
|
||||
|
||||
private static string DefaultMesEntity(DateTime dateTime) => string.Concat(dateTime.Ticks, "_MES_ENTITY");
|
||||
|
||||
internal void Update(string mid, string processJobID)
|
||||
{
|
||||
_MID = mid;
|
||||
|
@ -6,23 +6,25 @@ public partial class WS
|
||||
public class Attachment
|
||||
{
|
||||
|
||||
public string SubGroupId { get; set; }
|
||||
public long HeaderId { get; set; }
|
||||
public string HeaderIdDirectory { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string DestinationFileName { get; set; }
|
||||
public string SourceFileName { get; set; }
|
||||
public string AttachmentId { get; set; }
|
||||
#nullable enable
|
||||
|
||||
public Attachment(string subGroupId, long headerId, string headerIdDirectory, string uniqueId, string destinationFileName, string sourceFileName)
|
||||
public long HeaderId { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string SubGroupId { get; set; }
|
||||
public string AttachmentId { get; set; }
|
||||
public string SourceFileName { get; set; }
|
||||
public string HeaderIdDirectory { get; set; }
|
||||
public string DestinationFileName { get; set; }
|
||||
|
||||
public Attachment(Results? results, string headerIdDirectory, string uniqueId, string destinationFileName, string sourceFileName)
|
||||
{
|
||||
SubGroupId = subGroupId;
|
||||
HeaderId = headerId;
|
||||
HeaderIdDirectory = headerIdDirectory;
|
||||
UniqueId = uniqueId;
|
||||
DestinationFileName = destinationFileName;
|
||||
SourceFileName = sourceFileName;
|
||||
HeaderIdDirectory = headerIdDirectory;
|
||||
DestinationFileName = destinationFileName;
|
||||
AttachmentId = System.Guid.NewGuid().ToString();
|
||||
HeaderId = results?.HeaderId is null ? -1 : results.HeaderId.Value;
|
||||
SubGroupId = results?.SubgroupId is null ? string.Empty : results.SubgroupId.Value.ToString();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -1,27 +1,75 @@
|
||||
using System.Collections.Generic;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.Shared.Metrology;
|
||||
|
||||
public partial class WS
|
||||
{
|
||||
// this class represents the response from the Inbound API endpoint
|
||||
|
||||
public class Results
|
||||
{
|
||||
// true or false if data was written to the database
|
||||
public bool Success { get; set; }
|
||||
|
||||
// if true, contains ID of the Header record in the database
|
||||
public long HeaderID { get; set; }
|
||||
#nullable enable
|
||||
|
||||
// if false, this collection will contain a list of errors
|
||||
public List<string> Errors { get; set; }
|
||||
[JsonConstructor]
|
||||
public Results(List<string>? errors,
|
||||
long? headerId,
|
||||
long? subgroupId,
|
||||
bool? success,
|
||||
List<string>? warnings)
|
||||
{
|
||||
Errors = errors;
|
||||
Success = success;
|
||||
HeaderId = headerId;
|
||||
Warnings = warnings;
|
||||
SubgroupId = subgroupId;
|
||||
}
|
||||
|
||||
// this collection will contain a list of warnings, they will not prevent data from being saved
|
||||
public List<string> Warnings { get; set; }
|
||||
[JsonPropertyName("errors")] public List<string>? Errors { get; set; }
|
||||
[JsonPropertyName("headerID")] public long? HeaderId { get; set; }
|
||||
[JsonPropertyName("subgroupId")] public long? SubgroupId { get; set; }
|
||||
[JsonPropertyName("success")] public bool? Success { get; set; }
|
||||
[JsonPropertyName("warnings")] public List<string>? Warnings { get; set; }
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
string result = JsonSerializer.Serialize(this, ResultsSourceGenerationContext.Default.Results);
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static Results Get(Results results, long? subgroupId) =>
|
||||
new(results.Errors, results.HeaderId, subgroupId, results.Success, results.Warnings);
|
||||
|
||||
internal static Results Get(string resultsJson, Exception e)
|
||||
{
|
||||
Results results;
|
||||
Exception? exception = e;
|
||||
List<string> errors = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
while (exception is not null)
|
||||
{
|
||||
_ = stringBuilder.AppendLine(exception.Message);
|
||||
exception = exception.InnerException;
|
||||
}
|
||||
errors.Add(resultsJson);
|
||||
errors.Add(stringBuilder.ToString());
|
||||
results = new(errors: errors,
|
||||
headerId: null,
|
||||
subgroupId: null,
|
||||
success: false,
|
||||
warnings: new());
|
||||
return results;
|
||||
}
|
||||
|
||||
// this is just a helper function to make displaying the results easier
|
||||
public override string ToString() => JsonSerializer.Serialize(this, GetType());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(WS.Results))]
|
||||
internal partial class ResultsSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
@ -10,9 +10,11 @@ namespace Adaptation.Shared.Metrology;
|
||||
public partial class WS
|
||||
{
|
||||
|
||||
#nullable enable
|
||||
|
||||
public static (string, Results) SendData(string url, long sequence, string directory, object payload, int timeoutSeconds = 120)
|
||||
{
|
||||
Results results = new();
|
||||
Results? wsResults = null;
|
||||
string resultsJson = string.Empty;
|
||||
try
|
||||
{
|
||||
@ -30,29 +32,20 @@ public partial class WS
|
||||
};
|
||||
HttpResponseMessage httpResponseMessage = httpClient.SendAsync(httpRequestMessage, HttpCompletionOption.ResponseContentRead).Result;
|
||||
resultsJson = httpResponseMessage.Content.ReadAsStringAsync().Result;
|
||||
results = JsonSerializer.Deserialize<Results>(resultsJson, new JsonSerializerOptions { PropertyNameCaseInsensitive = true });
|
||||
string checkDirectory = Path.Combine(directory, $"-{results.HeaderID}");
|
||||
wsResults = JsonSerializer.Deserialize(resultsJson, ResultsSourceGenerationContext.Default.Results);
|
||||
if (wsResults is null)
|
||||
throw new NullReferenceException(nameof(wsResults));
|
||||
string checkDirectory = Path.Combine(directory, $"-{wsResults.HeaderId}");
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
File.WriteAllText(Path.Combine(checkDirectory, $"{sequence}.json"), json);
|
||||
}
|
||||
if (!results.Success)
|
||||
results.Errors.Add(results.ToString());
|
||||
if (wsResults.Success is null || !wsResults.Success.Value)
|
||||
wsResults.Errors?.Add(wsResults.ToString());
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Exception exception = e;
|
||||
StringBuilder stringBuilder = new();
|
||||
while (exception is not null)
|
||||
{
|
||||
_ = stringBuilder.AppendLine(exception.Message);
|
||||
exception = exception.InnerException;
|
||||
}
|
||||
results.Errors ??= new List<string>();
|
||||
results.Errors.Add(resultsJson);
|
||||
results.Errors.Add(stringBuilder.ToString());
|
||||
}
|
||||
return new(resultsJson, results);
|
||||
{ wsResults ??= Results.Get(resultsJson, e); }
|
||||
return new(resultsJson, wsResults);
|
||||
}
|
||||
|
||||
public static void AttachFile(string url, Attachment attachment, int timeoutSeconds = 60)
|
||||
@ -69,16 +62,20 @@ public partial class WS
|
||||
}
|
||||
}
|
||||
|
||||
public static void AttachFiles(string url, List<Attachment> headerAttachments = null, List<Attachment> dataAttachments = null)
|
||||
public static void AttachFiles(string url, List<Attachment>? headerAttachments = null, List<Attachment>? dataAttachments = null)
|
||||
{
|
||||
string directory;
|
||||
try
|
||||
{
|
||||
string? directoryName;
|
||||
if (headerAttachments is not null)
|
||||
{
|
||||
foreach (Attachment attachment in headerAttachments)
|
||||
{
|
||||
directory = Path.Combine(Path.GetDirectoryName(attachment.HeaderIdDirectory), attachment.AttachmentId) ?? throw new Exception();
|
||||
directoryName = Path.GetDirectoryName(attachment.HeaderIdDirectory);
|
||||
if (string.IsNullOrEmpty(directoryName))
|
||||
continue;
|
||||
directory = Path.Combine(directoryName, attachment.AttachmentId) ?? throw new Exception();
|
||||
if (!Directory.Exists(directory))
|
||||
_ = Directory.CreateDirectory(directory);
|
||||
File.Copy(attachment.SourceFileName, Path.Combine(directory, attachment.DestinationFileName), overwrite: true);
|
||||
@ -88,7 +85,10 @@ public partial class WS
|
||||
{
|
||||
foreach (Attachment attachment in dataAttachments)
|
||||
{
|
||||
directory = Path.Combine(Path.GetDirectoryName(attachment.HeaderIdDirectory.Replace("Header", "Data")), attachment.AttachmentId) ?? throw new Exception();
|
||||
directoryName = Path.GetDirectoryName(attachment.HeaderIdDirectory.Replace("Header", "Data"));
|
||||
if (string.IsNullOrEmpty(directoryName))
|
||||
continue;
|
||||
directory = Path.Combine(directoryName, attachment.AttachmentId) ?? throw new Exception();
|
||||
if (!Directory.Exists(directory))
|
||||
_ = Directory.CreateDirectory(directory);
|
||||
File.Copy(attachment.SourceFileName, Path.Combine(directory, attachment.DestinationFileName), overwrite: true);
|
||||
@ -108,7 +108,7 @@ public partial class WS
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Exception exception = e;
|
||||
Exception? exception = e;
|
||||
StringBuilder stringBuilder = new();
|
||||
while (exception is not null)
|
||||
{
|
||||
|
@ -1,18 +1,22 @@
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.Shared;
|
||||
|
||||
public class ProcessDataStandardFormat
|
||||
#nullable enable
|
||||
|
||||
internal class ProcessDataStandardFormat
|
||||
{
|
||||
|
||||
public enum SearchFor
|
||||
internal enum SearchFor
|
||||
{
|
||||
EquipmentIntegration = 1,
|
||||
BusinessIntegration = 2,
|
||||
@ -20,322 +24,47 @@ public class ProcessDataStandardFormat
|
||||
Archive = 4
|
||||
}
|
||||
|
||||
public static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
|
||||
internal long? Sequence { get; private set; }
|
||||
internal ReadOnlyCollection<string> Body { get; private set; }
|
||||
internal ReadOnlyCollection<string> Footer { get; private set; }
|
||||
internal ReadOnlyCollection<string> Header { get; private set; }
|
||||
internal ReadOnlyCollection<string> Columns { get; private set; }
|
||||
internal ProcessDataStandardFormat? InputPDSF { get; private set; }
|
||||
internal ReadOnlyCollection<string> Logistics { get; private set; }
|
||||
|
||||
internal ProcessDataStandardFormat(ReadOnlyCollection<string> body,
|
||||
ReadOnlyCollection<string> columns,
|
||||
ReadOnlyCollection<string> footer,
|
||||
ReadOnlyCollection<string> header,
|
||||
ProcessDataStandardFormat? inputPDSF,
|
||||
ReadOnlyCollection<string> logistics,
|
||||
long? sequence)
|
||||
{
|
||||
string result;
|
||||
if (jsonElements.Length == 0)
|
||||
result = string.Empty;
|
||||
else
|
||||
{
|
||||
int columns = 0;
|
||||
List<string> lines;
|
||||
string endOffset = "E#######T";
|
||||
string dataOffset = "D#######T";
|
||||
string headerOffset = "H#######T";
|
||||
string format = "MM/dd/yyyy HH:mm:ss";
|
||||
StringBuilder stringBuilder = new();
|
||||
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
|
||||
_ = stringBuilder.Append("\"Time\"").Append('\t');
|
||||
_ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
|
||||
_ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
|
||||
for (int i = 0; i < jsonElements.Length;)
|
||||
{
|
||||
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
|
||||
{
|
||||
columns += 1;
|
||||
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
|
||||
}
|
||||
break;
|
||||
}
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
lines.Add(stringBuilder.ToString());
|
||||
for (int i = 0; i < jsonElements.Length; i++)
|
||||
{
|
||||
_ = stringBuilder.Clear();
|
||||
_ = stringBuilder.Append("0.1").Append('\t');
|
||||
_ = stringBuilder.Append('1').Append('\t');
|
||||
_ = stringBuilder.Append('2').Append('\t');
|
||||
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
|
||||
_ = stringBuilder.Append(jsonProperty.Value).Append('\t');
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
lines.Add(stringBuilder.ToString());
|
||||
}
|
||||
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
|
||||
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
|
||||
lines.Add("DELIMITER ;");
|
||||
lines.Add(string.Concat("START_TIME_FORMAT ", format));
|
||||
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
|
||||
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
|
||||
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
|
||||
if (!string.IsNullOrEmpty(logisticsText))
|
||||
lines.Add(logisticsText);
|
||||
else
|
||||
{
|
||||
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
|
||||
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
|
||||
lines.Add("END_HEADER");
|
||||
}
|
||||
_ = stringBuilder.Clear();
|
||||
foreach (string line in lines)
|
||||
_ = stringBuilder.AppendLine(line);
|
||||
result = stringBuilder.ToString();
|
||||
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
|
||||
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
|
||||
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
|
||||
}
|
||||
return result;
|
||||
Body = body;
|
||||
Columns = columns;
|
||||
Footer = footer;
|
||||
Header = header;
|
||||
InputPDSF = inputPDSF;
|
||||
Logistics = logistics;
|
||||
Sequence = sequence;
|
||||
}
|
||||
|
||||
public static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(string reportFullPath, string[] lines = null)
|
||||
{
|
||||
string segment;
|
||||
List<string> body = new();
|
||||
StringBuilder logistics = new();
|
||||
lines ??= File.ReadAllLines(reportFullPath);
|
||||
string[] segments;
|
||||
if (lines.Length < 7)
|
||||
segments = Array.Empty<string>();
|
||||
else
|
||||
segments = lines[6].Trim().Split('\t');
|
||||
List<string> columns = new();
|
||||
for (int c = 0; c < segments.Length; c++)
|
||||
{
|
||||
segment = segments[c].Substring(1, segments[c].Length - 2);
|
||||
if (!columns.Contains(segment))
|
||||
columns.Add(segment);
|
||||
else
|
||||
{
|
||||
for (short i = 1; i < short.MaxValue; i++)
|
||||
{
|
||||
segment = string.Concat(segment, "_", i);
|
||||
if (!columns.Contains(segment))
|
||||
{
|
||||
columns.Add(segment);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
bool lookForLogistics = false;
|
||||
for (int r = 7; r < lines.Length; r++)
|
||||
{
|
||||
if (lines[r].StartsWith("NUM_DATA_ROWS"))
|
||||
lookForLogistics = true;
|
||||
if (!lookForLogistics)
|
||||
{
|
||||
body.Add(lines[r]);
|
||||
continue;
|
||||
}
|
||||
if (lines[r].StartsWith("LOGISTICS_1"))
|
||||
{
|
||||
for (int i = r; i < lines.Length; i++)
|
||||
{
|
||||
if (lines[r].StartsWith("END_HEADER"))
|
||||
break;
|
||||
_ = logistics.AppendLine(lines[i]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return new Tuple<string, string[], string[]>(logistics.ToString(), columns.ToArray(), body.ToArray());
|
||||
}
|
||||
internal static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') =>
|
||||
GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
|
||||
|
||||
public static JsonElement[] GetArray(Tuple<string, string[], string[]> pdsf, bool lookForNumbers = false)
|
||||
{
|
||||
JsonElement[] results;
|
||||
string logistics = pdsf.Item1;
|
||||
string[] columns = pdsf.Item2;
|
||||
string[] bodyLines = pdsf.Item3;
|
||||
if (bodyLines.Length == 0 || !bodyLines[0].Contains('\t'))
|
||||
results = JsonSerializer.Deserialize<JsonElement[]>("[]");
|
||||
else
|
||||
{
|
||||
string value;
|
||||
string[] segments;
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach (string bodyLine in bodyLines)
|
||||
{
|
||||
_ = stringBuilder.Append('{');
|
||||
segments = bodyLine.Trim().Split('\t');
|
||||
if (!lookForNumbers)
|
||||
{
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
if (string.IsNullOrEmpty(value))
|
||||
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append("null,");
|
||||
else if (value.All(char.IsDigit))
|
||||
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append(',');
|
||||
else
|
||||
_ = stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
_ = stringBuilder.AppendLine("},");
|
||||
}
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 3, 3);
|
||||
results = JsonSerializer.Deserialize<JsonElement[]>(string.Concat("[", stringBuilder, "]"));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
internal static string BusinessIntegration(bool addSpaces = true, char separator = ' ') =>
|
||||
GetString(SearchFor.BusinessIntegration, addSpaces, separator);
|
||||
|
||||
public static Dictionary<string, List<string>> GetDictionary(Tuple<string, string[], string[]> pdsf)
|
||||
{
|
||||
Dictionary<string, List<string>> results = new();
|
||||
string[] segments;
|
||||
string[] columns = pdsf.Item2;
|
||||
string[] bodyLines = pdsf.Item3;
|
||||
foreach (string column in columns)
|
||||
results.Add(column, new List<string>());
|
||||
foreach (string bodyLine in bodyLines)
|
||||
{
|
||||
segments = bodyLine.Split('\t');
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
if (c >= columns.Length)
|
||||
continue;
|
||||
results[columns[c]].Add(segments[c]);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
internal static string SystemExport(bool addSpaces = true, char separator = ' ') =>
|
||||
GetString(SearchFor.SystemExport, addSpaces, separator);
|
||||
|
||||
public static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(Tuple<string, string[], string[]> pdsf)
|
||||
{
|
||||
Dictionary<Test, Dictionary<string, List<string>>> results = new();
|
||||
List<string> collection;
|
||||
string testColumn = nameof(Test);
|
||||
Dictionary<string, List<string>> keyValuePairs = GetDictionary(pdsf);
|
||||
if (!keyValuePairs.TryGetValue(testColumn, out collection))
|
||||
throw new Exception();
|
||||
int min;
|
||||
int max;
|
||||
Test testKey;
|
||||
List<string> vs;
|
||||
string columnKey;
|
||||
Dictionary<Test, List<int>> tests = new();
|
||||
for (int i = 0; i < collection.Count; i++)
|
||||
{
|
||||
if (Enum.TryParse(collection[i], out Test test))
|
||||
{
|
||||
if (!results.ContainsKey(test))
|
||||
{
|
||||
tests.Add(test, new List<int>());
|
||||
results.Add(test, new Dictionary<string, List<string>>());
|
||||
}
|
||||
tests[test].Add(i);
|
||||
}
|
||||
}
|
||||
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
|
||||
{
|
||||
testKey = testKeyValuePair.Key;
|
||||
min = testKeyValuePair.Value.Min();
|
||||
max = testKeyValuePair.Value.Max() + 1;
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
results[testKey].Add(keyValuePair.Key, new List<string>());
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
{
|
||||
vs = keyValuePair.Value;
|
||||
columnKey = keyValuePair.Key;
|
||||
for (int i = min; i < max; i++)
|
||||
{
|
||||
if (vs.Count > i)
|
||||
results[testKey][columnKey].Add(vs[i]);
|
||||
else
|
||||
results[testKey][columnKey].Add(string.Empty);
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(pdsf.Item1, results);
|
||||
}
|
||||
internal static string Archive(bool addSpaces = true, char separator = ' ') =>
|
||||
GetString(SearchFor.Archive, addSpaces, separator);
|
||||
|
||||
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
|
||||
{
|
||||
if (!addSpaces)
|
||||
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
|
||||
else
|
||||
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
|
||||
}
|
||||
internal static ProcessDataStandardFormat GetEmpty() =>
|
||||
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null, new(new string[] { "LOGISTICS_1" }), null);
|
||||
|
||||
public static string EquipmentIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
|
||||
|
||||
public static string BusinessIntegration(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.BusinessIntegration, addSpaces, separator);
|
||||
|
||||
public static string SystemExport(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.SystemExport, addSpaces, separator);
|
||||
|
||||
public static string Archive(bool addSpaces = true, char separator = ' ') => GetString(SearchFor.Archive, addSpaces, separator);
|
||||
|
||||
public static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string> ignoreParameterNames = null)
|
||||
{
|
||||
StringBuilder result = new();
|
||||
ignoreParameterNames ??= new List<string>();
|
||||
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
|
||||
throw new Exception();
|
||||
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
|
||||
throw new Exception();
|
||||
string nullData;
|
||||
const string columnDate = "Date";
|
||||
const string columnTime = "Time";
|
||||
const string firstDuplicate = "_1";
|
||||
_ = result.AppendLine(scopeInfo.Header);
|
||||
StringBuilder line = new();
|
||||
if (logistics.NullData is null)
|
||||
nullData = string.Empty;
|
||||
else
|
||||
nullData = logistics.NullData.ToString();
|
||||
int count = (from l in keyValuePairs select l.Value.Count).Min();
|
||||
for (int r = 0; r < count; r++)
|
||||
{
|
||||
_ = line.Clear();
|
||||
_ = line.Append('!');
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
{
|
||||
if (!names.Contains(keyValuePair.Key))
|
||||
continue;
|
||||
if (ignoreParameterNames.Contains(keyValuePair.Key))
|
||||
continue;
|
||||
if (pairedParameterNames.Contains(keyValuePair.Key))
|
||||
{
|
||||
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
|
||||
continue;
|
||||
else
|
||||
_ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
|
||||
_ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
|
||||
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
|
||||
_ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
|
||||
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
|
||||
_ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
|
||||
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
|
||||
_ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
|
||||
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
|
||||
_ = line.Append(nullData);
|
||||
else
|
||||
_ = line.Append(keyValuePair.Value[r]);
|
||||
_ = line.Append(';');
|
||||
}
|
||||
}
|
||||
if (pairedParameterNames.Count == 0)
|
||||
{
|
||||
_ = line.Remove(line.Length - 1, 1);
|
||||
_ = result.AppendLine(line.ToString());
|
||||
}
|
||||
}
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
public static List<string> PDSFToFixedWidth(string reportFullPath)
|
||||
internal static List<string> PDSFToFixedWidth(string reportFullPath)
|
||||
{
|
||||
List<string> results = new();
|
||||
if (!File.Exists(reportFullPath))
|
||||
@ -404,4 +133,590 @@ public class ProcessDataStandardFormat
|
||||
return results;
|
||||
}
|
||||
|
||||
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6)
|
||||
{
|
||||
ProcessDataStandardFormat result;
|
||||
string segment;
|
||||
string[] segments;
|
||||
bool addToFooter = false;
|
||||
List<string> body = new();
|
||||
List<string> header = new();
|
||||
List<string> footer = new();
|
||||
List<string> columns = new();
|
||||
ReadOnlyCollection<string> logistics;
|
||||
lines ??= File.ReadAllLines(reportFullPath);
|
||||
if (lines.Length < columnsLine + 1)
|
||||
segments = Array.Empty<string>();
|
||||
else
|
||||
{
|
||||
segments = lines[columnsLine].Trim().Split('\t');
|
||||
for (int i = 0; i < columnsLine; i++)
|
||||
header.Add(lines[i]);
|
||||
}
|
||||
for (int c = 0; c < segments.Length; c++)
|
||||
{
|
||||
segment = segments[c].Substring(1, segments[c].Length - 2);
|
||||
if (!columns.Contains(segment))
|
||||
columns.Add(segment);
|
||||
else
|
||||
{
|
||||
for (short i = 1; i < short.MaxValue; i++)
|
||||
{
|
||||
segment = string.Concat(segment, "_", i);
|
||||
if (!columns.Contains(segment))
|
||||
{
|
||||
columns.Add(segment);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
for (int r = columnsLine + 1; r < lines.Length; r++)
|
||||
{
|
||||
if (lines[r].StartsWith("NUM_DATA_ROWS"))
|
||||
addToFooter = true;
|
||||
if (!addToFooter)
|
||||
body.Add(lines[r]);
|
||||
else
|
||||
{
|
||||
footer.Add(lines[r]);
|
||||
if (lines[r].StartsWith("END_HEADER"))
|
||||
break;
|
||||
}
|
||||
}
|
||||
string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
|
||||
logistics = GetLogistics(footer, linesOne: linesOne);
|
||||
result = new(body: body.AsReadOnly(),
|
||||
columns: columns.AsReadOnly(),
|
||||
footer: footer.AsReadOnly(),
|
||||
header: header.AsReadOnly(),
|
||||
inputPDSF: null,
|
||||
logistics: logistics,
|
||||
sequence: null);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<string> GetLogistics(List<string> footer, string? linesOne)
|
||||
{
|
||||
List<string> results = new();
|
||||
bool foundLogistics1 = false;
|
||||
foreach (string line in footer)
|
||||
{
|
||||
if (line.StartsWith("END_HEADER"))
|
||||
break;
|
||||
if (line.StartsWith("LOGISTICS_1"))
|
||||
foundLogistics1 = true;
|
||||
if (foundLogistics1 && line.StartsWith("LOGISTICS_"))
|
||||
results.Add(line);
|
||||
}
|
||||
if (!string.IsNullOrEmpty(linesOne) && results.Count == 0)
|
||||
results.Add(linesOne);
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
internal static ProcessDataStandardFormat? GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
|
||||
{
|
||||
ProcessDataStandardFormat? result;
|
||||
const int columnsLine = 6;
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, pdsfMapping.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
|
||||
JsonElement[]? jsonElements = GetArray(pdsfMapping.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
|
||||
if (jsonElements is null || jsonElements.Length == 0 || pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
|
||||
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
|
||||
result = null;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int expectedColumns, int columnsLine, string path, string[]? lines)
|
||||
{
|
||||
ProcessDataStandardFormat result;
|
||||
long sequence;
|
||||
string[] segments;
|
||||
bool addToFooter = false;
|
||||
List<string> body = new();
|
||||
List<string> header = new();
|
||||
List<string> footer = new();
|
||||
ReadOnlyCollection<string> logistics;
|
||||
lines ??= File.ReadAllLines(path);
|
||||
if (lines.Length <= columnsLine)
|
||||
segments = Array.Empty<string>();
|
||||
else
|
||||
{
|
||||
segments = lines[columnsLine].Split('\t');
|
||||
if (segments.Length != expectedColumns)
|
||||
segments = Array.Empty<string>();
|
||||
for (int i = 0; i < columnsLine; i++)
|
||||
header.Add(lines[i]);
|
||||
}
|
||||
string[] columns = segments.Select(l => l.Trim('"')).ToArray();
|
||||
for (int r = columnsLine + 1; r < lines.Length; r++)
|
||||
{
|
||||
if (lines[r].StartsWith("NUM_DATA_ROWS"))
|
||||
addToFooter = true;
|
||||
if (!addToFooter)
|
||||
body.Add(lines[r]);
|
||||
else
|
||||
{
|
||||
footer.Add(lines[r]);
|
||||
if (lines[r].StartsWith("END_HEADER"))
|
||||
break;
|
||||
}
|
||||
}
|
||||
logistics = GetLogistics(footer, linesOne: null);
|
||||
if (logistics.Count == 0)
|
||||
sequence = lastWriteTime.Ticks;
|
||||
else
|
||||
{
|
||||
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
|
||||
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s;
|
||||
}
|
||||
result = new(body: body.AsReadOnly(),
|
||||
columns: new(columns),
|
||||
footer: footer.AsReadOnly(),
|
||||
header: header.AsReadOnly(),
|
||||
inputPDSF: null,
|
||||
logistics: logistics,
|
||||
sequence: sequence);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static JsonElement[]? GetArray(int expectedColumns, ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers)
|
||||
{
|
||||
JsonElement[]? results;
|
||||
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
|
||||
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
|
||||
else
|
||||
{
|
||||
string value;
|
||||
string[] segments;
|
||||
List<string> lines = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach (string bodyLine in processDataStandardFormat.Body)
|
||||
{
|
||||
_ = stringBuilder.Clear();
|
||||
_ = stringBuilder.Append('{');
|
||||
segments = bodyLine.Split('\t');
|
||||
if (segments.Length != expectedColumns)
|
||||
continue;
|
||||
if (!lookForNumbers)
|
||||
{
|
||||
for (int c = 0; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int c = 0; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
if (string.IsNullOrEmpty(value))
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
|
||||
else if (value.All(char.IsDigit))
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
|
||||
else
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
_ = stringBuilder.AppendLine("}");
|
||||
lines.Add(stringBuilder.ToString());
|
||||
}
|
||||
string json = $"[{string.Join(",", lines)}]";
|
||||
results = JsonSerializer.Deserialize(json, JsonElementCollectionSourceGenerationContext.Default.JsonElementArray);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static ProcessDataStandardFormat GetProcessDataStandardFormat(ProcessDataStandardFormatMapping processDataStandardFormatMapping, JsonElement[] jsonElements, ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
ProcessDataStandardFormat result;
|
||||
int column;
|
||||
string value;
|
||||
JsonProperty jsonProperty;
|
||||
List<string> values = new();
|
||||
List<string> results = new();
|
||||
JsonProperty[] jsonProperties;
|
||||
List<string> unknownColumns = new();
|
||||
for (int i = 0; i < jsonElements.Length; i++)
|
||||
{
|
||||
values.Clear();
|
||||
if (jsonElements[i].ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
unknownColumns.Add(string.Empty);
|
||||
break;
|
||||
}
|
||||
jsonProperties = jsonElements[i].EnumerateObject().ToArray();
|
||||
if (jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
|
||||
continue;
|
||||
for (int c = 0; c < processDataStandardFormatMapping.ColumnIndices.Count; c++)
|
||||
{
|
||||
column = processDataStandardFormatMapping.ColumnIndices[c];
|
||||
if (column == -1)
|
||||
value = processDataStandardFormatMapping.OldColumnNames[c];
|
||||
else
|
||||
{
|
||||
jsonProperty = jsonProperties[column];
|
||||
value = jsonProperty.Value.ToString();
|
||||
}
|
||||
values.Add(value);
|
||||
}
|
||||
results.Add(string.Join("\t", values));
|
||||
}
|
||||
result = new(body: new(results),
|
||||
columns: processDataStandardFormatMapping.OldColumnNames,
|
||||
footer: processDataStandardFormat.Footer,
|
||||
header: processDataStandardFormat.Header,
|
||||
inputPDSF: processDataStandardFormat,
|
||||
logistics: processDataStandardFormat.Logistics,
|
||||
sequence: processDataStandardFormat.Sequence);
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults)
|
||||
{
|
||||
List<string> results = new();
|
||||
if (processDataStandardFormat.Sequence is null)
|
||||
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
|
||||
string endOffset = "E#######T";
|
||||
string dataOffset = "D#######T";
|
||||
string headerOffset = "H#######T";
|
||||
string format = "MM/dd/yyyy HH:mm:ss";
|
||||
string startTime = new DateTime(processDataStandardFormat.Sequence.Value).ToString(format);
|
||||
results.Add("HEADER_TAG\tHEADER_VALUE");
|
||||
results.Add("FORMAT\t2.00");
|
||||
results.Add("NUMBER_PASSES\t0001");
|
||||
results.Add($"HEADER_OFFSET\t{headerOffset}");
|
||||
results.Add($"DATA_OFFSET\t{dataOffset}");
|
||||
results.Add($"END_OFFSET\t{endOffset}");
|
||||
results.Add($"\"{string.Join("\"\t\"", processDataStandardFormat.Columns)}\"");
|
||||
results.AddRange(processDataStandardFormat.Body);
|
||||
results.Add($"NUM_DATA_ROWS\t{processDataStandardFormat.Body.Count.ToString().PadLeft(9, '0')}");
|
||||
results.Add($"NUM_DATA_COLUMNS\t{processDataStandardFormat.Columns.Count.ToString().PadLeft(9, '0')}");
|
||||
results.Add("DELIMITER\t;");
|
||||
results.Add($"START_TIME_FORMAT\t{format}");
|
||||
results.Add($"START_TIME\t{startTime}");
|
||||
results.Add("LOGISTICS_COLUMN\tA_LOGISTICS");
|
||||
results.Add("LOGISTICS_COLUMN\tB_LOGISTICS");
|
||||
if (wsResults is null || wsResults.Count != 1)
|
||||
results.AddRange(processDataStandardFormat.Logistics);
|
||||
else
|
||||
{
|
||||
string[] segments;
|
||||
foreach (string logistics in processDataStandardFormat.Logistics)
|
||||
{
|
||||
segments = logistics.Split(new string[] { "\t" }, StringSplitOptions.None);
|
||||
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1]))
|
||||
results.Add(logistics);
|
||||
else
|
||||
results.Add($"{segments[0]}\t{segments[1][0]}_HeaderId={wsResults[0].HeaderId};{segments[1][0]}_SubgroupId={wsResults[0].SubgroupId};{segments[1]}");
|
||||
}
|
||||
}
|
||||
results.Add("EOF");
|
||||
if (processDataStandardFormat.InputPDSF is not null)
|
||||
{
|
||||
List<char> hyphens = new();
|
||||
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|')));
|
||||
results.Add(string.Empty);
|
||||
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
|
||||
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
|
||||
hyphens.Add('-');
|
||||
results.Add($"|{string.Join("|", hyphens)}|");
|
||||
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|')));
|
||||
results.Add(string.Empty);
|
||||
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|')));
|
||||
}
|
||||
File.WriteAllText(path, string.Join(Environment.NewLine, results));
|
||||
}
|
||||
|
||||
internal static Dictionary<string, List<string>> GetDictionary(ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
Dictionary<string, List<string>> results = new();
|
||||
string[] segments;
|
||||
foreach (string column in processDataStandardFormat.Columns)
|
||||
results.Add(column, new List<string>());
|
||||
foreach (string bodyLine in processDataStandardFormat.Body)
|
||||
{
|
||||
segments = bodyLine.Split('\t');
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
if (c >= processDataStandardFormat.Columns.Count)
|
||||
continue;
|
||||
results[processDataStandardFormat.Columns[c]].Add(segments[c]);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
internal static JsonElement[] GetArray(ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers = false)
|
||||
{
|
||||
JsonElement[] results;
|
||||
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
|
||||
results = JsonSerializer.Deserialize("[]", JsonElementCollectionSourceGenerationContext.Default.JsonElementArray) ?? throw new Exception();
|
||||
else
|
||||
{
|
||||
string value;
|
||||
string[] segments;
|
||||
List<string> lines = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach (string bodyLine in processDataStandardFormat.Body)
|
||||
{
|
||||
_ = stringBuilder.Clear();
|
||||
_ = stringBuilder.Append('{');
|
||||
segments = bodyLine.Trim().Split('\t');
|
||||
if (!lookForNumbers)
|
||||
{
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
if (string.IsNullOrEmpty(value))
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
|
||||
else if (value.All(char.IsDigit))
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
|
||||
else
|
||||
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
_ = stringBuilder.AppendLine("}");
|
||||
lines.Add(stringBuilder.ToString());
|
||||
}
|
||||
string json = $"[{string.Join(",", lines)}]";
|
||||
results = JsonSerializer.Deserialize<JsonElement[]>(json) ?? throw new Exception();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
internal static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
|
||||
{
|
||||
string result;
|
||||
if (jsonElements.Length == 0)
|
||||
result = string.Empty;
|
||||
else
|
||||
{
|
||||
int columns = 0;
|
||||
List<string> lines;
|
||||
string endOffset = "E#######T";
|
||||
string dataOffset = "D#######T";
|
||||
string headerOffset = "H#######T";
|
||||
string format = "MM/dd/yyyy HH:mm:ss";
|
||||
StringBuilder stringBuilder = new();
|
||||
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
|
||||
_ = stringBuilder.Append("\"Time\"").Append('\t');
|
||||
_ = stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
|
||||
_ = stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
|
||||
for (int i = 0; i < jsonElements.Length;)
|
||||
{
|
||||
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
|
||||
{
|
||||
columns += 1;
|
||||
_ = stringBuilder.Append('"').Append(jsonProperty.Name).Append('"').Append('\t');
|
||||
}
|
||||
break;
|
||||
}
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
lines.Add(stringBuilder.ToString());
|
||||
for (int i = 0; i < jsonElements.Length; i++)
|
||||
{
|
||||
_ = stringBuilder.Clear();
|
||||
_ = stringBuilder.Append("0.1").Append('\t');
|
||||
_ = stringBuilder.Append('1').Append('\t');
|
||||
_ = stringBuilder.Append('2').Append('\t');
|
||||
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
|
||||
_ = stringBuilder.Append(jsonProperty.Value).Append('\t');
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
lines.Add(stringBuilder.ToString());
|
||||
}
|
||||
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
|
||||
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
|
||||
lines.Add("DELIMITER ;");
|
||||
lines.Add(string.Concat("START_TIME_FORMAT ", format));
|
||||
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
|
||||
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
|
||||
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
|
||||
if (!string.IsNullOrEmpty(logisticsText))
|
||||
lines.Add(logisticsText);
|
||||
else
|
||||
{
|
||||
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
|
||||
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
|
||||
lines.Add("END_HEADER");
|
||||
}
|
||||
_ = stringBuilder.Clear();
|
||||
foreach (string line in lines)
|
||||
_ = stringBuilder.AppendLine(line);
|
||||
result = stringBuilder.ToString();
|
||||
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
|
||||
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
|
||||
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
Dictionary<Test, Dictionary<string, List<string>>> results = new();
|
||||
List<string>? collection;
|
||||
string testColumn = nameof(Test);
|
||||
Dictionary<string, List<string>> keyValuePairs = GetDictionary(processDataStandardFormat);
|
||||
if (!keyValuePairs.TryGetValue(testColumn, out collection))
|
||||
throw new Exception();
|
||||
int min;
|
||||
int max;
|
||||
Test testKey;
|
||||
List<string> vs;
|
||||
string columnKey;
|
||||
Dictionary<Test, List<int>> tests = new();
|
||||
for (int i = 0; i < collection.Count; i++)
|
||||
{
|
||||
if (Enum.TryParse(collection[i], out Test test))
|
||||
{
|
||||
if (!results.ContainsKey(test))
|
||||
{
|
||||
tests.Add(test, new List<int>());
|
||||
results.Add(test, new Dictionary<string, List<string>>());
|
||||
}
|
||||
tests[test].Add(i);
|
||||
}
|
||||
}
|
||||
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
|
||||
{
|
||||
testKey = testKeyValuePair.Key;
|
||||
min = testKeyValuePair.Value.Min();
|
||||
max = testKeyValuePair.Value.Max() + 1;
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
results[testKey].Add(keyValuePair.Key, new List<string>());
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
{
|
||||
vs = keyValuePair.Value;
|
||||
columnKey = keyValuePair.Key;
|
||||
for (int i = min; i < max; i++)
|
||||
{
|
||||
if (vs.Count > i)
|
||||
results[testKey][columnKey].Add(vs[i]);
|
||||
else
|
||||
results[testKey][columnKey].Add(string.Empty);
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(processDataStandardFormat.Logistics[0], results);
|
||||
}
|
||||
|
||||
internal static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string>? ignoreParameterNames = null)
|
||||
{
|
||||
StringBuilder result = new();
|
||||
ignoreParameterNames ??= new List<string>();
|
||||
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
|
||||
throw new Exception();
|
||||
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
|
||||
throw new Exception();
|
||||
string? nullData;
|
||||
const string columnDate = "Date";
|
||||
const string columnTime = "Time";
|
||||
const string firstDuplicate = "_1";
|
||||
_ = result.AppendLine(scopeInfo.Header);
|
||||
StringBuilder line = new();
|
||||
if (logistics.NullData is null)
|
||||
nullData = string.Empty;
|
||||
else
|
||||
nullData = logistics.NullData.ToString();
|
||||
int count = (from l in keyValuePairs select l.Value.Count).Min();
|
||||
for (int r = 0; r < count; r++)
|
||||
{
|
||||
_ = line.Clear();
|
||||
_ = line.Append('!');
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
{
|
||||
if (!names.Contains(keyValuePair.Key))
|
||||
continue;
|
||||
if (ignoreParameterNames.Contains(keyValuePair.Key))
|
||||
continue;
|
||||
if (pairedParameterNames.Contains(keyValuePair.Key))
|
||||
{
|
||||
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
|
||||
continue;
|
||||
else
|
||||
_ = result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
|
||||
_ = line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
|
||||
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
|
||||
_ = line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
|
||||
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
|
||||
_ = line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
|
||||
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
|
||||
_ = line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
|
||||
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
|
||||
_ = line.Append(nullData);
|
||||
else
|
||||
_ = line.Append(keyValuePair.Value[r]);
|
||||
_ = line.Append(';');
|
||||
}
|
||||
}
|
||||
if (pairedParameterNames.Count == 0)
|
||||
{
|
||||
_ = line.Remove(line.Length - 1, 1);
|
||||
_ = result.AppendLine(line.ToString());
|
||||
}
|
||||
}
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
|
||||
{
|
||||
if (!addSpaces)
|
||||
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
|
||||
else
|
||||
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
|
||||
}
|
||||
|
||||
private static int? TryGetPropertyIndex(JsonProperty[] jsonProperties, string propertyName)
|
||||
{
|
||||
int? result = null;
|
||||
for (int i = 0; i < jsonProperties.Length; i++)
|
||||
{
|
||||
if (jsonProperties[i].Name != propertyName)
|
||||
continue;
|
||||
result = i;
|
||||
break;
|
||||
}
|
||||
if (result is null)
|
||||
{
|
||||
for (int i = 0; i < jsonProperties.Length; i++)
|
||||
{
|
||||
if (jsonProperties[i].Name[0] != propertyName[0])
|
||||
continue;
|
||||
if (jsonProperties[i].Name.Length != propertyName.Length)
|
||||
continue;
|
||||
if (jsonProperties[i].Name != propertyName)
|
||||
continue;
|
||||
result = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
[JsonSerializable(typeof(JsonElement[]))]
|
||||
internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
|
||||
{
|
||||
}
|
33
Adaptation/Shared/ProcessDataStandardFormatMapping.cs
Normal file
33
Adaptation/Shared/ProcessDataStandardFormatMapping.cs
Normal file
@ -0,0 +1,33 @@
|
||||
using System.Collections.ObjectModel;
|
||||
|
||||
namespace Adaptation.Shared;
|
||||
|
||||
public class ProcessDataStandardFormatMapping
|
||||
{
|
||||
|
||||
public ReadOnlyCollection<string> BackfillColumns { get; private set; }
|
||||
public ReadOnlyCollection<int> ColumnIndices { get; private set; }
|
||||
public ReadOnlyCollection<string> IgnoreColumns { get; private set; }
|
||||
public ReadOnlyCollection<string> IndexOnlyColumns { get; private set; }
|
||||
public ReadOnlyDictionary<string, string> KeyValuePairs { get; private set; }
|
||||
public ReadOnlyCollection<string> NewColumnNames { get; private set; }
|
||||
public ReadOnlyCollection<string> OldColumnNames { get; private set; }
|
||||
|
||||
public ProcessDataStandardFormatMapping(ReadOnlyCollection<string> backfillColumns,
|
||||
ReadOnlyCollection<int> columnIndices,
|
||||
ReadOnlyCollection<string> ignoreColumns,
|
||||
ReadOnlyCollection<string> indexOnlyColumns,
|
||||
ReadOnlyDictionary<string, string> keyValuePairs,
|
||||
ReadOnlyCollection<string> newColumnNames,
|
||||
ReadOnlyCollection<string> oldColumnNames)
|
||||
{
|
||||
BackfillColumns = backfillColumns;
|
||||
ColumnIndices = columnIndices;
|
||||
IgnoreColumns = ignoreColumns;
|
||||
IndexOnlyColumns = indexOnlyColumns;
|
||||
KeyValuePairs = keyValuePairs;
|
||||
NewColumnNames = newColumnNames;
|
||||
OldColumnNames = oldColumnNames;
|
||||
}
|
||||
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
#if true
|
||||
#if v2_58_0
|
||||
using Adaptation._Tests.Shared;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
|
@ -1,4 +1,4 @@
|
||||
#if true
|
||||
#if v2_58_0
|
||||
using Adaptation._Tests.Shared;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
|
@ -1,4 +1,4 @@
|
||||
#if true
|
||||
#if v2_58_0
|
||||
using Adaptation._Tests.Shared;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
|
@ -1,4 +1,4 @@
|
||||
#if true
|
||||
#if v2_58_0
|
||||
using Adaptation._Tests.Shared;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
|
@ -0,0 +1,182 @@
|
||||
#if true
|
||||
using Adaptation._Tests.Shared;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Reflection;
|
||||
|
||||
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_59_0;
|
||||
|
||||
[TestClass]
|
||||
public class MET08DDUPSFS6420 : EAFLoggingUnitTesting
|
||||
{
|
||||
|
||||
#pragma warning disable CA2254
|
||||
#pragma warning disable IDE0060
|
||||
|
||||
internal static string DummyRoot { get; private set; }
|
||||
internal static MET08DDUPSFS6420 EAFLoggingUnitTesting { get; private set; }
|
||||
|
||||
static MET08DDUPSFS6420() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
|
||||
|
||||
public MET08DDUPSFS6420() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
|
||||
{
|
||||
if (EAFLoggingUnitTesting is null)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
public MET08DDUPSFS6420(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
|
||||
{
|
||||
}
|
||||
|
||||
[ClassInitialize]
|
||||
public static void ClassInitialize(TestContext testContext)
|
||||
{
|
||||
EAFLoggingUnitTesting ??= new MET08DDUPSFS6420(testContext);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
|
||||
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
|
||||
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
|
||||
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
|
||||
}
|
||||
|
||||
[ClassCleanup()]
|
||||
public static void ClassCleanup()
|
||||
{
|
||||
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
|
||||
EAFLoggingUnitTesting?.Dispose();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__MoveMatchingFiles()
|
||||
{
|
||||
string check = "*.pdsf";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__OpenInsightMetrologyViewer()
|
||||
{
|
||||
string check = "*.pdsf";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__IQSSi()
|
||||
{
|
||||
string check = "*.pdsf";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__OpenInsight()
|
||||
{
|
||||
string check = "*.pdsf";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments()
|
||||
{
|
||||
string check = "*.pdsf";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__APC()
|
||||
{
|
||||
string check = "*.pdsf";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__SPaCe()
|
||||
{
|
||||
string check = "*.pdsf";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__Processed()
|
||||
{
|
||||
string check = "*.pdsf";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__Archive()
|
||||
{
|
||||
string check = "*.pdsf";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__Dummy()
|
||||
{
|
||||
string check = "637400762709163000.zip";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
@ -0,0 +1,65 @@
|
||||
#if true
|
||||
using Adaptation._Tests.Shared;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Reflection;
|
||||
|
||||
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_59_0;
|
||||
|
||||
[TestClass]
|
||||
public class TENCOR1 : EAFLoggingUnitTesting
|
||||
{
|
||||
|
||||
#pragma warning disable CA2254
|
||||
#pragma warning disable IDE0060
|
||||
|
||||
internal static string DummyRoot { get; private set; }
|
||||
internal static TENCOR1 EAFLoggingUnitTesting { get; private set; }
|
||||
|
||||
static TENCOR1() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
|
||||
|
||||
public TENCOR1() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
|
||||
{
|
||||
if (EAFLoggingUnitTesting is null)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
public TENCOR1(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
|
||||
{
|
||||
}
|
||||
|
||||
[ClassInitialize]
|
||||
public static void ClassInitialize(TestContext testContext)
|
||||
{
|
||||
EAFLoggingUnitTesting ??= new TENCOR1(testContext);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
|
||||
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
|
||||
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
|
||||
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
|
||||
}
|
||||
|
||||
[ClassCleanup()]
|
||||
public static void ClassCleanup()
|
||||
{
|
||||
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
|
||||
EAFLoggingUnitTesting?.Dispose();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__TENCOR1__pcl()
|
||||
{
|
||||
string check = "*.pcl";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
@ -0,0 +1,65 @@
|
||||
#if true
|
||||
using Adaptation._Tests.Shared;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Reflection;
|
||||
|
||||
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_59_0;
|
||||
|
||||
[TestClass]
|
||||
public class TENCOR2 : EAFLoggingUnitTesting
|
||||
{
|
||||
|
||||
#pragma warning disable CA2254
|
||||
#pragma warning disable IDE0060
|
||||
|
||||
internal static string DummyRoot { get; private set; }
|
||||
internal static TENCOR2 EAFLoggingUnitTesting { get; private set; }
|
||||
|
||||
static TENCOR2() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
|
||||
|
||||
public TENCOR2() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
|
||||
{
|
||||
if (EAFLoggingUnitTesting is null)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
public TENCOR2(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
|
||||
{
|
||||
}
|
||||
|
||||
[ClassInitialize]
|
||||
public static void ClassInitialize(TestContext testContext)
|
||||
{
|
||||
EAFLoggingUnitTesting ??= new TENCOR2(testContext);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
|
||||
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
|
||||
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
|
||||
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
|
||||
}
|
||||
|
||||
[ClassCleanup()]
|
||||
public static void ClassCleanup()
|
||||
{
|
||||
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
|
||||
EAFLoggingUnitTesting?.Dispose();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__TENCOR2__pcl()
|
||||
{
|
||||
string check = "*.pcl";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
@ -0,0 +1,65 @@
|
||||
#if true
|
||||
using Adaptation._Tests.Shared;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Reflection;
|
||||
|
||||
namespace Adaptation._Tests.CreateSelfDescription.Production.v2_59_0;
|
||||
|
||||
[TestClass]
|
||||
public class TENCOR3 : EAFLoggingUnitTesting
|
||||
{
|
||||
|
||||
#pragma warning disable CA2254
|
||||
#pragma warning disable IDE0060
|
||||
|
||||
internal static string DummyRoot { get; private set; }
|
||||
internal static TENCOR3 EAFLoggingUnitTesting { get; private set; }
|
||||
|
||||
static TENCOR3() => DummyRoot = @"\\mesfs.infineon.com\EC_Characterization_Si\Dummy";
|
||||
|
||||
public TENCOR3() : base(DummyRoot, testContext: null, declaringType: null, skipEquipmentDictionary: false)
|
||||
{
|
||||
if (EAFLoggingUnitTesting is null)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
public TENCOR3(TestContext testContext) : base(DummyRoot, testContext, new StackFrame().GetMethod().DeclaringType, skipEquipmentDictionary: false)
|
||||
{
|
||||
}
|
||||
|
||||
[ClassInitialize]
|
||||
public static void ClassInitialize(TestContext testContext)
|
||||
{
|
||||
EAFLoggingUnitTesting ??= new TENCOR3(testContext);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(testContext.TestName, " - ClassInitialize"));
|
||||
string[] fileNameAndText = EAFLoggingUnitTesting.AdaptationTesting.GetCSharpText(testContext.TestName);
|
||||
File.WriteAllText(fileNameAndText[0], fileNameAndText[1]);
|
||||
File.WriteAllText(fileNameAndText[2], fileNameAndText[3]);
|
||||
}
|
||||
|
||||
[ClassCleanup()]
|
||||
public static void ClassCleanup()
|
||||
{
|
||||
EAFLoggingUnitTesting?.Logger?.LogInformation("Cleanup");
|
||||
EAFLoggingUnitTesting?.Dispose();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__TENCOR3__pcl()
|
||||
{
|
||||
string check = "*.pcl";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Getting configuration"));
|
||||
_ = AdaptationTesting.GetWriteConfigurationGetFileRead(methodBase, check, EAFLoggingUnitTesting.AdaptationTesting);
|
||||
EAFLoggingUnitTesting.Logger.LogInformation(string.Concat(methodBase.Name, " - Exit"));
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
@ -1,4 +1,4 @@
|
||||
#if true
|
||||
#if v2_58_0
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
|
@ -1,4 +1,4 @@
|
||||
#if true
|
||||
#if v2_58_0
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
|
@ -1,4 +1,4 @@
|
||||
#if true
|
||||
#if v2_58_0
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
|
@ -1,4 +1,4 @@
|
||||
#if true
|
||||
#if v2_58_0
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
|
144
Adaptation/_Tests/Extract/Production/v2.59.0/MET08DDUPSFS6420.cs
Normal file
144
Adaptation/_Tests/Extract/Production/v2.59.0/MET08DDUPSFS6420.cs
Normal file
@ -0,0 +1,144 @@
|
||||
#if true
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.Reflection;
|
||||
|
||||
namespace Adaptation._Tests.Extract.Production.v2_59_0;
|
||||
|
||||
[TestClass]
|
||||
public class MET08DDUPSFS6420
|
||||
{
|
||||
|
||||
#pragma warning disable CA2254
|
||||
#pragma warning disable IDE0060
|
||||
|
||||
private static CreateSelfDescription.Production.v2_59_0.MET08DDUPSFS6420 _MET08DDUPSFS6420;
|
||||
|
||||
[ClassInitialize]
|
||||
public static void ClassInitialize(TestContext testContext)
|
||||
{
|
||||
CreateSelfDescription.Production.v2_59_0.MET08DDUPSFS6420.ClassInitialize(testContext);
|
||||
_MET08DDUPSFS6420 = CreateSelfDescription.Production.v2_59_0.MET08DDUPSFS6420.EAFLoggingUnitTesting;
|
||||
}
|
||||
|
||||
private static void NonThrowTryCatch()
|
||||
{
|
||||
try
|
||||
{ throw new Exception(); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__MoveMatchingFiles() => _MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__MoveMatchingFiles();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__OpenInsightMetrologyViewer() => _MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__OpenInsightMetrologyViewer();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__OpenInsightMetrologyViewer637810124350899080__Normal()
|
||||
{
|
||||
string check = "*.pdsf";
|
||||
bool validatePDSF = false;
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
_MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__OpenInsightMetrologyViewer();
|
||||
string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__IQSSi() => _MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__IQSSi();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__OpenInsight() => _MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__OpenInsight();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__OpenInsight638052163299572098__IqsSql()
|
||||
{
|
||||
string check = "*.pdsf";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
_MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__OpenInsight();
|
||||
string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF: false);
|
||||
IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments() => _MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments638519735942138814__HeaderId()
|
||||
{
|
||||
string check = "*.pdsf";
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
_MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments();
|
||||
string[] variables = _MET08DDUPSFS6420.AdaptationTesting.GetVariables(methodBase, check, validatePDSF: false);
|
||||
IFileRead fileRead = _MET08DDUPSFS6420.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__APC() => _MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__APC();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__SPaCe() => _MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__SPaCe();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__Processed() => _MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__Processed();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__Archive() => _MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__Archive();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__MET08DDUPSFS6420__Dummy() => _MET08DDUPSFS6420.Production__v2_59_0__MET08DDUPSFS6420__Dummy();
|
||||
|
||||
}
|
||||
#endif
|
59
Adaptation/_Tests/Extract/Production/v2.59.0/TENCOR1.cs
Normal file
59
Adaptation/_Tests/Extract/Production/v2.59.0/TENCOR1.cs
Normal file
@ -0,0 +1,59 @@
|
||||
#if true
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.Reflection;
|
||||
|
||||
namespace Adaptation._Tests.Extract.Production.v2_59_0;
|
||||
|
||||
[TestClass]
|
||||
public class TENCOR1
|
||||
{
|
||||
|
||||
#pragma warning disable CA2254
|
||||
#pragma warning disable IDE0060
|
||||
|
||||
private static CreateSelfDescription.Production.v2_59_0.TENCOR1 _TENCOR1;
|
||||
|
||||
[ClassInitialize]
|
||||
public static void ClassInitialize(TestContext testContext)
|
||||
{
|
||||
CreateSelfDescription.Production.v2_59_0.TENCOR1.ClassInitialize(testContext);
|
||||
_TENCOR1 = CreateSelfDescription.Production.v2_59_0.TENCOR1.EAFLoggingUnitTesting;
|
||||
}
|
||||
|
||||
private static void NonThrowTryCatch()
|
||||
{
|
||||
try
|
||||
{ throw new Exception(); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__TENCOR1__pcl() => _TENCOR1.Production__v2_59_0__TENCOR1__pcl();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
[ExpectedException(typeof(MissingMethodException))]
|
||||
public void Production__v2_59_0__TENCOR1__pcl637955518212649513__Normal()
|
||||
{
|
||||
string check = "*.pcl";
|
||||
bool validatePDSF = false;
|
||||
_TENCOR1.Production__v2_59_0__TENCOR1__pcl();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _TENCOR1.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _TENCOR1.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
59
Adaptation/_Tests/Extract/Production/v2.59.0/TENCOR2.cs
Normal file
59
Adaptation/_Tests/Extract/Production/v2.59.0/TENCOR2.cs
Normal file
@ -0,0 +1,59 @@
|
||||
#if true
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.Reflection;
|
||||
|
||||
namespace Adaptation._Tests.Extract.Production.v2_59_0;
|
||||
|
||||
[TestClass]
|
||||
public class TENCOR2
|
||||
{
|
||||
|
||||
#pragma warning disable CA2254
|
||||
#pragma warning disable IDE0060
|
||||
|
||||
private static CreateSelfDescription.Production.v2_59_0.TENCOR2 _TENCOR2;
|
||||
|
||||
[ClassInitialize]
|
||||
public static void ClassInitialize(TestContext testContext)
|
||||
{
|
||||
CreateSelfDescription.Production.v2_59_0.TENCOR2.ClassInitialize(testContext);
|
||||
_TENCOR2 = CreateSelfDescription.Production.v2_59_0.TENCOR2.EAFLoggingUnitTesting;
|
||||
}
|
||||
|
||||
private static void NonThrowTryCatch()
|
||||
{
|
||||
try
|
||||
{ throw new Exception(); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__TENCOR2__pcl() => _TENCOR2.Production__v2_59_0__TENCOR2__pcl();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
[ExpectedException(typeof(MissingMethodException))]
|
||||
public void Production__v2_59_0__TENCOR2__pcl637955534973701250__Normal()
|
||||
{
|
||||
string check = "*.pcl";
|
||||
bool validatePDSF = false;
|
||||
_TENCOR2.Production__v2_59_0__TENCOR2__pcl();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
77
Adaptation/_Tests/Extract/Production/v2.59.0/TENCOR3.cs
Normal file
77
Adaptation/_Tests/Extract/Production/v2.59.0/TENCOR3.cs
Normal file
@ -0,0 +1,77 @@
|
||||
#if true
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Microsoft.VisualStudio.TestTools.UnitTesting;
|
||||
using System;
|
||||
using System.Diagnostics;
|
||||
using System.Reflection;
|
||||
|
||||
namespace Adaptation._Tests.Extract.Production.v2_59_0;
|
||||
|
||||
[TestClass]
|
||||
public class TENCOR3
|
||||
{
|
||||
|
||||
#pragma warning disable CA2254
|
||||
#pragma warning disable IDE0060
|
||||
|
||||
private static CreateSelfDescription.Production.v2_59_0.TENCOR3 _TENCOR3;
|
||||
|
||||
[ClassInitialize]
|
||||
public static void ClassInitialize(TestContext testContext)
|
||||
{
|
||||
CreateSelfDescription.Production.v2_59_0.TENCOR3.ClassInitialize(testContext);
|
||||
_TENCOR3 = CreateSelfDescription.Production.v2_59_0.TENCOR3.EAFLoggingUnitTesting;
|
||||
}
|
||||
|
||||
private static void NonThrowTryCatch()
|
||||
{
|
||||
try
|
||||
{ throw new Exception(); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Production__v2_59_0__TENCOR3__pcl() => _TENCOR3.Production__v2_59_0__TENCOR3__pcl();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
[ExpectedException(typeof(MissingMethodException))]
|
||||
public void Production__v2_59_0__TENCOR3__pcl637955520360305921__Normal()
|
||||
{
|
||||
string check = "*.pcl";
|
||||
bool validatePDSF = false;
|
||||
_TENCOR3.Production__v2_59_0__TENCOR3__pcl();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _TENCOR3.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _TENCOR3.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
[ExpectedException(typeof(MissingMethodException))]
|
||||
public void Production__v2_59_0__TENCOR3__pcl638725158781216195__Parital()
|
||||
{
|
||||
string check = "*.pcl";
|
||||
bool validatePDSF = false;
|
||||
_TENCOR3.Production__v2_59_0__TENCOR3__pcl();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _TENCOR3.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _TENCOR3.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
File diff suppressed because it is too large
Load Diff
@ -51,7 +51,7 @@ public class MET08DDUPSFS6420 : LoggingUnitTesting, IDisposable
|
||||
public void TestDateTime()
|
||||
{
|
||||
DateTime dateTime = DateTime.Now;
|
||||
Assert.IsTrue(dateTime.ToString("M/d/yyyy h:mm:ss tt") == dateTime.ToString());
|
||||
Assert.AreEqual(dateTime.ToString(), dateTime.ToString("M/d/yyyy h:mm:ss tt"));
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
@ -64,7 +64,7 @@ public class MET08DDUPSFS6420 : LoggingUnitTesting, IDisposable
|
||||
StringBuilder results = new();
|
||||
(string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[]
|
||||
{
|
||||
new("MET08DDUPSFS6420", "v2.58.0"),
|
||||
new("MET08DDUPSFS6420", "v2.59.0"),
|
||||
};
|
||||
string production = "http://messa08ec.infineon.com:9003/CellInstanceServiceV2";
|
||||
Shared.PasteSpecialXml.EAF.XML.API.CellInstance.CellInstanceVersion cellInstanceVersion;
|
||||
|
@ -52,7 +52,7 @@ public class PCL : LoggingUnitTesting, IDisposable
|
||||
public void TestDateTime()
|
||||
{
|
||||
DateTime dateTime = DateTime.Now;
|
||||
Assert.IsTrue(dateTime.ToString("M/d/yyyy h:mm:ss tt") == dateTime.ToString());
|
||||
Assert.AreEqual(dateTime.ToString(), dateTime.ToString("M/d/yyyy h:mm:ss tt"));
|
||||
}
|
||||
|
||||
[TestMethod]
|
||||
@ -69,22 +69,22 @@ public class PCL : LoggingUnitTesting, IDisposable
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("12-123456-1234");
|
||||
Assert.IsTrue(descriptor.Reactor is "12");
|
||||
Assert.IsTrue(descriptor.RDS is "123456");
|
||||
Assert.IsTrue(descriptor.PSN is "1234");
|
||||
Assert.AreEqual("12", descriptor.Reactor);
|
||||
Assert.AreEqual("123456", descriptor.RDS);
|
||||
Assert.AreEqual("1234", descriptor.PSN);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Layer));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("123456");
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Reactor));
|
||||
Assert.IsTrue(descriptor.RDS is "123456");
|
||||
Assert.AreEqual("123456", descriptor.RDS);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Layer));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("1T123456");
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Reactor));
|
||||
Assert.IsTrue(descriptor.RDS is "123456");
|
||||
Assert.AreEqual("123456", descriptor.RDS);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Layer));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
@ -93,96 +93,96 @@ public class PCL : LoggingUnitTesting, IDisposable
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Reactor));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.RDS));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
Assert.IsTrue(descriptor.Employee is "MP");
|
||||
Assert.AreEqual("MP", descriptor.Employee);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Layer));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
descriptor = ProcessData.GetDescriptor("12-123456-1234.2-1");
|
||||
Assert.IsTrue(descriptor.Reactor is "12");
|
||||
Assert.IsTrue(descriptor.RDS is "123456");
|
||||
Assert.IsTrue(descriptor.PSN is "1234");
|
||||
Assert.IsTrue(descriptor.Layer is "2");
|
||||
Assert.IsTrue(descriptor.Zone is "1");
|
||||
Assert.AreEqual("12", descriptor.Reactor);
|
||||
Assert.AreEqual("123456", descriptor.RDS);
|
||||
Assert.AreEqual("1234", descriptor.PSN);
|
||||
Assert.AreEqual("2", descriptor.Layer);
|
||||
Assert.AreEqual("1", descriptor.Zone);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("12-123456-1234.02-1");
|
||||
Assert.IsTrue(descriptor.Reactor is "12");
|
||||
Assert.IsTrue(descriptor.RDS is "123456");
|
||||
Assert.IsTrue(descriptor.PSN is "1234");
|
||||
Assert.IsTrue(descriptor.Layer is "2");
|
||||
Assert.IsTrue(descriptor.Zone is "1");
|
||||
Assert.AreEqual("12", descriptor.Reactor);
|
||||
Assert.AreEqual("123456", descriptor.RDS);
|
||||
Assert.AreEqual("1234", descriptor.PSN);
|
||||
Assert.AreEqual("2", descriptor.Layer);
|
||||
Assert.AreEqual("1", descriptor.Zone);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("20");
|
||||
Assert.IsTrue(descriptor.Reactor is "20");
|
||||
Assert.AreEqual("20", descriptor.Reactor);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.RDS));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Layer));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("20.2");
|
||||
Assert.IsTrue(descriptor.Reactor is "20");
|
||||
Assert.AreEqual("20", descriptor.Reactor);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.RDS));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
Assert.IsTrue(descriptor.Layer is "2");
|
||||
Assert.AreEqual("2", descriptor.Layer);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("20.2.1");
|
||||
Assert.IsTrue(descriptor.Layer is "2");
|
||||
Assert.AreEqual("2", descriptor.Layer);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.RDS));
|
||||
Assert.IsTrue(descriptor.Reactor is "20");
|
||||
Assert.IsTrue(descriptor.Zone is "1");
|
||||
Assert.AreEqual("20", descriptor.Reactor);
|
||||
Assert.AreEqual("1", descriptor.Zone);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("20.1.1");
|
||||
Assert.IsTrue(descriptor.Layer is "1");
|
||||
Assert.AreEqual("1", descriptor.Layer);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.RDS));
|
||||
Assert.IsTrue(descriptor.Reactor is "20");
|
||||
Assert.IsTrue(descriptor.Zone is "1");
|
||||
Assert.AreEqual("20", descriptor.Reactor);
|
||||
Assert.AreEqual("1", descriptor.Zone);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("P2-LOW-RR");
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Layer));
|
||||
Assert.IsTrue(descriptor.PSN is "RR");
|
||||
Assert.AreEqual("RR", descriptor.PSN);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.RDS));
|
||||
Assert.IsTrue(descriptor.Reactor is "P2");
|
||||
Assert.AreEqual("P2", descriptor.Reactor);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("i171308.1.51");
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Layer));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
Assert.IsTrue(descriptor.RDS is "i171308.1.51");
|
||||
Assert.AreEqual("i171308.1.51", descriptor.RDS);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Reactor));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("o171308.1.51");
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Layer));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
Assert.IsTrue(descriptor.RDS is "o171308.1.51");
|
||||
Assert.AreEqual("o171308.1.51", descriptor.RDS);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Reactor));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("O171308.1.51");
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Layer));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
Assert.IsTrue(descriptor.RDS is "O171308.1.51");
|
||||
Assert.AreEqual("O171308.1.51", descriptor.RDS);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Reactor));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("171308.1.51");
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Layer));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
Assert.IsTrue(descriptor.RDS is "171308.1.51");
|
||||
Assert.AreEqual("171308.1.51", descriptor.RDS);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Reactor));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("75-QP1414-SPLIT4");
|
||||
Assert.IsTrue(!string.IsNullOrEmpty(descriptor.Lot));
|
||||
Assert.IsFalse(string.IsNullOrEmpty(descriptor.Lot));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Layer));
|
||||
Assert.IsTrue(descriptor.PSN is "SPLIT4");
|
||||
Assert.AreEqual("SPLIT4", descriptor.PSN);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.RDS));
|
||||
Assert.IsTrue(descriptor.Reactor is "75");
|
||||
Assert.AreEqual("75", descriptor.Reactor);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Zone));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Employee));
|
||||
descriptor = ProcessData.GetDescriptor("B48");
|
||||
Assert.IsTrue(descriptor.Lot == "B48");
|
||||
Assert.AreEqual("B48", descriptor.Lot);
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.Layer));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
Assert.IsTrue(string.IsNullOrEmpty(descriptor.PSN));
|
||||
@ -203,9 +203,9 @@ public class PCL : LoggingUnitTesting, IDisposable
|
||||
StringBuilder results = new();
|
||||
(string cellInstanceName, string cellInstanceVersionName)[] collection = new (string, string)[]
|
||||
{
|
||||
new("TENCOR1", "v2.58.0"),
|
||||
new("TENCOR2", "v2.58.0"),
|
||||
new("TENCOR3", "v2.58.0"),
|
||||
new("TENCOR1", "v2.59.0"),
|
||||
new("TENCOR2", "v2.59.0"),
|
||||
new("TENCOR3", "v2.59.0"),
|
||||
new("TENCOR1-EQPT", "v2.12.3"),
|
||||
new("TENCOR2-EQPT", "v2.12.3"),
|
||||
new("TENCOR3-EQPT", "v2.12.3"),
|
||||
|
@ -37,7 +37,7 @@ public partial class FileRead : FileReaderHandler, ISMTP
|
||||
private FilePathGenerator _FilePathGeneratorForTarget;
|
||||
private readonly List<EquipmentParameter> _EquipmentParameters;
|
||||
private static readonly Dictionary<string, List<long>> _DummyRuns;
|
||||
private static readonly Dictionary<long, List<string>> _StaticRuns;
|
||||
private static readonly Dictionary<long, List<Adaptation.Shared.Metrology.WS.Results>> _StaticRuns;
|
||||
|
||||
static FileRead()
|
||||
{
|
||||
|
@ -166,6 +166,7 @@
|
||||
<Compile Include="Adaptation\Shared\Metrology\WS.Results.cs" />
|
||||
<Compile Include="Adaptation\Shared\ParameterType.cs" />
|
||||
<Compile Include="Adaptation\Shared\ProcessDataStandardFormat.cs" />
|
||||
<Compile Include="Adaptation\Shared\ProcessDataStandardFormatMapping.cs" />
|
||||
<Compile Include="Adaptation\Shared\Properties\IDescription.cs" />
|
||||
<Compile Include="Adaptation\Shared\Properties\IFileRead.cs" />
|
||||
<Compile Include="Adaptation\Shared\Properties\ILogistics.cs" />
|
||||
@ -184,7 +185,7 @@
|
||||
<Version>7.2.4630.5</Version>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Infineon.EAF.Runtime">
|
||||
<Version>2.58.0</Version>
|
||||
<Version>2.59.0</Version>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Pdfbox">
|
||||
<Version>1.1.1</Version>
|
||||
|
@ -32,5 +32,5 @@ using System.Runtime.InteropServices;
|
||||
// You can specify all the values or you can default the Build and Revision Numbers
|
||||
// by using the '*' as shown below:
|
||||
// [assembly: AssemblyVersion("1.0.*")]
|
||||
[assembly: AssemblyVersion("2.58.0.0")]
|
||||
[assembly: AssemblyFileVersion("2.58.0.0")]
|
||||
[assembly: AssemblyVersion("2.59.0.0")]
|
||||
[assembly: AssemblyFileVersion("2.59.0.0")]
|
||||
|
@ -35,6 +35,9 @@ public class Logistics : ILogistics
|
||||
public long Sequence => _Sequence;
|
||||
public double TotalSecondsSinceLastWriteTimeFromSequence => _TotalSecondsSinceLastWriteTimeFromSequence;
|
||||
|
||||
private static string DefaultMesEntity(DateTime dateTime) =>
|
||||
string.Concat(dateTime.Ticks, "_MES_ENTITY");
|
||||
|
||||
public Logistics(IFileRead fileRead)
|
||||
{
|
||||
DateTime dateTime = DateTime.Now;
|
||||
@ -59,7 +62,7 @@ public class Logistics : ILogistics
|
||||
throw new Exception();
|
||||
_NullData = fileRead.NullData;
|
||||
_FileInfo = new(reportFullPath);
|
||||
DateTime dateTime = new (_FileInfo.LastWriteTime.Ticks + tickOffset);
|
||||
DateTime dateTime = new(_FileInfo.LastWriteTime.Ticks + tickOffset);
|
||||
if (fileInfoLength.HasValue && _FileInfo.Length < fileInfoLength.Value)
|
||||
dateTime = dateTime.AddTicks(-1);
|
||||
_JobID = fileRead.CellInstanceName;
|
||||
@ -84,14 +87,14 @@ public class Logistics : ILogistics
|
||||
_Logistics2 = new List<Logistics2>();
|
||||
}
|
||||
|
||||
public Logistics(string reportFullPath, string logistics)
|
||||
internal Logistics(string reportFullPath, ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
string key;
|
||||
DateTime dateTime;
|
||||
string[] segments;
|
||||
_FileInfo = new(reportFullPath);
|
||||
_Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
|
||||
if (!Logistics1.Any() || !Logistics1[0].StartsWith("LOGISTICS_1"))
|
||||
_Logistics1 = processDataStandardFormat.Logistics.ToList();
|
||||
if (Logistics1.Count == 0 || !Logistics1[0].StartsWith("LOGISTICS_1"))
|
||||
{
|
||||
_NullData = null;
|
||||
_JobID = "null";
|
||||
@ -190,8 +193,6 @@ public class Logistics : ILogistics
|
||||
}
|
||||
}
|
||||
|
||||
private static string DefaultMesEntity(DateTime dateTime) => string.Concat(dateTime.Ticks, "_MES_ENTITY");
|
||||
|
||||
internal void Update(string mid, string processJobID)
|
||||
{
|
||||
_MID = mid;
|
||||
|
Reference in New Issue
Block a user