4 Commits

Author SHA1 Message Date
b17d2e057e Code aligment 2025-07-23 10:58:20 -07:00
f7e0a34108 Now relying on pipeline to copy files from file shares for ghost-pcl and linc-pdfc
pdftext-stripper logic is already ordered and now looping exiting text files is also ordered
2025-06-27 12:42:44 -07:00
3467fb63a0 Removed save-open-insight-file to use process-data-standard-format instead 2025-06-10 07:37:23 -07:00
0ef44389c6 Updated tests with new runs 2025-06-10 07:37:09 -07:00
18 changed files with 275 additions and 356 deletions

View File

@ -128,7 +128,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -153,7 +153,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveArchive(reportFullPath, dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -8,6 +8,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
namespace Adaptation.FileHandlers.IQSSi;
@ -107,11 +108,63 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
private static string GetLines(Logistics logistics, List<pcl.Description> descriptions)
{
StringBuilder result = new();
pcl.Description x = descriptions[0];
char del = '\t';
_ = result.Append(x.AreaCountAvg).Append(del). // 001 - AreaCountAvg
Append(x.AreaCountMax).Append(del). // 002 - AreaCountMax
Append(x.AreaCountMin).Append(del). // 003 - AreaCountMin
Append(x.AreaCountStdDev).Append(del). // 004 - AreaCountStdDev
Append(x.AreaTotalAvg).Append(del). // 005 - AreaTotalAvg
Append(x.AreaTotalMax).Append(del). // 006 - AreaTotalMax
Append(x.AreaTotalMin).Append(del). // 007 - AreaTotalMin
Append(x.AreaTotalStdDev).Append(del). // 008 - AreaTotalStdDev
Append(x.Date).Append(del). // 009 -
Append(x.HazeAverageAvg).Append(del). // 010 - Haze Average
Append(x.HazeAverageMax).Append(del). // 011 -
Append(x.HazeAverageMin).Append(del). // 012 -
Append(x.HazeAverageStdDev).Append(del). // 013 -
Append(x.HazeRegionAvg).Append(del). // 014 -
Append(x.HazeRegionMax).Append(del). // 015 -
Append(x.HazeRegionMin).Append(del). // 016 -
Append(x.HazeRegionStdDev).Append(del). // 017 -
Append(x.Lot).Append(del). // 018 -
Append(x.LPDCM2Avg).Append(del). // 019 -
Append(x.LPDCM2Max).Append(del). // 020 -
Append(x.LPDCM2Min).Append(del). // 021 -
Append(x.LPDCM2StdDev).Append(del). // 022 -
Append(x.LPDCountAvg).Append(del). // 023 -
Append(x.LPDCountMax).Append(del). // 024 -
Append(x.LPDCM2Min).Append(del). // 025 -
Append(x.LPDCountStdDev).Append(del). // 026 -
Append(x.Employee).Append(del). // 027 -
Append(x.RDS).Append(del). // 028 - Lot
Append(x.Reactor).Append(del). // 029 - Process
Append(x.Recipe.Replace(";", string.Empty)).Append(del). // 030 - Part
Append(x.ScratchCountAvg).Append(del). // 031 - Scratch Count
Append(x.ScratchCountMax).Append(del). // 032 -
Append(x.ScratchCountMin).Append(del). // 033 -
Append(x.ScratchTotalStdDev).Append(del). // 034 -
Append(x.ScratchTotalAvg).Append(del). // 035 - Scratch Length
Append(x.ScratchTotalMax).Append(del). // 036 -
Append(x.ScratchTotalMin).Append(del). // 037 -
Append(x.ScratchTotalStdDev).Append(del). // 038 -
Append(x.SumOfDefectsAvg).Append(del). // 039 - Average Sum of Defects
Append(x.SumOfDefectsMax).Append(del). // 040 - Max Sum of Defects
Append(x.SumOfDefectsMin).Append(del). // 041 - Min Sum of Defects
Append(x.SumOfDefectsStdDev).Append(del). // 042 - SumOfDefectsStdDev
Append(logistics.MesEntity).Append(del). // 043 -
AppendLine();
return result.ToString();
}
private void SaveIQSFile(string reportFullPath, DateTime dateTime, List<pcl.Description> descriptions, Test[] tests)
{
bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
string lines = OpenInsight.FileRead.GetLines(_Logistics, descriptions);
string lines = GetLines(_Logistics, descriptions);
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
if (!Directory.Exists(duplicateDirectory))
@ -160,7 +213,7 @@ public class FileRead : Shared.FileRead, IFileRead
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveIQSFile(reportFullPath, dateTime, descriptions, tests);
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)

View File

@ -6,11 +6,9 @@ using Adaptation.Shared.Methods;
using Adaptation.Shared.Metrology;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsight;
@ -18,16 +16,12 @@ namespace Adaptation.FileHandlers.OpenInsight;
public class FileRead : Shared.FileRead, IFileRead
{
private int _LastIndex;
private readonly string _IqsConnectionString;
private readonly string _OpenInsightFilePattern;
private readonly string _OpenInsightApiECDirectory;
private readonly ReadOnlyCollection<ModelObjectParameterDefinition> _IQSCopyCollection;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_LastIndex = -1;
_MinFileLength = 10;
_Logistics = new(this);
_NullData = string.Empty;
@ -37,10 +31,8 @@ public class FileRead : Shared.FileRead, IFileRead
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_IQSCopyCollection = new(GetProperties(cellInstanceConnectionName, modelObjectParameters, "IQS.Copy."));
_IqsConnectionString = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.ConnectionString");
_OpenInsightApiECDirectory = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "API.EC.Directory");
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
@ -118,128 +110,9 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
internal static string GetLines(Logistics logistics, List<pcl.Description> descriptions)
{
StringBuilder result = new();
pcl.Description x = descriptions[0];
bool ganPPTST = x.Recipe.Contains("GAN_PPTST");
if (ganPPTST)
{
string slot;
string reactor;
const int eight = 8;
DateTime dateTime = DateTime.Parse(x.Date);
string lot = x.Lot.ToLower().Replace("69-", string.Empty).Replace("71-", string.Empty).Replace("-", string.Empty);
if (string.IsNullOrEmpty(x.Lot) || x.Lot.Length < 2)
reactor = "R";
else
reactor = string.Concat("R", x.Lot.Substring(0, 2));
_ = result.Append(nameof(x.Date)).Append(';').
Append("Part").Append(';').
Append(nameof(x.Reactor)).Append(';').
Append("Lot").Append(';').
Append(nameof(pcl.Detail.Slot)).Append(';').
Append(nameof(pcl.Detail.Bin1)).Append(';').
Append(nameof(pcl.Detail.Bin2)).Append(';').
Append(nameof(pcl.Detail.Bin3)).Append(';').
Append(nameof(pcl.Detail.Bin4)).Append(';').
Append(nameof(pcl.Detail.Bin5)).Append(';').
Append(nameof(pcl.Detail.Bin6)).Append(';').
Append("Bin9").
AppendLine();
foreach (pcl.Description description in descriptions)
{
slot = description.Slot.Replace("*", string.Empty);
_ = result.Append('!').Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(';').
Append("Particle Adder;").
Append(reactor).Append(';').
Append(lot).Append(';').
Append(slot).Append(';').
Append(description.Bin1).Append(';').
Append(description.Bin2).Append(';').
Append(description.Bin3).Append(';').
Append(description.Bin4).Append(';').
Append(description.Bin5).Append(';').
Append(description.Bin6).Append(';').
Append(description.AreaCount).
AppendLine();
}
if (descriptions.Count != eight)
{
string negativeTenThousand = "-10000";
for (int i = descriptions.Count; i < eight; i++)
{
_ = result.Append('!').Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(';').
Append("Particle Adder;").
Append(reactor).Append(';').
Append(lot).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).
AppendLine();
}
}
if (result.ToString().Split('\n').Length != (eight + 2))
throw new Exception(string.Concat("Must have ", eight, " samples"));
}
else
{
char del = '\t';
_ = result.Append(x.AreaCountAvg).Append(del). // 001 - AreaCountAvg
Append(x.AreaCountMax).Append(del). // 002 - AreaCountMax
Append(x.AreaCountMin).Append(del). // 003 - AreaCountMin
Append(x.AreaCountStdDev).Append(del). // 004 - AreaCountStdDev
Append(x.AreaTotalAvg).Append(del). // 005 - AreaTotalAvg
Append(x.AreaTotalMax).Append(del). // 006 - AreaTotalMax
Append(x.AreaTotalMin).Append(del). // 007 - AreaTotalMin
Append(x.AreaTotalStdDev).Append(del). // 008 - AreaTotalStdDev
Append(x.Date).Append(del). // 009 -
Append(x.HazeAverageAvg).Append(del). // 010 - Haze Average
Append(x.HazeAverageMax).Append(del). // 011 -
Append(x.HazeAverageMin).Append(del). // 012 -
Append(x.HazeAverageStdDev).Append(del). // 013 -
Append(x.HazeRegionAvg).Append(del). // 014 -
Append(x.HazeRegionMax).Append(del). // 015 -
Append(x.HazeRegionMin).Append(del). // 016 -
Append(x.HazeRegionStdDev).Append(del). // 017 -
Append(x.Lot).Append(del). // 018 -
Append(x.LPDCM2Avg).Append(del). // 019 -
Append(x.LPDCM2Max).Append(del). // 020 -
Append(x.LPDCM2Min).Append(del). // 021 -
Append(x.LPDCM2StdDev).Append(del). // 022 -
Append(x.LPDCountAvg).Append(del). // 023 -
Append(x.LPDCountMax).Append(del). // 024 -
Append(x.LPDCM2Min).Append(del). // 025 -
Append(x.LPDCountStdDev).Append(del). // 026 -
Append(x.Employee).Append(del). // 027 -
Append(x.RDS).Append(del). // 028 - Lot
Append(x.Reactor).Append(del). // 029 - Process
Append(x.Recipe.Replace(";", string.Empty)).Append(del). // 030 - Part
Append(x.ScratchCountAvg).Append(del). // 031 - Scratch Count
Append(x.ScratchCountMax).Append(del). // 032 -
Append(x.ScratchCountMin).Append(del). // 033 -
Append(x.ScratchTotalStdDev).Append(del). // 034 -
Append(x.ScratchTotalAvg).Append(del). // 035 - Scratch Length
Append(x.ScratchTotalMax).Append(del). // 036 -
Append(x.ScratchTotalMin).Append(del). // 037 -
Append(x.ScratchTotalStdDev).Append(del). // 038 -
Append(x.SumOfDefectsAvg).Append(del). // 039 - Average Sum of Defects
Append(x.SumOfDefectsMax).Append(del). // 040 - Max Sum of Defects
Append(x.SumOfDefectsMin).Append(del). // 041 - Min Sum of Defects
Append(x.SumOfDefectsStdDev).Append(del). // 042 - SumOfDefectsStdDev
Append(logistics.MesEntity).Append(del). // 043 -
AppendLine();
}
return result.ToString();
}
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<pcl.Description> descriptions, Test[] tests)
{
string duplicateFile;
bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
@ -255,50 +128,37 @@ public class FileRead : Shared.FileRead, IFileRead
if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory);
}
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
if (descriptions.Count != 0 && tests.Length != 0)
if (descriptions.Count == 0 || tests.Length == 0)
duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
else
{
string lines = GetLines(_Logistics, descriptions);
if (!string.IsNullOrEmpty(lines))
long? subgroupId;
string fileName = Path.GetFileName(reportFullPath);
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
subgroupId = null;
else
(subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
{
_LastIndex += 1;
long? subgroupId;
if (_LastIndex >= _IQSCopyCollection.Count)
_LastIndex = 0;
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
ModelObjectParameterDefinition modelObjectParameterDefinition = _IQSCopyCollection[_LastIndex];
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
subgroupId = null;
else
(subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (subgroupId is null)
collection.Add(new(new ScopeInfo(tests[0], _OpenInsightFilePattern), lines));
else
collection.Add(new(new ScopeInfo(tests[0], $"{subgroupId.Value} {_OpenInsightFilePattern}"), lines));
if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
{
if (wsResults is null || wsResults.Count != 1)
throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");
lock (_StaticRuns)
wsResults[0] = WS.Results.Get(wsResults[0], subgroupId);
}
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), lines, subgroupId, weekOfYear);
try
{ FromIQS.SaveCopy(_FileConnectorConfiguration.SourceFileLocation, _IqsConnectionString, modelObjectParameterDefinition.Name, modelObjectParameterDefinition.Value.Split('|')); }
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
if (wsResults is null || wsResults.Count != 1)
throw new NullReferenceException($"{nameof(wsResults)} {wsResults?.Count} != 1 {_Logistics.Sequence}!");
lock (_StaticRuns)
wsResults[0] = WS.Results.Get(wsResults[0], subgroupId);
}
if (!fileName.StartsWith("Viewer"))
duplicateFile = Path.Combine(duplicateDirectory, $"{subgroupId} {fileName}".TrimStart());
else
duplicateFile = Path.Combine(duplicateDirectory, $"{$"Viewer {subgroupId}".TrimEnd()} {fileName.Replace("Viewer", string.Empty)}");
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), subgroupId, weekOfYear);
}
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
{
File.Copy(reportFullPath, duplicateFile, overwrite: true);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
}
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
@ -312,7 +172,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveOpenInsightFile(reportFullPath, dateTime, processDataStandardFormat, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -376,7 +376,7 @@ public class FromIQS
return result;
}
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, pcl.Description description, string lines, long? subGroupId, string weekOfYear)
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, pcl.Description description, long? subGroupId, string weekOfYear)
{
string checkFile;
string fileName = Path.GetFileName(reportFullPath);
@ -390,109 +390,9 @@ public class FromIQS
checkFile = Path.Combine(ecDirectory, fileName);
if (ecExists && !File.Exists(checkFile))
File.Copy(reportFullPath, checkFile);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.txt");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, lines);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.lbl");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, processDataStandardFormat.Body[processDataStandardFormat.Body.Count - 1]);
}
private static string GetCommandText(string[] iqsCopyValues)
{ // cSpell:disable
List<string> results = new();
if (iqsCopyValues.Length != 4)
throw new NotSupportedException();
string find = iqsCopyValues[1];
string replace = iqsCopyValues[3];
results.Add(" select pd.f_name [Part Name], ");
results.Add(" null [Part Revision], ");
results.Add($" '{replace}' [Test Name], ");
results.Add(" null [Description], ");
results.Add(" null [Lot Number], ");
results.Add(" null [Job Name], ");
results.Add(" null [Process Name], ");
results.Add(" case when sl.f_url = 0 then null else sl.f_url end [Reasonable Limit (Upper)], ");
results.Add(" case when sl.f_url = 0 then 0 else 1 end [Alarm Reasonable Limit (Upper)], ");
results.Add(" case when sl.f_usl = 0 then null else sl.f_usl end [Specification Limit (Upper)], ");
results.Add(" case when sl.f_usl = 0 then 0 else 1 end [Alarm Specification Limit (Upper)], ");
results.Add(" case when sl.f_ugb = 0 then null else sl.f_ugb end [Warning Limit (Upper)], ");
results.Add(" case when sl.f_ugb = 0 then 0 else 1 end [Alarm Warning Limit (Upper)], ");
results.Add(" case when sl.f_tar = 0 then null else sl.f_tar end [Specification Limit (Target)], ");
results.Add(" case when sl.f_lgb = 0 then null else sl.f_lgb end [Warning Limit (Lower)], ");
results.Add(" case when sl.f_lgb = 0 then 0 else 1 end [Alarm Warning Limit (Lower)], ");
results.Add(" case when sl.f_lsl = 0 then null else sl.f_lsl end [Specification Limit (Lower)], ");
results.Add(" case when sl.f_lsl = 0 then 0 else 1 end [Alarm Specification Limit (Lower)], ");
results.Add(" case when sl.f_lrl = 0 then null else sl.f_lrl end [Reasonable Limit (Lower)], ");
results.Add(" case when sl.f_lrl = 0 then 0 else 1 end [Alarm Reasonable Limit (Lower)], ");
results.Add(" td.f_name [Original Test Name], ");
results.Add(" td.f_test [Test Id], ");
results.Add(" ( ");
results.Add(" select count(sl_b.f_spec) ");
results.Add(" from [spcepiworld].[dbo].[spec_lim] sl_b ");
results.Add(" join [spcepiworld].[dbo].[part_dat] pd_b ");
results.Add(" on sl_b.f_part = pd_b.f_part ");
results.Add(" join [spcepiworld].[dbo].[test_dat] td_b ");
results.Add(" on sl_b.f_test = td_b.f_test ");
results.Add(" where sl_b.f_prcs = 0 ");
results.Add($" and td_b.f_name = '{replace}' ");
results.Add(" and pd_b.f_name = pd.f_name ");
results.Add(" and sl_b.f_url = sl.f_url ");
results.Add(" and sl_b.f_usl = sl.f_usl ");
results.Add(" and sl_b.f_ugb = sl.f_ugb ");
results.Add(" and sl_b.f_tar = sl.f_tar ");
results.Add(" and sl_b.f_lgb = sl.f_lgb ");
results.Add(" and sl_b.f_lsl = sl.f_lsl ");
results.Add(" and sl_b.f_lrl = sl.f_lrl ");
results.Add(" group by sl_b.f_spec ");
results.Add(" ) count ");
results.Add(" from [spcepiworld].[dbo].[spec_lim] sl ");
results.Add(" join [spcepiworld].[dbo].[part_dat] pd ");
results.Add(" on sl.f_part = pd.f_part ");
results.Add(" join [spcepiworld].[dbo].[test_dat] td ");
results.Add(" on sl.f_test = td.f_test ");
results.Add(" where sl.f_prcs = 0 ");
results.Add($" and td.f_name = '{find}' ");
results.Add(" and isnull(( ");
results.Add(" select count(sl_b.f_spec) ");
results.Add(" from [spcepiworld].[dbo].[spec_lim] sl_b ");
results.Add(" join [spcepiworld].[dbo].[part_dat] pd_b ");
results.Add(" on sl_b.f_part = pd_b.f_part ");
results.Add(" join [spcepiworld].[dbo].[test_dat] td_b ");
results.Add(" on sl_b.f_test = td_b.f_test ");
results.Add(" where sl_b.f_prcs = 0 ");
results.Add($" and td_b.f_name = '{replace}' ");
results.Add(" and pd_b.f_name = pd.f_name ");
results.Add(" and sl_b.f_url = sl.f_url ");
results.Add(" and sl_b.f_usl = sl.f_usl ");
results.Add(" and sl_b.f_ugb = sl.f_ugb ");
results.Add(" and sl_b.f_tar = sl.f_tar ");
results.Add(" and sl_b.f_lgb = sl.f_lgb ");
results.Add(" and sl_b.f_lsl = sl.f_lsl ");
results.Add(" and sl_b.f_lrl = sl.f_lrl ");
results.Add(" group by sl_b.f_spec ");
results.Add(" ), 0) = 0 ");
results.Add(" for json path ");
return string.Join(Environment.NewLine, results);
} // cSpell:restore
internal static void SaveCopy(string fileConnectorConfigurationSourceFileLocation, string connectionString, string name, string[] iqsCopyValues)
{
string checkFile = Path.Combine(fileConnectorConfigurationSourceFileLocation, $"{name}.json");
if (!File.Exists(checkFile))
{
string commandText = GetCommandText(iqsCopyValues);
StringBuilder stringBuilder = GetForJsonPath(connectionString, commandText);
if (stringBuilder.Length != 0)
File.WriteAllText(checkFile, stringBuilder.ToString());
else
File.WriteAllText(Path.Combine(fileConnectorConfigurationSourceFileLocation, $"{name}.sql"), commandText);
}
}
#nullable disable
}

View File

@ -147,7 +147,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SendData(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -171,7 +171,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
PostOpenInsightMetrologyViewerAttachments(descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -172,7 +172,7 @@ public class FileRead : Shared.FileRead, IFileRead
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
DirectoryMove(reportFullPath, dateTime, descriptions);
else if (!_IsEAFHosted)

View File

@ -125,7 +125,7 @@ public class FileRead : Shared.FileRead, IFileRead
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
FileCopy(reportFullPath, dateTime, descriptions);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics[0]), tests, jsonElements, new List<FileInfo>());
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -75,6 +75,7 @@ internal class Convert
string[] txtFiles = Directory.GetFiles(sourcePath, $"{sourceFileNameWithoutExtension}_*.txt", SearchOption.TopDirectoryOnly);
if (txtFiles.Length != 0)
{
txtFiles = (from l in txtFiles orderby l.Length, l select l).ToArray();
foreach (string txtFile in txtFiles)
{
sourceFiles.Add(txtFile);

View File

@ -87,16 +87,16 @@
</None>
</ItemGroup>
<ItemGroup>
<None Condition="'$(Configuration)' == 'Debug'" Include="\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64\gpcl6dll64.dll">
<None Condition="'$(Configuration)' == 'Debug'" Include="D:\EAF-Mesa-Integration\copy\GhostPCL\gpcl6win64\gpcl6dll64.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Condition="'$(Configuration)' == 'Debug'" Include="\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64\gpcl6win64.exe">
<None Condition="'$(Configuration)' == 'Debug'" Include="D:\EAF-Mesa-Integration\copy\GhostPCL\gpcl6win64\gpcl6win64.exe">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Condition="'$(Configuration)' == 'Release'" Include="\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64\gpcl6dll64.dll">
<None Condition="'$(Configuration)' == 'Release'" Include="D:\EAF-Mesa-Integration\copy\GhostPCL\gpcl6win64\gpcl6dll64.dll">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Condition="'$(Configuration)' == 'Release'" Include="\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL\gpcl6win64\gpcl6win64.exe">
<None Condition="'$(Configuration)' == 'Release'" Include="D:\EAF-Mesa-Integration\copy\GhostPCL\gpcl6win64\gpcl6win64.exe">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
</ItemGroup>

View File

@ -41,6 +41,24 @@ stages:
displayName: "Nuget Clear"
enabled: false
- task: CopyFiles@2
displayName: 'Copy GhostPCL Files to: D:\EAF-Mesa-Integration\copy'
inputs:
Contents: "*"
SourceFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL'
TargetFolder: 'D:\EAF-Mesa-Integration\copy\GhostPCL'
OverWrite: true
enabled: true
- task: CopyFiles@2
displayName: 'Copy LincPDFC Files to: D:\EAF-Mesa-Integration\copy'
inputs:
Contents: "*"
SourceFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC'
TargetFolder: 'D:\EAF-Mesa-Integration\copy\LincPDFC'
OverWrite: true
enabled: false
- script: |
"C:\program files\dotnet\dotnet.exe" user-secrets init
"C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"
@ -184,6 +202,24 @@ stages:
displayName: "Nuget Clear"
enabled: false
- task: CopyFiles@2
displayName: 'Copy GhostPCL Files to: D:\EAF-Mesa-Integration\copy'
inputs:
Contents: "*"
SourceFolder: '\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL'
TargetFolder: 'D:\EAF-Mesa-Integration\copy\GhostPCL'
OverWrite: true
enabled: true
- task: CopyFiles@2
displayName: 'Copy LincPDFC Files to: D:\EAF-Mesa-Integration\copy'
inputs:
Contents: "*"
SourceFolder: '\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC'
TargetFolder: 'D:\EAF-Mesa-Integration\copy\LincPDFC'
OverWrite: true
enabled: false
- script: |
"C:\program files\dotnet\dotnet.exe" user-secrets init
"C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"

View File

@ -478,27 +478,14 @@ public class FileRead : Properties.IFileRead
}
}
protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
protected static void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
{
string directory;
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType)
directory = Path.Combine(_TracePath, _EquipmentType, "Target", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName);
else
directory = Path.Combine(_TracePath, _EquipmentType, "Source", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName);
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
File.WriteAllText(file, lines);
if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
try
{ File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
catch (Exception) { }
}
#pragma warning disable CA1510
if (fileRead is null)
throw new ArgumentNullException(nameof(fileRead));
if (jsonElements is null)
throw new ArgumentNullException(nameof(jsonElements));
#pragma warning restore CA1510
}
protected void WaitForThread(Thread thread, List<Exception> threadExceptions)

View File

@ -136,6 +136,7 @@ internal class ProcessDataStandardFormat
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, string[]? lines = null, int columnsLine = 6)
{
ProcessDataStandardFormat result;
long? sequence;
string segment;
string[] segments;
bool addToFooter = false;
@ -186,13 +187,25 @@ internal class ProcessDataStandardFormat
}
string? linesOne = lines.Length > 0 && body.Count == 0 && columns.Count == 0 ? lines[1] : null;
logistics = GetLogistics(footer, linesOne: linesOne);
if (logistics.Count == 0)
sequence = null;
else
{
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
}
if (sequence is null && !string.IsNullOrEmpty(reportFullPath))
{
FileInfo fileInfo = new(reportFullPath);
sequence = fileInfo.LastWriteTime.Ticks;
}
result = new(body: body.AsReadOnly(),
columns: columns.AsReadOnly(),
footer: footer.AsReadOnly(),
header: header.AsReadOnly(),
inputPDSF: null,
logistics: logistics,
sequence: null);
sequence: sequence);
return result;
}
@ -236,7 +249,7 @@ internal class ProcessDataStandardFormat
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines)
{
ProcessDataStandardFormat result;
long sequence;
long? sequence;
string[] segments;
bool addToFooter = false;
List<string> body = new();
@ -268,12 +281,13 @@ internal class ProcessDataStandardFormat
}
logistics = GetLogistics(footer, linesOne: null);
if (logistics.Count == 0)
sequence = lastWriteTime.Ticks;
sequence = null;
else
{
segments = logistics[0].Split(new string[] { "SEQUENCE=" }, StringSplitOptions.None);
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? lastWriteTime.Ticks : s;
sequence = segments.Length < 2 || !long.TryParse(segments[1].Split(';')[0], out long s) ? null : s;
}
sequence ??= lastWriteTime.Ticks;
result = new(body: body.AsReadOnly(),
columns: new(columns),
footer: footer.AsReadOnly(),
@ -302,7 +316,7 @@ internal class ProcessDataStandardFormat
segments = bodyLine.Split('\t').ToList();
for (int c = 0; c < segments.Count; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
@ -378,19 +392,27 @@ internal class ProcessDataStandardFormat
break;
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ',');
}
line = string.Concat(line.Substring(0, line.Length - 1), '}');
lines.Add(line);
}
string? json = null;
if (processDataStandardFormat.Footer is not null && processDataStandardFormat.Footer.Count > 0)
{
Dictionary<string, string> footerKeyValuePairs = GetFooterKeyValuePairs(processDataStandardFormat.Footer);
Dictionary<string, Dictionary<string, string>> logisticKeyValuePairs = GetLogisticKeyValuePairs(processDataStandardFormat.Footer, footerKeyValuePairs);
json = JsonSerializer.Serialize(logisticKeyValuePairs, DictionaryStringDictionaryStringStringSourceGenerationContext.Default.DictionaryStringDictionaryStringString);
}
string footerText = string.IsNullOrEmpty(json) || json == "{}" ? string.Empty : $",{Environment.NewLine}\"PDSF\":{Environment.NewLine}{json}";
result = string.Concat(
'{',
Environment.NewLine,
'"',
"Count",
'"',
": ",
": ",
processDataStandardFormat.Body.Count,
',',
Environment.NewLine,
@ -409,12 +431,88 @@ internal class ProcessDataStandardFormat
'"',
"Sequence",
'"',
": ",
": ",
processDataStandardFormat.Sequence,
Environment.NewLine,
footerText,
Environment.NewLine,
'}');
return result;
#pragma warning restore CA1845, IDE0057
}
private static Dictionary<string, string> GetFooterKeyValuePairs(ReadOnlyCollection<string> footerLines)
{
Dictionary<string, string> results = new();
string[] segments;
foreach (string footerLine in footerLines)
{
segments = footerLine.Split('\t');
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
{
continue;
}
if (segments[1].Contains(';'))
{
continue;
}
else
{
if (results.ContainsKey(segments[0]))
{
continue;
}
results.Add(segments[0], segments[1]);
}
}
return results;
}
private static Dictionary<string, Dictionary<string, string>> GetLogisticKeyValuePairs(ReadOnlyCollection<string> footerLines, Dictionary<string, string> footerKeyValuePairs)
{
Dictionary<string, Dictionary<string, string>> results = new();
string[] segments;
string[] subSegments;
string[] subSubSegments;
Dictionary<string, string>? keyValue;
results.Add("Footer", footerKeyValuePairs);
foreach (string footerLine in footerLines)
{
segments = footerLine.Split('\t');
if (segments.Length != 2 || string.IsNullOrEmpty(segments[1].Trim()))
{
continue;
}
if (!segments[1].Contains(';') || !segments[1].Contains('='))
{
continue;
}
else
{
subSegments = segments[1].Split(';');
if (subSegments.Length < 1)
{
continue;
}
if (!results.TryGetValue(segments[0], out keyValue))
{
results.Add(segments[0], new());
if (!results.TryGetValue(segments[0], out keyValue))
{
throw new Exception();
}
}
foreach (string segment in subSegments)
{
subSubSegments = segment.Split('=');
if (subSubSegments.Length != 2)
{
continue;
}
keyValue.Add(subSubSegments[0], subSubSegments[1]);
}
}
}
return results;
}
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults)
@ -518,7 +616,7 @@ internal class ProcessDataStandardFormat
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
@ -526,7 +624,7 @@ internal class ProcessDataStandardFormat
{
for (int c = 1; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
value = segments[c].Replace("\\", "\\\\").Replace("\"", "\\\"");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
@ -763,4 +861,10 @@ internal class ProcessDataStandardFormat
[JsonSerializable(typeof(JsonElement[]))]
internal partial class JsonElementCollectionSourceGenerationContext : JsonSerializerContext
{
}
[JsonSourceGenerationOptions(WriteIndented = true, DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
[JsonSerializable(typeof(Dictionary<string, Dictionary<string, string>>))]
internal partial class DictionaryStringDictionaryStringStringSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -47,7 +47,7 @@ public class MET08DDUPSFS6420
[Ignore]
#endif
[TestMethod]
public void Production__v2_60_0__MET08DDUPSFS6420__OpenInsightMetrologyViewer637810124350899080__Normal()
public void Production__v2_60_0__MET08DDUPSFS6420__OpenInsightMetrologyViewer638851139271252054__Normal()
{
string check = "*.pdsf";
bool validatePDSF = false;
@ -76,7 +76,7 @@ public class MET08DDUPSFS6420
[Ignore]
#endif
[TestMethod]
public void Production__v2_60_0__MET08DDUPSFS6420__OpenInsight638052163299572098__IqsSql()
public void Production__v2_60_0__MET08DDUPSFS6420__OpenInsight638851304220990490__IqsSql()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();
@ -98,7 +98,7 @@ public class MET08DDUPSFS6420
[Ignore]
#endif
[TestMethod]
public void Production__v2_60_0__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments638519735942138814__HeaderId()
public void Production__v2_60_0__MET08DDUPSFS6420__OpenInsightMetrologyViewerAttachments638851355286349752__HeaderId()
{
string check = "*.pdsf";
MethodBase methodBase = new StackFrame().GetMethod();

View File

@ -41,26 +41,7 @@ public class TENCOR1
[Ignore]
#endif
[TestMethod]
[ExpectedException(typeof(MissingMethodException))]
public void Production__v2_60_0__TENCOR1__pcl637955518212649513__Normal()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR1.Production__v2_60_0__TENCOR1__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR1.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR1.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
[ExpectedException(typeof(MissingMethodException))]
public void Production__v2_60_0__TENCOR1__pcl638838745567643708__TooMany()
public void Production__v2_60_0__TENCOR1__pcl638851335365053074__Normal()
{
string check = "*.pcl";
bool validatePDSF = false;

View File

@ -41,8 +41,24 @@ public class TENCOR2
[Ignore]
#endif
[TestMethod]
[ExpectedException(typeof(MissingMethodException))]
public void Production__v2_60_0__TENCOR2__pcl637955534973701250__Normal()
public void Production__v2_60_0__TENCOR2__pcl638851352261289484__Normal()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR2.Production__v2_60_0__TENCOR2__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR2.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR2.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
public void Production__v2_60_0__TENCOR2__pcl638860965797666706__TwoRuns()
{
string check = "*.pcl";
bool validatePDSF = false;

View File

@ -41,26 +41,7 @@ public class TENCOR3
[Ignore]
#endif
[TestMethod]
[ExpectedException(typeof(MissingMethodException))]
public void Production__v2_60_0__TENCOR3__pcl637955520360305921__Normal()
{
string check = "*.pcl";
bool validatePDSF = false;
_TENCOR3.Production__v2_60_0__TENCOR3__pcl();
MethodBase methodBase = new StackFrame().GetMethod();
string[] variables = _TENCOR3.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
IFileRead fileRead = _TENCOR3.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
Logistics logistics = new(fileRead);
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
NonThrowTryCatch();
}
#if DEBUG
[Ignore]
#endif
[TestMethod]
[ExpectedException(typeof(MissingMethodException))]
public void Production__v2_60_0__TENCOR3__pcl638725158781216195__Parital()
public void Production__v2_60_0__TENCOR3__pcl638851336413561558__Normal()
{
string check = "*.pcl";
bool validatePDSF = false;