2 Commits

6 changed files with 193 additions and 337 deletions

View File

@ -8,6 +8,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
namespace Adaptation.FileHandlers.IQSSi;
@ -107,11 +108,63 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
private static string GetLines(Logistics logistics, List<pcl.Description> descriptions)
{
StringBuilder result = new();
pcl.Description x = descriptions[0];
char del = '\t';
_ = result.Append(x.AreaCountAvg).Append(del). // 001 - AreaCountAvg
Append(x.AreaCountMax).Append(del). // 002 - AreaCountMax
Append(x.AreaCountMin).Append(del). // 003 - AreaCountMin
Append(x.AreaCountStdDev).Append(del). // 004 - AreaCountStdDev
Append(x.AreaTotalAvg).Append(del). // 005 - AreaTotalAvg
Append(x.AreaTotalMax).Append(del). // 006 - AreaTotalMax
Append(x.AreaTotalMin).Append(del). // 007 - AreaTotalMin
Append(x.AreaTotalStdDev).Append(del). // 008 - AreaTotalStdDev
Append(x.Date).Append(del). // 009 -
Append(x.HazeAverageAvg).Append(del). // 010 - Haze Average
Append(x.HazeAverageMax).Append(del). // 011 -
Append(x.HazeAverageMin).Append(del). // 012 -
Append(x.HazeAverageStdDev).Append(del). // 013 -
Append(x.HazeRegionAvg).Append(del). // 014 -
Append(x.HazeRegionMax).Append(del). // 015 -
Append(x.HazeRegionMin).Append(del). // 016 -
Append(x.HazeRegionStdDev).Append(del). // 017 -
Append(x.Lot).Append(del). // 018 -
Append(x.LPDCM2Avg).Append(del). // 019 -
Append(x.LPDCM2Max).Append(del). // 020 -
Append(x.LPDCM2Min).Append(del). // 021 -
Append(x.LPDCM2StdDev).Append(del). // 022 -
Append(x.LPDCountAvg).Append(del). // 023 -
Append(x.LPDCountMax).Append(del). // 024 -
Append(x.LPDCM2Min).Append(del). // 025 -
Append(x.LPDCountStdDev).Append(del). // 026 -
Append(x.Employee).Append(del). // 027 -
Append(x.RDS).Append(del). // 028 - Lot
Append(x.Reactor).Append(del). // 029 - Process
Append(x.Recipe.Replace(";", string.Empty)).Append(del). // 030 - Part
Append(x.ScratchCountAvg).Append(del). // 031 - Scratch Count
Append(x.ScratchCountMax).Append(del). // 032 -
Append(x.ScratchCountMin).Append(del). // 033 -
Append(x.ScratchTotalStdDev).Append(del). // 034 -
Append(x.ScratchTotalAvg).Append(del). // 035 - Scratch Length
Append(x.ScratchTotalMax).Append(del). // 036 -
Append(x.ScratchTotalMin).Append(del). // 037 -
Append(x.ScratchTotalStdDev).Append(del). // 038 -
Append(x.SumOfDefectsAvg).Append(del). // 039 - Average Sum of Defects
Append(x.SumOfDefectsMax).Append(del). // 040 - Max Sum of Defects
Append(x.SumOfDefectsMin).Append(del). // 041 - Min Sum of Defects
Append(x.SumOfDefectsStdDev).Append(del). // 042 - SumOfDefectsStdDev
Append(logistics.MesEntity).Append(del). // 043 -
AppendLine();
return result.ToString();
}
private void SaveIQSFile(string reportFullPath, DateTime dateTime, List<pcl.Description> descriptions, Test[] tests)
{
bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
string lines = OpenInsight.FileRead.GetLines(_Logistics, descriptions);
string lines = GetLines(_Logistics, descriptions);
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
if (!Directory.Exists(duplicateDirectory))

View File

@ -22,17 +22,21 @@ public class FileRead : Shared.FileRead, IFileRead
internal class PreWith
{
internal string MatchingFile { get; private set; }
internal string CheckFile { get; private set; }
internal string ErrFile { get; private set; }
internal string CheckFile { get; private set; }
internal string MatchingFile { get; private set; }
internal string CheckDirectory { get; private set; }
internal string NoWaitDirectory { get; private set; }
internal PreWith(string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory)
internal PreWith(string checkDirectory,
string checkFile,
string errFile,
string matchingFile,
string noWaitDirectory)
{
MatchingFile = matchingFile;
CheckFile = checkFile;
ErrFile = errFile;
CheckFile = checkFile;
MatchingFile = matchingFile;
CheckDirectory = checkDirectory;
NoWaitDirectory = noWaitDirectory;
}
@ -221,7 +225,11 @@ public class FileRead : Shared.FileRead, IFileRead
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
preWith = new(pre.MatchingFile, pre.CheckFile, errFile, checkDirectory, noWaitDirectory);
preWith = new(checkDirectory: checkDirectory,
checkFile: pre.CheckFile,
errFile: errFile,
matchingFile: pre.MatchingFile,
noWaitDirectory: noWaitDirectory);
results.Add(preWith);
}
return results.AsReadOnly();
@ -275,7 +283,7 @@ public class FileRead : Shared.FileRead, IFileRead
return results.AsReadOnly();
}
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
{
ReadOnlyCollection<Post> postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
if (postCollection.Count != 0)
@ -294,7 +302,7 @@ public class FileRead : Shared.FileRead, IFileRead
}
}
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat? processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
{
List<Post> results = new();
Post post;
@ -303,15 +311,10 @@ public class FileRead : Shared.FileRead, IFileRead
{
if (!_IsEAFHosted)
continue;
if (processDataStandardFormat is null)
File.Move(preWith.MatchingFile, preWith.CheckFile);
else
{
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
wsResults = null;
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
File.Delete(preWith.MatchingFile);
}
if (Directory.Exists(preWith.NoWaitDirectory))
{
post = new(preWith.CheckFile, preWith.ErrFile);
@ -345,17 +348,10 @@ public class FileRead : Shared.FileRead, IFileRead
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
ProcessDataStandardFormat? processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, _ProcessDataStandardFormatMapping);
if (processDataStandardFormat is not null)
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, _ProcessDataStandardFormatMapping);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
else
{
processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
processDataStandardFormat = null;
}
if (!_IsEAFHosted && processDataStandardFormat is not null)
ProcessDataStandardFormat.Write(".pdsf", processDataStandardFormat, wsResults: null);
if (!_IsEAFHosted)
ProcessDataStandardFormat.Write("../../.pdsf", processDataStandardFormat, wsResults: null);
SetFileParameterLotIDToLogisticsMID();
int numberLength = 2;
long ticks = dateTime.Ticks;
@ -364,9 +360,12 @@ public class FileRead : Shared.FileRead, IFileRead
ReadOnlyCollection<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
if (matchingFiles.Count != searchDirectories.Count)
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
if (_IsEAFHosted)
{
try
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { }
}
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
MoveCollection(dateTime, processDataStandardFormat, preWithCollection);

View File

@ -6,11 +6,9 @@ using Adaptation.Shared.Methods;
using Adaptation.Shared.Metrology;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsight;
@ -18,16 +16,12 @@ namespace Adaptation.FileHandlers.OpenInsight;
public class FileRead : Shared.FileRead, IFileRead
{
private int _LastIndex;
private readonly string _IqsConnectionString;
private readonly string _OpenInsightFilePattern;
private readonly string _OpenInsightApiECDirectory;
private readonly ReadOnlyCollection<ModelObjectParameterDefinition> _IQSCopyCollection;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_LastIndex = -1;
_MinFileLength = 10;
_Logistics = new(this);
_NullData = string.Empty;
@ -37,10 +31,8 @@ public class FileRead : Shared.FileRead, IFileRead
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_IQSCopyCollection = new(GetProperties(cellInstanceConnectionName, modelObjectParameters, "IQS.Copy."));
_IqsConnectionString = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.ConnectionString");
_OpenInsightApiECDirectory = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "API.EC.Directory");
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
@ -118,128 +110,9 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
internal static string GetLines(Logistics logistics, List<pcl.Description> descriptions)
{
StringBuilder result = new();
pcl.Description x = descriptions[0];
bool ganPPTST = x.Recipe.Contains("GAN_PPTST");
if (ganPPTST)
{
string slot;
string reactor;
const int eight = 8;
DateTime dateTime = DateTime.Parse(x.Date);
string lot = x.Lot.ToLower().Replace("69-", string.Empty).Replace("71-", string.Empty).Replace("-", string.Empty);
if (string.IsNullOrEmpty(x.Lot) || x.Lot.Length < 2)
reactor = "R";
else
reactor = string.Concat("R", x.Lot.Substring(0, 2));
_ = result.Append(nameof(x.Date)).Append(';').
Append("Part").Append(';').
Append(nameof(x.Reactor)).Append(';').
Append("Lot").Append(';').
Append(nameof(pcl.Detail.Slot)).Append(';').
Append(nameof(pcl.Detail.Bin1)).Append(';').
Append(nameof(pcl.Detail.Bin2)).Append(';').
Append(nameof(pcl.Detail.Bin3)).Append(';').
Append(nameof(pcl.Detail.Bin4)).Append(';').
Append(nameof(pcl.Detail.Bin5)).Append(';').
Append(nameof(pcl.Detail.Bin6)).Append(';').
Append("Bin9").
AppendLine();
foreach (pcl.Description description in descriptions)
{
slot = description.Slot.Replace("*", string.Empty);
_ = result.Append('!').Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(';').
Append("Particle Adder;").
Append(reactor).Append(';').
Append(lot).Append(';').
Append(slot).Append(';').
Append(description.Bin1).Append(';').
Append(description.Bin2).Append(';').
Append(description.Bin3).Append(';').
Append(description.Bin4).Append(';').
Append(description.Bin5).Append(';').
Append(description.Bin6).Append(';').
Append(description.AreaCount).
AppendLine();
}
if (descriptions.Count != eight)
{
string negativeTenThousand = "-10000";
for (int i = descriptions.Count; i < eight; i++)
{
_ = result.Append('!').Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(';').
Append("Particle Adder;").
Append(reactor).Append(';').
Append(lot).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).Append(';').
Append(negativeTenThousand).
AppendLine();
}
}
if (result.ToString().Split('\n').Length != (eight + 2))
throw new Exception(string.Concat("Must have ", eight, " samples"));
}
else
{
char del = '\t';
_ = result.Append(x.AreaCountAvg).Append(del). // 001 - AreaCountAvg
Append(x.AreaCountMax).Append(del). // 002 - AreaCountMax
Append(x.AreaCountMin).Append(del). // 003 - AreaCountMin
Append(x.AreaCountStdDev).Append(del). // 004 - AreaCountStdDev
Append(x.AreaTotalAvg).Append(del). // 005 - AreaTotalAvg
Append(x.AreaTotalMax).Append(del). // 006 - AreaTotalMax
Append(x.AreaTotalMin).Append(del). // 007 - AreaTotalMin
Append(x.AreaTotalStdDev).Append(del). // 008 - AreaTotalStdDev
Append(x.Date).Append(del). // 009 -
Append(x.HazeAverageAvg).Append(del). // 010 - Haze Average
Append(x.HazeAverageMax).Append(del). // 011 -
Append(x.HazeAverageMin).Append(del). // 012 -
Append(x.HazeAverageStdDev).Append(del). // 013 -
Append(x.HazeRegionAvg).Append(del). // 014 -
Append(x.HazeRegionMax).Append(del). // 015 -
Append(x.HazeRegionMin).Append(del). // 016 -
Append(x.HazeRegionStdDev).Append(del). // 017 -
Append(x.Lot).Append(del). // 018 -
Append(x.LPDCM2Avg).Append(del). // 019 -
Append(x.LPDCM2Max).Append(del). // 020 -
Append(x.LPDCM2Min).Append(del). // 021 -
Append(x.LPDCM2StdDev).Append(del). // 022 -
Append(x.LPDCountAvg).Append(del). // 023 -
Append(x.LPDCountMax).Append(del). // 024 -
Append(x.LPDCM2Min).Append(del). // 025 -
Append(x.LPDCountStdDev).Append(del). // 026 -
Append(x.Employee).Append(del). // 027 -
Append(x.RDS).Append(del). // 028 - Lot
Append(x.Reactor).Append(del). // 029 - Process
Append(x.Recipe.Replace(";", string.Empty)).Append(del). // 030 - Part
Append(x.ScratchCountAvg).Append(del). // 031 - Scratch Count
Append(x.ScratchCountMax).Append(del). // 032 -
Append(x.ScratchCountMin).Append(del). // 033 -
Append(x.ScratchTotalStdDev).Append(del). // 034 -
Append(x.ScratchTotalAvg).Append(del). // 035 - Scratch Length
Append(x.ScratchTotalMax).Append(del). // 036 -
Append(x.ScratchTotalMin).Append(del). // 037 -
Append(x.ScratchTotalStdDev).Append(del). // 038 -
Append(x.SumOfDefectsAvg).Append(del). // 039 - Average Sum of Defects
Append(x.SumOfDefectsMax).Append(del). // 040 - Max Sum of Defects
Append(x.SumOfDefectsMin).Append(del). // 041 - Min Sum of Defects
Append(x.SumOfDefectsStdDev).Append(del). // 042 - SumOfDefectsStdDev
Append(logistics.MesEntity).Append(del). // 043 -
AppendLine();
}
return result.ToString();
}
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, List<pcl.Description> descriptions, Test[] tests)
{
string duplicateFile;
bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
@ -255,27 +128,18 @@ public class FileRead : Shared.FileRead, IFileRead
if (!Directory.Exists(duplicateDirectory))
_ = Directory.CreateDirectory(duplicateDirectory);
}
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
if (descriptions.Count != 0 && tests.Length != 0)
if (descriptions.Count == 0 || tests.Length == 0)
duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
else
{
string lines = GetLines(_Logistics, descriptions);
if (!string.IsNullOrEmpty(lines))
{
_LastIndex += 1;
long? subgroupId;
if (_LastIndex >= _IQSCopyCollection.Count)
_LastIndex = 0;
string fileName = Path.GetFileName(reportFullPath);
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
ModelObjectParameterDefinition modelObjectParameterDefinition = _IQSCopyCollection[_LastIndex];
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
subgroupId = null;
else
(subgroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (subgroupId is null)
collection.Add(new(new ScopeInfo(tests[0], _OpenInsightFilePattern), lines));
else
collection.Add(new(new ScopeInfo(tests[0], $"{subgroupId.Value} {_OpenInsightFilePattern}"), lines));
if (_StaticRuns.TryGetValue(_Logistics.Sequence, out List<WS.Results> wsResults))
{
if (wsResults is null || wsResults.Count != 1)
@ -283,23 +147,19 @@ public class FileRead : Shared.FileRead, IFileRead
lock (_StaticRuns)
wsResults[0] = WS.Results.Get(wsResults[0], subgroupId);
}
if (!fileName.StartsWith("Viewer"))
duplicateFile = Path.Combine(duplicateDirectory, $"{subgroupId} {fileName}".TrimStart());
else
duplicateFile = Path.Combine(duplicateDirectory, $"{$"Viewer {subgroupId}".TrimEnd()} {fileName.Replace("Viewer", string.Empty)}");
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), lines, subgroupId, weekOfYear);
try
{ FromIQS.SaveCopy(_FileConnectorConfiguration.SourceFileLocation, _IqsConnectionString, modelObjectParameterDefinition.Name, modelObjectParameterDefinition.Value.Split('|')); }
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
}
FromIQS.Save(_OpenInsightApiECDirectory, _Logistics, reportFullPath, processDataStandardFormat, descriptions.First(), subgroupId, weekOfYear);
}
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
{
File.Copy(reportFullPath, duplicateFile, overwrite: true);
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
}
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{

View File

@ -376,7 +376,7 @@ public class FromIQS
return result;
}
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, pcl.Description description, string lines, long? subGroupId, string weekOfYear)
internal static void Save(string openInsightApiECDirectory, Logistics logistics, string reportFullPath, ProcessDataStandardFormat processDataStandardFormat, pcl.Description description, long? subGroupId, string weekOfYear)
{
string checkFile;
string fileName = Path.GetFileName(reportFullPath);
@ -390,109 +390,9 @@ public class FromIQS
checkFile = Path.Combine(ecDirectory, fileName);
if (ecExists && !File.Exists(checkFile))
File.Copy(reportFullPath, checkFile);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.txt");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, lines);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.lbl");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, processDataStandardFormat.Body[processDataStandardFormat.Body.Count - 1]);
}
private static string GetCommandText(string[] iqsCopyValues)
{ // cSpell:disable
List<string> results = new();
if (iqsCopyValues.Length != 4)
throw new NotSupportedException();
string find = iqsCopyValues[1];
string replace = iqsCopyValues[3];
results.Add(" select pd.f_name [Part Name], ");
results.Add(" null [Part Revision], ");
results.Add($" '{replace}' [Test Name], ");
results.Add(" null [Description], ");
results.Add(" null [Lot Number], ");
results.Add(" null [Job Name], ");
results.Add(" null [Process Name], ");
results.Add(" case when sl.f_url = 0 then null else sl.f_url end [Reasonable Limit (Upper)], ");
results.Add(" case when sl.f_url = 0 then 0 else 1 end [Alarm Reasonable Limit (Upper)], ");
results.Add(" case when sl.f_usl = 0 then null else sl.f_usl end [Specification Limit (Upper)], ");
results.Add(" case when sl.f_usl = 0 then 0 else 1 end [Alarm Specification Limit (Upper)], ");
results.Add(" case when sl.f_ugb = 0 then null else sl.f_ugb end [Warning Limit (Upper)], ");
results.Add(" case when sl.f_ugb = 0 then 0 else 1 end [Alarm Warning Limit (Upper)], ");
results.Add(" case when sl.f_tar = 0 then null else sl.f_tar end [Specification Limit (Target)], ");
results.Add(" case when sl.f_lgb = 0 then null else sl.f_lgb end [Warning Limit (Lower)], ");
results.Add(" case when sl.f_lgb = 0 then 0 else 1 end [Alarm Warning Limit (Lower)], ");
results.Add(" case when sl.f_lsl = 0 then null else sl.f_lsl end [Specification Limit (Lower)], ");
results.Add(" case when sl.f_lsl = 0 then 0 else 1 end [Alarm Specification Limit (Lower)], ");
results.Add(" case when sl.f_lrl = 0 then null else sl.f_lrl end [Reasonable Limit (Lower)], ");
results.Add(" case when sl.f_lrl = 0 then 0 else 1 end [Alarm Reasonable Limit (Lower)], ");
results.Add(" td.f_name [Original Test Name], ");
results.Add(" td.f_test [Test Id], ");
results.Add(" ( ");
results.Add(" select count(sl_b.f_spec) ");
results.Add(" from [spcepiworld].[dbo].[spec_lim] sl_b ");
results.Add(" join [spcepiworld].[dbo].[part_dat] pd_b ");
results.Add(" on sl_b.f_part = pd_b.f_part ");
results.Add(" join [spcepiworld].[dbo].[test_dat] td_b ");
results.Add(" on sl_b.f_test = td_b.f_test ");
results.Add(" where sl_b.f_prcs = 0 ");
results.Add($" and td_b.f_name = '{replace}' ");
results.Add(" and pd_b.f_name = pd.f_name ");
results.Add(" and sl_b.f_url = sl.f_url ");
results.Add(" and sl_b.f_usl = sl.f_usl ");
results.Add(" and sl_b.f_ugb = sl.f_ugb ");
results.Add(" and sl_b.f_tar = sl.f_tar ");
results.Add(" and sl_b.f_lgb = sl.f_lgb ");
results.Add(" and sl_b.f_lsl = sl.f_lsl ");
results.Add(" and sl_b.f_lrl = sl.f_lrl ");
results.Add(" group by sl_b.f_spec ");
results.Add(" ) count ");
results.Add(" from [spcepiworld].[dbo].[spec_lim] sl ");
results.Add(" join [spcepiworld].[dbo].[part_dat] pd ");
results.Add(" on sl.f_part = pd.f_part ");
results.Add(" join [spcepiworld].[dbo].[test_dat] td ");
results.Add(" on sl.f_test = td.f_test ");
results.Add(" where sl.f_prcs = 0 ");
results.Add($" and td.f_name = '{find}' ");
results.Add(" and isnull(( ");
results.Add(" select count(sl_b.f_spec) ");
results.Add(" from [spcepiworld].[dbo].[spec_lim] sl_b ");
results.Add(" join [spcepiworld].[dbo].[part_dat] pd_b ");
results.Add(" on sl_b.f_part = pd_b.f_part ");
results.Add(" join [spcepiworld].[dbo].[test_dat] td_b ");
results.Add(" on sl_b.f_test = td_b.f_test ");
results.Add(" where sl_b.f_prcs = 0 ");
results.Add($" and td_b.f_name = '{replace}' ");
results.Add(" and pd_b.f_name = pd.f_name ");
results.Add(" and sl_b.f_url = sl.f_url ");
results.Add(" and sl_b.f_usl = sl.f_usl ");
results.Add(" and sl_b.f_ugb = sl.f_ugb ");
results.Add(" and sl_b.f_tar = sl.f_tar ");
results.Add(" and sl_b.f_lgb = sl.f_lgb ");
results.Add(" and sl_b.f_lsl = sl.f_lsl ");
results.Add(" and sl_b.f_lrl = sl.f_lrl ");
results.Add(" group by sl_b.f_spec ");
results.Add(" ), 0) = 0 ");
results.Add(" for json path ");
return string.Join(Environment.NewLine, results);
} // cSpell:restore
internal static void SaveCopy(string fileConnectorConfigurationSourceFileLocation, string connectionString, string name, string[] iqsCopyValues)
{
string checkFile = Path.Combine(fileConnectorConfigurationSourceFileLocation, $"{name}.json");
if (!File.Exists(checkFile))
{
string commandText = GetCommandText(iqsCopyValues);
StringBuilder stringBuilder = GetForJsonPath(connectionString, commandText);
if (stringBuilder.Length != 0)
File.WriteAllText(checkFile, stringBuilder.ToString());
else
File.WriteAllText(Path.Combine(fileConnectorConfigurationSourceFileLocation, $"{name}.sql"), commandText);
}
}
#nullable disable
}

View File

@ -61,8 +61,8 @@ internal class ProcessDataStandardFormat
internal static string Archive(bool addSpaces = true, char separator = ' ') =>
GetString(SearchFor.Archive, addSpaces, separator);
internal static ProcessDataStandardFormat GetEmpty() =>
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null, new(new string[] { "LOGISTICS_1" }), null);
internal static ProcessDataStandardFormat GetEmpty(Logistics logistics) =>
new(new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), new(Array.Empty<string>()), null, new(logistics.Logistics1), null);
internal static List<string> PDSFToFixedWidth(string reportFullPath)
{
@ -214,25 +214,26 @@ internal class ProcessDataStandardFormat
return results.AsReadOnly();
}
internal static ProcessDataStandardFormat? GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
{
ProcessDataStandardFormat? result;
ProcessDataStandardFormat result;
const int columnsLine = 6;
FileInfo fileInfo = new(reportFullPath);
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, pdsfMapping.NewColumnNames.Count, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = GetArray(pdsfMapping.NewColumnNames.Count, processDataStandardFormat, lookForNumbers: false);
if (jsonElements is null || jsonElements.Length == 0 || pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count)
result = null;
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
JsonElement[]? jsonElements = pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
if (jsonElements is null || jsonProperties is null || jsonProperties.Length != pdsfMapping.NewColumnNames.Count)
result = processDataStandardFormat;
else
{
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
result = null;
result = processDataStandardFormat;
}
return result;
}
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int expectedColumns, int columnsLine, string path, string[]? lines)
private static ProcessDataStandardFormat GetProcessDataStandardFormat(DateTime lastWriteTime, int columnsLine, string path, string[]? lines)
{
ProcessDataStandardFormat result;
long sequence;
@ -248,8 +249,6 @@ internal class ProcessDataStandardFormat
else
{
segments = lines[columnsLine].Split('\t');
if (segments.Length != expectedColumns)
segments = Array.Empty<string>();
for (int i = 0; i < columnsLine; i++)
header.Add(lines[i]);
}
@ -285,7 +284,7 @@ internal class ProcessDataStandardFormat
return result;
}
private static JsonElement[]? GetArray(int expectedColumns, ProcessDataStandardFormat processDataStandardFormat, bool lookForNumbers)
private static JsonElement[]? GetFullArray(ProcessDataStandardFormat processDataStandardFormat)
{
JsonElement[]? results;
if (processDataStandardFormat.Body.Count == 0 || !processDataStandardFormat.Body[0].Contains('\t'))
@ -293,37 +292,19 @@ internal class ProcessDataStandardFormat
else
{
string value;
string[] segments;
List<string> segments;
List<string> lines = new();
StringBuilder stringBuilder = new();
foreach (string bodyLine in processDataStandardFormat.Body)
{
_ = stringBuilder.Clear();
_ = stringBuilder.Append('{');
segments = bodyLine.Split('\t');
if (segments.Length != expectedColumns)
continue;
if (!lookForNumbers)
{
for (int c = 0; c < segments.Length; c++)
segments = bodyLine.Split('\t').ToList();
for (int c = 0; c < segments.Count; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
else
{
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
if (string.IsNullOrEmpty(value))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append("null,");
else if (value.All(char.IsDigit))
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":").Append(value).Append(',');
else
_ = stringBuilder.Append('"').Append(processDataStandardFormat.Columns[c]).Append("\":\"").Append(value).Append("\",");
}
}
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.AppendLine("}");
lines.Add(stringBuilder.ToString());
@ -379,6 +360,63 @@ internal class ProcessDataStandardFormat
return result;
}
private static string GetJson(ProcessDataStandardFormat processDataStandardFormat)
{
if (processDataStandardFormat.InputPDSF is null)
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
#pragma warning disable CA1845, IDE0057
string result;
string line;
string value;
string[] segments;
List<string> lines = new();
for (int i = 0; i < processDataStandardFormat.InputPDSF.Body.Count; i++)
{
line = "{";
segments = processDataStandardFormat.InputPDSF.Body[i].Trim().Split('\t');
if (segments.Length != processDataStandardFormat.InputPDSF.Columns.Count)
break;
for (int c = 0; c < segments.Length; c++)
{
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
line += string.Concat('"', processDataStandardFormat.InputPDSF.Columns[c].Trim('"'), '"', ':', '"', value, '"', ',');
}
line = string.Concat(line.Substring(0, line.Length - 1), '}');
lines.Add(line);
}
result = string.Concat(
'{',
Environment.NewLine,
'"',
"Count",
'"',
": ",
processDataStandardFormat.Body.Count,
',',
Environment.NewLine,
'"',
"Records",
'"',
": ",
Environment.NewLine,
'[',
Environment.NewLine,
string.Join($",{Environment.NewLine}", lines),
Environment.NewLine,
']',
',',
Environment.NewLine,
'"',
"Sequence",
'"',
": ",
processDataStandardFormat.Sequence,
Environment.NewLine,
'}');
return result;
#pragma warning restore CA1845, IDE0057
}
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults)
{
List<string> results = new();
@ -418,9 +456,10 @@ internal class ProcessDataStandardFormat
results.Add($"{segments[0]}\t{segments[1][0]}_HeaderId={wsResults[0].HeaderId};{segments[1][0]}_SubgroupId={wsResults[0].SubgroupId};{segments[1]}");
}
}
results.Add("EOF");
results.Add("END_HEADER");
if (processDataStandardFormat.InputPDSF is not null)
{
results.Add(string.Empty);
List<char> hyphens = new();
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|')));
results.Add(string.Empty);
@ -431,6 +470,11 @@ internal class ProcessDataStandardFormat
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|')));
results.Add(string.Empty);
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|')));
results.Add(string.Empty);
results.Add("EOF");
results.Add(string.Empty);
string json = GetJson(processDataStandardFormat);
results.Add(json);
}
File.WriteAllText(path, string.Join(Environment.NewLine, results));
}

View File

@ -1182,7 +1182,7 @@ public class AdaptationTesting : ISMTP
Assert.IsNotNull(extractResult.Item3);
Assert.IsNotNull(extractResult.Item4);
if (!validatePDSF)
_ = GetProcessDataStandardFormat(fileRead, logistics, extractResult, ProcessDataStandardFormat.GetEmpty());
_ = GetProcessDataStandardFormat(fileRead, logistics, extractResult, ProcessDataStandardFormat.GetEmpty(logistics));
else
{
Assert.IsTrue(extractResult.Item3.Length > 0, "extractResult Array Length check!");