Ready to test
This commit is contained in:
519
Adaptation/FileHandlers/MET08DDUPSFS6420/FileRead.cs
Normal file
519
Adaptation/FileHandlers/MET08DDUPSFS6420/FileRead.cs
Normal file
@ -0,0 +1,519 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Adaptation.Shared.Metrology;
|
||||
using Infineon.Monitoring.MonA;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.MET08DDUPSFS6420;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly Timer _Timer;
|
||||
private int _LastDummyRunIndex;
|
||||
private readonly bool _IsDummy;
|
||||
private readonly bool _IsNaEDA;
|
||||
private readonly bool _IsXToAPC;
|
||||
private readonly string _IqsFile;
|
||||
private readonly bool _IsXToIQSSi;
|
||||
private readonly bool _IsXToSPaCe;
|
||||
private readonly bool _IsXToIQSGaN;
|
||||
private readonly string _MemoryPath;
|
||||
private readonly bool _IsXToOpenInsight;
|
||||
private readonly string _OpenInsightFilePattern;
|
||||
private readonly bool _IsXToOpenInsightMetrologyViewer;
|
||||
private readonly Dictionary<string, string> _CellNames;
|
||||
private readonly string _OpenInsightMetrologyViewerAPI;
|
||||
private readonly bool _IsXToOpenInsightMetrologyViewerAttachments;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_LastDummyRunIndex = -1;
|
||||
_IsDummy = _Hyphens == (int)Hyphen.IsDummy;
|
||||
_IsNaEDA = _Hyphens == (int)Hyphen.IsNaEDA;
|
||||
_IsXToAPC = _Hyphens == (int)Hyphen.IsXToAPC;
|
||||
_CellNames = new Dictionary<string, string>();
|
||||
_IsXToIQSSi = _Hyphens == (int)Hyphen.IsXToIQSSi;
|
||||
_IsXToSPaCe = _Hyphens == (int)Hyphen.IsXToSPaCe;
|
||||
_IsXToIQSGaN = _Hyphens == (int)Hyphen.IsXToIQSGaN;
|
||||
_IsXToOpenInsight = _Hyphens == (int)Hyphen.IsXToOpenInsight;
|
||||
_IsXToOpenInsightMetrologyViewer = _Hyphens == (int)Hyphen.IsXToOpenInsightMetrologyViewer;
|
||||
_IqsFile = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.File");
|
||||
_MemoryPath = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Path.Memory");
|
||||
_IsXToOpenInsightMetrologyViewerAttachments = _Hyphens == (int)Hyphen.IsXToOpenInsightMetrologyViewerAttachments;
|
||||
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
|
||||
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
|
||||
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Path");
|
||||
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
|
||||
_CellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1], modelObjectParameterDefinition.Value);
|
||||
if (_IsDummy)
|
||||
{
|
||||
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
|
||||
{
|
||||
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
|
||||
Callback(null);
|
||||
}
|
||||
else
|
||||
{
|
||||
int milliSeconds;
|
||||
milliSeconds = (int)(fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000 / 2);
|
||||
_Timer = new Timer(Callback, null, milliSeconds, Timeout.Infinite);
|
||||
milliSeconds += 2000;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
{
|
||||
if (_Description is not Description)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state) => Callback(state);
|
||||
|
||||
protected static List<pcl.Description> GetDescriptions(JsonElement[] jsonElements)
|
||||
{
|
||||
List<pcl.Description> results = new();
|
||||
pcl.Description description;
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
|
||||
foreach (JsonElement jsonElement in jsonElements)
|
||||
{
|
||||
if (jsonElement.ValueKind != JsonValueKind.Object)
|
||||
throw new Exception();
|
||||
description = JsonSerializer.Deserialize<pcl.Description>(jsonElement.ToString(), jsonSerializerOptions);
|
||||
results.Add(description);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
string duplicateDirectory;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<pcl.Description> descriptions = GetDescriptions(jsonElements);
|
||||
Tuple<Test[], Dictionary<Test, List<Shared.Properties.IDescription>>> tuple = GetTuple(this, from l in descriptions select (Shared.Properties.IDescription)l, extra: false);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tuple.Item1, jsonElements, new List<FileInfo>());
|
||||
bool isNotUsedInsightMetrologyViewerAttachments = !(_FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) && _IsXToOpenInsightMetrologyViewerAttachments;
|
||||
bool isDummyRun = _DummyRuns.Any() && _DummyRuns.ContainsKey(_Logistics.JobID) && _DummyRuns[_Logistics.JobID].Any() && (from l in _DummyRuns[_Logistics.JobID] where l == _Logistics.Sequence select 1).Any();
|
||||
if (isDummyRun)
|
||||
{
|
||||
try
|
||||
{ File.SetLastWriteTime(reportFullPath, dateTime); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
|
||||
if (_IsXToIQSSi)
|
||||
duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\All");
|
||||
else if (!_IsXToOpenInsight)
|
||||
duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", segments[0]);
|
||||
else
|
||||
duplicateDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\Data");
|
||||
if (segments.Length > 2)
|
||||
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
if (isDummyRun || isNotUsedInsightMetrologyViewerAttachments || _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
{
|
||||
bool ganPPTST = false;
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
string successDirectory;
|
||||
if (!_IsXToAPC)
|
||||
successDirectory = string.Empty;
|
||||
else
|
||||
{
|
||||
successDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\ViewerPath");
|
||||
if (!Directory.Exists(successDirectory))
|
||||
_ = Directory.CreateDirectory(successDirectory);
|
||||
}
|
||||
List<Tuple<Shared.Properties.IScopeInfo, string>> tuples = new();
|
||||
string duplicateFile = string.Concat(duplicateDirectory, @"\", Path.GetFileName(reportFullPath));
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
|
||||
string logisticsSequenceMemoryDirectory = string.Concat(_MemoryPath, @"\", _EquipmentType, @"\Source\", weekDirectory, @"\", _Logistics.Sequence);
|
||||
if (!Directory.Exists(logisticsSequenceMemoryDirectory))
|
||||
_ = Directory.CreateDirectory(logisticsSequenceMemoryDirectory);
|
||||
if (_IsXToAPC)
|
||||
{
|
||||
if (!isDummyRun && _IsEAFHosted)
|
||||
File.Copy(reportFullPath, duplicateFile, overwrite: true);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (_IsXToOpenInsightMetrologyViewer)
|
||||
{
|
||||
WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
if (!isDummyRun && _IsEAFHosted)
|
||||
{
|
||||
Tuple<string, WS.Results> wsResults = WS.SendData(_OpenInsightMetrologyViewerAPI, wsRequest);
|
||||
if (!wsResults.Item2.Success)
|
||||
throw new Exception(wsResults.ToString());
|
||||
_Log.Debug(wsResults.Item2.HeaderID);
|
||||
File.WriteAllText(string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json"), wsResults.Item1);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Test test;
|
||||
string lines;
|
||||
Shared.Properties.IScopeInfo scopeInfo;
|
||||
foreach (KeyValuePair<Test, List<Shared.Properties.IDescription>> keyValuePair in tuple.Item2)
|
||||
{
|
||||
test = keyValuePair.Key;
|
||||
//scopeInfo = new ScopeInfo(test);
|
||||
if (!_IsXToOpenInsight)
|
||||
scopeInfo = new ScopeInfo(test, _IqsFile);
|
||||
else
|
||||
scopeInfo = new ScopeInfo(test, _OpenInsightFilePattern);
|
||||
//lines = ProcessDataStandardFormat.GetLines(this, scopeInfo, names, values, dateFormat: "M/d/yyyy hh:mm:ss tt", timeFormat: string.Empty, pairedColumns: ExtractResultPairedColumns);
|
||||
ganPPTST = descriptions[0].Recipe.Contains("GAN_PPTST");
|
||||
lines = ProcessData.GetLines(this, _Logistics, descriptions, ganPPTST);
|
||||
tuples.Add(new Tuple<Shared.Properties.IScopeInfo, string>(scopeInfo, lines));
|
||||
}
|
||||
}
|
||||
if (_IsXToOpenInsightMetrologyViewerAttachments)
|
||||
{
|
||||
string[] matchDirectories = Shared1567(reportFullPath, tuples);
|
||||
if (!isDummyRun && _IsEAFHosted && !isNotUsedInsightMetrologyViewerAttachments)
|
||||
ProcessData.PostOpenInsightMetrologyViewerAttachments(this, _Logistics, _OpenInsightMetrologyViewerAPI, dateTime, logisticsSequenceMemoryDirectory, descriptions, matchDirectories[0]);
|
||||
}
|
||||
}
|
||||
if (!_IsXToOpenInsightMetrologyViewer && !_IsXToOpenInsightMetrologyViewerAttachments)
|
||||
{
|
||||
bool check = false;
|
||||
if (!_IsXToIQSSi && !_IsXToIQSGaN)
|
||||
check = true;
|
||||
else if (_IsXToIQSSi && !ganPPTST)
|
||||
check = true;
|
||||
else if (_IsXToIQSGaN && ganPPTST)
|
||||
check = true;
|
||||
//else
|
||||
// Don't write file(s) //throw new Exception();
|
||||
if (check)
|
||||
Shared0413(dateTime, isDummyRun, successDirectory, duplicateDirectory, tuples, duplicateFile);
|
||||
}
|
||||
}
|
||||
if (_IsXToOpenInsightMetrologyViewerAttachments)
|
||||
{
|
||||
string destinationDirectory;
|
||||
//string destinationDirectory = WriteScopeInfo(_ProgressPath, _Logistics, dateTime, duplicateDirectory, tuples);
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
|
||||
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\", _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
_ = Directory.CreateDirectory(jobIdDirectory);
|
||||
string[] matchDirectories;
|
||||
if (!_IsEAFHosted)
|
||||
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
|
||||
else
|
||||
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
|
||||
if ((matchDirectories is null) || matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
destinationDirectory = matchDirectories[0];
|
||||
if (isDummyRun)
|
||||
Shared0607(reportFullPath, duplicateDirectory, logisticsSequence, destinationDirectory);
|
||||
else
|
||||
{
|
||||
WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
|
||||
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
|
||||
if (_IsEAFHosted)
|
||||
Shared1277(reportFullPath, destinationDirectory, logisticsSequence, jobIdDirectory, json);
|
||||
else
|
||||
{
|
||||
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
|
||||
string historicalText = File.ReadAllText(jsonFileName);
|
||||
if (json != historicalText)
|
||||
throw new Exception("File doesn't match historical!");
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private void CallbackIsDummy(string traceDummyFile, List<Tuple<string, string, string, string, int>> tuples, bool fileConnectorConfigurationIncludeSubDirectories, bool includeSubDirectoriesExtra)
|
||||
{
|
||||
int fileCount;
|
||||
string[] files;
|
||||
string monARessource;
|
||||
string checkDirectory;
|
||||
string sourceArchiveFile;
|
||||
string inProcessDirectory;
|
||||
const string site = "sjc";
|
||||
string stateName = string.Concat("Dummy_", _EventName);
|
||||
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
|
||||
MonIn monIn = MonIn.GetInstance(monInURL);
|
||||
foreach (Tuple<string, string, string, string, int> item in tuples)
|
||||
{
|
||||
monARessource = item.Item1;
|
||||
sourceArchiveFile = item.Item2;
|
||||
inProcessDirectory = item.Item3;
|
||||
checkDirectory = item.Item4;
|
||||
fileCount = item.Item5;
|
||||
try
|
||||
{
|
||||
if (fileCount > 0 || string.IsNullOrEmpty(checkDirectory))
|
||||
{
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Warning);
|
||||
for (int i = 1; i < 12; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
else if (inProcessDirectory == checkDirectory)
|
||||
continue;
|
||||
if (!_IsEAFHosted)
|
||||
continue;
|
||||
if (!File.Exists(sourceArchiveFile))
|
||||
continue;
|
||||
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
continue;
|
||||
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
|
||||
if (fileConnectorConfigurationIncludeSubDirectories && includeSubDirectoriesExtra)
|
||||
checkDirectory = string.Concat(checkDirectory, @"\", sequence);
|
||||
if (fileConnectorConfigurationIncludeSubDirectories)
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
else
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
foreach (string file in files)
|
||||
File.SetLastWriteTime(file, new DateTime(sequence));
|
||||
if (!fileConnectorConfigurationIncludeSubDirectories)
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(checkDirectory, @"\", Path.GetFileName(file)));
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
foreach (string directory in directories)
|
||||
_ = Directory.CreateDirectory(string.Concat(checkDirectory, directory.Substring(inProcessDirectory.Length)));
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(checkDirectory, file.Substring(inProcessDirectory.Length)));
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Ok);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Critical);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void Callback(object state)
|
||||
{
|
||||
if (!_IsDummy)
|
||||
throw new Exception();
|
||||
try
|
||||
{
|
||||
DateTime dateTime = DateTime.Now;
|
||||
bool check = dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday;
|
||||
if (check)
|
||||
{
|
||||
int fileCount;
|
||||
string[] files;
|
||||
string monARessource;
|
||||
string checkDirectory;
|
||||
string sourceArchiveFile;
|
||||
string sourceFileLocation;
|
||||
string inProcessDirectory;
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string traceDummyDirectory = string.Concat(Path.GetPathRoot(_TracePath), @"\TracesDummy\", _CellInstanceName, @"\Source\", dateTime.ToString("yyyy"), "___Week_", weekOfYear);
|
||||
if (!Directory.Exists(traceDummyDirectory))
|
||||
_ = Directory.CreateDirectory(traceDummyDirectory);
|
||||
string traceDummyFile = string.Concat(traceDummyDirectory, @"\", dateTime.Ticks, " - ", _CellInstanceName, ".txt");
|
||||
File.AppendAllText(traceDummyFile, string.Empty);
|
||||
List<Tuple<string, string, string, string, int>> tuples = new();
|
||||
string progressDirectory = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\_ Progress"));
|
||||
if (progressDirectory != _ProgressPath || !Directory.Exists(progressDirectory))
|
||||
throw new Exception("Invalid progress path");
|
||||
foreach (KeyValuePair<string, string> keyValuePair in _CellNames)
|
||||
{
|
||||
monARessource = keyValuePair.Key;
|
||||
if (!keyValuePair.Value.Contains('\\'))
|
||||
continue;
|
||||
foreach (string sourceFileFilter in _FileConnectorConfiguration.SourceFileFilter.Split('|'))
|
||||
{
|
||||
if (sourceFileFilter.ToLower().StartsWith(keyValuePair.Value.Replace(@"\", string.Empty)))
|
||||
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
|
||||
else if (_FileConnectorConfiguration.SourceFileLocation.ToLower().EndsWith(keyValuePair.Value))
|
||||
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
|
||||
else
|
||||
sourceFileLocation = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\", keyValuePair.Value));
|
||||
sourceArchiveFile = Path.GetFullPath(string.Concat(sourceFileLocation, @"\", sourceFileFilter));
|
||||
if (!File.Exists(sourceArchiveFile))
|
||||
continue;
|
||||
if (!_DummyRuns.ContainsKey(monARessource))
|
||||
_DummyRuns.Add(monARessource, new List<long>());
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceFileFilter, sourceFileLocation, sourceArchiveFile, 0));
|
||||
}
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, from l in tuples select l.Item4);
|
||||
if (tuples.Any())
|
||||
{
|
||||
_LastDummyRunIndex += 1;
|
||||
if (_LastDummyRunIndex >= tuples.Count)
|
||||
_LastDummyRunIndex = 0;
|
||||
monARessource = tuples[_LastDummyRunIndex].Item1;
|
||||
string sourceFileFilter = tuples[_LastDummyRunIndex].Item2;
|
||||
sourceFileLocation = tuples[_LastDummyRunIndex].Item3;
|
||||
sourceArchiveFile = tuples[_LastDummyRunIndex].Item4;
|
||||
//fileCount = tuples[_LastDummyRunIndex].Item5;
|
||||
tuples.Clear();
|
||||
if (long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
{
|
||||
if (!_DummyRuns[monARessource].Contains(sequence))
|
||||
_DummyRuns[monARessource].Add(sequence);
|
||||
inProcessDirectory = string.Concat(progressDirectory, @"\Dummy_in process\", sequence);
|
||||
checkDirectory = inProcessDirectory;
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
files = Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories);
|
||||
fileCount = files.Length;
|
||||
if (files.Any())
|
||||
{
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
try
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Delete(file);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
|
||||
checkDirectory = sourceFileLocation;
|
||||
files = Directory.GetFiles(checkDirectory, string.Concat("*", sequence, "*"), SearchOption.TopDirectoryOnly);
|
||||
fileCount = files.Length;
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
|
||||
}
|
||||
}
|
||||
if (tuples.Any())
|
||||
//CallbackIsDummy(traceDummyFile, tuples, FileConnectorConfiguration.IncludeSubDirectories.Value, includeSubDirectoriesExtra: false);
|
||||
CallbackIsDummy(traceDummyFile, tuples, fileConnectorConfigurationIncludeSubDirectories: true, includeSubDirectoriesExtra: true);
|
||||
}
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
try
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
}
|
17
Adaptation/FileHandlers/MET08DDUPSFS6420/Hyphen.cs
Normal file
17
Adaptation/FileHandlers/MET08DDUPSFS6420/Hyphen.cs
Normal file
@ -0,0 +1,17 @@
|
||||
namespace Adaptation.FileHandlers.MET08DDUPSFS6420;
|
||||
|
||||
public enum Hyphen
|
||||
{
|
||||
IsXToOpenInsightMetrologyViewer, //MetrologyWS.SendData(logic, string.Concat("http://", serverName, "/api/inbound/Tencor"), headerAttachments, detailAttachments);
|
||||
IsXToIQSSi, //bool WriteFileSPC(Dictionary
|
||||
IsXToIQSGaN, //GAN_PPTST
|
||||
IsXToOpenInsight, //bool WriteFileOpenInsight(Dictionary
|
||||
IsXToOpenInsightMetrologyViewerAttachments, //Site-Two
|
||||
IsXToAPC,
|
||||
IsXToSPaCe,
|
||||
IsXToArchive,
|
||||
IsArchive,
|
||||
IsDummy,
|
||||
IsManualOIEntry,
|
||||
IsNaEDA
|
||||
}
|
278
Adaptation/FileHandlers/MET08DDUPSFS6420/ProcessData.cs
Normal file
278
Adaptation/FileHandlers/MET08DDUPSFS6420/ProcessData.cs
Normal file
@ -0,0 +1,278 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Metrology;
|
||||
using Adaptation.Shared.Properties;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.MET08DDUPSFS6420;
|
||||
|
||||
public class ProcessData
|
||||
{
|
||||
|
||||
internal static List<Tuple<int, Enum, string>> HyphenTuples => new()
|
||||
{
|
||||
new Tuple<int, Enum, string>(0, Hyphen.IsNaEDA, @"\EC_EDA\Staging\Traces\~\Source"),
|
||||
new Tuple<int, Enum, string>(15, Hyphen.IsXToOpenInsightMetrologyViewer, @"\EC_EAFLog\TracesMES\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsXToIQSSi, @"\EC_SPC_Si\Traces\~\PollPath"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsXToIQSGaN, @"\EC_SPC_GaN\Traces\~\PollPath"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsXToOpenInsight, @"\\messa01ec.ec.local\APPS\Metrology\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsXToOpenInsightMetrologyViewerAttachments, @"\EC_Characterization_Si\In Process\~\Source"),
|
||||
new Tuple<int, Enum, string>(360, Hyphen.IsXToAPC, @"\EC_APC\Staging\Traces\~\PollPath"),
|
||||
new Tuple<int, Enum, string>(-36, Hyphen.IsXToSPaCe, @"\EC_SPC_Si\Traces\~\Source"),
|
||||
new Tuple<int, Enum, string>(180, Hyphen.IsXToArchive, @"\EC_EAFLog\TracesArchive\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsArchive, @"\EC_Characterization_Si\Processed")
|
||||
//new Tuple<int, Enum, string>("IsDummy"
|
||||
};
|
||||
|
||||
internal static string GetLines(IFileRead fileRead, Logistics logistics, List<pcl.Description> descriptions, bool ganPPTST)
|
||||
{
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
StringBuilder result = new();
|
||||
pcl.Description x = descriptions[0];
|
||||
if (ganPPTST)
|
||||
{
|
||||
string slot;
|
||||
string reactor;
|
||||
const int eight = 8;
|
||||
DateTime dateTime = DateTime.Parse(x.Date);
|
||||
string lot = x.Lot.ToLower().Replace("69-", string.Empty).Replace("71-", string.Empty).Replace("-", string.Empty);
|
||||
if (string.IsNullOrEmpty(x.Lot) || x.Lot.Length < 2)
|
||||
reactor = "R";
|
||||
else
|
||||
reactor = string.Concat("R", x.Lot.Substring(0, 2));
|
||||
_ = result.Append(nameof(x.Date)).Append(';').
|
||||
Append("Part").Append(';').
|
||||
Append(nameof(x.Reactor)).Append(';').
|
||||
Append("Lot").Append(';').
|
||||
Append(nameof(pcl.Detail.Slot)).Append(';').
|
||||
Append(nameof(pcl.Detail.Bin1)).Append(';').
|
||||
Append(nameof(pcl.Detail.Bin2)).Append(';').
|
||||
Append(nameof(pcl.Detail.Bin3)).Append(';').
|
||||
Append(nameof(pcl.Detail.Bin4)).Append(';').
|
||||
Append(nameof(pcl.Detail.Bin5)).Append(';').
|
||||
Append(nameof(pcl.Detail.Bin6)).Append(';').
|
||||
Append("Bin9").
|
||||
AppendLine();
|
||||
foreach (pcl.Description description in descriptions)
|
||||
{
|
||||
slot = description.Slot.Replace("*", string.Empty);
|
||||
_ = result.Append('!').Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(';').
|
||||
Append("Particle Adder;").
|
||||
Append(reactor).Append(';').
|
||||
Append(lot).Append(';').
|
||||
Append(slot).Append(';').
|
||||
Append(description.Bin1).Append(';').
|
||||
Append(description.Bin2).Append(';').
|
||||
Append(description.Bin3).Append(';').
|
||||
Append(description.Bin4).Append(';').
|
||||
Append(description.Bin5).Append(';').
|
||||
Append(description.Bin6).Append(';').
|
||||
Append(description.AreaCount).
|
||||
AppendLine();
|
||||
}
|
||||
if (descriptions.Count != eight)
|
||||
{
|
||||
string negativeTenThousand = "-10000";
|
||||
for (int i = descriptions.Count; i < eight; i++)
|
||||
{
|
||||
_ = result.Append('!').Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(';').
|
||||
Append("Particle Adder;").
|
||||
Append(reactor).Append(';').
|
||||
Append(lot).Append(';').
|
||||
Append(negativeTenThousand).Append(';').
|
||||
Append(negativeTenThousand).Append(';').
|
||||
Append(negativeTenThousand).Append(';').
|
||||
Append(negativeTenThousand).Append(';').
|
||||
Append(negativeTenThousand).Append(';').
|
||||
Append(negativeTenThousand).Append(';').
|
||||
Append(negativeTenThousand).Append(';').
|
||||
Append(negativeTenThousand).
|
||||
AppendLine();
|
||||
}
|
||||
}
|
||||
if (result.ToString().Split('\n').Length != (eight + 2))
|
||||
throw new Exception(string.Concat("Must have ", eight, " samples"));
|
||||
}
|
||||
else
|
||||
{
|
||||
char del = '\t';
|
||||
_ = result.Append(x.AreaCountAvg).Append(del). // 001 - AreaCountAvg
|
||||
Append(x.AreaCountMax).Append(del). // 002 - AreaCountMax
|
||||
Append(x.AreaCountMin).Append(del). // 003 - AreaCountMin
|
||||
Append(x.AreaCountStdDev).Append(del). // 004 - AreaCountStdDev
|
||||
Append(x.AreaTotalAvg).Append(del). // 005 - AreaTotalAvg
|
||||
Append(x.AreaTotalMax).Append(del). // 006 - AreaTotalMax
|
||||
Append(x.AreaTotalMin).Append(del). // 007 - AreaTotalMin
|
||||
Append(x.AreaTotalStdDev).Append(del). // 008 - AreaTotalStdDev
|
||||
Append(x.Date).Append(del). // 009 -
|
||||
Append(x.HazeAverageAvg).Append(del). // 010 - Haze Average
|
||||
Append(x.HazeAverageMax).Append(del). // 011 -
|
||||
Append(x.HazeAverageMin).Append(del). // 012 -
|
||||
Append(x.HazeAverageStdDev).Append(del). // 013 -
|
||||
Append(x.HazeRegionAvg).Append(del). // 014 -
|
||||
Append(x.HazeRegionMax).Append(del). // 015 -
|
||||
Append(x.HazeRegionMin).Append(del). // 016 -
|
||||
Append(x.HazeRegionStdDev).Append(del). // 017 -
|
||||
Append(x.Lot).Append(del). // 018 -
|
||||
Append(x.LPDCM2Avg).Append(del). // 019 -
|
||||
Append(x.LPDCM2Max).Append(del). // 020 -
|
||||
Append(x.LPDCM2Min).Append(del). // 021 -
|
||||
Append(x.LPDCM2StdDev).Append(del). // 022 -
|
||||
Append(x.LPDCountAvg).Append(del). // 023 -
|
||||
Append(x.LPDCountMax).Append(del). // 024 -
|
||||
Append(x.LPDCM2Min).Append(del). // 025 -
|
||||
Append(x.LPDCountStdDev).Append(del). // 026 -
|
||||
Append(x.Employee).Append(del). // 027 -
|
||||
Append(x.RDS).Append(del). // 028 - Lot
|
||||
Append(x.Reactor).Append(del). // 029 - Process
|
||||
Append(x.Recipe.Replace(";", string.Empty)).Append(del). // 030 - Part
|
||||
Append(x.ScratchCountAvg).Append(del). // 031 - Scratch Count
|
||||
Append(x.ScratchCountMax).Append(del). // 032 -
|
||||
Append(x.ScratchCountMin).Append(del). // 033 -
|
||||
Append(x.ScratchTotalStdDev).Append(del). // 034 -
|
||||
Append(x.ScratchTotalAvg).Append(del). // 035 - Scratch Length
|
||||
Append(x.ScratchTotalMax).Append(del). // 036 -
|
||||
Append(x.ScratchTotalMin).Append(del). // 037 -
|
||||
Append(x.ScratchTotalStdDev).Append(del). // 038 -
|
||||
Append(x.SumOfDefectsAvg).Append(del). // 039 - Average Sum of Defects
|
||||
Append(x.SumOfDefectsMax).Append(del). // 040 - Max Sum of Defects
|
||||
Append(x.SumOfDefectsMin).Append(del). // 041 - Min Sum of Defects
|
||||
Append(x.SumOfDefectsStdDev).Append(del). // 042 - SumOfDefectsStdDev
|
||||
Append(logistics.MesEntity).Append(del). // 043 -
|
||||
AppendLine();
|
||||
}
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
private static void UpdateDataPDF(List<pcl.Description> descriptions, string checkFileName)
|
||||
{
|
||||
string value;
|
||||
object possiblePage;
|
||||
object possibleString;
|
||||
object possibleCOSArray;
|
||||
java.util.List tokenList;
|
||||
java.util.List arrayList;
|
||||
java.io.OutputStream outputStream;
|
||||
List<string> updateValues = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
java.util.ListIterator tokenIterator;
|
||||
java.util.ListIterator arrayIterator;
|
||||
java.io.File file = new(checkFileName);
|
||||
string reactorLoadLock = descriptions[0].Comments;
|
||||
org.apache.pdfbox.pdmodel.common.PDStream pdStream;
|
||||
org.apache.pdfbox.pdmodel.common.PDStream updatedStream;
|
||||
org.apache.pdfbox.pdfparser.PDFStreamParser pdfStreamParser;
|
||||
org.apache.pdfbox.pdfwriter.ContentStreamWriter contentStreamWriter;
|
||||
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
|
||||
org.apache.pdfbox.pdmodel.PDDocumentCatalog pdDocumentCatalog = pdDocument.getDocumentCatalog();
|
||||
java.util.List pagesList = pdDocumentCatalog.getAllPages();
|
||||
java.util.ListIterator pageIterator = pagesList.listIterator();
|
||||
for (short i = 1; i < short.MaxValue; i++)
|
||||
{
|
||||
if (!pageIterator.hasNext())
|
||||
break;
|
||||
possiblePage = pageIterator.next();
|
||||
if (possiblePage is not org.apache.pdfbox.pdmodel.PDPage page)
|
||||
continue;
|
||||
pdStream = page.getContents();
|
||||
pdfStreamParser = new org.apache.pdfbox.pdfparser.PDFStreamParser(pdStream);
|
||||
pdfStreamParser.parse();
|
||||
tokenList = pdfStreamParser.getTokens();
|
||||
tokenIterator = tokenList.listIterator();
|
||||
for (short t = 1; i < short.MaxValue; t++)
|
||||
{
|
||||
if (!tokenIterator.hasNext())
|
||||
break;
|
||||
possibleCOSArray = tokenIterator.next();
|
||||
if (possibleCOSArray is not org.apache.pdfbox.cos.COSArray cossArray)
|
||||
continue;
|
||||
_ = stringBuilder.Clear();
|
||||
arrayList = cossArray.toList();
|
||||
arrayIterator = arrayList.listIterator();
|
||||
for (short a = 1; i < short.MaxValue; a++)
|
||||
{
|
||||
if (!arrayIterator.hasNext())
|
||||
break;
|
||||
possibleString = arrayIterator.next();
|
||||
if (possibleString is not org.apache.pdfbox.cos.COSString cossString)
|
||||
continue;
|
||||
value = cossString.getString();
|
||||
_ = stringBuilder.Append(value);
|
||||
if (value != "]")
|
||||
continue;
|
||||
updateValues.Add(value);
|
||||
value = stringBuilder.ToString();
|
||||
if (value.Contains("[]"))
|
||||
cossArray.setString(a - 1, string.Concat("*", reactorLoadLock, "]"));
|
||||
else
|
||||
cossArray.setString(a - 1, string.Concat(" {*", reactorLoadLock, "}]"));
|
||||
}
|
||||
}
|
||||
if (updateValues.Any())
|
||||
{
|
||||
updatedStream = new org.apache.pdfbox.pdmodel.common.PDStream(pdDocument);
|
||||
outputStream = updatedStream.createOutputStream();
|
||||
contentStreamWriter = new org.apache.pdfbox.pdfwriter.ContentStreamWriter(outputStream);
|
||||
contentStreamWriter.writeTokens(tokenList);
|
||||
outputStream.close();
|
||||
page.setContents(updatedStream);
|
||||
}
|
||||
}
|
||||
if (updateValues.Any())
|
||||
pdDocument.save(checkFileName);
|
||||
pdDocument.close();
|
||||
}
|
||||
|
||||
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, DateTime dateTime, string logisticsSequenceMemoryDirectory, List<pcl.Description> descriptions, string matchDirectory)
|
||||
{
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
if (dateTime == DateTime.MinValue)
|
||||
{ }
|
||||
if (logisticsSequenceMemoryDirectory is null)
|
||||
{ }
|
||||
if (descriptions is null)
|
||||
{ }
|
||||
if (matchDirectory is null)
|
||||
{ }
|
||||
string checkFileName;
|
||||
string[] pclFiles = Directory.GetFiles(matchDirectory, "*.pcl", SearchOption.TopDirectoryOnly);
|
||||
if (pclFiles.Length != 1)
|
||||
throw new Exception("Invalid source file count!");
|
||||
string sourceFileNameNoExt = Path.GetFileNameWithoutExtension(pclFiles[0]);
|
||||
string wsResultsMemoryFile = string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json");
|
||||
if (!File.Exists(wsResultsMemoryFile))
|
||||
throw new Exception(string.Concat("Memory file <", wsResultsMemoryFile, "> doesn't exist!"));
|
||||
string json = File.ReadAllText(wsResultsMemoryFile);
|
||||
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json);
|
||||
long wsResultsHeaderID = metrologyWSRequest.HeaderID;
|
||||
List<WS.Attachment> dataAttachments = new();
|
||||
List<WS.Attachment> headerAttachments = new();
|
||||
checkFileName = string.Concat(matchDirectory, @"\", sourceFileNameNoExt, "_data.pdf");
|
||||
if (!File.Exists(checkFileName))
|
||||
throw new Exception("Header file doesn't exist!");
|
||||
else
|
||||
{
|
||||
UpdateDataPDF(descriptions, checkFileName);
|
||||
headerAttachments.Add(new WS.Attachment(descriptions[0].HeaderUniqueId, "Data.pdf", checkFileName));
|
||||
}
|
||||
foreach (pcl.Description description in descriptions)
|
||||
{
|
||||
checkFileName = string.Concat(matchDirectory, @"\", sourceFileNameNoExt, "_", description.Slot.Replace('*', 's'), "_image.pdf");
|
||||
if (File.Exists(checkFileName))
|
||||
dataAttachments.Add(new WS.Attachment(description.UniqueId, "Image.pdf", checkFileName));
|
||||
checkFileName = string.Concat(matchDirectory, @"\", sourceFileNameNoExt, "_", description.Slot.Replace('*', 's'), "_data.pdf");
|
||||
if (File.Exists(checkFileName))
|
||||
dataAttachments.Add(new WS.Attachment(description.UniqueId, "Data.pdf", checkFileName));
|
||||
}
|
||||
if (dataAttachments.Count == 0 || dataAttachments.Count != descriptions.Count)
|
||||
throw new Exception("Invalid attachment count!");
|
||||
WS.AttachFiles(openInsightMetrologyViewerAPI, wsResultsHeaderID, headerAttachments, dataAttachments);
|
||||
}
|
||||
|
||||
}
|
199
Adaptation/FileHandlers/MET08DDUPSFS6420/WSRequest.cs
Normal file
199
Adaptation/FileHandlers/MET08DDUPSFS6420/WSRequest.cs
Normal file
@ -0,0 +1,199 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Properties;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
namespace Adaptation.FileHandlers.MET08DDUPSFS6420;
|
||||
|
||||
public class WSRequest
|
||||
{
|
||||
|
||||
public long Id { get; set; }
|
||||
public string AreaCountAvg { get; set; }
|
||||
public string AreaCountMax { get; set; }
|
||||
public string AreaCountMin { get; set; }
|
||||
public string AreaCountStdDev { get; set; }
|
||||
public string AreaTotalAvg { get; set; }
|
||||
public string AreaTotalMax { get; set; }
|
||||
public string AreaTotalMin { get; set; }
|
||||
public string AreaTotalStdDev { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string HazeAverageAvg { get; set; }
|
||||
public string HazeAverageMax { get; set; }
|
||||
public string HazeAverageMin { get; set; }
|
||||
public string HazeAverageStdDev { get; set; }
|
||||
public string HazeRegionAvg { get; set; }
|
||||
public string HazeRegionMax { get; set; }
|
||||
public string HazeRegionMin { get; set; }
|
||||
public string HazeRegionStdDev { get; set; }
|
||||
public string Layer { get; set; }
|
||||
public string LotID { get; set; }
|
||||
public string LPDCM2Avg { get; set; }
|
||||
public string LPDCM2Max { get; set; }
|
||||
public string LPDCM2Min { get; set; }
|
||||
public string LPDCM2StdDev { get; set; }
|
||||
public string LPDCountAvg { get; set; }
|
||||
public string LPDCountMax { get; set; }
|
||||
public string LPDCountMin { get; set; }
|
||||
public string LPDCountStdDev { get; set; }
|
||||
public string Operator { get; set; }
|
||||
public string ParseErrorText { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string ScratchCountAvg { get; set; }
|
||||
public string ScratchCountMax { get; set; }
|
||||
public string ScratchCountMin { get; set; }
|
||||
public string ScratchCountStdDev { get; set; }
|
||||
public string ScratchTotalAvg { get; set; }
|
||||
public string ScratchTotalMax { get; set; }
|
||||
public string ScratchTotalMin { get; set; }
|
||||
public string ScratchTotalStdDev { get; set; }
|
||||
public string SumOfDefectsAvg { get; set; }
|
||||
public string SumOfDefectsMax { get; set; }
|
||||
public string SumOfDefectsMin { get; set; }
|
||||
public string SumOfDefectsStdDev { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Zone { get; set; }
|
||||
|
||||
public string CellName { get; set; }
|
||||
public string Data { get; set; }
|
||||
public int i { get; set; }
|
||||
public List<pcl.Detail> Details { get; protected set; }
|
||||
|
||||
[Obsolete("For json")] public WSRequest() { }
|
||||
|
||||
internal WSRequest(IFileRead fileRead, Logistics logistics, List<pcl.Description> descriptions)
|
||||
{
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
i = -1;
|
||||
Id = 0;
|
||||
Zone = null;
|
||||
Layer = null;
|
||||
Title = null;
|
||||
Data = "*Data*";
|
||||
Details = new List<pcl.Detail>();
|
||||
CellName = logistics.MesEntity;
|
||||
pcl.Description x = descriptions[0];
|
||||
//Header
|
||||
{
|
||||
AreaCountAvg = x.AreaCountAvg;
|
||||
AreaCountMax = x.AreaCountMax;
|
||||
AreaCountMin = x.AreaCountMin;
|
||||
AreaCountStdDev = x.AreaCountStdDev;
|
||||
AreaTotalAvg = x.AreaTotalAvg;
|
||||
AreaTotalMax = x.AreaTotalMax;
|
||||
AreaTotalMin = x.AreaTotalMin;
|
||||
AreaTotalStdDev = x.AreaTotalStdDev;
|
||||
Date = x.Date;
|
||||
HazeAverageAvg = x.HazeAverageAvg;
|
||||
HazeAverageMax = x.HazeAverageMax;
|
||||
HazeAverageMin = x.HazeAverageMin;
|
||||
HazeAverageStdDev = x.HazeAverageStdDev;
|
||||
HazeRegionAvg = x.HazeRegionAvg;
|
||||
HazeRegionMax = x.HazeRegionMax;
|
||||
HazeRegionMin = x.HazeRegionMin;
|
||||
HazeRegionStdDev = x.HazeRegionStdDev;
|
||||
LotID = x.Lot;
|
||||
LPDCM2Avg = x.LPDCM2Avg;
|
||||
LPDCM2Max = x.LPDCM2Max;
|
||||
LPDCM2Min = x.LPDCM2Min;
|
||||
LPDCM2StdDev = x.LPDCM2StdDev;
|
||||
LPDCountAvg = x.LPDCountAvg;
|
||||
LPDCountMax = x.LPDCountMax;
|
||||
LPDCountMin = x.LPDCountMin;
|
||||
LPDCountStdDev = x.LPDCountStdDev;
|
||||
ParseErrorText = x.ParseErrorText;
|
||||
PSN = x.PSN;
|
||||
RDS = x.RDS;
|
||||
Reactor = x.Reactor;
|
||||
Recipe = x.Recipe;
|
||||
ScratchCountAvg = x.ScratchCountAvg;
|
||||
ScratchCountMax = x.ScratchCountMax;
|
||||
ScratchCountMin = x.ScratchCountMin;
|
||||
ScratchCountStdDev = x.ScratchCountStdDev;
|
||||
ScratchTotalAvg = x.ScratchTotalAvg;
|
||||
ScratchTotalMax = x.ScratchTotalMax;
|
||||
ScratchTotalMin = x.ScratchTotalMin;
|
||||
ScratchTotalStdDev = x.ScratchTotalStdDev;
|
||||
SumOfDefectsAvg = x.SumOfDefectsAvg;
|
||||
SumOfDefectsMax = x.SumOfDefectsMax;
|
||||
SumOfDefectsMin = x.SumOfDefectsMin;
|
||||
SumOfDefectsStdDev = x.SumOfDefectsStdDev;
|
||||
UniqueId = x.UniqueId;
|
||||
}
|
||||
pcl.Detail detail;
|
||||
foreach (pcl.Description description in descriptions)
|
||||
{
|
||||
detail = new pcl.Detail
|
||||
{
|
||||
Data = "*Data*",
|
||||
i = -1,
|
||||
Id = 0, //item.Id,
|
||||
AreaCount = description.AreaCount,
|
||||
AreaTotal = description.AreaTotal,
|
||||
Bin1 = description.Bin1,
|
||||
Bin2 = description.Bin2,
|
||||
Bin3 = description.Bin3,
|
||||
Bin4 = description.Bin4,
|
||||
Bin5 = description.Bin5,
|
||||
Bin6 = description.Bin6,
|
||||
Bin7 = description.Bin7,
|
||||
Bin8 = description.Bin8,
|
||||
Comments = description.Comments,
|
||||
Date = description.Date,
|
||||
Diameter = description.Diameter,
|
||||
Exclusion = description.Exclusion,
|
||||
Gain = description.Gain,
|
||||
HazeAverage = description.HazeAverage,
|
||||
HazePeak = description.HazePeak,
|
||||
HazeRegion = description.HazeRegion,
|
||||
HazeRng = description.HazeRng,
|
||||
HeaderUniqueId = description.HeaderUniqueId,
|
||||
LPDCM2 = description.LPDCM2,
|
||||
LPDCount = description.LPDCount,
|
||||
Laser = description.Laser,
|
||||
Mean = description.Mean,
|
||||
Recipe = description.Recipe,
|
||||
ScratchCount = description.ScratchCount,
|
||||
ScratchTotal = description.ScratchTotal,
|
||||
Slot = description.Slot,
|
||||
Sort = description.Sort,
|
||||
StdDev = description.StdDev,
|
||||
SumOfDefects = description.SumOfDefects,
|
||||
Thresh = description.Thresh,
|
||||
Thruput = description.Thruput,
|
||||
Title = null,
|
||||
UniqueId = description.UniqueId
|
||||
};
|
||||
Details.Add(detail);
|
||||
}
|
||||
Date = logistics.DateTimeFromSequence.ToString();
|
||||
if (UniqueId is null && Details.Any())
|
||||
UniqueId = Details[0].HeaderUniqueId;
|
||||
for (int i = 0; i < Details.Count; i++)
|
||||
{
|
||||
if (string.IsNullOrEmpty(Details[i].Bin1))
|
||||
Details[i].Bin1 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin2))
|
||||
Details[i].Bin2 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin3))
|
||||
Details[i].Bin3 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin4))
|
||||
Details[i].Bin4 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin5))
|
||||
Details[i].Bin5 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin6))
|
||||
Details[i].Bin6 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin7))
|
||||
Details[i].Bin7 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin8))
|
||||
Details[i].Bin8 = null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
Reference in New Issue
Block a user