Ready to test
This commit is contained in:
141
Adaptation/FileHandlers/Archive/FileRead.cs
Normal file
141
Adaptation/FileHandlers/Archive/FileRead.cs
Normal file
@ -0,0 +1,141 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.Archive;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
{
|
||||
if (_Description is not Description)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
private void MoveArchive(DateTime dateTime)
|
||||
{
|
||||
if (dateTime == DateTime.MinValue)
|
||||
{ }
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
|
||||
string jobIdDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
_ = Directory.CreateDirectory(jobIdDirectory);
|
||||
//string destinationArchiveDirectory = string.Concat(jobIdDirectory, @"\!Archive\", weekDirectory);
|
||||
string destinationArchiveDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\Archive\", _Logistics.JobID, @"\", weekDirectory);
|
||||
if (!Directory.Exists(destinationArchiveDirectory))
|
||||
_ = Directory.CreateDirectory(destinationArchiveDirectory);
|
||||
string[] matchDirectories = new string[] { GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).FirstOrDefault() };
|
||||
if ((matchDirectories is null) || matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
|
||||
destinationArchiveDirectory = string.Concat(destinationArchiveDirectory, @"\", Path.GetFileName(sourceDirectory));
|
||||
Directory.Move(sourceDirectory, destinationArchiveDirectory);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Tuple<Test[], Dictionary<Test, List<Shared.Properties.IDescription>>> tuple = GetTuple(this, descriptions, extra: false);
|
||||
MoveArchive(dateTime);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tuple.Item1, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
40
Adaptation/FileHandlers/CellInstanceConnectionName.cs
Normal file
40
Adaptation/FileHandlers/CellInstanceConnectionName.cs
Normal file
@ -0,0 +1,40 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers;
|
||||
|
||||
public class CellInstanceConnectionName
|
||||
{
|
||||
|
||||
internal static IFileRead Get(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted)
|
||||
{
|
||||
IFileRead result;
|
||||
bool isDuplicator = cellInstanceConnectionName.StartsWith(cellInstanceName);
|
||||
if (isDuplicator)
|
||||
{
|
||||
string cellInstanceConnectionNameBase = cellInstanceConnectionName.Replace("-", string.Empty);
|
||||
int hyphens = cellInstanceConnectionName.Length - cellInstanceConnectionNameBase.Length;
|
||||
result = hyphens switch
|
||||
{
|
||||
(int)MET08DDUPSP1TBI.Hyphen.IsArchive => new Archive.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
|
||||
(int)MET08DDUPSP1TBI.Hyphen.IsDummy => new Dummy.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
|
||||
(int)MET08DDUPSP1TBI.Hyphen.IsTIBCO => new TIBCO.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
|
||||
(int)MET08DDUPSP1TBI.Hyphen.IsXToArchive => new ToArchive.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
|
||||
_ => new MET08DDUPSP1TBI.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
result = cellInstanceConnectionName switch
|
||||
{
|
||||
nameof(txt) => new txt.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
|
||||
_ => throw new Exception(),
|
||||
};
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
308
Adaptation/FileHandlers/Dummy/FileRead.cs
Normal file
308
Adaptation/FileHandlers/Dummy/FileRead.cs
Normal file
@ -0,0 +1,308 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Infineon.Monitoring.MonA;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.Dummy;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly Timer _Timer;
|
||||
private int _LastDummyRunIndex;
|
||||
private readonly string[] _CellNames;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_LastDummyRunIndex = -1;
|
||||
List<string> cellNames = new();
|
||||
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
|
||||
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Alias");
|
||||
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
|
||||
cellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1]);
|
||||
_CellNames = cellNames.ToArray();
|
||||
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
|
||||
Callback(null);
|
||||
else
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName) => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract() => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
{
|
||||
if (_Description is not Description)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state) => Callback(state);
|
||||
|
||||
private void CallbackInProcessCleared(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, string inProcessDirectory, long sequence, bool warning)
|
||||
{
|
||||
const string site = "sjc";
|
||||
string stateName = string.Concat("Dummy_", _EventName);
|
||||
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
|
||||
MonIn monIn = MonIn.GetInstance(monInURL);
|
||||
try
|
||||
{
|
||||
if (warning)
|
||||
{
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Warning);
|
||||
for (int i = 1; i < 12; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
|
||||
string[] files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
foreach (string file in files)
|
||||
File.SetLastWriteTime(file, new DateTime(sequence));
|
||||
if (!_FileConnectorConfiguration.IncludeSubDirectories.Value)
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Move(file, Path.Combine(targetFileLocation, Path.GetFileName(file)));
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
foreach (string directory in directories)
|
||||
_ = Directory.CreateDirectory(string.Concat(targetFileLocation, directory.Substring(inProcessDirectory.Length)));
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(targetFileLocation, file.Substring(inProcessDirectory.Length)));
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Ok);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Critical);
|
||||
}
|
||||
}
|
||||
|
||||
private void CallbackFileExists(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, long sequence)
|
||||
{
|
||||
string[] files;
|
||||
bool warning = false;
|
||||
if (!_DummyRuns.ContainsKey(monARessource))
|
||||
_DummyRuns.Add(monARessource, new List<long>());
|
||||
if (!_DummyRuns[monARessource].Contains(sequence))
|
||||
_DummyRuns[monARessource].Add(sequence);
|
||||
File.AppendAllLines(traceDummyFile, new string[] { sourceArchiveFile });
|
||||
string inProcessDirectory = Path.Combine(_ProgressPath, "Dummy In-Process", sequence.ToString());
|
||||
if (!Directory.Exists(inProcessDirectory))
|
||||
_ = Directory.CreateDirectory(inProcessDirectory);
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
if (files.Any())
|
||||
{
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
try
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Delete(file);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
if (_FileConnectorConfiguration.IncludeSubDirectories.Value)
|
||||
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.AllDirectories);
|
||||
else
|
||||
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string file in files)
|
||||
{
|
||||
if (new FileInfo(file).LastWriteTime.Ticks == sequence)
|
||||
{
|
||||
warning = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
CallbackInProcessCleared(sourceArchiveFile, traceDummyFile, targetFileLocation, monARessource, inProcessDirectory, sequence, warning);
|
||||
}
|
||||
|
||||
private string GetCellName(string pathSegment)
|
||||
{
|
||||
string result = string.Empty;
|
||||
foreach (string cellName in _CellNames)
|
||||
{
|
||||
if (pathSegment.ToLower().Contains(cellName.ToLower()))
|
||||
{
|
||||
result = cellName;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (string.IsNullOrEmpty(result))
|
||||
{
|
||||
int count;
|
||||
List<(string CellName, int Count)> cellNames = new();
|
||||
foreach (string cellName in _CellNames)
|
||||
{
|
||||
count = 0;
|
||||
foreach (char @char in cellName.ToLower())
|
||||
count += pathSegment.Length - pathSegment.ToLower().Replace(@char.ToString(), string.Empty).Length;
|
||||
cellNames.Add(new(cellName, count));
|
||||
}
|
||||
result = (from l in cellNames orderby l.CellName.Length, l.Count descending select l.CellName).First();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private void Callback(object state)
|
||||
{
|
||||
try
|
||||
{
|
||||
string pathSegment;
|
||||
string monARessource;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
if (!_FileConnectorConfiguration.TargetFileLocation.Contains(_FileConnectorConfiguration.SourceFileLocation))
|
||||
throw new Exception("Target must start with source");
|
||||
bool check = dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday;
|
||||
if (!_IsEAFHosted || check)
|
||||
{
|
||||
string checkSegment;
|
||||
string checkDirectory;
|
||||
string sourceFileFilter;
|
||||
string sourceArchiveFile;
|
||||
string sourceFileLocation;
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string traceDummyDirectory = Path.Combine(Path.GetPathRoot(_TracePath), "TracesDummy", _CellInstanceName, "Source", $"{dateTime:yyyy}___Week_{weekOfYear}");
|
||||
if (!Directory.Exists(traceDummyDirectory))
|
||||
_ = Directory.CreateDirectory(traceDummyDirectory);
|
||||
string traceDummyFile = Path.Combine(traceDummyDirectory, $"{dateTime.Ticks} - {_CellInstanceName}.txt");
|
||||
File.AppendAllText(traceDummyFile, string.Empty);
|
||||
if (_FileConnectorConfiguration.SourceFileLocation.EndsWith("\\"))
|
||||
sourceFileLocation = _FileConnectorConfiguration.SourceFileLocation;
|
||||
else
|
||||
sourceFileLocation = string.Concat(_FileConnectorConfiguration.SourceFileLocation, '\\');
|
||||
for (int i = 0; i < _FileConnectorConfiguration.SourceFileFilters.Count; i++)
|
||||
{
|
||||
_LastDummyRunIndex += 1;
|
||||
if (_LastDummyRunIndex >= _FileConnectorConfiguration.SourceFileFilters.Count)
|
||||
_LastDummyRunIndex = 0;
|
||||
sourceFileFilter = _FileConnectorConfiguration.SourceFileFilters[_LastDummyRunIndex];
|
||||
sourceArchiveFile = Path.GetFullPath(string.Concat(sourceFileLocation, sourceFileFilter));
|
||||
if (File.Exists(sourceArchiveFile))
|
||||
{
|
||||
checkSegment = _FileConnectorConfiguration.TargetFileLocation.Substring(sourceFileLocation.Length);
|
||||
checkDirectory = Path.GetDirectoryName(sourceArchiveFile);
|
||||
for (int z = 0; z < int.MaxValue; z++)
|
||||
{
|
||||
if (checkDirectory.Length < sourceFileLocation.Length || !checkDirectory.StartsWith(sourceFileLocation))
|
||||
break;
|
||||
checkDirectory = Path.GetDirectoryName(checkDirectory);
|
||||
if (Directory.Exists(Path.Combine(checkDirectory, checkSegment)))
|
||||
{
|
||||
checkDirectory = Path.Combine(checkDirectory, checkSegment);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!checkDirectory.EndsWith(checkSegment))
|
||||
throw new Exception("Could not determine dummy target directory for extract!");
|
||||
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
throw new Exception("Invalid file name for source archive file!");
|
||||
pathSegment = checkDirectory.Substring(sourceFileLocation.Length);
|
||||
monARessource = GetCellName(pathSegment);
|
||||
if (string.IsNullOrEmpty(monARessource))
|
||||
throw new Exception("Could not determine which cell archive file is associated with!");
|
||||
if (_IsEAFHosted)
|
||||
CallbackFileExists(sourceArchiveFile, traceDummyFile, checkDirectory, monARessource, sequence);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
try
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -1,293 +0,0 @@
|
||||
using Adaptation.Helpers;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Metrology;
|
||||
using log4net;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers
|
||||
{
|
||||
|
||||
public partial class FileRead : ILogic
|
||||
{
|
||||
|
||||
private string _LastLines;
|
||||
private ConfigData _ConfigData;
|
||||
|
||||
|
||||
public FileRead()
|
||||
{
|
||||
_LastLines = string.Empty;
|
||||
Logistics = new Logistics();
|
||||
_Log = LogManager.GetLogger(typeof(FileRead));
|
||||
}
|
||||
|
||||
public ILogic ShallowCopy()
|
||||
{
|
||||
return (ILogic)MemberwiseClone();
|
||||
}
|
||||
|
||||
public void WaitForThread()
|
||||
{
|
||||
WaitForThread(thread: null, threadExceptions: null);
|
||||
}
|
||||
|
||||
public Tuple<string, ConfigDataBase> GetOpenInsightTuple()
|
||||
{
|
||||
Tuple<string, ConfigDataBase> restuls = new Tuple<string, ConfigDataBase>(_ConfigData.OpenInsightSiViewer, _ConfigData);
|
||||
return restuls;
|
||||
}
|
||||
|
||||
public Tuple<string, JsonElement?, List<FileInfo>> GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, JsonElement?, List<FileInfo>> results;
|
||||
_FileParameter.Clear();
|
||||
DateTime dateTime = DateTime.Now;
|
||||
if (_ConfigData.IsEvent && _ConfigData.Duplicator is null)
|
||||
results = GetExtractResult(reportFullPath);
|
||||
else if (_ConfigData.Duplicator.HasValue)
|
||||
results = GetDuplicatorExtractResult(reportFullPath, dateTime);
|
||||
else
|
||||
throw new Exception();
|
||||
if (results.Item2 is null)
|
||||
results = new Tuple<string, JsonElement?, List<FileInfo>>(results.Item1, JsonSerializer.Deserialize<JsonElement>("[]"), results.Item3);
|
||||
int count = results.Item2.Value.GetArrayLength();
|
||||
if (count > 0 && _ConfigData.EafHosted)
|
||||
WritePDSF(results.Item2.Value);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
private Tuple<string, JsonElement?, List<FileInfo>> GetExtractResult(string reportFullPath)
|
||||
{
|
||||
Tuple<string, JsonElement?, List<FileInfo>> results = new Tuple<string, JsonElement?, List<FileInfo>>(string.Empty, null, new List<FileInfo>());
|
||||
FileInfo fileInfo = new FileInfo(reportFullPath);
|
||||
Logistics = new Logistics(ConfigData.NullData, _ConfigData.CellNames, _ConfigData.MesEntities, fileInfo, useSplitForMID: false);
|
||||
SetFileParameterLotID(Logistics.MID);
|
||||
if (_Configuration.SourceFileFilter == "*")
|
||||
_Log.Debug("Share transfer");
|
||||
else if (fileInfo.Length < ConfigData.MinFileLength)
|
||||
results.Item3.Add(fileInfo);
|
||||
else
|
||||
{
|
||||
ProcessData processData = new ProcessData(this, _ConfigData, results.Item3);
|
||||
if (!(processData.Header is null))
|
||||
{
|
||||
string mid = string.Concat(processData.Header.Reactor, "-", processData.Header.RDS, "-", processData.Header.PSN);
|
||||
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
Logistics.MID = mid;
|
||||
SetFileParameterLotID(mid);
|
||||
Logistics.ProcessJobID = processData.Header.Reactor;
|
||||
}
|
||||
if (processData.Header is null || !processData.Details.Any())
|
||||
throw new Exception();
|
||||
results = processData.GetResults(this, _ConfigData, results.Item3);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private Tuple<string, JsonElement?, List<FileInfo>> GetDuplicatorExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, JsonElement?, List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement pdsdBodyValues = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
results = new Tuple<string, JsonElement?, List<FileInfo>>(pdsf.Item1, pdsdBodyValues, new List<FileInfo>());
|
||||
List<Duplicator.Description> processDataDescriptions = _ConfigData.GetProcessDataDescriptions(pdsdBodyValues);
|
||||
Dictionary<Test, List<Duplicator.Description>> keyValuePairs = ProcessData.GetKeyValuePairs(_ConfigData, pdsdBodyValues, processDataDescriptions, extra: false);
|
||||
bool isNotUsedInsightMetrologyViewerAttachments = (!(_Configuration.FileScanningIntervalInSeconds > 0) && _ConfigData.Duplicator.Value == ConfigData.Level.IsXToOpenInsightMetrologyViewerAttachments);
|
||||
bool isDummyRun = (ConfigData.DummyRuns.Any() && ConfigData.DummyRuns.ContainsKey(Logistics.JobID) && ConfigData.DummyRuns[Logistics.JobID].Any() && (from l in ConfigData.DummyRuns[Logistics.JobID] where l == Logistics.Sequence select 1).Any());
|
||||
if (isDummyRun)
|
||||
{
|
||||
try
|
||||
{ File.SetLastWriteTime(reportFullPath, dateTime); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
string duplicateDirectory;
|
||||
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
|
||||
if (_ConfigData.Duplicator.Value != ConfigData.Level.IsXToOpenInsight)
|
||||
duplicateDirectory = string.Concat(_Configuration.TargetFileLocation, @"\", segments[0]);
|
||||
else
|
||||
duplicateDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_Configuration.TargetFileLocation)), @"\Data");
|
||||
if (segments.Length > 2)
|
||||
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
Directory.CreateDirectory(duplicateDirectory);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
Directory.CreateDirectory(duplicateDirectory);
|
||||
if ((isDummyRun || isNotUsedInsightMetrologyViewerAttachments || _Configuration.FileScanningIntervalInSeconds > 0) && _ConfigData.Duplicator.Value != ConfigData.Level.IsXToArchive && _ConfigData.Duplicator.Value != ConfigData.Level.IsArchive)
|
||||
{
|
||||
string successDirectory;
|
||||
if (_ConfigData.Duplicator.Value != ConfigData.Level.IsXToAPC)
|
||||
successDirectory = string.Empty;
|
||||
else
|
||||
{
|
||||
successDirectory = string.Concat(Path.GetDirectoryName(_Configuration.TargetFileLocation), @"\ViewerPath");
|
||||
if (!Directory.Exists(successDirectory))
|
||||
Directory.CreateDirectory(successDirectory);
|
||||
}
|
||||
CultureInfo cultureInfo = new CultureInfo("en-US");
|
||||
Calendar calendar = cultureInfo.Calendar;
|
||||
List<Tuple<IScopeInfo, string>> tuples = new List<Tuple<IScopeInfo, string>>();
|
||||
string duplicateFile = string.Concat(duplicateDirectory, @"\", Path.GetFileName(reportFullPath));
|
||||
string weekOfYear = calendar.GetWeekOfYear(Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = string.Concat(Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
|
||||
string logisticsSequenceMemoryDirectory = string.Concat(_ConfigData.MemoryPath, @"\", _ConfigData.GetEquipmentType(), @"\Source\", weekDirectory, @"\", Logistics.Sequence);
|
||||
if (!Directory.Exists(logisticsSequenceMemoryDirectory))
|
||||
Directory.CreateDirectory(logisticsSequenceMemoryDirectory);
|
||||
if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToAPC)
|
||||
{
|
||||
if (!isDummyRun && _ConfigData.EafHosted)
|
||||
File.Copy(reportFullPath, duplicateFile, overwrite: true);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToOpenInsightMetrologyViewer)
|
||||
{
|
||||
List<ProcessData.FileRead.Description> fileReadDescriptions = ProcessData.GetProcessDataFileReadDescriptions(_ConfigData, pdsdBodyValues);
|
||||
ProcessData.WSRequest wsRequest = new ProcessData.WSRequest(this, fileReadDescriptions);
|
||||
if (!isDummyRun && _ConfigData.EafHosted)
|
||||
{
|
||||
Tuple<string, WS.Results> wsResults = WS.SendData(_ConfigData.OpenInsightMetrologyViewerAPI, wsRequest);
|
||||
if (!wsResults.Item2.Success)
|
||||
throw new Exception(wsResults.ToString());
|
||||
_Log.Debug(wsResults.Item2.HeaderID);
|
||||
File.WriteAllText(string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json"), wsResults.Item1);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Test test;
|
||||
string check;
|
||||
IScopeInfo scopeInfo;
|
||||
Tuple<string, string> tuple;
|
||||
foreach (KeyValuePair<Test, List<Duplicator.Description>> keyValuePair in keyValuePairs)
|
||||
{
|
||||
test = keyValuePair.Key;
|
||||
//scopeInfo = new ScopeInfo(this, _ConfigData, test);
|
||||
if (_ConfigData.Duplicator.Value != ConfigData.Level.IsXToOpenInsight)
|
||||
scopeInfo = new ScopeInfo(this, _ConfigData, test, _ConfigData.IqsFile, _ConfigData.IqsQueryFilter);
|
||||
else
|
||||
scopeInfo = new ScopeInfo(this, _ConfigData, test, _ConfigData.OpenInsightFilePattern, _ConfigData.IqsQueryFilter);
|
||||
//lines = ProcessDataStandardFormat.GetLines(Logistics, scopeInfo, names, values, dateFormat: "M/d/yyyy hh:mm:ss tt", timeFormat: string.Empty, pairedColumns: ExtractResultPairedColumns);
|
||||
List<ProcessData.FileRead.Description> fileReadDescriptions = ProcessData.GetProcessDataFileReadDescriptions(_ConfigData, pdsdBodyValues);
|
||||
tuple = ProcessData.GetLines(this, fileReadDescriptions);
|
||||
check = tuple.Item1.Replace(tuple.Item2, "$Date$");
|
||||
if (string.IsNullOrEmpty(_LastLines) || check != _LastLines)
|
||||
tuples.Add(new Tuple<IScopeInfo, string>(scopeInfo, tuple.Item1));
|
||||
_LastLines = check;
|
||||
}
|
||||
}
|
||||
if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToOpenInsightMetrologyViewerAttachments)
|
||||
{
|
||||
string[] matchDirectories = Shared1567(reportFullPath, tuples);
|
||||
if (!isDummyRun && _ConfigData.EafHosted && !isNotUsedInsightMetrologyViewerAttachments)
|
||||
{
|
||||
List<ProcessData.FileRead.Description> fileReadDescriptions = ProcessData.GetProcessDataFileReadDescriptions(_ConfigData, pdsdBodyValues);
|
||||
ProcessData.PostOpenInsightMetrologyViewerAttachments(_Log, _ConfigData, Logistics, dateTime, logisticsSequenceMemoryDirectory, fileReadDescriptions, matchDirectories[0]);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (_ConfigData.Duplicator.Value != ConfigData.Level.IsXToOpenInsightMetrologyViewer && _ConfigData.Duplicator.Value != ConfigData.Level.IsXToOpenInsightMetrologyViewerAttachments)
|
||||
Shared0413(dateTime, isDummyRun, successDirectory, duplicateDirectory, tuples, duplicateFile);
|
||||
}
|
||||
if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToOpenInsightMetrologyViewerAttachments)
|
||||
{
|
||||
string destinationDirectory;
|
||||
//string destinationDirectory = WriteScopeInfo(_ConfigData.ProgressPath, Logistics, dateTime, duplicateDirectory, tuples);
|
||||
FileInfo fileInfo = new FileInfo(reportFullPath);
|
||||
string logisticsSequence = Logistics.Sequence.ToString();
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
|
||||
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_ConfigData.FileConnectorConfiguration.TargetFileLocation)), @"\", Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
Directory.CreateDirectory(jobIdDirectory);
|
||||
string[] matchDirectories;
|
||||
if (!_ConfigData.EafHosted)
|
||||
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
|
||||
else
|
||||
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
|
||||
if ((matchDirectories is null) || matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
destinationDirectory = matchDirectories[0];
|
||||
if (isDummyRun)
|
||||
Shared0607(reportFullPath, duplicateDirectory, logisticsSequence, destinationDirectory);
|
||||
else
|
||||
{
|
||||
List<ProcessData.FileRead.Description> fileReadDescriptions = ProcessData.GetProcessDataFileReadDescriptions(_ConfigData, pdsdBodyValues);
|
||||
ProcessData.WSRequest wsRequest = new ProcessData.WSRequest(this, fileReadDescriptions);
|
||||
JsonSerializerOptions jsonSerializerOptions = new JsonSerializerOptions { WriteIndented = true };
|
||||
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
|
||||
if (_ConfigData.EafHosted)
|
||||
Shared1277(reportFullPath, destinationDirectory, logisticsSequence, jobIdDirectory, json);
|
||||
else
|
||||
{
|
||||
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
|
||||
string historicalText = File.ReadAllText(jsonFileName);
|
||||
if (json != historicalText)
|
||||
throw new Exception("File doesn't match historical!");
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private void MoveArchive()
|
||||
{
|
||||
CultureInfo cultureInfo = new CultureInfo("en-US");
|
||||
Calendar calendar = cultureInfo.Calendar;
|
||||
string logisticsSequence = Logistics.Sequence.ToString();
|
||||
string weekOfYear = calendar.GetWeekOfYear(Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = string.Concat(Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
|
||||
string jobIdDirectory = string.Concat(_ConfigData.FileConnectorConfiguration.TargetFileLocation, @"\", Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
Directory.CreateDirectory(jobIdDirectory);
|
||||
//string destinationArchiveDirectory = string.Concat(jobIdDirectory, @"\!Archive\", weekDirectory);
|
||||
string destinationArchiveDirectory = string.Concat(Path.GetDirectoryName(_ConfigData.FileConnectorConfiguration.TargetFileLocation), @"\Archive\", Logistics.JobID, @"\", weekDirectory);
|
||||
if (!Directory.Exists(destinationArchiveDirectory))
|
||||
Directory.CreateDirectory(destinationArchiveDirectory);
|
||||
string[] matchDirectories = new string[] { GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).FirstOrDefault() };
|
||||
if ((matchDirectories is null) || matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
|
||||
destinationArchiveDirectory = string.Concat(destinationArchiveDirectory, @"\", Path.GetFileName(sourceDirectory));
|
||||
Directory.Move(sourceDirectory, destinationArchiveDirectory);
|
||||
}
|
||||
|
||||
public void Move(string reportFullPath, Tuple<string, JsonElement?, List<FileInfo>> extractResults, Exception exception = null)
|
||||
{
|
||||
Shared1872(reportFullPath, exception);
|
||||
bool isErrorFile = !(exception is null);
|
||||
if (!isErrorFile && _ConfigData.Duplicator.HasValue)
|
||||
{
|
||||
if (_ConfigData.Duplicator.Value == ConfigData.Level.IsXToArchive)
|
||||
Shared0192(reportFullPath);
|
||||
else if (_ConfigData.EafHosted && _ConfigData.Duplicator.Value == ConfigData.Level.IsArchive)
|
||||
MoveArchive();
|
||||
if (_ConfigData.EafHosted && !string.IsNullOrEmpty(_ConfigData.ProgressPath))
|
||||
CreateProgressDirectory(_ConfigData.ProgressPath, Logistics, (int?)_ConfigData.Duplicator, exceptionLines: null);
|
||||
}
|
||||
if (!isErrorFile && _ConfigData.Duplicator is null)
|
||||
WriteIO(reportFullPath);
|
||||
if (!_ConfigData.EafHosted)
|
||||
{
|
||||
object @object = GetFilePathGeneratorInfo(reportFullPath, isErrorFile: false);
|
||||
if (!(@object is null) && @object is string to)
|
||||
{
|
||||
if (to.Contains("%"))
|
||||
_Log.Debug("Can't debug without EAF Hosting");
|
||||
else
|
||||
Shared1124(reportFullPath, extractResults, to, _Configuration.SourceFileLocation, resolvedFileLocation: string.Empty, exception: null);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because it is too large
Load Diff
512
Adaptation/FileHandlers/MET08DDUPSP1TBI/FileRead.cs
Normal file
512
Adaptation/FileHandlers/MET08DDUPSP1TBI/FileRead.cs
Normal file
@ -0,0 +1,512 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Adaptation.Shared.Metrology;
|
||||
using Infineon.Monitoring.MonA;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.MET08DDUPSP1TBI;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private string _LastLines;
|
||||
private readonly Timer _Timer;
|
||||
private int _LastDummyRunIndex;
|
||||
private readonly bool _IsDummy;
|
||||
private readonly bool _IsNaEDA;
|
||||
private readonly bool _IsXToAPC;
|
||||
private readonly string _IqsFile;
|
||||
private readonly bool _IsXToIQSSi;
|
||||
private readonly bool _IsXToSPaCe;
|
||||
private readonly string _MemoryPath;
|
||||
private readonly bool _IsXToOpenInsight;
|
||||
private readonly string _GhostPCLFileName;
|
||||
private readonly string _OpenInsightFilePattern;
|
||||
private readonly bool _IsXToOpenInsightMetrologyViewer;
|
||||
private readonly Dictionary<string, string> _CellNames;
|
||||
private readonly string _OpenInsightMetrologyViewerAPI;
|
||||
private readonly bool _IsXToOpenInsightMetrologyViewerAttachments;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_LastDummyRunIndex = -1;
|
||||
_IsDummy = _Hyphens == (int)Hyphen.IsDummy;
|
||||
_IsNaEDA = _Hyphens == (int)Hyphen.IsNaEDA;
|
||||
_IsXToAPC = _Hyphens == (int)Hyphen.IsXToAPC;
|
||||
_CellNames = new Dictionary<string, string>();
|
||||
_IsXToIQSSi = _Hyphens == (int)Hyphen.IsXToIQSSi;
|
||||
_IsXToSPaCe = _Hyphens == (int)Hyphen.IsXToSPaCe;
|
||||
_IsXToOpenInsight = _Hyphens == (int)Hyphen.IsXToOpenInsight;
|
||||
_IqsFile = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.File");
|
||||
_IsXToOpenInsightMetrologyViewer = _Hyphens == (int)Hyphen.IsXToOpenInsightMetrologyViewer;
|
||||
_MemoryPath = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Path.Memory");
|
||||
_IsXToOpenInsightMetrologyViewerAttachments = _Hyphens == (int)Hyphen.IsXToOpenInsightMetrologyViewerAttachments;
|
||||
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
|
||||
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
|
||||
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Path");
|
||||
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
|
||||
_CellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1], modelObjectParameterDefinition.Value);
|
||||
_GhostPCLFileName = string.Concat(Path.GetDirectoryName(Assembly.GetEntryAssembly().Location), @"\gpcl6win64.exe");
|
||||
if (_IsEAFHosted && _IsXToOpenInsightMetrologyViewerAttachments && !File.Exists(_GhostPCLFileName))
|
||||
throw new Exception("Ghost PCL FileName doesn't Exist!");
|
||||
if (_IsDummy)
|
||||
{
|
||||
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
|
||||
{
|
||||
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
|
||||
Callback(null);
|
||||
}
|
||||
else
|
||||
{
|
||||
int milliSeconds;
|
||||
milliSeconds = (int)(fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000 / 2);
|
||||
_Timer = new Timer(Callback, null, milliSeconds, Timeout.Infinite);
|
||||
milliSeconds += 2000;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
{
|
||||
if (_Description is not Description)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state) => Callback(state);
|
||||
|
||||
protected static List<txt.Description> GetDescriptions(JsonElement[] jsonElements)
|
||||
{
|
||||
List<txt.Description> results = new();
|
||||
txt.Description description;
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
|
||||
foreach (JsonElement jsonElement in jsonElements)
|
||||
{
|
||||
if (jsonElement.ValueKind != JsonValueKind.Object)
|
||||
throw new Exception();
|
||||
description = JsonSerializer.Deserialize<txt.Description>(jsonElement.ToString(), jsonSerializerOptions);
|
||||
results.Add(description);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
string duplicateDirectory;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<txt.Description> descriptions = GetDescriptions(jsonElements);
|
||||
Tuple<Test[], Dictionary<Test, List<Shared.Properties.IDescription>>> tuple = GetTuple(this, from l in descriptions select (Shared.Properties.IDescription)l, extra: false);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tuple.Item1, jsonElements, new List<FileInfo>());
|
||||
bool isNotUsedInsightMetrologyViewerAttachments = !(_FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) && _IsXToOpenInsightMetrologyViewerAttachments;
|
||||
bool isDummyRun = _DummyRuns.Any() && _DummyRuns.ContainsKey(_Logistics.JobID) && _DummyRuns[_Logistics.JobID].Any() && (from l in _DummyRuns[_Logistics.JobID] where l == _Logistics.Sequence select 1).Any();
|
||||
if (isDummyRun)
|
||||
{
|
||||
try
|
||||
{ File.SetLastWriteTime(reportFullPath, dateTime); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
|
||||
if (_IsXToIQSSi)
|
||||
duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\All");
|
||||
else if (!_IsXToOpenInsight)
|
||||
duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", segments[0]);
|
||||
else
|
||||
duplicateDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\Data");
|
||||
if (segments.Length > 2)
|
||||
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
if (isDummyRun || isNotUsedInsightMetrologyViewerAttachments || _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
{
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
string successDirectory;
|
||||
if (!_IsXToAPC)
|
||||
successDirectory = string.Empty;
|
||||
else
|
||||
{
|
||||
successDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\ViewerPath");
|
||||
if (!Directory.Exists(successDirectory))
|
||||
_ = Directory.CreateDirectory(successDirectory);
|
||||
}
|
||||
List<Tuple<Shared.Properties.IScopeInfo, string>> tuples = new();
|
||||
string duplicateFile = string.Concat(duplicateDirectory, @"\", Path.GetFileName(reportFullPath));
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
|
||||
string logisticsSequenceMemoryDirectory = string.Concat(_MemoryPath, @"\", _EquipmentType, @"\Source\", weekDirectory, @"\", _Logistics.Sequence);
|
||||
if (!Directory.Exists(logisticsSequenceMemoryDirectory))
|
||||
_ = Directory.CreateDirectory(logisticsSequenceMemoryDirectory);
|
||||
if (_IsXToAPC)
|
||||
{
|
||||
if (!isDummyRun && _IsEAFHosted)
|
||||
File.Copy(reportFullPath, duplicateFile, overwrite: true);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (_IsXToOpenInsightMetrologyViewer)
|
||||
{
|
||||
WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
if (!isDummyRun && _IsEAFHosted)
|
||||
{
|
||||
Tuple<string, WS.Results> wsResults = WS.SendData(_OpenInsightMetrologyViewerAPI, wsRequest);
|
||||
if (!wsResults.Item2.Success)
|
||||
throw new Exception(wsResults.ToString());
|
||||
_Log.Debug(wsResults.Item2.HeaderID);
|
||||
File.WriteAllText(string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json"), wsResults.Item1);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Test test;
|
||||
string check;
|
||||
Tuple<string, string> lines;
|
||||
Shared.Properties.IScopeInfo scopeInfo;
|
||||
foreach (KeyValuePair<Test, List<Shared.Properties.IDescription>> keyValuePair in tuple.Item2)
|
||||
{
|
||||
test = keyValuePair.Key;
|
||||
//scopeInfo = new ScopeInfo(test);
|
||||
if (!_IsXToOpenInsight)
|
||||
scopeInfo = new ScopeInfo(test, _IqsFile);
|
||||
else
|
||||
scopeInfo = new ScopeInfo(test, _OpenInsightFilePattern);
|
||||
lines = ProcessData.GetLines(this, _Logistics, descriptions);
|
||||
check = lines.Item1.Replace(lines.Item2, "$Date$");
|
||||
if (string.IsNullOrEmpty(_LastLines) || check != _LastLines)
|
||||
tuples.Add(new Tuple<Shared.Properties.IScopeInfo, string>(scopeInfo, lines.Item1));
|
||||
_LastLines = check;
|
||||
}
|
||||
}
|
||||
if (_IsXToOpenInsightMetrologyViewerAttachments)
|
||||
{
|
||||
string[] matchDirectories = Shared1567(reportFullPath, tuples);
|
||||
if (!isDummyRun && _IsEAFHosted && !isNotUsedInsightMetrologyViewerAttachments)
|
||||
ProcessData.PostOpenInsightMetrologyViewerAttachments(this, _Logistics, _OpenInsightMetrologyViewerAPI, _GhostPCLFileName, dateTime, logisticsSequenceMemoryDirectory, descriptions, matchDirectories[0]);
|
||||
}
|
||||
}
|
||||
if (!_IsXToOpenInsightMetrologyViewer && !_IsXToOpenInsightMetrologyViewerAttachments)
|
||||
Shared0413(dateTime, isDummyRun, successDirectory, duplicateDirectory, tuples, duplicateFile);
|
||||
}
|
||||
if (_IsXToOpenInsightMetrologyViewerAttachments)
|
||||
{
|
||||
string destinationDirectory;
|
||||
//string destinationDirectory = WriteScopeInfo(_ProgressPath, _Logistics, dateTime, duplicateDirectory, tuples);
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
|
||||
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\", _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
_ = Directory.CreateDirectory(jobIdDirectory);
|
||||
string[] matchDirectories;
|
||||
if (!_IsEAFHosted)
|
||||
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
|
||||
else
|
||||
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
|
||||
if ((matchDirectories is null) || matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
destinationDirectory = matchDirectories[0];
|
||||
if (isDummyRun)
|
||||
Shared0607(reportFullPath, duplicateDirectory, logisticsSequence, destinationDirectory);
|
||||
else
|
||||
{
|
||||
WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
|
||||
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
|
||||
if (_IsEAFHosted)
|
||||
Shared1277(reportFullPath, destinationDirectory, logisticsSequence, jobIdDirectory, json);
|
||||
else
|
||||
{
|
||||
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
|
||||
string historicalText = File.ReadAllText(jsonFileName);
|
||||
if (json != historicalText)
|
||||
throw new Exception("File doesn't match historical!");
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private void CallbackIsDummy(string traceDummyFile, List<Tuple<string, string, string, string, int>> tuples, bool fileConnectorConfigurationIncludeSubDirectories, bool includeSubDirectoriesExtra)
|
||||
{
|
||||
int fileCount;
|
||||
string[] files;
|
||||
string monARessource;
|
||||
string checkDirectory;
|
||||
string sourceArchiveFile;
|
||||
string inProcessDirectory;
|
||||
const string site = "sjc";
|
||||
string stateName = string.Concat("Dummy_", _EventName);
|
||||
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
|
||||
MonIn monIn = MonIn.GetInstance(monInURL);
|
||||
foreach (Tuple<string, string, string, string, int> item in tuples)
|
||||
{
|
||||
monARessource = item.Item1;
|
||||
sourceArchiveFile = item.Item2;
|
||||
inProcessDirectory = item.Item3;
|
||||
checkDirectory = item.Item4;
|
||||
fileCount = item.Item5;
|
||||
try
|
||||
{
|
||||
if (fileCount > 0 || string.IsNullOrEmpty(checkDirectory))
|
||||
{
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Warning);
|
||||
for (int i = 1; i < 12; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
else if (inProcessDirectory == checkDirectory)
|
||||
continue;
|
||||
if (!_IsEAFHosted)
|
||||
continue;
|
||||
if (!File.Exists(sourceArchiveFile))
|
||||
continue;
|
||||
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
continue;
|
||||
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
|
||||
if (fileConnectorConfigurationIncludeSubDirectories && includeSubDirectoriesExtra)
|
||||
checkDirectory = string.Concat(checkDirectory, @"\", sequence);
|
||||
if (fileConnectorConfigurationIncludeSubDirectories)
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
else
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
foreach (string file in files)
|
||||
File.SetLastWriteTime(file, new DateTime(sequence));
|
||||
if (!fileConnectorConfigurationIncludeSubDirectories)
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(checkDirectory, @"\", Path.GetFileName(file)));
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
foreach (string directory in directories)
|
||||
_ = Directory.CreateDirectory(string.Concat(checkDirectory, directory.Substring(inProcessDirectory.Length)));
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(checkDirectory, file.Substring(inProcessDirectory.Length)));
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Ok);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Critical);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void Callback(object state)
|
||||
{
|
||||
if (!_IsDummy)
|
||||
throw new Exception();
|
||||
try
|
||||
{
|
||||
DateTime dateTime = DateTime.Now;
|
||||
bool check = dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday;
|
||||
if (check)
|
||||
{
|
||||
int fileCount;
|
||||
string[] files;
|
||||
string monARessource;
|
||||
string checkDirectory;
|
||||
string sourceArchiveFile;
|
||||
string sourceFileLocation;
|
||||
string inProcessDirectory;
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string traceDummyDirectory = string.Concat(Path.GetPathRoot(_TracePath), @"\TracesDummy\", _CellInstanceName, @"\Source\", dateTime.ToString("yyyy"), "___Week_", weekOfYear);
|
||||
if (!Directory.Exists(traceDummyDirectory))
|
||||
_ = Directory.CreateDirectory(traceDummyDirectory);
|
||||
string traceDummyFile = string.Concat(traceDummyDirectory, @"\", dateTime.Ticks, " - ", _CellInstanceName, ".txt");
|
||||
File.AppendAllText(traceDummyFile, string.Empty);
|
||||
List<Tuple<string, string, string, string, int>> tuples = new();
|
||||
string progressDirectory = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\_ Progress"));
|
||||
if (progressDirectory != _ProgressPath || !Directory.Exists(progressDirectory))
|
||||
throw new Exception("Invalid progress path");
|
||||
foreach (KeyValuePair<string, string> keyValuePair in _CellNames)
|
||||
{
|
||||
monARessource = keyValuePair.Key;
|
||||
if (!keyValuePair.Value.Contains('\\'))
|
||||
continue;
|
||||
foreach (string sourceFileFilter in _FileConnectorConfiguration.SourceFileFilter.Split('|'))
|
||||
{
|
||||
if (sourceFileFilter.ToLower().StartsWith(keyValuePair.Value.Replace(@"\", string.Empty)))
|
||||
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
|
||||
else if (_FileConnectorConfiguration.SourceFileLocation.ToLower().EndsWith(keyValuePair.Value))
|
||||
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
|
||||
else
|
||||
sourceFileLocation = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\", keyValuePair.Value));
|
||||
sourceArchiveFile = Path.GetFullPath(string.Concat(sourceFileLocation, @"\", sourceFileFilter));
|
||||
if (!File.Exists(sourceArchiveFile))
|
||||
continue;
|
||||
if (!_DummyRuns.ContainsKey(monARessource))
|
||||
_DummyRuns.Add(monARessource, new List<long>());
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceFileFilter, sourceFileLocation, sourceArchiveFile, 0));
|
||||
}
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, from l in tuples select l.Item4);
|
||||
if (tuples.Any())
|
||||
{
|
||||
_LastDummyRunIndex += 1;
|
||||
if (_LastDummyRunIndex >= tuples.Count)
|
||||
_LastDummyRunIndex = 0;
|
||||
monARessource = tuples[_LastDummyRunIndex].Item1;
|
||||
string sourceFileFilter = tuples[_LastDummyRunIndex].Item2;
|
||||
sourceFileLocation = tuples[_LastDummyRunIndex].Item3;
|
||||
sourceArchiveFile = tuples[_LastDummyRunIndex].Item4;
|
||||
//fileCount = tuples[_LastDummyRunIndex].Item5;
|
||||
tuples.Clear();
|
||||
if (long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
{
|
||||
if (!_DummyRuns[monARessource].Contains(sequence))
|
||||
_DummyRuns[monARessource].Add(sequence);
|
||||
inProcessDirectory = string.Concat(progressDirectory, @"\Dummy_in process\", sequence);
|
||||
checkDirectory = inProcessDirectory;
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
files = Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories);
|
||||
fileCount = files.Length;
|
||||
if (files.Any())
|
||||
{
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
try
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Delete(file);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
|
||||
checkDirectory = sourceFileLocation;
|
||||
files = Directory.GetFiles(checkDirectory, string.Concat("*", sequence, "*"), SearchOption.TopDirectoryOnly);
|
||||
fileCount = files.Length;
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
|
||||
}
|
||||
}
|
||||
if (tuples.Any())
|
||||
//CallbackIsDummy(traceDummyFile, tuples, FileConnectorConfiguration.IncludeSubDirectories.Value, includeSubDirectoriesExtra: false);
|
||||
CallbackIsDummy(traceDummyFile, tuples, fileConnectorConfigurationIncludeSubDirectories: true, includeSubDirectoriesExtra: true);
|
||||
}
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
try
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
}
|
16
Adaptation/FileHandlers/MET08DDUPSP1TBI/Hyphen.cs
Normal file
16
Adaptation/FileHandlers/MET08DDUPSP1TBI/Hyphen.cs
Normal file
@ -0,0 +1,16 @@
|
||||
namespace Adaptation.FileHandlers.MET08DDUPSP1TBI;
|
||||
|
||||
public enum Hyphen
|
||||
{
|
||||
IsXToOpenInsightMetrologyViewer, //MetrologyWS.SendData(logic, string.Concat("http://", serverName, "/api/inbound/TencorSP1"), headerAttachments, detailAttachments);
|
||||
IsXToIQSSi, //bool WriteFileSPC(Dictionary
|
||||
IsXToOpenInsight, //bool WriteFileOpenInsight(Dictionary
|
||||
IsXToOpenInsightMetrologyViewerAttachments, //Site-Two
|
||||
IsXToAPC,
|
||||
IsXToSPaCe,
|
||||
IsXToArchive,
|
||||
IsArchive,
|
||||
IsDummy,
|
||||
IsTIBCO,
|
||||
IsNaEDA
|
||||
}
|
158
Adaptation/FileHandlers/MET08DDUPSP1TBI/ProcessData.cs
Normal file
158
Adaptation/FileHandlers/MET08DDUPSP1TBI/ProcessData.cs
Normal file
@ -0,0 +1,158 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Metrology;
|
||||
using Adaptation.Shared.Properties;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.MET08DDUPSP1TBI;
|
||||
|
||||
public class ProcessData
|
||||
{
|
||||
|
||||
internal static List<Tuple<int, Enum, string>> HyphenTuples => new()
|
||||
{
|
||||
new Tuple<int, Enum, string>(0, Hyphen.IsNaEDA, @"\EC_EDA\Staging\Traces\~\Source"),
|
||||
new Tuple<int, Enum, string>(15, Hyphen.IsXToOpenInsightMetrologyViewer, @"\EC_EAFLog\TracesMES\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsXToIQSSi, @"\EC_SPC_Si\Traces\~\PollPath"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsXToOpenInsight, @"\\messa01ec.ec.local\APPS\Metrology\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsXToOpenInsightMetrologyViewerAttachments, @"\EC_Characterization_Si\In Process\~\Source"),
|
||||
new Tuple<int, Enum, string>(360, Hyphen.IsXToAPC, @"\EC_APC\Staging\Traces\~\PollPath"),
|
||||
new Tuple<int, Enum, string>(-36, Hyphen.IsXToSPaCe, @"\EC_SPC_Si\Traces\~\Source"),
|
||||
new Tuple<int, Enum, string>(180, Hyphen.IsXToArchive, @"\EC_EAFLog\TracesArchive\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsArchive, @"\EC_Characterization_Si\Processed")
|
||||
//new Tuple<int, Enum, string>("IsDummy"
|
||||
};
|
||||
|
||||
internal static Tuple<string, string> GetLines(IFileRead fileRead, Logistics logistics, List<txt.Description> descriptions)
|
||||
{
|
||||
StringBuilder result = new();
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
if (logistics is null)
|
||||
{ }
|
||||
if (descriptions is null)
|
||||
{ }
|
||||
char del = '\t';
|
||||
txt.Description x = descriptions[0];
|
||||
_ = result.Append(x.DcnLpdMin).Append(del). // 001 -
|
||||
Append(x.DcnLpdMax).Append(del). // 002 -
|
||||
Append(x.DcnLpdMean).Append(del). // 003 - DCN LPD
|
||||
Append(x.DcnAreaCountMin).Append(del). // 004 -
|
||||
Append(x.DcnAreaCountMax).Append(del). // 005 -
|
||||
Append(x.DcnAreaCountMean).Append(del).// 006 - DCN Area
|
||||
Append(x.DcnAreaMin).Append(del). // 007 -
|
||||
Append(x.DcnAreaMax).Append(del). // 008 -
|
||||
Append(x.Date).Append(del). // 009 -
|
||||
Append(x.DcnHazeAvgMean).Append(del). // 010 - Haze Average
|
||||
Append(string.Empty).Append(del). // 011 -
|
||||
Append(string.Empty).Append(del). // 012 -
|
||||
Append(string.Empty).Append(del). // 013 -
|
||||
Append(string.Empty).Append(del). // 014 -
|
||||
Append(string.Empty).Append(del). // 015 -
|
||||
Append(string.Empty).Append(del). // 016 -
|
||||
Append(string.Empty).Append(del). // 017 -
|
||||
Append(string.Empty).Append(del). // 018 -
|
||||
Append(string.Empty).Append(del). // 019 -
|
||||
Append(string.Empty).Append(del). // 020 -
|
||||
Append(string.Empty).Append(del). // 021 -
|
||||
Append(string.Empty).Append(del). // 022 -
|
||||
Append(string.Empty).Append(del). // 023 -
|
||||
Append(string.Empty).Append(del). // 024 -
|
||||
Append(string.Empty).Append(del). // 025 -
|
||||
Append(string.Empty).Append(del). // 026 -
|
||||
Append(string.Empty).Append(del). // 027 -
|
||||
Append(x.RDS).Append(del). // 028 - Lot
|
||||
Append(x.Reactor).Append(del). // 029 - Process
|
||||
Append(x.Recipe).Append(del). // 030 - Part
|
||||
Append(x.DcnScrMean).Append(del). // 031 - Scratch Count
|
||||
Append(string.Empty).Append(del). // 032 -
|
||||
Append(string.Empty).Append(del). // 033 -
|
||||
Append(string.Empty).Append(del). // 034 -
|
||||
Append(x.DcnMicroScrMean).Append(del). // 035 - Scratch Length
|
||||
Append(string.Empty).Append(del). // 036 -
|
||||
Append(string.Empty).Append(del). // 037 -
|
||||
Append(string.Empty).Append(del). // 038 -
|
||||
Append(x.DcnAllMean).Append(del). // 039 - Average Sum of Defects
|
||||
Append(x.DcnAllMax).Append(del). // 040 - Max Sum of defects
|
||||
Append(x.DcnAllMin).Append(del). // 041 - Min Sum of Defects
|
||||
Append(string.Empty).Append(del). // 042 -
|
||||
Append(logistics.MesEntity).Append(del). // 043 -
|
||||
Append(x.DcnAreaMean).Append(del). // 044 - DCN MM2
|
||||
AppendLine();
|
||||
return new Tuple<string, string>(result.ToString(), x.Date);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert the raw data file to parsable file format - in this case from PRN to PDF
|
||||
/// </summary>
|
||||
/// <param name="sourceFile">source file to be converted to PDF</param>
|
||||
/// <returns></returns>
|
||||
private static string ConvertSourceFileToPdf(string ghostPCLFileName, string sourceFile)
|
||||
{
|
||||
string result = Path.ChangeExtension(sourceFile, ".pdf");
|
||||
if (!File.Exists(result))
|
||||
{
|
||||
//string arguments = string.Concat("-i \"", sourceFile, "\" -o \"", result, "\"");
|
||||
string arguments = string.Concat("-dSAFER -dBATCH -dNOPAUSE -sOutputFile=\"", result, "\" -sDEVICE=pdfwrite \"", sourceFile, "\"");
|
||||
//Process process = Process.Start(lincPDFCFileName, arguments);
|
||||
Process process = Process.Start(ghostPCLFileName, arguments);
|
||||
_ = process.WaitForExit(30000);
|
||||
if (!File.Exists(result))
|
||||
throw new Exception("PDF file wasn't created");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string ghostPCLFileName, DateTime dateTime, string logisticsSequenceMemoryDirectory, List<txt.Description> descriptions, string matchDirectory)
|
||||
{
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
if (dateTime == DateTime.MinValue)
|
||||
{ }
|
||||
if (logisticsSequenceMemoryDirectory is null)
|
||||
{ }
|
||||
if (descriptions is null)
|
||||
{ }
|
||||
if (matchDirectory is null)
|
||||
{ }
|
||||
string[] summaryFiles = Directory.GetFiles(matchDirectory, "*.txt", SearchOption.TopDirectoryOnly);
|
||||
if (summaryFiles.Length != 1)
|
||||
throw new Exception("Invalid summary file count!");
|
||||
string wsResultsMemoryFile = string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json");
|
||||
if (!File.Exists(wsResultsMemoryFile))
|
||||
throw new Exception(string.Concat("Memory file <", wsResultsMemoryFile, "> doesn't exist!"));
|
||||
string json = File.ReadAllText(wsResultsMemoryFile);
|
||||
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json);
|
||||
long wsResultsHeaderID = metrologyWSRequest.HeaderID;
|
||||
string[] prnFiles = Directory.GetFiles(matchDirectory, "WaferMap*.prn", SearchOption.TopDirectoryOnly);
|
||||
if (prnFiles.Length == 0 || prnFiles.Length != descriptions.Count)
|
||||
throw new Exception("Invalid WaferMap*.prn file count!");
|
||||
List<string> pdfFiles = new();
|
||||
foreach (string prnFile in prnFiles.OrderBy(l => l))
|
||||
pdfFiles.Add(ConvertSourceFileToPdf(ghostPCLFileName, prnFile));
|
||||
if (pdfFiles.Count == 0 || pdfFiles.Count != descriptions.Count)
|
||||
throw new Exception("Invalid *.pdf file count!");
|
||||
List<WS.Attachment> dataAttachments = new();
|
||||
List<WS.Attachment> headerAttachments = new()
|
||||
{ new WS.Attachment(descriptions[0].HeaderUniqueId, "Data.txt", summaryFiles[0]) };
|
||||
int count;
|
||||
if (pdfFiles.Count < descriptions.Count)
|
||||
count = pdfFiles.Count;
|
||||
else
|
||||
count = descriptions.Count;
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(pdfFiles[i]))
|
||||
dataAttachments.Add(new WS.Attachment(descriptions[i].UniqueId, "Image.pdf", pdfFiles[i]));
|
||||
}
|
||||
if (dataAttachments.Count == 0 || dataAttachments.Count != descriptions.Count)
|
||||
throw new Exception("Invalid attachment count!");
|
||||
WS.AttachFiles(openInsightMetrologyViewerAPI, wsResultsHeaderID, headerAttachments, dataAttachments);
|
||||
}
|
||||
|
||||
}
|
608
Adaptation/FileHandlers/MET08DDUPSP1TBI/WSRequest.cs
Normal file
608
Adaptation/FileHandlers/MET08DDUPSP1TBI/WSRequest.cs
Normal file
@ -0,0 +1,608 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Properties;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
namespace Adaptation.FileHandlers.MET08DDUPSP1TBI;
|
||||
|
||||
public class WSRequest
|
||||
{
|
||||
public bool SentToMetrology { get; set; }
|
||||
public bool SentToSPC { get; set; }
|
||||
//
|
||||
|
||||
public string CellName { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string LotID { get; set; }
|
||||
public string Operator { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string Session { get; set; }
|
||||
public string UniqueID { get; set; }
|
||||
public string DcnAllMax { get; set; }
|
||||
public string DcnAllMean { get; set; }
|
||||
public string DcnAllMin { get; set; }
|
||||
public string DcnAllStDev { get; set; }
|
||||
public string DcnAreaCntMax { get; set; }
|
||||
public string DcnAreaCntMean { get; set; }
|
||||
public string DcnAreaCntMin { get; set; }
|
||||
public string DcnAreaCntStDev { get; set; }
|
||||
public string DcnAreaMax { get; set; }
|
||||
public string DcnAreaMean { get; set; }
|
||||
public string DcnAreaMin { get; set; }
|
||||
public string DcnAreaStDev { get; set; }
|
||||
public string DcnBin1Max { get; set; }
|
||||
public string DcnBin1Mean { get; set; }
|
||||
public string DcnBin1Min { get; set; }
|
||||
public string DcnBin1StDev { get; set; }
|
||||
public string DcnBin2Max { get; set; }
|
||||
public string DcnBin2Mean { get; set; }
|
||||
public string DcnBin2Min { get; set; }
|
||||
public string DcnBin2StDev { get; set; }
|
||||
public string DcnBin3Max { get; set; }
|
||||
public string DcnBin3Mean { get; set; }
|
||||
public string DcnBin3Min { get; set; }
|
||||
public string DcnBin3StDev { get; set; }
|
||||
public string DcnBin4Max { get; set; }
|
||||
public string DcnBin4Mean { get; set; }
|
||||
public string DcnBin4Min { get; set; }
|
||||
public string DcnBin4StDev { get; set; }
|
||||
public string DcnBin5Max { get; set; }
|
||||
public string DcnBin5Mean { get; set; }
|
||||
public string DcnBin5Min { get; set; }
|
||||
public string DcnBin5StDev { get; set; }
|
||||
public string DcnBin6Max { get; set; }
|
||||
public string DcnBin6Mean { get; set; }
|
||||
public string DcnBin6Min { get; set; }
|
||||
public string DcnBin6StDev { get; set; }
|
||||
public string DcnBin7Max { get; set; }
|
||||
public string DcnBin7Mean { get; set; }
|
||||
public string DcnBin7Min { get; set; }
|
||||
public string DcnBin7StDev { get; set; }
|
||||
public string DcnBin8Max { get; set; }
|
||||
public string DcnBin8Mean { get; set; }
|
||||
public string DcnBin8Min { get; set; }
|
||||
public string DcnBin8StDev { get; set; }
|
||||
public string DcnHazeAvgMax { get; set; }
|
||||
public string DcnHazeAvgMean { get; set; }
|
||||
public string DcnHazeAvgMin { get; set; }
|
||||
public string DcnHazeAvgStDev { get; set; }
|
||||
public string DcnHazeMedMax { get; set; }
|
||||
public string DcnHazeMedMean { get; set; }
|
||||
public string DcnHazeMedMin { get; set; }
|
||||
public string DcnHazeMedStDev { get; set; }
|
||||
public string DcnHazeStDevMax { get; set; }
|
||||
public string DcnHazeStDevMean { get; set; }
|
||||
public string DcnHazeStDevMin { get; set; }
|
||||
public string DcnHazeStDevStDev { get; set; }
|
||||
public string DcnLpdESMax { get; set; }
|
||||
public string DcnLpdESMean { get; set; }
|
||||
public string DcnLpdESMin { get; set; }
|
||||
public string DcnLpdESStDev { get; set; }
|
||||
public string DcnLpdMax { get; set; }
|
||||
public string DcnLpdMean { get; set; }
|
||||
public string DcnLpdMin { get; set; }
|
||||
public string DcnLpdNMax { get; set; }
|
||||
public string DcnLpdNMean { get; set; }
|
||||
public string DcnLpdNMin { get; set; }
|
||||
public string DcnLpdNStDev { get; set; }
|
||||
public string DcnLpdStDev { get; set; }
|
||||
public string DcnMicroScrMax { get; set; }
|
||||
public string DcnMicroScrMean { get; set; }
|
||||
public string DcnMicroScrMin { get; set; }
|
||||
public string DcnMicroScrStDev { get; set; }
|
||||
public string DcnScrMax { get; set; }
|
||||
public string DcnScrMean { get; set; }
|
||||
public string DcnScrMin { get; set; }
|
||||
public string DcnScrStDev { get; set; }
|
||||
public string DcnSlipMax { get; set; }
|
||||
public string DcnSlipMean { get; set; }
|
||||
public string DcnSlipMin { get; set; }
|
||||
public string DcnSlipStDev { get; set; }
|
||||
public string DnnAllMax { get; set; }
|
||||
public string DnnAllMean { get; set; }
|
||||
public string DnnAllMin { get; set; }
|
||||
public string DnnAllStDev { get; set; }
|
||||
public string DnnAreaCntMax { get; set; }
|
||||
public string DnnAreaCntMean { get; set; }
|
||||
public string DnnAreaCntMin { get; set; }
|
||||
public string DnnAreaCntStDev { get; set; }
|
||||
public string DnnAreaMax { get; set; }
|
||||
public string DnnAreaMean { get; set; }
|
||||
public string DnnAreaMin { get; set; }
|
||||
public string DnnAreaStDev { get; set; }
|
||||
public string DnnBin1Max { get; set; }
|
||||
public string DnnBin1Mean { get; set; }
|
||||
public string DnnBin1Min { get; set; }
|
||||
public string DnnBin1StDev { get; set; }
|
||||
public string DnnBin2Max { get; set; }
|
||||
public string DnnBin2Mean { get; set; }
|
||||
public string DnnBin2Min { get; set; }
|
||||
public string DnnBin2StDev { get; set; }
|
||||
public string DnnBin3Max { get; set; }
|
||||
public string DnnBin3Mean { get; set; }
|
||||
public string DnnBin3Min { get; set; }
|
||||
public string DnnBin3StDev { get; set; }
|
||||
public string DnnBin4Max { get; set; }
|
||||
public string DnnBin4Mean { get; set; }
|
||||
public string DnnBin4Min { get; set; }
|
||||
public string DnnBin4StDev { get; set; }
|
||||
public string DnnBin5Max { get; set; }
|
||||
public string DnnBin5Mean { get; set; }
|
||||
public string DnnBin5Min { get; set; }
|
||||
public string DnnBin5StDev { get; set; }
|
||||
public string DnnBin6Max { get; set; }
|
||||
public string DnnBin6Mean { get; set; }
|
||||
public string DnnBin6Min { get; set; }
|
||||
public string DnnBin6StDev { get; set; }
|
||||
public string DnnBin7Max { get; set; }
|
||||
public string DnnBin7Mean { get; set; }
|
||||
public string DnnBin7Min { get; set; }
|
||||
public string DnnBin7StDev { get; set; }
|
||||
public string DnnBin8Max { get; set; }
|
||||
public string DnnBin8Mean { get; set; }
|
||||
public string DnnBin8Min { get; set; }
|
||||
public string DnnBin8StDev { get; set; }
|
||||
public string DnnHazeAvgMax { get; set; }
|
||||
public string DnnHazeAvgMean { get; set; }
|
||||
public string DnnHazeAvgMin { get; set; }
|
||||
public string DnnHazeAvgStDev { get; set; }
|
||||
public string DnnHazeMedMax { get; set; }
|
||||
public string DnnHazeMedMean { get; set; }
|
||||
public string DnnHazeMedMin { get; set; }
|
||||
public string DnnHazeMedStDev { get; set; }
|
||||
public string DnnHazeStDevMax { get; set; }
|
||||
public string DnnHazeStDevMean { get; set; }
|
||||
public string DnnHazeStDevMin { get; set; }
|
||||
public string DnnHazeStDevStDev { get; set; }
|
||||
public string DnnLpdESMax { get; set; }
|
||||
public string DnnLpdESMean { get; set; }
|
||||
public string DnnLpdESMin { get; set; }
|
||||
public string DnnLpdESStDev { get; set; }
|
||||
public string DnnLpdMax { get; set; }
|
||||
public string DnnLpdMean { get; set; }
|
||||
public string DnnLpdMin { get; set; }
|
||||
public string DnnLpdNMax { get; set; }
|
||||
public string DnnLpdNMean { get; set; }
|
||||
public string DnnLpdNMin { get; set; }
|
||||
public string DnnLpdNStDev { get; set; }
|
||||
public string DnnLpdStDev { get; set; }
|
||||
public string DnnMicroScrMax { get; set; }
|
||||
public string DnnMicroScrMean { get; set; }
|
||||
public string DnnMicroScrMin { get; set; }
|
||||
public string DnnMicroScrStDev { get; set; }
|
||||
public string DnnScrMax { get; set; }
|
||||
public string DnnScrMean { get; set; }
|
||||
public string DnnScrMin { get; set; }
|
||||
public string DnnScrStDev { get; set; }
|
||||
public string DnnSlipMax { get; set; }
|
||||
public string DnnSlipMean { get; set; }
|
||||
public string DnnSlipMin { get; set; }
|
||||
public string DnnSlipStDev { get; set; }
|
||||
public string DwnAllMax { get; set; }
|
||||
public string DwnAllMean { get; set; }
|
||||
public string DwnAllMin { get; set; }
|
||||
public string DwnAllStDev { get; set; }
|
||||
public string DwnAreaCntMax { get; set; }
|
||||
public string DwnAreaCntMean { get; set; }
|
||||
public string DwnAreaCntMin { get; set; }
|
||||
public string DwnAreaCntStDev { get; set; }
|
||||
public string DwnAreaMax { get; set; }
|
||||
public string DwnAreaMean { get; set; }
|
||||
public string DwnAreaMin { get; set; }
|
||||
public string DwnAreaStDev { get; set; }
|
||||
public string DwnBin1Max { get; set; }
|
||||
public string DwnBin1Mean { get; set; }
|
||||
public string DwnBin1Min { get; set; }
|
||||
public string DwnBin1StDev { get; set; }
|
||||
public string DwnBin2Max { get; set; }
|
||||
public string DwnBin2Mean { get; set; }
|
||||
public string DwnBin2Min { get; set; }
|
||||
public string DwnBin2StDev { get; set; }
|
||||
public string DwnBin3Max { get; set; }
|
||||
public string DwnBin3Mean { get; set; }
|
||||
public string DwnBin3Min { get; set; }
|
||||
public string DwnBin3StDev { get; set; }
|
||||
public string DwnBin4Max { get; set; }
|
||||
public string DwnBin4Mean { get; set; }
|
||||
public string DwnBin4Min { get; set; }
|
||||
public string DwnBin4StDev { get; set; }
|
||||
public string DwnBin5Max { get; set; }
|
||||
public string DwnBin5Mean { get; set; }
|
||||
public string DwnBin5Min { get; set; }
|
||||
public string DwnBin5StDev { get; set; }
|
||||
public string DwnBin6Max { get; set; }
|
||||
public string DwnBin6Mean { get; set; }
|
||||
public string DwnBin6Min { get; set; }
|
||||
public string DwnBin6StDev { get; set; }
|
||||
public string DwnBin7Max { get; set; }
|
||||
public string DwnBin7Mean { get; set; }
|
||||
public string DwnBin7Min { get; set; }
|
||||
public string DwnBin7StDev { get; set; }
|
||||
public string DwnBin8Max { get; set; }
|
||||
public string DwnBin8Mean { get; set; }
|
||||
public string DwnBin8Min { get; set; }
|
||||
public string DwnBin8StDev { get; set; }
|
||||
public string DwnHazeAvgMax { get; set; }
|
||||
public string DwnHazeAvgMean { get; set; }
|
||||
public string DwnHazeAvgMin { get; set; }
|
||||
public string DwnHazeAvgStDev { get; set; }
|
||||
public string DwnHazeMedMax { get; set; }
|
||||
public string DwnHazeMedMean { get; set; }
|
||||
public string DwnHazeMedMin { get; set; }
|
||||
public string DwnHazeMedStDev { get; set; }
|
||||
public string DwnHazeStDevMax { get; set; }
|
||||
public string DwnHazeStDevMean { get; set; }
|
||||
public string DwnHazeStDevMin { get; set; }
|
||||
public string DwnHazeStDevStDev { get; set; }
|
||||
public string DwnLpdESMax { get; set; }
|
||||
public string DwnLpdESMean { get; set; }
|
||||
public string DwnLpdESMin { get; set; }
|
||||
public string DwnLpdESStDev { get; set; }
|
||||
public string DwnLpdMax { get; set; }
|
||||
public string DwnLpdMean { get; set; }
|
||||
public string DwnLpdMin { get; set; }
|
||||
public string DwnLpdNMax { get; set; }
|
||||
public string DwnLpdNMean { get; set; }
|
||||
public string DwnLpdNMin { get; set; }
|
||||
public string DwnLpdNStDev { get; set; }
|
||||
public string DwnLpdStDev { get; set; }
|
||||
public string DwnMicroScrMax { get; set; }
|
||||
public string DwnMicroScrMean { get; set; }
|
||||
public string DwnMicroScrMin { get; set; }
|
||||
public string DwnMicroScrStDev { get; set; }
|
||||
public string DwnScrMax { get; set; }
|
||||
public string DwnScrMean { get; set; }
|
||||
public string DwnScrMin { get; set; }
|
||||
public string DwnScrStDev { get; set; }
|
||||
public string DwnSlipMax { get; set; }
|
||||
public string DwnSlipMean { get; set; }
|
||||
public string DwnSlipMin { get; set; }
|
||||
public string DwnSlipStDev { get; set; }
|
||||
public List<txt.Detail> Details { get; protected set; }
|
||||
|
||||
[Obsolete("For json")] public WSRequest() { }
|
||||
|
||||
internal WSRequest(IFileRead fileRead, Logistics logistics, List<txt.Description> descriptions)
|
||||
{
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
Details = new List<txt.Detail>();
|
||||
CellName = logistics.MesEntity;
|
||||
txt.Description x = descriptions[0];
|
||||
//Header
|
||||
{
|
||||
UniqueID = x.UniqueId;
|
||||
Date = x.Date;
|
||||
Reactor = x.Reactor;
|
||||
LotID = x.Lot;
|
||||
Session = x.Session;
|
||||
DcnAllMin = x.DcnAllMin;
|
||||
DcnAllMax = x.DcnAllMax;
|
||||
DcnAllMean = x.DcnAllMean;
|
||||
DcnAllStDev = x.DcnAllStdDev;
|
||||
DcnLpdMin = x.DcnLpdMin;
|
||||
DcnLpdMax = x.DcnLpdMax;
|
||||
DcnLpdMean = x.DcnLpdMean;
|
||||
DcnLpdStDev = x.DcnLpdStdDev;
|
||||
DcnLpdNMin = x.DcnLpdNMin;
|
||||
DcnLpdNMax = x.DcnLpdNMax;
|
||||
DcnLpdNMean = x.DcnLpdNMean;
|
||||
DcnLpdNStDev = x.DcnLpdNStdDev;
|
||||
DcnLpdESMin = x.DcnLpdESMin;
|
||||
DcnLpdESMax = x.DcnLpdESMax;
|
||||
DcnLpdESMean = x.DcnLpdESMean;
|
||||
DcnLpdESStDev = x.DcnLpdESStdDev;
|
||||
DcnMicroScrMin = x.DcnMicroScrMin;
|
||||
DcnMicroScrMax = x.DcnMicroScrMax;
|
||||
DcnMicroScrMean = x.DcnMicroScrMean;
|
||||
DcnMicroScrStDev = x.DcnMicroScrStdDev;
|
||||
DcnScrMin = x.DcnScrMin;
|
||||
DcnScrMax = x.DcnScrMax;
|
||||
DcnScrMean = x.DcnScrMean;
|
||||
DcnScrStDev = x.DcnScrStdDev;
|
||||
DcnSlipMin = x.DcnSlipMin;
|
||||
DcnSlipMax = x.DcnSlipMax;
|
||||
DcnSlipMean = x.DcnSlipMean;
|
||||
DcnSlipStDev = x.DcnSlipStdDev;
|
||||
DcnAreaCntMin = x.DcnAreaCountMin;
|
||||
DcnAreaCntMax = x.DcnAreaCountMax;
|
||||
DcnAreaCntMean = x.DcnAreaCountMean;
|
||||
DcnAreaCntStDev = x.DcnAreaCountStdDev;
|
||||
DcnAreaMin = x.DcnAreaMin;
|
||||
DcnAreaMax = x.DcnAreaMax;
|
||||
DcnAreaMean = x.DcnAreaMean;
|
||||
DcnAreaStDev = x.DcnAreaStdDev;
|
||||
DcnHazeAvgMin = x.DcnHazeAvgMin;
|
||||
DcnHazeAvgMax = x.DcnHazeAvgMax;
|
||||
DcnHazeAvgMean = x.DcnHazeAvgMean;
|
||||
DcnHazeAvgStDev = x.DcnHazeAvgStdDev;
|
||||
DcnHazeMedMin = x.DcnHazeMedianMin;
|
||||
DcnHazeMedMax = x.DcnHazeMedianMax;
|
||||
DcnHazeMedMean = x.DcnHazeMedianMean;
|
||||
DcnHazeMedStDev = x.DcnHazeMedianStdDev;
|
||||
DcnHazeStDevMin = x.DcnHazeStdDevMin;
|
||||
DcnHazeStDevMax = x.DcnHazeStdDevMax;
|
||||
DcnHazeStDevMean = x.DcnHazeStdDevMean;
|
||||
DcnHazeStDevStDev = x.DcnHazeStdDevStdDev;
|
||||
DcnBin1Min = x.DcnBin1Min;
|
||||
DcnBin1Max = x.DcnBin1Max;
|
||||
DcnBin1Mean = x.DcnBin1Mean;
|
||||
DcnBin1StDev = x.DcnBin1StdDev;
|
||||
DcnBin2Min = x.DcnBin2Min;
|
||||
DcnBin2Max = x.DcnBin2Max;
|
||||
DcnBin2Mean = x.DcnBin2Mean;
|
||||
DcnBin2StDev = x.DcnBin2StdDev;
|
||||
DcnBin3Min = x.DcnBin3Min;
|
||||
DcnBin3Max = x.DcnBin3Max;
|
||||
DcnBin3Mean = x.DcnBin3Mean;
|
||||
DcnBin3StDev = x.DcnBin3StdDev;
|
||||
DcnBin4Min = x.DcnBin4Min;
|
||||
DcnBin4Max = x.DcnBin4Max;
|
||||
DcnBin4Mean = x.DcnBin4Mean;
|
||||
DcnBin4StDev = x.DcnBin4StdDev;
|
||||
DcnBin5Min = x.DcnBin5Min;
|
||||
DcnBin5Max = x.DcnBin5Max;
|
||||
DcnBin5Mean = x.DcnBin5Mean;
|
||||
DcnBin5StDev = x.DcnBin5StdDev;
|
||||
DcnBin6Min = x.DcnBin6Min;
|
||||
DcnBin6Max = x.DcnBin6Max;
|
||||
DcnBin6Mean = x.DcnBin6Mean;
|
||||
DcnBin6StDev = x.DcnBin6StdDev;
|
||||
DcnBin7Min = x.DcnBin7Min;
|
||||
DcnBin7Max = x.DcnBin7Max;
|
||||
DcnBin7Mean = x.DcnBin7Mean;
|
||||
DcnBin7StDev = x.DcnBin7StdDev;
|
||||
DcnBin8Min = x.DcnBin8Min;
|
||||
DcnBin8Max = x.DcnBin8Max;
|
||||
DcnBin8Mean = x.DcnBin8Mean;
|
||||
DcnBin8StDev = x.DcnBin8StdDev;
|
||||
DwnAllMin = x.DwnAllMin;
|
||||
DwnAllMax = x.DwnAllMax;
|
||||
DwnAllMean = x.DwnAllMean;
|
||||
DwnAllStDev = x.DwnAllStdDev;
|
||||
DwnLpdMin = x.DwnLpdMin;
|
||||
DwnLpdMax = x.DwnLpdMax;
|
||||
DwnLpdMean = x.DwnLpdMean;
|
||||
DwnLpdStDev = x.DwnLpdStdDev;
|
||||
DwnLpdNMin = x.DwnLpdNMin;
|
||||
DwnLpdNMax = x.DwnLpdNMax;
|
||||
DwnLpdNMean = x.DwnLpdNMean;
|
||||
DwnLpdNStDev = x.DwnLpdNStdDev;
|
||||
DwnLpdESMin = x.DwnLpdESMin;
|
||||
DwnLpdESMax = x.DwnLpdESMax;
|
||||
DwnLpdESMean = x.DwnLpdESMean;
|
||||
DwnLpdESStDev = x.DwnLpdESStdDev;
|
||||
DwnMicroScrMin = x.DwnMicroScrMin;
|
||||
DwnMicroScrMax = x.DwnMicroScrMax;
|
||||
DwnMicroScrMean = x.DwnMicroScrMean;
|
||||
DwnMicroScrStDev = x.DwnMicroScrStdDev;
|
||||
DwnScrMin = x.DwnScrMin;
|
||||
DwnScrMax = x.DwnScrMax;
|
||||
DwnScrMean = x.DwnScrMean;
|
||||
DwnScrStDev = x.DwnScrStdDev;
|
||||
DwnSlipMin = x.DwnSlipMin;
|
||||
DwnSlipMax = x.DwnSlipMax;
|
||||
DwnSlipMean = x.DwnSlipMean;
|
||||
DwnSlipStDev = x.DwnSlipStdDev;
|
||||
DwnAreaCntMin = x.DwnAreaCountMin;
|
||||
DwnAreaCntMax = x.DwnAreaCountMax;
|
||||
DwnAreaCntMean = x.DwnAreaCountMean;
|
||||
DwnAreaCntStDev = x.DwnAreaCountStdDev;
|
||||
DwnAreaMin = x.DwnAreaMin;
|
||||
DwnAreaMax = x.DwnAreaMax;
|
||||
DwnAreaMean = x.DwnAreaMean;
|
||||
DwnAreaStDev = x.DwnAreaStdDev;
|
||||
DwnHazeAvgMin = x.DwnHazeAvgMin;
|
||||
DwnHazeAvgMax = x.DwnHazeAvgMax;
|
||||
DwnHazeAvgMean = x.DwnHazeAvgMean;
|
||||
DwnHazeAvgStDev = x.DwnHazeAvgStdDev;
|
||||
DwnHazeMedMin = x.DwnHazeMedianMin;
|
||||
DwnHazeMedMax = x.DwnHazeMedianMax;
|
||||
DwnHazeMedMean = x.DwnHazeMedianMean;
|
||||
DwnHazeMedStDev = x.DwnHazeMedianStdDev;
|
||||
DwnHazeStDevMin = x.DwnHazeStdDevMin;
|
||||
DwnHazeStDevMax = x.DwnHazeStdDevMax;
|
||||
DwnHazeStDevMean = x.DwnHazeStdDevMean;
|
||||
DwnHazeStDevStDev = x.DwnHazeStdDevStdDev;
|
||||
DwnBin1Min = x.DwnBin1Min;
|
||||
DwnBin1Max = x.DwnBin1Max;
|
||||
DwnBin1Mean = x.DwnBin1Mean;
|
||||
DwnBin1StDev = x.DwnBin1StdDev;
|
||||
DwnBin2Min = x.DwnBin2Min;
|
||||
DwnBin2Max = x.DwnBin2Max;
|
||||
DwnBin2Mean = x.DwnBin2Mean;
|
||||
DwnBin2StDev = x.DwnBin2StdDev;
|
||||
DwnBin3Min = x.DwnBin3Min;
|
||||
DwnBin3Max = x.DwnBin3Max;
|
||||
DwnBin3Mean = x.DwnBin3Mean;
|
||||
DwnBin3StDev = x.DwnBin3StdDev;
|
||||
DwnBin4Min = x.DwnBin4Min;
|
||||
DwnBin4Max = x.DwnBin4Max;
|
||||
DwnBin4Mean = x.DwnBin4Mean;
|
||||
DwnBin4StDev = x.DwnBin4StdDev;
|
||||
DwnBin5Min = x.DwnBin5Min;
|
||||
DwnBin5Max = x.DwnBin5Max;
|
||||
DwnBin5Mean = x.DwnBin5Mean;
|
||||
DwnBin5StDev = x.DwnBin5StdDev;
|
||||
DwnBin6Min = x.DwnBin6Min;
|
||||
DwnBin6Max = x.DwnBin6Max;
|
||||
DwnBin6Mean = x.DwnBin6Mean;
|
||||
DwnBin6StDev = x.DwnBin6StdDev;
|
||||
DwnBin7Min = x.DwnBin7Min;
|
||||
DwnBin7Max = x.DwnBin7Max;
|
||||
DwnBin7Mean = x.DwnBin7Mean;
|
||||
DwnBin7StDev = x.DwnBin7StdDev;
|
||||
DwnBin8Min = x.DwnBin8Min;
|
||||
DwnBin8Max = x.DwnBin8Max;
|
||||
DwnBin8Mean = x.DwnBin8Mean;
|
||||
DwnBin8StDev = x.DwnBin8StdDev;
|
||||
DnnAllMin = x.DnnAllMin;
|
||||
DnnAllMax = x.DnnAllMax;
|
||||
DnnAllMean = x.DnnAllMean;
|
||||
DnnAllStDev = x.DnnAllStdDev;
|
||||
DnnLpdMin = x.DnnLpdMin;
|
||||
DnnLpdMax = x.DnnLpdMax;
|
||||
DnnLpdMean = x.DnnLpdMean;
|
||||
DnnLpdStDev = x.DnnLpdStdDev;
|
||||
DnnLpdNMin = x.DnnLpdNMin;
|
||||
DnnLpdNMax = x.DnnLpdNMax;
|
||||
DnnLpdNMean = x.DnnLpdNMean;
|
||||
DnnLpdNStDev = x.DnnLpdNStdDev;
|
||||
DnnLpdESMin = x.DnnLpdESMin;
|
||||
DnnLpdESMax = x.DnnLpdESMax;
|
||||
DnnLpdESMean = x.DnnLpdESMean;
|
||||
DnnLpdESStDev = x.DnnLpdESStdDev;
|
||||
DnnMicroScrMin = x.DnnMicroScrMin;
|
||||
DnnMicroScrMax = x.DnnMicroScrMax;
|
||||
DnnMicroScrMean = x.DnnMicroScrMean;
|
||||
DnnMicroScrStDev = x.DnnMicroScrStdDev;
|
||||
DnnScrMin = x.DnnScrMin;
|
||||
DnnScrMax = x.DnnScrMax;
|
||||
DnnScrMean = x.DnnScrMean;
|
||||
DnnScrStDev = x.DnnScrStdDev;
|
||||
DnnSlipMin = x.DnnSlipMin;
|
||||
DnnSlipMax = x.DnnSlipMax;
|
||||
DnnSlipMean = x.DnnSlipMean;
|
||||
DnnSlipStDev = x.DnnSlipStdDev;
|
||||
DnnAreaCntMin = x.DnnAreaCountMin;
|
||||
DnnAreaCntMax = x.DnnAreaCountMax;
|
||||
DnnAreaCntMean = x.DnnAreaCountMean;
|
||||
DnnAreaCntStDev = x.DnnAreaCountStdDev;
|
||||
DnnAreaMin = x.DnnAreaMin;
|
||||
DnnAreaMax = x.DnnAreaMax;
|
||||
DnnAreaMean = x.DnnAreaMean;
|
||||
DnnAreaStDev = x.DnnAreaStdDev;
|
||||
DnnHazeAvgMin = x.DnnHazeAvgMin;
|
||||
DnnHazeAvgMax = x.DnnHazeAvgMax;
|
||||
DnnHazeAvgMean = x.DnnHazeAvgMean;
|
||||
DnnHazeAvgStDev = x.DnnHazeAvgStdDev;
|
||||
DnnHazeMedMin = x.DnnHazeMedianMin;
|
||||
DnnHazeMedMax = x.DnnHazeMedianMax;
|
||||
DnnHazeMedMean = x.DnnHazeMedianMean;
|
||||
DnnHazeMedStDev = x.DnnHazeMedianStdDev;
|
||||
DnnHazeStDevMin = x.DnnHazeStdDevMin;
|
||||
DnnHazeStDevMax = x.DnnHazeStdDevMax;
|
||||
DnnHazeStDevMean = x.DnnHazeStdDevMean;
|
||||
DnnHazeStDevStDev = x.DnnHazeStdDevStdDev;
|
||||
DnnBin1Min = x.DnnBin1Min;
|
||||
DnnBin1Max = x.DnnBin1Max;
|
||||
DnnBin1Mean = x.DnnBin1Mean;
|
||||
DnnBin1StDev = x.DnnBin1StdDev;
|
||||
DnnBin2Min = x.DnnBin2Min;
|
||||
DnnBin2Max = x.DnnBin2Max;
|
||||
DnnBin2Mean = x.DnnBin2Mean;
|
||||
DnnBin2StDev = x.DnnBin2StdDev;
|
||||
DnnBin3Min = x.DnnBin3Min;
|
||||
DnnBin3Max = x.DnnBin3Max;
|
||||
DnnBin3Mean = x.DnnBin3Mean;
|
||||
DnnBin3StDev = x.DnnBin3StdDev;
|
||||
DnnBin4Min = x.DnnBin4Min;
|
||||
DnnBin4Max = x.DnnBin4Max;
|
||||
DnnBin4Mean = x.DnnBin4Mean;
|
||||
DnnBin4StDev = x.DnnBin4StdDev;
|
||||
DnnBin5Min = x.DnnBin5Min;
|
||||
DnnBin5Max = x.DnnBin5Max;
|
||||
DnnBin5Mean = x.DnnBin5Mean;
|
||||
DnnBin5StDev = x.DnnBin5StdDev;
|
||||
DnnBin6Min = x.DnnBin6Min;
|
||||
DnnBin6Max = x.DnnBin6Max;
|
||||
DnnBin6Mean = x.DnnBin6Mean;
|
||||
DnnBin6StDev = x.DnnBin6StdDev;
|
||||
DnnBin7Min = x.DnnBin7Min;
|
||||
DnnBin7Max = x.DnnBin7Max;
|
||||
DnnBin7Mean = x.DnnBin7Mean;
|
||||
DnnBin7StDev = x.DnnBin7StdDev;
|
||||
DnnBin8Min = x.DnnBin8Min;
|
||||
DnnBin8Max = x.DnnBin8Max;
|
||||
DnnBin8Mean = x.DnnBin8Mean;
|
||||
DnnBin8StDev = x.DnnBin8StdDev;
|
||||
RDS = x.RDS;
|
||||
PSN = x.PSN;
|
||||
Recipe = x.Recipe;
|
||||
Operator = x.Employee;
|
||||
}
|
||||
txt.Detail detail;
|
||||
foreach (txt.Description description in descriptions)
|
||||
{
|
||||
detail = new txt.Detail
|
||||
{
|
||||
Grade = description.Grade,
|
||||
HeaderUniqueID = description.HeaderUniqueId,
|
||||
Side = description.Side,
|
||||
SrcDest = description.SrcDest,
|
||||
UniqueID = description.UniqueId,
|
||||
WaferID = description.WaferID,
|
||||
Data = "*Data*",
|
||||
DcnAll = description.DcnAll,
|
||||
DcnArea = description.DcnArea,
|
||||
DcnAreaCount = description.DcnAreaCount,
|
||||
DcnBin1 = description.DcnBin1,
|
||||
DcnBin2 = description.DcnBin2,
|
||||
DcnBin3 = description.DcnBin3,
|
||||
DcnBin4 = description.DcnBin4,
|
||||
DcnBin5 = description.DcnBin5,
|
||||
DcnBin6 = description.DcnBin6,
|
||||
DcnBin7 = description.DcnBin7,
|
||||
DcnBin8 = description.DcnBin8,
|
||||
DcnHazeAvg = description.DcnHazeAvg,
|
||||
DcnHazeMedian = description.DcnHazeMedian,
|
||||
DcnHazeStdDev = description.DcnHazeStdDev,
|
||||
DcnLpd = description.DcnLpd,
|
||||
DcnLpdES = description.DcnLpdES,
|
||||
DcnLpdN = description.DcnLpdN,
|
||||
DcnMicroScr = description.DcnMicroScr,
|
||||
DcnScr = description.DcnScr,
|
||||
DcnSlip = description.DcnSlip,
|
||||
DnnAll = description.DnnAll,
|
||||
DnnArea = description.DnnArea,
|
||||
DnnAreaCount = description.DnnAreaCount,
|
||||
DnnBin1 = description.DnnBin1,
|
||||
DnnBin2 = description.DnnBin2,
|
||||
DnnBin3 = description.DnnBin3,
|
||||
DnnBin4 = description.DnnBin4,
|
||||
DnnBin5 = description.DnnBin5,
|
||||
DnnBin6 = description.DnnBin6,
|
||||
DnnBin7 = description.DnnBin7,
|
||||
DnnBin8 = description.DnnBin8,
|
||||
DnnHazeAvg = description.DnnHazeAvg,
|
||||
DnnHazeMedian = description.DnnHazeMedian,
|
||||
DnnHazeStdDev = description.DnnHazeStdDev,
|
||||
DnnLpd = description.DnnLpd,
|
||||
DnnLpdES = description.DnnLpdES,
|
||||
DnnLpdN = description.DnnLpdN,
|
||||
DnnMicroScr = description.DnnMicroScr,
|
||||
DnnScr = description.DnnScr,
|
||||
DnnSlip = description.DnnSlip,
|
||||
DwnAll = description.DwnAll,
|
||||
DwnArea = description.DwnArea,
|
||||
DwnAreaCount = description.DwnAreaCount,
|
||||
DwnBin1 = description.DwnBin1,
|
||||
DwnBin2 = description.DwnBin2,
|
||||
DwnBin3 = description.DwnBin3,
|
||||
DwnBin4 = description.DwnBin4,
|
||||
DwnBin5 = description.DwnBin5,
|
||||
DwnBin6 = description.DwnBin6,
|
||||
DwnBin7 = description.DwnBin7,
|
||||
DwnBin8 = description.DwnBin8,
|
||||
DwnHazeAvg = description.DwnHazeAvg,
|
||||
DwnHazeMedian = description.DwnHazeMedian,
|
||||
DwnHazeStdDev = description.DwnHazeStdDev,
|
||||
DwnLpd = description.DwnLpd,
|
||||
DwnLpdES = description.DwnLpdES,
|
||||
DwnLpdN = description.DwnLpdN,
|
||||
DwnMicroScr = description.DwnMicroScr,
|
||||
DwnScr = description.DwnScr,
|
||||
DwnSlip = description.DwnSlip
|
||||
};
|
||||
Details.Add(detail);
|
||||
}
|
||||
Date = logistics.DateTimeFromSequence.ToString();
|
||||
if (UniqueID is null && Details.Any())
|
||||
UniqueID = Details[0].HeaderUniqueID;
|
||||
}
|
||||
|
||||
}
|
150
Adaptation/FileHandlers/TIBCO/FileRead.cs
Normal file
150
Adaptation/FileHandlers/TIBCO/FileRead.cs
Normal file
@ -0,0 +1,150 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.TIBCO;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
ModelObjectParameterDefinition[] pathParameters = GetProperties(cellInstanceConnectionName, modelObjectParameters, "Path.");
|
||||
string oiContextDataPendingPath = GetPropertyValue(cellInstanceConnectionName, pathParameters, "Path.OIContextDataPending");
|
||||
string oiContextDataResultsPath = GetPropertyValue(cellInstanceConnectionName, pathParameters, "Path.OIContextDataResults");
|
||||
string oiContextDataSearchPath = GetPropertyValue(cellInstanceConnectionName, pathParameters, "Path.OIContextDataSearch");
|
||||
string lsl2SQLConnectionString = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "ConnectionString.LSL2SQL");
|
||||
ModelObjectParameterDefinition[] tibcoParameters = GetProperties(cellInstanceConnectionName, modelObjectParameters, "TIBCO.");
|
||||
string tibcoParameterChannel = GetPropertyValue(cellInstanceConnectionName, tibcoParameters, "TIBCO.IFX_CHANNEL");
|
||||
string tibcoParameterSubject = GetPropertyValue(cellInstanceConnectionName, tibcoParameters, "TIBCO.IFX_SUBJECT");
|
||||
string tibcoParameterSubjectPrefix = GetPropertyValue(cellInstanceConnectionName, tibcoParameters, "TIBCO.IFX_SUBJECT_PREFIX");
|
||||
string tibcoParameterConfigurationLocation = GetPropertyValue(cellInstanceConnectionName, tibcoParameters, "TIBCO.IFX_CONFIGURATION_LOCATION");
|
||||
string tibcoParameterConfigurationLocationCopy = GetPropertyValue(cellInstanceConnectionName, tibcoParameters, "TIBCO.IFX_CONFIGURATION_LOCATION_LOCAL_COPY");
|
||||
if (_IsEAFHosted)
|
||||
{
|
||||
Transport.Main.Initialize(smtp, cellInstanceName, fileConnectorConfiguration, oiContextDataPendingPath, oiContextDataResultsPath, oiContextDataSearchPath, lsl2SQLConnectionString);
|
||||
if (!string.IsNullOrEmpty(fileConnectorConfiguration.SourceFileLocation))
|
||||
_ = Transport.Main.Setup(useSleep: true, setIfxTransport: true, tibcoParameterChannel, tibcoParameterSubjectPrefix, tibcoParameterConfigurationLocation, tibcoParameterConfigurationLocationCopy, tibcoParameterSubject);
|
||||
else
|
||||
_ = Transport.Main.Setup(useSleep: false, setIfxTransport: false, tibcoParameterChannel, tibcoParameterSubjectPrefix, tibcoParameterConfigurationLocation, tibcoParameterConfigurationLocationCopy, tibcoParameterSubject);
|
||||
}
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults, exception);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
{
|
||||
if (_Description is not Description)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
if (dateTime == DateTime.MinValue)
|
||||
{ }
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
|
||||
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
|
||||
string duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", segments[0]);
|
||||
if (segments.Length > 2)
|
||||
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
13
Adaptation/FileHandlers/TIBCO/Transport/Input.cs
Normal file
13
Adaptation/FileHandlers/TIBCO/Transport/Input.cs
Normal file
@ -0,0 +1,13 @@
|
||||
namespace Adaptation.FileHandlers.TIBCO.Transport;
|
||||
|
||||
public class Input
|
||||
{
|
||||
|
||||
public string Sequence { get; set; }
|
||||
public string Area { get; set; }
|
||||
public string EquipmentType { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string MID { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
|
||||
}
|
13
Adaptation/FileHandlers/TIBCO/Transport/Item.cs
Normal file
13
Adaptation/FileHandlers/TIBCO/Transport/Item.cs
Normal file
@ -0,0 +1,13 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers.TIBCO.Transport;
|
||||
|
||||
public class Item
|
||||
{
|
||||
public string Name { get; set; } //WaferLot //UniqueID
|
||||
public string Type { get; set; } //SatelliteGroup //Sort
|
||||
public string Number { get; set; } //PocketNumber //Slot
|
||||
public string Qty { get; set; } //1
|
||||
public string CarrierName { get; set; } //PROCESS_GROUP
|
||||
}
|
187
Adaptation/FileHandlers/TIBCO/Transport/Job.cs
Normal file
187
Adaptation/FileHandlers/TIBCO/Transport/Job.cs
Normal file
@ -0,0 +1,187 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Data.SqlClient;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.TIBCO.Transport;
|
||||
|
||||
public class Job
|
||||
{
|
||||
|
||||
public string AutomationMode { get; }
|
||||
public string BasicType { get; }
|
||||
public string Equipment { get; }
|
||||
public string JobName { get; }
|
||||
public string LotName { get; }
|
||||
public string PackageName { get; }
|
||||
public string ProcessSpecName { get; }
|
||||
public string ProcessType { get; }
|
||||
public string ProductName { get; }
|
||||
public string Qty { get; }
|
||||
public string RecipeName { get; }
|
||||
public string StateModel { get; }
|
||||
//
|
||||
public bool IsAreaSi { get; }
|
||||
public DateTime DateTime { get; }
|
||||
public List<Item> Items { get; }
|
||||
|
||||
public Job(string oiContextDataPendingPath, string oiContextDataResultsPath, string oiContextDataSearchPath, string lsl2SQLConnectionString, string mid)
|
||||
{
|
||||
Items = new List<Item>();
|
||||
if (mid[0] != '{' || mid[mid.Length - 1] != '}' || !mid.Contains("\"Si\""))
|
||||
IsAreaSi = false;
|
||||
else
|
||||
{
|
||||
string[] segments;
|
||||
const string hyphen = "-";
|
||||
Input input = JsonSerializer.Deserialize<Input>(mid);
|
||||
IsAreaSi = input.Area == "Si";
|
||||
if (!long.TryParse(input.Sequence, out long sequence))
|
||||
DateTime = DateTime.Now;
|
||||
else
|
||||
DateTime = new DateTime(sequence);
|
||||
if (!string.IsNullOrEmpty(input.MID) && input.MID.Length > 9 && input.MID[2] == hyphen[0] && input.MID[9] == hyphen[0])
|
||||
segments = input.MID.Split(hyphen[0]);
|
||||
else
|
||||
segments = new string[] { hyphen, hyphen, hyphen };
|
||||
//
|
||||
AutomationMode = string.Concat(DateTime.Ticks, ".", input.MesEntity);
|
||||
if (segments[1] == hyphen)
|
||||
BasicType = hyphen;
|
||||
else
|
||||
BasicType = GetBasicType(lsl2SQLConnectionString, hyphen, segments[1]);
|
||||
Equipment = input.MesEntity;
|
||||
JobName = DateTime.Ticks.ToString();
|
||||
if (segments[0] == hyphen)
|
||||
LotName = input.MID;
|
||||
else
|
||||
LotName = segments[1];
|
||||
PackageName = hyphen; //WAFER_ID WaferLot
|
||||
ProcessSpecName = hyphen; //WAFER_POS PocketNumber
|
||||
ProcessType = segments[0];
|
||||
ProductName = segments[2].Split('.')[0];
|
||||
Qty = "1";
|
||||
RecipeName = input.Recipe;
|
||||
StateModel = input.EquipmentType;
|
||||
Items.Add(new Item { Name = "0", Type = "NA", Number = (0 + 1).ToString(), Qty = "1", CarrierName = hyphen });
|
||||
MoveOldFiles(oiContextDataSearchPath, oiContextDataPendingPath, oiContextDataResultsPath);
|
||||
}
|
||||
}
|
||||
|
||||
public string GetBasicType(string lsl2SQLConnectionString, string hyphen, string rds)
|
||||
{
|
||||
string result;
|
||||
// string json;
|
||||
// string loadLock;
|
||||
// JsonElement jsonElement;
|
||||
// DateTime dateTime = DateTime.Now;
|
||||
// string rdsFile = Path.Combine(configData.OIContextDataResultsPath, $"{DateTime.Ticks}.rds");
|
||||
// string jsonFile = Path.Combine(configData.OIContextDataResultsPath, $"{DateTime.Ticks}.json");
|
||||
// File.WriteAllText(Path.Combine(configData.OIContextDataSearchPath, $"{DateTime.Ticks}.rds"), rds);
|
||||
// CultureInfo cultureInfo = new CultureInfo("en-US");
|
||||
// Calendar calendar = cultureInfo.Calendar;
|
||||
// string weekOfYear = calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
// string yearWeek = string.Concat(dateTime.ToString("yyyy"), "___Week_", weekOfYear);
|
||||
// string resultsDirectory = Path.Combine(configData.OIContextDataResultsPath, yearWeek);
|
||||
// if (!Directory.Exists(resultsDirectory))
|
||||
// Directory.CreateDirectory(resultsDirectory);
|
||||
// long breakAfter = dateTime.AddSeconds(60).Ticks;
|
||||
// for (int i = 0; i < short.MaxValue; i++)
|
||||
// {
|
||||
// if (File.Exists(rdsFile) && File.Exists(jsonFile))
|
||||
// {
|
||||
// loadLock = string.Empty;
|
||||
// json = File.ReadAllText(jsonFile);
|
||||
// jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
|
||||
// if (jsonElement.ValueKind == JsonValueKind.Object)
|
||||
// {
|
||||
// foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
// {
|
||||
// if (jsonProperty.Name != "LoadLock")
|
||||
// continue;
|
||||
// loadLock = jsonProperty.Value.ToString();
|
||||
// }
|
||||
// }
|
||||
// if (string.IsNullOrEmpty(loadLock))
|
||||
// File.Move(jsonFile, Path.Combine(configData.OIContextDataResultsPath, $"{DateTime.Ticks}.err"));
|
||||
// else
|
||||
// {
|
||||
// File.Move(rdsFile, Path.Combine(configData.OIContextDataResultsPath, yearWeek, $"{DateTime.Ticks}.rds"));
|
||||
// File.Move(jsonFile, Path.Combine(configData.OIContextDataResultsPath, yearWeek, $"{DateTime.Ticks}.json"));
|
||||
// result = loadLock;
|
||||
// }
|
||||
// break;
|
||||
// }
|
||||
// if (DateTime.Now.Ticks > breakAfter)
|
||||
// break;
|
||||
// }
|
||||
object scalar = null;
|
||||
StringBuilder sql = new();
|
||||
_ = sql.Append(" SELECT ").
|
||||
Append(" CASE ").
|
||||
Append(" WHEN LOAD_LOCK_SIDE = 'L' THEN 'Left - ' ").
|
||||
Append(" WHEN LOAD_LOCK_SIDE = 'R' THEN 'Right - ' ").
|
||||
Append(" ELSE LOAD_LOCK_SIDE ").
|
||||
Append(" END + REACTOR_TYPE AS LOAD_LOCK ").
|
||||
Append(" FROM [LSL2SQL].[dbo].[REACT_RUN] ").
|
||||
Append($" WHERE RDS_NO = '{rds}' ");
|
||||
//Append(" AND LOAD_SIG != '' ");
|
||||
try
|
||||
{
|
||||
using SqlConnection sqlConnection = new(lsl2SQLConnectionString);
|
||||
sqlConnection.Open();
|
||||
using (SqlCommand sqlCommand = new(sql.ToString(), sqlConnection))
|
||||
scalar = sqlCommand.ExecuteScalar();
|
||||
sqlConnection.Close();
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
}
|
||||
if (scalar is null)
|
||||
result = hyphen;
|
||||
else
|
||||
result = scalar.ToString();
|
||||
return result;
|
||||
}
|
||||
|
||||
private static void MoveOldFiles(string oiContextDataPendingPath, string oiContextDataResultsPath, string oiContextDataSearchPath)
|
||||
{
|
||||
string yearWeek;
|
||||
string[] oldFiles;
|
||||
FileInfo fileInfo;
|
||||
string weekOfYear;
|
||||
string moveDirectory;
|
||||
DateTime daysOld = DateTime.Now.AddDays(-2);
|
||||
CultureInfo cultureInfo = new("en-US");
|
||||
Calendar calendar = cultureInfo.Calendar;
|
||||
string[] directories = new string[] { oiContextDataSearchPath, oiContextDataPendingPath, oiContextDataResultsPath };
|
||||
foreach (string directory in directories)
|
||||
{
|
||||
try
|
||||
{
|
||||
oldFiles = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string oldFile in oldFiles)
|
||||
{
|
||||
fileInfo = new FileInfo(oldFile);
|
||||
if (!fileInfo.Exists || fileInfo.LastWriteTime > daysOld)
|
||||
continue;
|
||||
weekOfYear = calendar.GetWeekOfYear(fileInfo.LastWriteTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
yearWeek = string.Concat(fileInfo.LastWriteTime.ToString("yyyy"), "___Week_", weekOfYear);
|
||||
moveDirectory = Path.Combine(fileInfo.DirectoryName, yearWeek);
|
||||
if (!Directory.Exists(moveDirectory))
|
||||
_ = Directory.CreateDirectory(moveDirectory);
|
||||
try
|
||||
{ File.Move(oldFile, Path.Combine(moveDirectory, fileInfo.Name)); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
70
Adaptation/FileHandlers/TIBCO/Transport/Logistics.cs
Normal file
70
Adaptation/FileHandlers/TIBCO/Transport/Logistics.cs
Normal file
@ -0,0 +1,70 @@
|
||||
namespace Adaptation.FileHandlers.TIBCO.Transport;
|
||||
|
||||
/// <summary>
|
||||
/// EDA-Configurator.pdf
|
||||
/// CDS Namespace Reply Variables
|
||||
/// </summary>
|
||||
public class Logistics
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Basic Type
|
||||
/// </summary>
|
||||
public string BASIC_TYPE { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Text for additional information
|
||||
/// </summary>
|
||||
public string INFO { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Unique assignment of lot processing to the basic cell (Dresden)
|
||||
/// </summary>
|
||||
public string JOBID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Equipment name used in MES
|
||||
/// </summary>
|
||||
public string MES_ENTITY { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Lot number, name for a lot
|
||||
/// </summary>
|
||||
public string MID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Recipe (Process Program ID)
|
||||
/// </summary>
|
||||
public string PPID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Process group (e.g. C5PR)
|
||||
/// </summary>
|
||||
public string PROCESS_GROUP { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Product name
|
||||
/// </summary>
|
||||
public string PRODUCT { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of wafers in lot
|
||||
/// </summary>
|
||||
public string TOTAL_NUMBER_OF_WAFERS { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Equipment sequence number
|
||||
/// </summary>
|
||||
public string SEQUENCE { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Unique wafer number (barcode, OCR)
|
||||
/// </summary>
|
||||
public string WAFER_ID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Wafer position in a tube (Furnace)
|
||||
/// </summary>
|
||||
public string WAFER_POS { get; set; }
|
||||
|
||||
}
|
228
Adaptation/FileHandlers/TIBCO/Transport/Main.cs
Normal file
228
Adaptation/FileHandlers/TIBCO/Transport/Main.cs
Normal file
@ -0,0 +1,228 @@
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Infineon.Yoda;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.TIBCO.Transport;
|
||||
|
||||
internal partial class Main
|
||||
{
|
||||
|
||||
private static ISMTP _SMTP;
|
||||
private static object _IfxTransport;
|
||||
private static string _CellInstanceName;
|
||||
private static string _LSL2SQLConnectionString;
|
||||
private static string _OIContextDataSearchPath;
|
||||
private static string _OIContextDataPendingPath;
|
||||
private static string _OIContextDataResultsPath;
|
||||
private static FileConnectorConfiguration _FileConnectorConfiguration;
|
||||
|
||||
internal static void Initialize(ISMTP smtp, string cellInstanceName, FileConnectorConfiguration fileConnectorConfiguration, string oiContextDataPendingPath, string oiContextDataResultsPath, string oiContextDataSearchPath, string lsl2SQLConnectionString)
|
||||
{
|
||||
_SMTP = smtp;
|
||||
_IfxTransport = null;
|
||||
_CellInstanceName = cellInstanceName;
|
||||
_LSL2SQLConnectionString = lsl2SQLConnectionString;
|
||||
_OIContextDataSearchPath = oiContextDataSearchPath;
|
||||
_OIContextDataPendingPath = oiContextDataPendingPath;
|
||||
_OIContextDataResultsPath = oiContextDataResultsPath;
|
||||
_FileConnectorConfiguration = fileConnectorConfiguration;
|
||||
}
|
||||
|
||||
internal static List<string> Setup(bool useSleep, bool setIfxTransport, string tibcoParameterChannel, string tibcoParameterSubjectPrefix, string tibcoParameterConfigurationLocation, string tibcoParameterConfigurationLocationCopy, string tibcoParameterSubject)
|
||||
{
|
||||
List<string> results = new();
|
||||
if (useSleep)
|
||||
{
|
||||
for (int i = 1; i < 4; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
if (setIfxTransport)
|
||||
{
|
||||
results.Add(string.Concat("IfxTransport Subject: ", tibcoParameterSubject));
|
||||
IfxDoc ifxDoc = new();
|
||||
ifxDoc.Add(IfxConst.SUBJECT_PREFIX, tibcoParameterSubjectPrefix);
|
||||
ifxDoc.Add(IfxConst.IFX_CHANNEL, tibcoParameterChannel);
|
||||
ifxDoc.Add(IfxConst.IFX_CONFIGURATION_LOCATION, tibcoParameterConfigurationLocation);
|
||||
ifxDoc.Add(IfxConst.IFX_CONFIGURATION_LOCATION_LOCAL_COPY, tibcoParameterConfigurationLocationCopy);
|
||||
results.Add(string.Concat("IfxTransport Config: ", ifxDoc));
|
||||
_IfxTransport = new IfxTransport();
|
||||
IfxTransport ifxTransport = (IfxTransport)_IfxTransport;
|
||||
ifxTransport.Create(ifxDoc);
|
||||
if (useSleep)
|
||||
{
|
||||
for (int i = 1; i < 10; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
results.Add(string.Concat("IfxTransport Current Daemon: ", ifxTransport.CurrentDaemon));
|
||||
results.Add(string.Concat("IfxTransport Current Network: ", ifxTransport.CurrentNetwork));
|
||||
results.Add(string.Concat("IfxTransport Current Service: ", ifxTransport.CurrentService));
|
||||
results.Add(string.Concat("IfxTransport Current PoolName: ", ifxTransport.CurrentPoolName));
|
||||
}
|
||||
for (int i = 1; i < 3; i++)
|
||||
Thread.Sleep(500);
|
||||
if (_IfxTransport is null)
|
||||
throw new Exception();
|
||||
else
|
||||
{
|
||||
IfxTransport ifxTransport = (IfxTransport)_IfxTransport;
|
||||
string[] subjects = tibcoParameterSubject.Split('|');
|
||||
foreach (string subject in subjects)
|
||||
ifxTransport.Subscribe(string.Concat(tibcoParameterSubjectPrefix, ".", subject));
|
||||
ifxTransport.ReliableMessage += MainTransport_ReliableMessage;
|
||||
for (int i = 1; i < 3; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static void MoveSourceFiles(string[] sourceFiles, string pdsfFileLogistics, Calendar calendar)
|
||||
{
|
||||
DateTime dateTime;
|
||||
string weekOfYear;
|
||||
string checkDirectory;
|
||||
foreach (string pdsfFile in sourceFiles)
|
||||
{
|
||||
if (pdsfFile == pdsfFileLogistics)
|
||||
continue;
|
||||
dateTime = new FileInfo(pdsfFile).LastWriteTime;
|
||||
weekOfYear = calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
checkDirectory = string.Concat(Path.GetDirectoryName(pdsfFile), @"\_ Logistics Archive\", dateTime.ToString("yyyy"), "_Week_", weekOfYear);
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
try
|
||||
{ File.Move(pdsfFile, string.Concat(checkDirectory, @"\", Path.GetFileName(pdsfFile))); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
private static string GetJobsMID(IfxDoc envelopeDocument)
|
||||
{
|
||||
string mid;
|
||||
if (envelopeDocument is null || !envelopeDocument.FieldExists("LotName"))
|
||||
mid = string.Empty;
|
||||
else
|
||||
mid = envelopeDocument.GetFieldByName("LotName").ToString();
|
||||
return mid;
|
||||
}
|
||||
|
||||
private static IfxDoc GetJobsReply(Job job)
|
||||
{
|
||||
IfxDoc result = new();
|
||||
IfxDoc itemDoc;
|
||||
IfxDoc jobDoc = new();
|
||||
IfxDoc lotDoc = new();
|
||||
IfxDoc recipeDoc = new();
|
||||
List<IfxDoc> itemDocs = new();
|
||||
jobDoc.Add("AutomationMode", job.AutomationMode);
|
||||
jobDoc.Add("CreationTimestamp", job.DateTime);
|
||||
jobDoc.Add("CreationUser", "-");
|
||||
jobDoc.Add("CurrentState", true);
|
||||
jobDoc.Add("Equipment", job.Equipment);
|
||||
jobDoc.Add("JobName", job.JobName);
|
||||
jobDoc.Add("LastUpdateTimestamp", job.DateTime);
|
||||
jobDoc.Add("LastUpdateUser", "-");
|
||||
jobDoc.Add("ProcessType", job.ProcessType);
|
||||
jobDoc.Add("StateModel", job.StateModel);
|
||||
jobDoc.Add("Status", "-");
|
||||
lotDoc.Add("BasicType", job.BasicType);
|
||||
lotDoc.Add("IsActive", true);
|
||||
lotDoc.Add("LotName", job.LotName);
|
||||
lotDoc.Add("LotState", "-");
|
||||
lotDoc.Add("PackageName", job.PackageName);
|
||||
lotDoc.Add("ProcessSpecName", job.ProcessSpecName);
|
||||
lotDoc.Add("ProductName", job.ProductName);
|
||||
lotDoc.Add("Qty", job.Qty);
|
||||
lotDoc.Add("Qty2", "-");
|
||||
recipeDoc.Add("RecipeName", job.RecipeName);
|
||||
lotDoc.Add("SpecName", "-");
|
||||
foreach (Item item in job.Items)
|
||||
{
|
||||
itemDoc = new IfxDoc();
|
||||
itemDoc.Add("Name", item.Name);
|
||||
itemDoc.Add("Type", item.Type);
|
||||
itemDoc.Add("Number", item.Number);
|
||||
itemDoc.Add("Qty", item.Qty);
|
||||
itemDoc.Add("CarrierName", item.CarrierName);
|
||||
itemDocs.Add(itemDoc);
|
||||
}
|
||||
jobDoc.Add("Recipe", recipeDoc);
|
||||
lotDoc.Add("Items", itemDocs.ToArray());
|
||||
jobDoc.Add("Lots", new IfxDoc[] { lotDoc });
|
||||
result.Add("FAJobs", new IfxDoc[] { jobDoc });
|
||||
result.Add("IFX_ECD", "0");
|
||||
result.Add("IFX_ETX", 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static void MainTransport_ReliableMessage(string subject, string replySubject, IfxEnvelope ifxEnvelope)
|
||||
{
|
||||
try
|
||||
{
|
||||
string mid = string.Empty;
|
||||
string[] sourceFiles = null;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
string pdsfFileLogistics = string.Empty;
|
||||
IfxDoc envelopeDocument = ifxEnvelope.ExtractDocument();
|
||||
CultureInfo cultureInfo = new("en-US");
|
||||
Calendar calendar = cultureInfo.Calendar;
|
||||
string weekOfYear = calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekOfYearSegment = string.Concat(@"\", dateTime.ToString("yyyy"), "_Week_", weekOfYear, @"\", dateTime.ToString("yyyy-MM-dd"));
|
||||
if (!string.IsNullOrEmpty(_FileConnectorConfiguration.SourceFileLocation))
|
||||
{
|
||||
string directory = string.Concat(_FileConnectorConfiguration.SourceFileLocation, weekOfYearSegment);
|
||||
if (!Directory.Exists(directory))
|
||||
_ = Directory.CreateDirectory(directory);
|
||||
string fileName = string.Concat(directory, @"\", subject.Replace(".", "~"), " - ", DateTime.Now.Ticks, ".xml");
|
||||
try
|
||||
{ envelopeDocument.SaveAsXml(fileName); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
if (!subject.EndsWith("GETJOBS"))
|
||||
throw new Exception();
|
||||
mid = GetJobsMID(envelopeDocument);
|
||||
Job job = new(_OIContextDataPendingPath, _OIContextDataResultsPath, _OIContextDataSearchPath, _LSL2SQLConnectionString, mid);
|
||||
if (job.IsAreaSi)
|
||||
{
|
||||
IfxDoc sendReply = GetJobsReply(job);
|
||||
ifxEnvelope.Transport.SendReply(ifxEnvelope, sendReply);
|
||||
if (!string.IsNullOrEmpty(_FileConnectorConfiguration.TargetFileLocation))
|
||||
{
|
||||
string directory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, weekOfYearSegment);
|
||||
if (!Directory.Exists(directory))
|
||||
_ = Directory.CreateDirectory(directory);
|
||||
string fileName = string.Concat(directory, @"\", subject.Replace(".", "~"), " - ", DateTime.Now.Ticks, ".xml");
|
||||
try
|
||||
{ sendReply.SaveAsXml(fileName); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
if (sourceFiles is not null && !string.IsNullOrEmpty(pdsfFileLogistics))
|
||||
MoveSourceFiles(sourceFiles, pdsfFileLogistics, calendar);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
subject = string.Concat("Exception:", _CellInstanceName, ":MainTransport_ReliableMessage");
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
string directory = _FileConnectorConfiguration.ErrorTargetFileLocation;
|
||||
if (!string.IsNullOrEmpty(directory) && Directory.Exists(directory))
|
||||
{
|
||||
string fileName = string.Concat(directory, @"\", subject.Replace(".", "~"), " - ", DateTime.Now.Ticks, ".txt");
|
||||
try
|
||||
{ File.WriteAllLines(fileName, new string[] { exception.Message, string.Empty, string.Empty, exception.StackTrace }); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
140
Adaptation/FileHandlers/ToArchive/FileRead.cs
Normal file
140
Adaptation/FileHandlers/ToArchive/FileRead.cs
Normal file
@ -0,0 +1,140 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.ToArchive;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults, exception);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
{
|
||||
if (_Description is not Description)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
if (dateTime == DateTime.MinValue)
|
||||
{ }
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
|
||||
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
|
||||
string duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", segments[0]);
|
||||
if (segments.Length > 2)
|
||||
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
bool isDummyRun = _DummyRuns.Any() && _DummyRuns.ContainsKey(_Logistics.JobID) && _DummyRuns[_Logistics.JobID].Any() && (from l in _DummyRuns[_Logistics.JobID] where l == _Logistics.Sequence select 1).Any();
|
||||
|
||||
List<Tuple<Shared.Properties.IScopeInfo, string>> tuples = new();
|
||||
|
||||
string destinationDirectory = WriteScopeInfo(_ProgressPath, _Logistics, dateTime, duplicateDirectory, tuples);
|
||||
if (isDummyRun)
|
||||
Shared0607(reportFullPath, duplicateDirectory, logisticsSequence, destinationDirectory);
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
1445
Adaptation/FileHandlers/txt/Description.cs
Normal file
1445
Adaptation/FileHandlers/txt/Description.cs
Normal file
File diff suppressed because it is too large
Load Diff
76
Adaptation/FileHandlers/txt/Detail.cs
Normal file
76
Adaptation/FileHandlers/txt/Detail.cs
Normal file
@ -0,0 +1,76 @@
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
public class Detail
|
||||
{
|
||||
|
||||
public string Grade { get; set; }
|
||||
public string HeaderUniqueID { get; set; }
|
||||
public string Side { get; set; }
|
||||
public string SrcDest { get; set; }
|
||||
public string UniqueID { get; set; }
|
||||
public string WaferID { get; set; }
|
||||
public string Data { get; set; }
|
||||
public string DcnAll { get; set; }
|
||||
public string DcnArea { get; set; }
|
||||
public string DcnAreaCount { get; set; }
|
||||
public string DcnBin1 { get; set; }
|
||||
public string DcnBin2 { get; set; }
|
||||
public string DcnBin3 { get; set; }
|
||||
public string DcnBin4 { get; set; }
|
||||
public string DcnBin5 { get; set; }
|
||||
public string DcnBin6 { get; set; }
|
||||
public string DcnBin7 { get; set; }
|
||||
public string DcnBin8 { get; set; }
|
||||
public string DcnHazeAvg { get; set; }
|
||||
public string DcnHazeMedian { get; set; }
|
||||
public string DcnHazeStdDev { get; set; }
|
||||
public string DcnLpd { get; set; }
|
||||
public string DcnLpdES { get; set; }
|
||||
public string DcnLpdN { get; set; }
|
||||
public string DcnMicroScr { get; set; }
|
||||
public string DcnScr { get; set; }
|
||||
public string DcnSlip { get; set; }
|
||||
public string DnnAll { get; set; }
|
||||
public string DnnArea { get; set; }
|
||||
public string DnnAreaCount { get; set; }
|
||||
public string DnnBin1 { get; set; }
|
||||
public string DnnBin2 { get; set; }
|
||||
public string DnnBin3 { get; set; }
|
||||
public string DnnBin4 { get; set; }
|
||||
public string DnnBin5 { get; set; }
|
||||
public string DnnBin6 { get; set; }
|
||||
public string DnnBin7 { get; set; }
|
||||
public string DnnBin8 { get; set; }
|
||||
public string DnnHazeAvg { get; set; }
|
||||
public string DnnHazeMedian { get; set; }
|
||||
public string DnnHazeStdDev { get; set; }
|
||||
public string DnnLpd { get; set; }
|
||||
public string DnnLpdES { get; set; }
|
||||
public string DnnLpdN { get; set; }
|
||||
public string DnnMicroScr { get; set; }
|
||||
public string DnnScr { get; set; }
|
||||
public string DnnSlip { get; set; }
|
||||
public string DwnAll { get; set; }
|
||||
public string DwnArea { get; set; }
|
||||
public string DwnAreaCount { get; set; }
|
||||
public string DwnBin1 { get; set; }
|
||||
public string DwnBin2 { get; set; }
|
||||
public string DwnBin3 { get; set; }
|
||||
public string DwnBin4 { get; set; }
|
||||
public string DwnBin5 { get; set; }
|
||||
public string DwnBin6 { get; set; }
|
||||
public string DwnBin7 { get; set; }
|
||||
public string DwnBin8 { get; set; }
|
||||
public string DwnHazeAvg { get; set; }
|
||||
public string DwnHazeMedian { get; set; }
|
||||
public string DwnHazeStdDev { get; set; }
|
||||
public string DwnLpd { get; set; }
|
||||
public string DwnLpdES { get; set; }
|
||||
public string DwnLpdN { get; set; }
|
||||
public string DwnMicroScr { get; set; }
|
||||
public string DwnScr { get; set; }
|
||||
public string DwnSlip { get; set; }
|
||||
|
||||
public Detail() => Data = "*Data*";
|
||||
|
||||
}
|
125
Adaptation/FileHandlers/txt/FileRead.cs
Normal file
125
Adaptation/FileHandlers/txt/FileRead.cs
Normal file
@ -0,0 +1,125 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
if (reportFullPath.Length < _MinFileLength)
|
||||
results.Item4.Add(new FileInfo(reportFullPath));
|
||||
else
|
||||
{
|
||||
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4);
|
||||
if (iProcessData is ProcessData processData)
|
||||
{
|
||||
string mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
|
||||
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
_Logistics.MID = mid;
|
||||
SetFileParameterLotID(mid);
|
||||
_Logistics.ProcessJobID = processData.Reactor;
|
||||
}
|
||||
if (!iProcessData.Details.Any())
|
||||
throw new Exception(string.Concat("No Data - ", dateTime.Ticks));
|
||||
results = iProcessData.GetResults(this, _Logistics, results.Item4);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
906
Adaptation/FileHandlers/txt/ProcessData.cs
Normal file
906
Adaptation/FileHandlers/txt/ProcessData.cs
Normal file
@ -0,0 +1,906 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using log4net;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Data;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
public class ProcessData : IProcessData
|
||||
{
|
||||
|
||||
private int _I;
|
||||
private string _Data;
|
||||
|
||||
private readonly ILog _Log;
|
||||
private readonly List<object> _Details;
|
||||
|
||||
public string JobID { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string Lot { get; set; }
|
||||
public string Operator { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string Session { get; set; }
|
||||
public string UniqueID { get; set; }
|
||||
public string DcnAllMax { get; set; }
|
||||
public string DcnAllMean { get; set; }
|
||||
public string DcnAllMin { get; set; }
|
||||
public string DcnAllStdDev { get; set; }
|
||||
public string DcnAreaCountMax { get; set; }
|
||||
public string DcnAreaCountMean { get; set; }
|
||||
public string DcnAreaCountMin { get; set; }
|
||||
public string DcnAreaCountStdDev { get; set; }
|
||||
public string DcnAreaMax { get; set; }
|
||||
public string DcnAreaMean { get; set; }
|
||||
public string DcnAreaMin { get; set; }
|
||||
public string DcnAreaStdDev { get; set; }
|
||||
public string DcnBin1Max { get; set; }
|
||||
public string DcnBin1Mean { get; set; }
|
||||
public string DcnBin1Min { get; set; }
|
||||
public string DcnBin1StdDev { get; set; }
|
||||
public string DcnBin2Max { get; set; }
|
||||
public string DcnBin2Mean { get; set; }
|
||||
public string DcnBin2Min { get; set; }
|
||||
public string DcnBin2StdDev { get; set; }
|
||||
public string DcnBin3Max { get; set; }
|
||||
public string DcnBin3Mean { get; set; }
|
||||
public string DcnBin3Min { get; set; }
|
||||
public string DcnBin3StdDev { get; set; }
|
||||
public string DcnBin4Max { get; set; }
|
||||
public string DcnBin4Mean { get; set; }
|
||||
public string DcnBin4Min { get; set; }
|
||||
public string DcnBin4StdDev { get; set; }
|
||||
public string DcnBin5Max { get; set; }
|
||||
public string DcnBin5Mean { get; set; }
|
||||
public string DcnBin5Min { get; set; }
|
||||
public string DcnBin5StdDev { get; set; }
|
||||
public string DcnBin6Max { get; set; }
|
||||
public string DcnBin6Mean { get; set; }
|
||||
public string DcnBin6Min { get; set; }
|
||||
public string DcnBin6StdDev { get; set; }
|
||||
public string DcnBin7Max { get; set; }
|
||||
public string DcnBin7Mean { get; set; }
|
||||
public string DcnBin7Min { get; set; }
|
||||
public string DcnBin7StdDev { get; set; }
|
||||
public string DcnBin8Max { get; set; }
|
||||
public string DcnBin8Mean { get; set; }
|
||||
public string DcnBin8Min { get; set; }
|
||||
public string DcnBin8StdDev { get; set; }
|
||||
public string DcnHazeAvgMax { get; set; }
|
||||
public string DcnHazeAvgMean { get; set; }
|
||||
public string DcnHazeAvgMin { get; set; }
|
||||
public string DcnHazeAvgStdDev { get; set; }
|
||||
public string DcnHazeMedianMax { get; set; }
|
||||
public string DcnHazeMedianMean { get; set; }
|
||||
public string DcnHazeMedianMin { get; set; }
|
||||
public string DcnHazeMedianStdDev { get; set; }
|
||||
public string DcnHazeStdDevMax { get; set; }
|
||||
public string DcnHazeStdDevMean { get; set; }
|
||||
public string DcnHazeStdDevMin { get; set; }
|
||||
public string DcnHazeStdDevStdDev { get; set; }
|
||||
public string DcnLpdESMax { get; set; }
|
||||
public string DcnLpdESMean { get; set; }
|
||||
public string DcnLpdESMin { get; set; }
|
||||
public string DcnLpdESStdDev { get; set; }
|
||||
public string DcnLpdMax { get; set; }
|
||||
public string DcnLpdMean { get; set; }
|
||||
public string DcnLpdMin { get; set; }
|
||||
public string DcnLpdNMax { get; set; }
|
||||
public string DcnLpdNMean { get; set; }
|
||||
public string DcnLpdNMin { get; set; }
|
||||
public string DcnLpdNStdDev { get; set; }
|
||||
public string DcnLpdStdDev { get; set; }
|
||||
public string DcnMicroScrMax { get; set; }
|
||||
public string DcnMicroScrMean { get; set; }
|
||||
public string DcnMicroScrMin { get; set; }
|
||||
public string DcnMicroScrStdDev { get; set; }
|
||||
public string DcnScrMax { get; set; }
|
||||
public string DcnScrMean { get; set; }
|
||||
public string DcnScrMin { get; set; }
|
||||
public string DcnScrStdDev { get; set; }
|
||||
public string DcnSlipMax { get; set; }
|
||||
public string DcnSlipMean { get; set; }
|
||||
public string DcnSlipMin { get; set; }
|
||||
public string DcnSlipStdDev { get; set; }
|
||||
public string DnnAllMax { get; set; }
|
||||
public string DnnAllMean { get; set; }
|
||||
public string DnnAllMin { get; set; }
|
||||
public string DnnAllStdDev { get; set; }
|
||||
public string DnnAreaCountMax { get; set; }
|
||||
public string DnnAreaCountMean { get; set; }
|
||||
public string DnnAreaCountMin { get; set; }
|
||||
public string DnnAreaCountStdDev { get; set; }
|
||||
public string DnnAreaMax { get; set; }
|
||||
public string DnnAreaMean { get; set; }
|
||||
public string DnnAreaMin { get; set; }
|
||||
public string DnnAreaStdDev { get; set; }
|
||||
public string DnnBin1Max { get; set; }
|
||||
public string DnnBin1Mean { get; set; }
|
||||
public string DnnBin1Min { get; set; }
|
||||
public string DnnBin1StdDev { get; set; }
|
||||
public string DnnBin2Max { get; set; }
|
||||
public string DnnBin2Mean { get; set; }
|
||||
public string DnnBin2Min { get; set; }
|
||||
public string DnnBin2StdDev { get; set; }
|
||||
public string DnnBin3Max { get; set; }
|
||||
public string DnnBin3Mean { get; set; }
|
||||
public string DnnBin3Min { get; set; }
|
||||
public string DnnBin3StdDev { get; set; }
|
||||
public string DnnBin4Max { get; set; }
|
||||
public string DnnBin4Mean { get; set; }
|
||||
public string DnnBin4Min { get; set; }
|
||||
public string DnnBin4StdDev { get; set; }
|
||||
public string DnnBin5Max { get; set; }
|
||||
public string DnnBin5Mean { get; set; }
|
||||
public string DnnBin5Min { get; set; }
|
||||
public string DnnBin5StdDev { get; set; }
|
||||
public string DnnBin6Max { get; set; }
|
||||
public string DnnBin6Mean { get; set; }
|
||||
public string DnnBin6Min { get; set; }
|
||||
public string DnnBin6StdDev { get; set; }
|
||||
public string DnnBin7Max { get; set; }
|
||||
public string DnnBin7Mean { get; set; }
|
||||
public string DnnBin7Min { get; set; }
|
||||
public string DnnBin7StdDev { get; set; }
|
||||
public string DnnBin8Max { get; set; }
|
||||
public string DnnBin8Mean { get; set; }
|
||||
public string DnnBin8Min { get; set; }
|
||||
public string DnnBin8StdDev { get; set; }
|
||||
public string DnnHazeAvgMax { get; set; }
|
||||
public string DnnHazeAvgMean { get; set; }
|
||||
public string DnnHazeAvgMin { get; set; }
|
||||
public string DnnHazeAvgStdDev { get; set; }
|
||||
public string DnnHazeMedianMax { get; set; }
|
||||
public string DnnHazeMedianMean { get; set; }
|
||||
public string DnnHazeMedianMin { get; set; }
|
||||
public string DnnHazeMedianStdDev { get; set; }
|
||||
public string DnnHazeStdDevMax { get; set; }
|
||||
public string DnnHazeStdDevMean { get; set; }
|
||||
public string DnnHazeStdDevMin { get; set; }
|
||||
public string DnnHazeStdDevStdDev { get; set; }
|
||||
public string DnnLpdESMax { get; set; }
|
||||
public string DnnLpdESMean { get; set; }
|
||||
public string DnnLpdESMin { get; set; }
|
||||
public string DnnLpdESStdDev { get; set; }
|
||||
public string DnnLpdMax { get; set; }
|
||||
public string DnnLpdMean { get; set; }
|
||||
public string DnnLpdMin { get; set; }
|
||||
public string DnnLpdNMax { get; set; }
|
||||
public string DnnLpdNMean { get; set; }
|
||||
public string DnnLpdNMin { get; set; }
|
||||
public string DnnLpdNStdDev { get; set; }
|
||||
public string DnnLpdStdDev { get; set; }
|
||||
public string DnnMicroScrMax { get; set; }
|
||||
public string DnnMicroScrMean { get; set; }
|
||||
public string DnnMicroScrMin { get; set; }
|
||||
public string DnnMicroScrStdDev { get; set; }
|
||||
public string DnnScrMax { get; set; }
|
||||
public string DnnScrMean { get; set; }
|
||||
public string DnnScrMin { get; set; }
|
||||
public string DnnScrStdDev { get; set; }
|
||||
public string DnnSlipMax { get; set; }
|
||||
public string DnnSlipMean { get; set; }
|
||||
public string DnnSlipMin { get; set; }
|
||||
public string DnnSlipStdDev { get; set; }
|
||||
public string DwnAllMax { get; set; }
|
||||
public string DwnAllMean { get; set; }
|
||||
public string DwnAllMin { get; set; }
|
||||
public string DwnAllStdDev { get; set; }
|
||||
public string DwnAreaCountMax { get; set; }
|
||||
public string DwnAreaCountMean { get; set; }
|
||||
public string DwnAreaCountMin { get; set; }
|
||||
public string DwnAreaCountStdDev { get; set; }
|
||||
public string DwnAreaMax { get; set; }
|
||||
public string DwnAreaMean { get; set; }
|
||||
public string DwnAreaMin { get; set; }
|
||||
public string DwnAreaStdDev { get; set; }
|
||||
public string DwnBin1Max { get; set; }
|
||||
public string DwnBin1Mean { get; set; }
|
||||
public string DwnBin1Min { get; set; }
|
||||
public string DwnBin1StdDev { get; set; }
|
||||
public string DwnBin2Max { get; set; }
|
||||
public string DwnBin2Mean { get; set; }
|
||||
public string DwnBin2Min { get; set; }
|
||||
public string DwnBin2StdDev { get; set; }
|
||||
public string DwnBin3Max { get; set; }
|
||||
public string DwnBin3Mean { get; set; }
|
||||
public string DwnBin3Min { get; set; }
|
||||
public string DwnBin3StdDev { get; set; }
|
||||
public string DwnBin4Max { get; set; }
|
||||
public string DwnBin4Mean { get; set; }
|
||||
public string DwnBin4Min { get; set; }
|
||||
public string DwnBin4StdDev { get; set; }
|
||||
public string DwnBin5Max { get; set; }
|
||||
public string DwnBin5Mean { get; set; }
|
||||
public string DwnBin5Min { get; set; }
|
||||
public string DwnBin5StdDev { get; set; }
|
||||
public string DwnBin6Max { get; set; }
|
||||
public string DwnBin6Mean { get; set; }
|
||||
public string DwnBin6Min { get; set; }
|
||||
public string DwnBin6StdDev { get; set; }
|
||||
public string DwnBin7Max { get; set; }
|
||||
public string DwnBin7Mean { get; set; }
|
||||
public string DwnBin7Min { get; set; }
|
||||
public string DwnBin7StdDev { get; set; }
|
||||
public string DwnBin8Max { get; set; }
|
||||
public string DwnBin8Mean { get; set; }
|
||||
public string DwnBin8Min { get; set; }
|
||||
public string DwnBin8StdDev { get; set; }
|
||||
public string DwnHazeAvgMax { get; set; }
|
||||
public string DwnHazeAvgMean { get; set; }
|
||||
public string DwnHazeAvgMin { get; set; }
|
||||
public string DwnHazeAvgStdDev { get; set; }
|
||||
public string DwnHazeMedianMax { get; set; }
|
||||
public string DwnHazeMedianMean { get; set; }
|
||||
public string DwnHazeMedianMin { get; set; }
|
||||
public string DwnHazeMedianStdDev { get; set; }
|
||||
public string DwnHazeStdDevMax { get; set; }
|
||||
public string DwnHazeStdDevMean { get; set; }
|
||||
public string DwnHazeStdDevMin { get; set; }
|
||||
public string DwnHazeStdDevStdDev { get; set; }
|
||||
public string DwnLpdESMax { get; set; }
|
||||
public string DwnLpdESMean { get; set; }
|
||||
public string DwnLpdESMin { get; set; }
|
||||
public string DwnLpdESStdDev { get; set; }
|
||||
public string DwnLpdMax { get; set; }
|
||||
public string DwnLpdMean { get; set; }
|
||||
public string DwnLpdMin { get; set; }
|
||||
public string DwnLpdNMax { get; set; }
|
||||
public string DwnLpdNMean { get; set; }
|
||||
public string DwnLpdNMin { get; set; }
|
||||
public string DwnLpdNStdDev { get; set; }
|
||||
public string DwnLpdStdDev { get; set; }
|
||||
public string DwnMicroScrMax { get; set; }
|
||||
public string DwnMicroScrMean { get; set; }
|
||||
public string DwnMicroScrMin { get; set; }
|
||||
public string DwnMicroScrStdDev { get; set; }
|
||||
public string DwnScrMax { get; set; }
|
||||
public string DwnScrMean { get; set; }
|
||||
public string DwnScrMin { get; set; }
|
||||
public string DwnScrStdDev { get; set; }
|
||||
public string DwnSlipMax { get; set; }
|
||||
public string DwnSlipMean { get; set; }
|
||||
public string DwnSlipMin { get; set; }
|
||||
public string DwnSlipStdDev { get; set; }
|
||||
|
||||
List<object> Shared.Properties.IProcessData.Details => _Details;
|
||||
|
||||
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
fileInfoCollection.Clear();
|
||||
_Details = new List<object>();
|
||||
_I = 0;
|
||||
_Data = string.Empty;
|
||||
JobID = logistics.JobID;
|
||||
MesEntity = logistics.MesEntity;
|
||||
Date = DateTime.Now.ToString();
|
||||
_Log = LogManager.GetLogger(typeof(ProcessData));
|
||||
Parse(fileRead, logistics, fileInfoCollection);
|
||||
}
|
||||
|
||||
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<Test> tests = new();
|
||||
foreach (object item in _Details)
|
||||
tests.Add(Test.SP1);
|
||||
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
|
||||
if (tests.Count != descriptions.Count)
|
||||
throw new Exception();
|
||||
for (int i = 0; i < tests.Count; i++)
|
||||
{
|
||||
if (descriptions[i] is not Description description)
|
||||
throw new Exception();
|
||||
if (description.Test != (int)tests[i])
|
||||
throw new Exception();
|
||||
}
|
||||
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
|
||||
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
|
||||
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
|
||||
return results;
|
||||
}
|
||||
|
||||
private void ScanPast(string text)
|
||||
{
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num > -1)
|
||||
_I = num + text.Length;
|
||||
else
|
||||
_I = _Data.Length;
|
||||
}
|
||||
|
||||
private string GetBefore(string text)
|
||||
{
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num > -1)
|
||||
{
|
||||
string str = _Data.Substring(_I, num - _I);
|
||||
_I = num + text.Length;
|
||||
return str.Trim();
|
||||
}
|
||||
string str1 = _Data.Substring(_I);
|
||||
_I = _Data.Length;
|
||||
return str1.Trim();
|
||||
}
|
||||
|
||||
private string GetBefore(string text, bool trim)
|
||||
{
|
||||
if (trim)
|
||||
return GetBefore(text);
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num > -1)
|
||||
{
|
||||
string str = _Data.Substring(_I, num - _I);
|
||||
_I = num + text.Length;
|
||||
return str;
|
||||
}
|
||||
string str1 = _Data.Substring(_I);
|
||||
_I = _Data.Length;
|
||||
return str1;
|
||||
}
|
||||
|
||||
private static bool IsNullOrWhiteSpace(string text)
|
||||
{
|
||||
for (int index = 0; index < text.Length; ++index)
|
||||
{
|
||||
if (!char.IsWhiteSpace(text[index]))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool IsBlankLine()
|
||||
{
|
||||
int num = _Data.IndexOf("\n", _I);
|
||||
return IsNullOrWhiteSpace(num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I));
|
||||
}
|
||||
|
||||
private string GetToEOL() => GetBefore("\n");
|
||||
|
||||
private string GetToEOL(bool trim)
|
||||
{
|
||||
if (trim)
|
||||
return GetToEOL();
|
||||
return GetBefore("\n", false);
|
||||
}
|
||||
|
||||
private string GetToText(string text) => _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
|
||||
|
||||
private string GetToken()
|
||||
{
|
||||
while (_I < _Data.Length && IsNullOrWhiteSpace(_Data.Substring(_I, 1)))
|
||||
++_I;
|
||||
int j = _I;
|
||||
while (j < _Data.Length && !IsNullOrWhiteSpace(_Data.Substring(j, 1)))
|
||||
++j;
|
||||
string str = _Data.Substring(_I, j - _I);
|
||||
_I = j;
|
||||
return str.Trim();
|
||||
}
|
||||
|
||||
private string PeekNextLine()
|
||||
{
|
||||
int j = _I;
|
||||
string toEol = GetToEOL();
|
||||
_I = j;
|
||||
return toEol;
|
||||
}
|
||||
|
||||
private void GetWaferSummaryInfo(List<WaferSummaryInfo> waferSummaryInfos, string whichInfo)
|
||||
{
|
||||
ScanPast(whichInfo);
|
||||
_ = GetToEOL();
|
||||
_ = GetToEOL();
|
||||
_ = GetToEOL();
|
||||
_ = GetToEOL();
|
||||
string[] segments;
|
||||
WaferSummaryInfo waferSummaryInfo;
|
||||
const string grade = "F Grade";
|
||||
const string reject = "F Reject";
|
||||
const string overLoad = "F OverLoad";
|
||||
for (string line = PeekNextLine(); line[0] != '-'; line = PeekNextLine())
|
||||
{
|
||||
line = GetToEOL();
|
||||
waferSummaryInfo = new WaferSummaryInfo();
|
||||
if (line.StartsWith(grade))
|
||||
line = line.Replace(grade, string.Concat("F -1", grade.Substring(4)));
|
||||
else if (line.StartsWith(reject))
|
||||
line = line.Replace(reject, string.Concat("F -1", reject.Substring(4)));
|
||||
else if (line.StartsWith(overLoad))
|
||||
line = line.Replace(overLoad, string.Concat("F -1", overLoad.Substring(4)));
|
||||
segments = line.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
waferSummaryInfo.Side = segments[0];
|
||||
waferSummaryInfo.WaferID = segments[1];
|
||||
waferSummaryInfo.Grade = segments[2];
|
||||
waferSummaryInfo.SrcDest = segments[3];
|
||||
if (segments.Length > 4)
|
||||
{
|
||||
waferSummaryInfo.Lpd = segments[5];
|
||||
waferSummaryInfo.LpdN = segments[6];
|
||||
waferSummaryInfo.LpdES = segments[7];
|
||||
waferSummaryInfo.MicroScr = segments[8];
|
||||
waferSummaryInfo.Scr = segments[9];
|
||||
waferSummaryInfo.Slip = segments[10];
|
||||
waferSummaryInfo.AreaNum = segments[11];
|
||||
waferSummaryInfo.Area = segments[12];
|
||||
waferSummaryInfo.HazeAvg = segments[13];
|
||||
waferSummaryInfo.HazeMedian = segments[14];
|
||||
waferSummaryInfo.HazeStdDev = segments[15];
|
||||
waferSummaryInfo.Bin1 = segments[16];
|
||||
waferSummaryInfo.Bin2 = segments[17];
|
||||
waferSummaryInfo.Bin3 = segments[18];
|
||||
waferSummaryInfo.Bin4 = segments[19];
|
||||
waferSummaryInfo.Bin5 = segments[20];
|
||||
waferSummaryInfo.Bin6 = segments[21];
|
||||
waferSummaryInfo.Bin7 = segments[22];
|
||||
waferSummaryInfo.Bin8 = segments[23];
|
||||
}
|
||||
if (waferSummaryInfo.WaferID == "-1")
|
||||
{
|
||||
segments = waferSummaryInfo.SrcDest.Split('-')[0].Split('/');
|
||||
waferSummaryInfo.WaferID = segments[segments.Length - 1];
|
||||
}
|
||||
waferSummaryInfos.Add(waferSummaryInfo);
|
||||
}
|
||||
}
|
||||
|
||||
private void ParseHeader(ILogistics logistics, List<WaferSummaryInfo> dcnTotals, List<WaferSummaryInfo> dwnTotals, List<WaferSummaryInfo> dnnTotals)
|
||||
{
|
||||
_I = 0;
|
||||
_Data = string.Empty;
|
||||
string summaryReportText = File.ReadAllText(logistics.ReportFullPath);
|
||||
if (!string.IsNullOrEmpty(summaryReportText))
|
||||
{
|
||||
_Log.Debug("HeaderFile() - Beginning");
|
||||
_I = 0;
|
||||
_Data = summaryReportText;
|
||||
ScanPast("Long Wafer Summary");
|
||||
_ = GetToEOL();
|
||||
ScanPast("Session:");
|
||||
string toEOL = GetToEOL(true);
|
||||
string str = toEOL;
|
||||
Recipe = toEOL;
|
||||
Session = str;
|
||||
ScanPast("Lot ID:");
|
||||
Lot = GetToEOL(true);
|
||||
// Remove illegal characters \/:*?"<>| found in the Lot.
|
||||
Lot = Regex.Replace(Lot, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
string[] segments = Lot.Split(new char[] { '-' });
|
||||
_Log.Debug("HeaderFile() - Debug A");
|
||||
if (segments.Length > 1)
|
||||
{
|
||||
Reactor = segments[0];
|
||||
RDS = segments[1];
|
||||
if (segments.Length > 2)
|
||||
{
|
||||
PSN = segments[2];
|
||||
if (segments.Length > 3)
|
||||
Operator = segments[3];
|
||||
}
|
||||
}
|
||||
_Log.Debug("HeaderFile() - Debug B");
|
||||
_I = 0;
|
||||
_Data = summaryReportText;
|
||||
GetWaferSummaryInfo(dcnTotals, "DCN Totals");
|
||||
ScanPast("Min");
|
||||
segments = GetToEOL().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
DcnAllMin = segments[0];
|
||||
DcnLpdMin = segments[1];
|
||||
DcnLpdNMin = segments[2];
|
||||
DcnLpdESMin = segments[3];
|
||||
DcnMicroScrMin = segments[4];
|
||||
DcnScrMin = segments[5];
|
||||
DcnSlipMin = segments[6];
|
||||
DcnAreaCountMin = segments[7];
|
||||
DcnAreaMin = segments[8];
|
||||
DcnHazeAvgMin = segments[9];
|
||||
DcnHazeMedianMin = segments[10];
|
||||
DcnHazeStdDevMin = segments[11];
|
||||
DcnBin1Min = segments[12];
|
||||
DcnBin2Min = segments[13];
|
||||
DcnBin3Min = segments[14];
|
||||
DcnBin4Min = segments[15];
|
||||
DcnBin5Min = segments[16];
|
||||
DcnBin6Min = segments[17];
|
||||
DcnBin7Min = segments[18];
|
||||
DcnBin8Min = segments[19];
|
||||
ScanPast("Max");
|
||||
segments = GetToEOL().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
DcnAllMax = segments[0];
|
||||
DcnLpdMax = segments[1];
|
||||
DcnLpdNMax = segments[2];
|
||||
DcnLpdESMax = segments[3];
|
||||
DcnMicroScrMax = segments[4];
|
||||
DcnScrMax = segments[5];
|
||||
DcnSlipMax = segments[6];
|
||||
DcnAreaCountMax = segments[7];
|
||||
DcnAreaMax = segments[8];
|
||||
DcnHazeAvgMax = segments[9];
|
||||
DcnHazeMedianMax = segments[10];
|
||||
DcnHazeStdDevMax = segments[11];
|
||||
DcnBin1Max = segments[12];
|
||||
DcnBin2Max = segments[13];
|
||||
DcnBin3Max = segments[14];
|
||||
DcnBin4Max = segments[15];
|
||||
DcnBin5Max = segments[16];
|
||||
DcnBin6Max = segments[17];
|
||||
DcnBin7Max = segments[18];
|
||||
DcnBin8Max = segments[19];
|
||||
ScanPast("Mean");
|
||||
segments = GetToEOL().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
DcnAllMean = segments[0];
|
||||
DcnLpdMean = segments[1];
|
||||
DcnLpdNMean = segments[2];
|
||||
DcnLpdESMean = segments[3];
|
||||
DcnMicroScrMean = segments[4];
|
||||
DcnScrMean = segments[5];
|
||||
DcnSlipMean = segments[6];
|
||||
DcnAreaCountMean = segments[7];
|
||||
DcnAreaMean = segments[8];
|
||||
DcnHazeAvgMean = segments[9];
|
||||
DcnHazeMedianMean = segments[10];
|
||||
DcnHazeStdDevMean = segments[11];
|
||||
DcnBin1Mean = segments[12];
|
||||
DcnBin2Mean = segments[13];
|
||||
DcnBin3Mean = segments[14];
|
||||
DcnBin4Mean = segments[15];
|
||||
DcnBin5Mean = segments[16];
|
||||
DcnBin6Mean = segments[17];
|
||||
DcnBin7Mean = segments[18];
|
||||
DcnBin8Mean = segments[19];
|
||||
ScanPast("Std. Dev.");
|
||||
segments = GetToEOL().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
DcnAllStdDev = segments[0];
|
||||
DcnLpdStdDev = segments[1];
|
||||
DcnLpdNStdDev = segments[2];
|
||||
DcnLpdESStdDev = segments[3];
|
||||
DcnMicroScrStdDev = segments[4];
|
||||
DcnScrStdDev = segments[5];
|
||||
DcnSlipStdDev = segments[6];
|
||||
DcnAreaCountStdDev = segments[7];
|
||||
DcnAreaStdDev = segments[8];
|
||||
DcnHazeAvgStdDev = segments[9];
|
||||
DcnHazeMedianStdDev = segments[10];
|
||||
DcnHazeStdDevStdDev = segments[11];
|
||||
DcnBin1StdDev = segments[12];
|
||||
DcnBin2StdDev = segments[13];
|
||||
DcnBin3StdDev = segments[14];
|
||||
DcnBin4StdDev = segments[15];
|
||||
DcnBin5StdDev = segments[16];
|
||||
DcnBin6StdDev = segments[17];
|
||||
DcnBin7StdDev = segments[18];
|
||||
DcnBin8StdDev = segments[19];
|
||||
_I = 0;
|
||||
_Data = summaryReportText;
|
||||
_Log.Debug("HeaderFile() - Debug C");
|
||||
if (!_Data.Contains("DWN Totals"))
|
||||
{
|
||||
for (int i = 0; i < dcnTotals.Count; i++)
|
||||
dwnTotals.Add(new WaferSummaryInfo());
|
||||
}
|
||||
else
|
||||
{
|
||||
GetWaferSummaryInfo(dwnTotals, "DWN Totals");
|
||||
ScanPast("Min");
|
||||
segments = GetToEOL().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
DwnAllMin = segments[0];
|
||||
DwnLpdMin = segments[1];
|
||||
DwnLpdNMin = segments[2];
|
||||
DwnLpdESMin = segments[3];
|
||||
DwnMicroScrMin = segments[4];
|
||||
DwnScrMin = segments[5];
|
||||
DwnSlipMin = segments[6];
|
||||
DwnAreaCountMin = segments[7];
|
||||
DwnAreaMin = segments[8];
|
||||
DwnHazeAvgMin = segments[9];
|
||||
DwnHazeMedianMin = segments[10];
|
||||
DwnHazeStdDevMin = segments[11];
|
||||
DwnBin1Min = segments[12];
|
||||
DwnBin2Min = segments[13];
|
||||
DwnBin3Min = segments[14];
|
||||
DwnBin4Min = segments[15];
|
||||
DwnBin5Min = segments[16];
|
||||
DwnBin6Min = segments[17];
|
||||
DwnBin7Min = segments[18];
|
||||
DwnBin8Min = segments[19];
|
||||
ScanPast("Max");
|
||||
segments = GetToEOL().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
DwnAllMax = segments[0];
|
||||
DwnLpdMax = segments[1];
|
||||
DwnLpdNMax = segments[2];
|
||||
DwnLpdESMax = segments[3];
|
||||
DwnMicroScrMax = segments[4];
|
||||
DwnScrMax = segments[5];
|
||||
DwnSlipMax = segments[6];
|
||||
DwnAreaCountMax = segments[7];
|
||||
DwnAreaMax = segments[8];
|
||||
DwnHazeAvgMax = segments[9];
|
||||
DwnHazeMedianMax = segments[10];
|
||||
DwnHazeStdDevMax = segments[11];
|
||||
DwnBin1Max = segments[12];
|
||||
DwnBin2Max = segments[13];
|
||||
DwnBin3Max = segments[14];
|
||||
DwnBin4Max = segments[15];
|
||||
DwnBin5Max = segments[16];
|
||||
DwnBin6Max = segments[17];
|
||||
DwnBin7Max = segments[18];
|
||||
DwnBin8Max = segments[19];
|
||||
ScanPast("Mean");
|
||||
segments = GetToEOL().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
DwnAllMean = segments[0];
|
||||
DwnLpdMean = segments[1];
|
||||
DwnLpdNMean = segments[2];
|
||||
DwnLpdESMean = segments[3];
|
||||
DwnMicroScrMean = segments[4];
|
||||
DwnScrMean = segments[5];
|
||||
DwnSlipMean = segments[6];
|
||||
DwnAreaCountMean = segments[7];
|
||||
DwnAreaMean = segments[8];
|
||||
DwnHazeAvgMean = segments[9];
|
||||
DwnHazeMedianMean = segments[10];
|
||||
DwnHazeStdDevMean = segments[11];
|
||||
DwnBin1Mean = segments[12];
|
||||
DwnBin2Mean = segments[13];
|
||||
DwnBin3Mean = segments[14];
|
||||
DwnBin4Mean = segments[15];
|
||||
DwnBin5Mean = segments[16];
|
||||
DwnBin6Mean = segments[17];
|
||||
DwnBin7Mean = segments[18];
|
||||
DwnBin8Mean = segments[19];
|
||||
ScanPast("Std. Dev.");
|
||||
segments = GetToEOL().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
DwnAllStdDev = segments[0];
|
||||
DwnLpdStdDev = segments[1];
|
||||
DwnLpdNStdDev = segments[2];
|
||||
DwnLpdESStdDev = segments[3];
|
||||
DwnMicroScrStdDev = segments[4];
|
||||
DwnScrStdDev = segments[5];
|
||||
DwnSlipStdDev = segments[6];
|
||||
DwnAreaCountStdDev = segments[7];
|
||||
DwnAreaStdDev = segments[8];
|
||||
DwnHazeAvgStdDev = segments[9];
|
||||
DwnHazeMedianStdDev = segments[10];
|
||||
DwnHazeStdDevStdDev = segments[11];
|
||||
DwnBin1StdDev = segments[12];
|
||||
DwnBin2StdDev = segments[13];
|
||||
DwnBin3StdDev = segments[14];
|
||||
DwnBin4StdDev = segments[15];
|
||||
DwnBin5StdDev = segments[16];
|
||||
DwnBin6StdDev = segments[17];
|
||||
DwnBin7StdDev = segments[18];
|
||||
DwnBin8StdDev = segments[19];
|
||||
}
|
||||
_I = 0;
|
||||
_Data = summaryReportText;
|
||||
_Log.Debug("HeaderFile() - Debug D");
|
||||
if (!_Data.Contains("DNN Totals"))
|
||||
{
|
||||
for (int i = 0; i < dcnTotals.Count; i++)
|
||||
dnnTotals.Add(new WaferSummaryInfo());
|
||||
}
|
||||
else
|
||||
{
|
||||
GetWaferSummaryInfo(dnnTotals, "DNN Totals");
|
||||
ScanPast("Min");
|
||||
segments = GetToEOL().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
DnnAllMin = segments[0];
|
||||
DnnLpdMin = segments[1];
|
||||
DnnLpdNMin = segments[2];
|
||||
DnnLpdESMin = segments[3];
|
||||
DnnMicroScrMin = segments[4];
|
||||
DnnScrMin = segments[5];
|
||||
DnnSlipMin = segments[6];
|
||||
DnnAreaCountMin = segments[7];
|
||||
DnnAreaMin = segments[8];
|
||||
DnnHazeAvgMin = segments[9];
|
||||
DnnHazeMedianMin = segments[10];
|
||||
DnnHazeStdDevMin = segments[11];
|
||||
DnnBin1Min = segments[12];
|
||||
DnnBin2Min = segments[13];
|
||||
DnnBin3Min = segments[14];
|
||||
DnnBin4Min = segments[15];
|
||||
DnnBin5Min = segments[16];
|
||||
DnnBin6Min = segments[17];
|
||||
DnnBin7Min = segments[18];
|
||||
DnnBin8Min = segments[19];
|
||||
ScanPast("Max");
|
||||
segments = GetToEOL().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
DnnAllMax = segments[0];
|
||||
DnnLpdMax = segments[1];
|
||||
DnnLpdNMax = segments[2];
|
||||
DnnLpdESMax = segments[3];
|
||||
DnnMicroScrMax = segments[4];
|
||||
DnnScrMax = segments[5];
|
||||
DnnSlipMax = segments[6];
|
||||
DnnAreaCountMax = segments[7];
|
||||
DnnAreaMax = segments[8];
|
||||
DnnHazeAvgMax = segments[9];
|
||||
DnnHazeMedianMax = segments[10];
|
||||
DnnHazeStdDevMax = segments[11];
|
||||
DnnBin1Max = segments[12];
|
||||
DnnBin2Max = segments[13];
|
||||
DnnBin3Max = segments[14];
|
||||
DnnBin4Max = segments[15];
|
||||
DnnBin5Max = segments[16];
|
||||
DnnBin6Max = segments[17];
|
||||
DnnBin7Max = segments[18];
|
||||
DnnBin8Max = segments[19];
|
||||
ScanPast("Mean");
|
||||
segments = GetToEOL().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
DnnAllMean = segments[0];
|
||||
DnnLpdMean = segments[1];
|
||||
DnnLpdNMean = segments[2];
|
||||
DnnLpdESMean = segments[3];
|
||||
DnnMicroScrMean = segments[4];
|
||||
DnnScrMean = segments[5];
|
||||
DnnSlipMean = segments[6];
|
||||
DnnAreaCountMean = segments[7];
|
||||
DnnAreaMean = segments[8];
|
||||
DnnHazeAvgMean = segments[9];
|
||||
DnnHazeMedianMean = segments[10];
|
||||
DnnHazeStdDevMean = segments[11];
|
||||
DnnBin1Mean = segments[12];
|
||||
DnnBin2Mean = segments[13];
|
||||
DnnBin3Mean = segments[14];
|
||||
DnnBin4Mean = segments[15];
|
||||
DnnBin5Mean = segments[16];
|
||||
DnnBin6Mean = segments[17];
|
||||
DnnBin7Mean = segments[18];
|
||||
DnnBin8Mean = segments[19];
|
||||
ScanPast("Std. Dev.");
|
||||
segments = GetToEOL().Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries);
|
||||
DnnAllStdDev = segments[0];
|
||||
DnnLpdStdDev = segments[1];
|
||||
DnnLpdNStdDev = segments[2];
|
||||
DnnLpdESStdDev = segments[3];
|
||||
DnnMicroScrStdDev = segments[4];
|
||||
DnnScrStdDev = segments[5];
|
||||
DnnSlipStdDev = segments[6];
|
||||
DnnAreaCountStdDev = segments[7];
|
||||
DnnAreaStdDev = segments[8];
|
||||
DnnHazeAvgStdDev = segments[9];
|
||||
DnnHazeMedianStdDev = segments[10];
|
||||
DnnHazeStdDevStdDev = segments[11];
|
||||
DnnBin1StdDev = segments[12];
|
||||
DnnBin2StdDev = segments[13];
|
||||
DnnBin3StdDev = segments[14];
|
||||
DnnBin4StdDev = segments[15];
|
||||
DnnBin5StdDev = segments[16];
|
||||
DnnBin6StdDev = segments[17];
|
||||
DnnBin7StdDev = segments[18];
|
||||
DnnBin8StdDev = segments[19];
|
||||
}
|
||||
}
|
||||
//UniqueID = string.Format("{0}_{1}_Summary_{2}", logistics.JobID, Lot, Date);
|
||||
UniqueID = string.Format("{0}_{1}_{2}", logistics.JobID, Lot, Path.GetFileNameWithoutExtension(logistics.ReportFullPath));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse the wafer summary data
|
||||
/// </summary>
|
||||
/// <param name="headerFile">wafer data</param>
|
||||
/// <param name="i">wafer index</param>
|
||||
/// <returns></returns>
|
||||
private Detail ParseWaferSummary(int i, List<WaferSummaryInfo> dcnTotals, List<WaferSummaryInfo> dwnTotals, List<WaferSummaryInfo> dnnTotals)
|
||||
{
|
||||
Detail result = new()
|
||||
{
|
||||
// NOTE: get the UniqueID during DataCalculation in order to retrieve the SPCToolID from the cell component Tag field.
|
||||
// id
|
||||
//runData.UniqueID = string.Format("{0}_{1}", sp1Header.UniqueId, i + 1);
|
||||
//runData.HeaderUniqueID = sp1Header.UniqueId;
|
||||
|
||||
// General Wafer Info
|
||||
Side = dcnTotals[i].Side,
|
||||
WaferID = dcnTotals[i].WaferID,
|
||||
Grade = dcnTotals[i].Grade,
|
||||
SrcDest = dcnTotals[i].SrcDest,
|
||||
|
||||
// Dcn Info
|
||||
DcnAll = dcnTotals[i].All ?? string.Empty,
|
||||
DcnLpd = dcnTotals[i].Lpd ?? string.Empty,
|
||||
DcnLpdN = dcnTotals[i].LpdN ?? string.Empty,
|
||||
DcnLpdES = dcnTotals[i].LpdES ?? string.Empty,
|
||||
DcnMicroScr = dcnTotals[i].MicroScr ?? string.Empty,
|
||||
DcnScr = dcnTotals[i].Scr ?? string.Empty,
|
||||
DcnSlip = dcnTotals[i].Slip ?? string.Empty,
|
||||
DcnAreaCount = dcnTotals[i].AreaNum ?? string.Empty,
|
||||
DcnArea = dcnTotals[i].Area ?? string.Empty,
|
||||
DcnHazeAvg = dcnTotals[i].HazeAvg ?? string.Empty,
|
||||
DcnHazeMedian = dcnTotals[i].HazeMedian ?? string.Empty,
|
||||
DcnHazeStdDev = dcnTotals[i].HazeStdDev ?? string.Empty,
|
||||
DcnBin1 = dcnTotals[i].Bin1 ?? string.Empty,
|
||||
DcnBin2 = dcnTotals[i].Bin2 ?? string.Empty,
|
||||
DcnBin3 = dcnTotals[i].Bin3 ?? string.Empty,
|
||||
DcnBin4 = dcnTotals[i].Bin4 ?? string.Empty,
|
||||
DcnBin5 = dcnTotals[i].Bin5 ?? string.Empty,
|
||||
DcnBin6 = dcnTotals[i].Bin6 ?? string.Empty,
|
||||
DcnBin7 = dcnTotals[i].Bin7 ?? string.Empty,
|
||||
DcnBin8 = dcnTotals[i].Bin8 ?? string.Empty,
|
||||
|
||||
// Dwn Info
|
||||
DwnAll = dwnTotals[i].All ?? string.Empty,
|
||||
DwnLpd = dwnTotals[i].Lpd ?? string.Empty,
|
||||
DwnLpdN = dwnTotals[i].LpdN ?? string.Empty,
|
||||
DwnLpdES = dwnTotals[i].LpdES ?? string.Empty,
|
||||
DwnMicroScr = dwnTotals[i].MicroScr ?? string.Empty,
|
||||
DwnScr = dwnTotals[i].Scr ?? string.Empty,
|
||||
DwnSlip = dwnTotals[i].Slip ?? string.Empty,
|
||||
DwnAreaCount = dwnTotals[i].AreaNum ?? string.Empty,
|
||||
DwnArea = dwnTotals[i].Area ?? string.Empty,
|
||||
DwnHazeAvg = dwnTotals[i].HazeAvg ?? string.Empty,
|
||||
DwnHazeMedian = dwnTotals[i].HazeMedian ?? string.Empty,
|
||||
DwnHazeStdDev = dwnTotals[i].HazeStdDev ?? string.Empty,
|
||||
DwnBin1 = dwnTotals[i].Bin1 ?? string.Empty,
|
||||
DwnBin2 = dwnTotals[i].Bin2 ?? string.Empty,
|
||||
DwnBin3 = dwnTotals[i].Bin3 ?? string.Empty,
|
||||
DwnBin4 = dwnTotals[i].Bin4 ?? string.Empty,
|
||||
DwnBin5 = dwnTotals[i].Bin5 ?? string.Empty,
|
||||
DwnBin6 = dwnTotals[i].Bin6 ?? string.Empty,
|
||||
DwnBin7 = dwnTotals[i].Bin7 ?? string.Empty,
|
||||
DwnBin8 = dwnTotals[i].Bin8 ?? string.Empty,
|
||||
|
||||
// Dnn Info
|
||||
DnnAll = dnnTotals[i].All ?? string.Empty,
|
||||
DnnLpd = dnnTotals[i].Lpd ?? string.Empty,
|
||||
DnnLpdN = dnnTotals[i].LpdN ?? string.Empty,
|
||||
DnnLpdES = dnnTotals[i].LpdES ?? string.Empty,
|
||||
DnnMicroScr = dnnTotals[i].MicroScr ?? string.Empty,
|
||||
DnnScr = dnnTotals[i].Scr ?? string.Empty,
|
||||
DnnSlip = dnnTotals[i].Slip ?? string.Empty,
|
||||
DnnAreaCount = dnnTotals[i].AreaNum ?? string.Empty,
|
||||
DnnArea = dnnTotals[i].Area ?? string.Empty,
|
||||
DnnHazeAvg = dnnTotals[i].HazeAvg ?? string.Empty,
|
||||
DnnHazeMedian = dnnTotals[i].HazeMedian ?? string.Empty,
|
||||
DnnHazeStdDev = dnnTotals[i].HazeStdDev ?? string.Empty,
|
||||
DnnBin1 = dnnTotals[i].Bin1 ?? string.Empty,
|
||||
DnnBin2 = dnnTotals[i].Bin2 ?? string.Empty,
|
||||
DnnBin3 = dnnTotals[i].Bin3 ?? string.Empty,
|
||||
DnnBin4 = dnnTotals[i].Bin4 ?? string.Empty,
|
||||
DnnBin5 = dnnTotals[i].Bin5 ?? string.Empty,
|
||||
DnnBin6 = dnnTotals[i].Bin6 ?? string.Empty,
|
||||
DnnBin7 = dnnTotals[i].Bin7 ?? string.Empty,
|
||||
DnnBin8 = dnnTotals[i].Bin8 ?? string.Empty
|
||||
};
|
||||
//result.HeaderUniqueID = string.Concat(MesEntity, "_", Lot, "_Summary_", Date);
|
||||
result.HeaderUniqueID = UniqueID;
|
||||
result.UniqueID = string.Concat(result.HeaderUniqueID, "_", result.WaferID.PadLeft(2, '0'));
|
||||
return result;
|
||||
}
|
||||
|
||||
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
Detail dataFile;
|
||||
foreach (string file in Directory.GetFiles(Path.GetDirectoryName(logistics.ReportFullPath), "WaferMap*.prn", SearchOption.TopDirectoryOnly))
|
||||
fileInfoCollection.Add(new FileInfo(file));
|
||||
List<WaferSummaryInfo> dcnTotals = new();
|
||||
List<WaferSummaryInfo> dwnTotals = new();
|
||||
List<WaferSummaryInfo> dnnTotals = new();
|
||||
ParseHeader(logistics, dcnTotals, dwnTotals, dnnTotals);
|
||||
_Log.Debug($"Number of wafers: {dcnTotals.Count}");
|
||||
for (int i = 0; i < dcnTotals.Count; i++)
|
||||
{
|
||||
_Log.Debug($"****ParseData - Parsing wafer summary: {i}");
|
||||
dataFile = ParseWaferSummary(i, dcnTotals, dwnTotals, dnnTotals);
|
||||
_Details.Add(dataFile);
|
||||
}
|
||||
fileInfoCollection.Add(new FileInfo(logistics.ReportFullPath));
|
||||
}
|
||||
|
||||
}
|
31
Adaptation/FileHandlers/txt/WaferSummaryInfo.cs
Normal file
31
Adaptation/FileHandlers/txt/WaferSummaryInfo.cs
Normal file
@ -0,0 +1,31 @@
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
public class WaferSummaryInfo
|
||||
{
|
||||
|
||||
public string All { get; set; }
|
||||
public string Area { get; set; }
|
||||
public string AreaNum { get; set; }
|
||||
public string Bin1 { get; set; }
|
||||
public string Bin2 { get; set; }
|
||||
public string Bin3 { get; set; }
|
||||
public string Bin4 { get; set; }
|
||||
public string Bin5 { get; set; }
|
||||
public string Bin6 { get; set; }
|
||||
public string Bin7 { get; set; }
|
||||
public string Bin8 { get; set; }
|
||||
public string Grade { get; set; }
|
||||
public string HazeAvg { get; set; }
|
||||
public string HazeMedian { get; set; }
|
||||
public string HazeStdDev { get; set; }
|
||||
public string Lpd { get; set; }
|
||||
public string LpdES { get; set; }
|
||||
public string LpdN { get; set; }
|
||||
public string MicroScr { get; set; }
|
||||
public string Scr { get; set; }
|
||||
public string Side { get; set; }
|
||||
public string Slip { get; set; }
|
||||
public string SrcDest { get; set; }
|
||||
public string WaferID { get; set; }
|
||||
|
||||
}
|
Reference in New Issue
Block a user