Ready to test v2.39.0
This commit is contained in:
141
Adaptation/FileHandlers/Archive/FileRead.cs
Normal file
141
Adaptation/FileHandlers/Archive/FileRead.cs
Normal file
@ -0,0 +1,141 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.Archive;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
{
|
||||
if (_Description is not Description)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
private void MoveArchive(DateTime dateTime)
|
||||
{
|
||||
if (dateTime == DateTime.MinValue)
|
||||
{ }
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
|
||||
string jobIdDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
_ = Directory.CreateDirectory(jobIdDirectory);
|
||||
//string destinationArchiveDirectory = string.Concat(jobIdDirectory, @"\!Archive\", weekDirectory);
|
||||
string destinationArchiveDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\Archive\", _Logistics.JobID, @"\", weekDirectory);
|
||||
if (!Directory.Exists(destinationArchiveDirectory))
|
||||
_ = Directory.CreateDirectory(destinationArchiveDirectory);
|
||||
string[] matchDirectories = new string[] { GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).FirstOrDefault() };
|
||||
if ((matchDirectories is null) || matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
|
||||
destinationArchiveDirectory = string.Concat(destinationArchiveDirectory, @"\", Path.GetFileName(sourceDirectory));
|
||||
Directory.Move(sourceDirectory, destinationArchiveDirectory);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Tuple<Test[], Dictionary<Test, List<Shared.Properties.IDescription>>> tuple = GetTuple(this, descriptions, extra: false);
|
||||
MoveArchive(dateTime);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tuple.Item1, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -4,33 +4,38 @@ using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers
|
||||
namespace Adaptation.FileHandlers;
|
||||
|
||||
public class CellInstanceConnectionName
|
||||
{
|
||||
|
||||
public class CellInstanceConnectionName
|
||||
internal static IFileRead Get(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted)
|
||||
{
|
||||
|
||||
internal static IFileRead Get(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted)
|
||||
IFileRead result;
|
||||
bool isDuplicator = cellInstanceConnectionName.StartsWith(cellInstanceName);
|
||||
if (isDuplicator)
|
||||
{
|
||||
IFileRead result;
|
||||
int levelIsArchive = 7;
|
||||
int levelIsXToArchive = 6;
|
||||
bool isDuplicator = cellInstanceConnectionName.StartsWith(cellInstanceName);
|
||||
if (isDuplicator)
|
||||
result = new MET08THFTIRQS408M.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, levelIsXToArchive, levelIsArchive);
|
||||
else
|
||||
string cellInstanceConnectionNameBase = cellInstanceConnectionName.Replace("-", string.Empty);
|
||||
int hyphens = cellInstanceConnectionName.Length - cellInstanceConnectionNameBase.Length;
|
||||
result = hyphens switch
|
||||
{
|
||||
result = cellInstanceConnectionName switch
|
||||
{
|
||||
nameof(txt) => new txt.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, levelIsXToArchive, levelIsArchive),
|
||||
nameof(Stratus) => new Stratus.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, levelIsXToArchive, levelIsArchive),
|
||||
//"QS408M" => new Stratus.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, levelIsXToArchive, levelIsArchive),
|
||||
_ => throw new Exception(),
|
||||
};
|
||||
}
|
||||
return result;
|
||||
(int)MET08THFTIRQS408M.Hyphen.IsArchive => new Archive.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
|
||||
(int)MET08THFTIRQS408M.Hyphen.IsDummy => new Dummy.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
|
||||
(int)MET08THFTIRQS408M.Hyphen.IsXToArchive => new ToArchive.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
|
||||
_ => new MET08THFTIRQS408M.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
};
|
||||
}
|
||||
|
||||
else
|
||||
{
|
||||
result = cellInstanceConnectionName switch
|
||||
{
|
||||
nameof(txt) => new txt.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
|
||||
nameof(Stratus) => new Stratus.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
|
||||
//"QS408M" => new Stratus.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted),
|
||||
_ => throw new Exception(),
|
||||
};
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
308
Adaptation/FileHandlers/Dummy/FileRead.cs
Normal file
308
Adaptation/FileHandlers/Dummy/FileRead.cs
Normal file
@ -0,0 +1,308 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Infineon.Monitoring.MonA;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.Dummy;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly Timer _Timer;
|
||||
private int _LastDummyRunIndex;
|
||||
private readonly string[] _CellNames;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_LastDummyRunIndex = -1;
|
||||
List<string> cellNames = new();
|
||||
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
|
||||
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Alias");
|
||||
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
|
||||
cellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1]);
|
||||
_CellNames = cellNames.ToArray();
|
||||
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
|
||||
Callback(null);
|
||||
else
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName) => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract() => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
{
|
||||
if (_Description is not Description)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state) => Callback(state);
|
||||
|
||||
private void CallbackInProcessCleared(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, string inProcessDirectory, long sequence, bool warning)
|
||||
{
|
||||
const string site = "sjc";
|
||||
string stateName = string.Concat("Dummy_", _EventName);
|
||||
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
|
||||
MonIn monIn = MonIn.GetInstance(monInURL);
|
||||
try
|
||||
{
|
||||
if (warning)
|
||||
{
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Warning);
|
||||
for (int i = 1; i < 12; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
|
||||
string[] files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
foreach (string file in files)
|
||||
File.SetLastWriteTime(file, new DateTime(sequence));
|
||||
if (!_FileConnectorConfiguration.IncludeSubDirectories.Value)
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Move(file, Path.Combine(targetFileLocation, Path.GetFileName(file)));
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
foreach (string directory in directories)
|
||||
_ = Directory.CreateDirectory(string.Concat(targetFileLocation, directory.Substring(inProcessDirectory.Length)));
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(targetFileLocation, file.Substring(inProcessDirectory.Length)));
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Ok);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Critical);
|
||||
}
|
||||
}
|
||||
|
||||
private void CallbackFileExists(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, long sequence)
|
||||
{
|
||||
string[] files;
|
||||
bool warning = false;
|
||||
if (!_DummyRuns.ContainsKey(monARessource))
|
||||
_DummyRuns.Add(monARessource, new List<long>());
|
||||
if (!_DummyRuns[monARessource].Contains(sequence))
|
||||
_DummyRuns[monARessource].Add(sequence);
|
||||
File.AppendAllLines(traceDummyFile, new string[] { sourceArchiveFile });
|
||||
string inProcessDirectory = Path.Combine(_ProgressPath, "Dummy In-Process", sequence.ToString());
|
||||
if (!Directory.Exists(inProcessDirectory))
|
||||
_ = Directory.CreateDirectory(inProcessDirectory);
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
if (files.Any())
|
||||
{
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
try
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Delete(file);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
if (_FileConnectorConfiguration.IncludeSubDirectories.Value)
|
||||
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.AllDirectories);
|
||||
else
|
||||
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string file in files)
|
||||
{
|
||||
if (new FileInfo(file).LastWriteTime.Ticks == sequence)
|
||||
{
|
||||
warning = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
CallbackInProcessCleared(sourceArchiveFile, traceDummyFile, targetFileLocation, monARessource, inProcessDirectory, sequence, warning);
|
||||
}
|
||||
|
||||
private string GetCellName(string pathSegment)
|
||||
{
|
||||
string result = string.Empty;
|
||||
foreach (string cellName in _CellNames)
|
||||
{
|
||||
if (pathSegment.ToLower().Contains(cellName.ToLower()))
|
||||
{
|
||||
result = cellName;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (string.IsNullOrEmpty(result))
|
||||
{
|
||||
int count;
|
||||
List<(string CellName, int Count)> cellNames = new();
|
||||
foreach (string cellName in _CellNames)
|
||||
{
|
||||
count = 0;
|
||||
foreach (char @char in cellName.ToLower())
|
||||
count += pathSegment.Length - pathSegment.ToLower().Replace(@char.ToString(), string.Empty).Length;
|
||||
cellNames.Add(new(cellName, count));
|
||||
}
|
||||
result = (from l in cellNames orderby l.CellName.Length, l.Count descending select l.CellName).First();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private void Callback(object state)
|
||||
{
|
||||
try
|
||||
{
|
||||
string pathSegment;
|
||||
string monARessource;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
if (!_FileConnectorConfiguration.TargetFileLocation.Contains(_FileConnectorConfiguration.SourceFileLocation))
|
||||
throw new Exception("Target must start with source");
|
||||
bool check = (dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday);
|
||||
if (!_IsEAFHosted || check)
|
||||
{
|
||||
string checkSegment;
|
||||
string checkDirectory;
|
||||
string sourceFileFilter;
|
||||
string sourceArchiveFile;
|
||||
string sourceFileLocation;
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string traceDummyDirectory = Path.Combine(Path.GetPathRoot(_TracePath), "TracesDummy", _CellInstanceName, "Source", $"{dateTime:yyyy}___Week_{weekOfYear}");
|
||||
if (!Directory.Exists(traceDummyDirectory))
|
||||
_ = Directory.CreateDirectory(traceDummyDirectory);
|
||||
string traceDummyFile = Path.Combine(traceDummyDirectory, $"{dateTime.Ticks} - {_CellInstanceName}.txt");
|
||||
File.AppendAllText(traceDummyFile, string.Empty);
|
||||
if (_FileConnectorConfiguration.SourceFileLocation.EndsWith("\\"))
|
||||
sourceFileLocation = _FileConnectorConfiguration.SourceFileLocation;
|
||||
else
|
||||
sourceFileLocation = string.Concat(_FileConnectorConfiguration.SourceFileLocation, '\\');
|
||||
for (int i = 0; i < _FileConnectorConfiguration.SourceFileFilters.Count; i++)
|
||||
{
|
||||
_LastDummyRunIndex += 1;
|
||||
if (_LastDummyRunIndex >= _FileConnectorConfiguration.SourceFileFilters.Count)
|
||||
_LastDummyRunIndex = 0;
|
||||
sourceFileFilter = _FileConnectorConfiguration.SourceFileFilters[_LastDummyRunIndex];
|
||||
sourceArchiveFile = Path.GetFullPath(string.Concat(sourceFileLocation, sourceFileFilter));
|
||||
if (File.Exists(sourceArchiveFile))
|
||||
{
|
||||
checkSegment = _FileConnectorConfiguration.TargetFileLocation.Substring(sourceFileLocation.Length);
|
||||
checkDirectory = Path.GetDirectoryName(sourceArchiveFile);
|
||||
for (int z = 0; z < int.MaxValue; z++)
|
||||
{
|
||||
if (checkDirectory.Length < sourceFileLocation.Length || !checkDirectory.StartsWith(sourceFileLocation))
|
||||
break;
|
||||
checkDirectory = Path.GetDirectoryName(checkDirectory);
|
||||
if (Directory.Exists(Path.Combine(checkDirectory, checkSegment)))
|
||||
{
|
||||
checkDirectory = Path.Combine(checkDirectory, checkSegment);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!checkDirectory.EndsWith(checkSegment))
|
||||
throw new Exception("Could not determine dummy target directory for extract!");
|
||||
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
throw new Exception("Invalid file name for source archive file!");
|
||||
pathSegment = checkDirectory.Substring(sourceFileLocation.Length);
|
||||
monARessource = GetCellName(pathSegment);
|
||||
if (string.IsNullOrEmpty(monARessource))
|
||||
throw new Exception("Could not determine which cell archive file is associated with!");
|
||||
if (_IsEAFHosted)
|
||||
CallbackFileExists(sourceArchiveFile, traceDummyFile, checkDirectory, monARessource, sequence);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
try
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -16,529 +16,375 @@ using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.MET08THFTIRQS408M
|
||||
namespace Adaptation.FileHandlers.MET08THFTIRQS408M;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly Timer _Timer;
|
||||
private int _LastDummyRunIndex;
|
||||
private readonly string _IqsFile;
|
||||
private readonly int _HyphenIsDummy;
|
||||
private readonly int _HyphenIsNaEDA;
|
||||
private readonly string _MemoryPath;
|
||||
private readonly int _HyphenIsXToAPC;
|
||||
private readonly int _HyphenIsXToIQSSi;
|
||||
private readonly int _HyphenIsXToSPaCe;
|
||||
private readonly string _OriginalDataBioRad;
|
||||
private readonly int _HyphenIsXToOpenInsight;
|
||||
private readonly string _EventNameFileReadDaily;
|
||||
private readonly string _OpenInsightFilePattern;
|
||||
private readonly string _OpenInsightMetrologyViewerAPI;
|
||||
private readonly Dictionary<string, string> _CellNames;
|
||||
private readonly int _HyphenIsXToOpenInsightMetrologyViewer;
|
||||
private readonly int _HyphenIsXToOpenInsightMetrologyViewerAttachments;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
|
||||
private readonly Timer _Timer;
|
||||
private int _LastDummyRunIndex;
|
||||
private readonly string _IqsFile;
|
||||
private readonly int _HyphenIsDummy;
|
||||
private readonly int _HyphenIsNaEDA;
|
||||
private readonly string _MemoryPath;
|
||||
private readonly int _HyphenIsXToAPC;
|
||||
private readonly int _HyphenIsXToIQSSi;
|
||||
private readonly int _HyphenIsXToSPaCe;
|
||||
private readonly string _OriginalDataBioRad;
|
||||
private readonly int _HyphenIsXToOpenInsight;
|
||||
private readonly string _EventNameFileReadDaily;
|
||||
private readonly string _OpenInsightFilePattern;
|
||||
private readonly string _OpenInsightMetrologyViewerAPI;
|
||||
private readonly Dictionary<string, string> _CellNames;
|
||||
private readonly int _HyphenIsXToOpenInsightMetrologyViewer;
|
||||
private readonly int _HyphenIsXToOpenInsightMetrologyViewerAttachments;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, hyphenXToArchive, hyphenIsArchive)
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_OriginalDataBioRad = "OriginalDataBioRad_";
|
||||
_LastDummyRunIndex = -1;
|
||||
if (_HyphenIsNaEDA == 0)
|
||||
{ }
|
||||
if (_HyphenIsXToSPaCe == 0)
|
||||
{ }
|
||||
if (_HyphenIsXToIQSSi == 0)
|
||||
{ }
|
||||
_CellNames = new Dictionary<string, string>();
|
||||
_HyphenIsNaEDA = (int)Hyphen.IsNaEDA;
|
||||
_HyphenIsDummy = (int)Hyphen.IsDummy;
|
||||
_HyphenIsXToAPC = (int)Hyphen.IsXToAPC;
|
||||
_HyphenIsXToIQSSi = (int)Hyphen.IsXToIQSSi;
|
||||
_HyphenIsXToSPaCe = (int)Hyphen.IsXToSPaCe;
|
||||
_HyphenIsXToOpenInsight = (int)Hyphen.IsXToOpenInsight;
|
||||
_EventNameFileReadDaily = string.Concat(_EventNameFileRead, "Daily");
|
||||
_IqsFile = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.File");
|
||||
_MemoryPath = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Path.Memory");
|
||||
_HyphenIsXToOpenInsightMetrologyViewer = (int)Hyphen.IsXToOpenInsightMetrologyViewer;
|
||||
_HyphenIsXToOpenInsightMetrologyViewerAttachments = (int)Hyphen.IsXToOpenInsightMetrologyViewerAttachments;
|
||||
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
|
||||
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
|
||||
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Path");
|
||||
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
|
||||
_CellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1], modelObjectParameterDefinition.Value);
|
||||
if (_Hyphens == _HyphenIsDummy)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (hyphenIsArchive != (int)Hyphen.IsArchive)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (hyphenXToArchive != (int)Hyphen.IsXToArchive)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_OriginalDataBioRad = "OriginalDataBioRad_";
|
||||
_LastDummyRunIndex = -1;
|
||||
if (_HyphenIsNaEDA == 0)
|
||||
{ }
|
||||
if (_HyphenIsXToSPaCe == 0)
|
||||
{ }
|
||||
if (_HyphenIsXToIQSSi == 0)
|
||||
{ }
|
||||
_CellNames = new Dictionary<string, string>();
|
||||
_HyphenIsNaEDA = (int)Hyphen.IsNaEDA;
|
||||
_HyphenIsDummy = (int)Hyphen.IsDummy;
|
||||
_HyphenIsXToAPC = (int)Hyphen.IsXToAPC;
|
||||
_HyphenIsXToIQSSi = (int)Hyphen.IsXToIQSSi;
|
||||
_HyphenIsXToSPaCe = (int)Hyphen.IsXToSPaCe;
|
||||
_HyphenIsXToOpenInsight = (int)Hyphen.IsXToOpenInsight;
|
||||
_EventNameFileReadDaily = string.Concat(_EventNameFileRead, "Daily");
|
||||
_IqsFile = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.File");
|
||||
_MemoryPath = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Path.Memory");
|
||||
_HyphenIsXToOpenInsightMetrologyViewer = (int)Hyphen.IsXToOpenInsightMetrologyViewer;
|
||||
_HyphenIsXToOpenInsightMetrologyViewerAttachments = (int)Hyphen.IsXToOpenInsightMetrologyViewerAttachments;
|
||||
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
|
||||
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
|
||||
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Path");
|
||||
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
|
||||
_CellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1], modelObjectParameterDefinition.Value);
|
||||
if (_Hyphens == _HyphenIsDummy)
|
||||
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
|
||||
{
|
||||
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
|
||||
{
|
||||
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
|
||||
Callback(null);
|
||||
}
|
||||
else
|
||||
{
|
||||
int milliSeconds;
|
||||
milliSeconds = (int)((fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000) / 2);
|
||||
_Timer = new Timer(Callback, null, milliSeconds, Timeout.Infinite);
|
||||
milliSeconds += 2000;
|
||||
}
|
||||
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
|
||||
Callback(null);
|
||||
}
|
||||
else
|
||||
{
|
||||
int milliSeconds;
|
||||
milliSeconds = (int)((fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000) / 2);
|
||||
_Timer = new Timer(Callback, null, milliSeconds, Timeout.Infinite);
|
||||
milliSeconds += 2000;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
Move(this, extractResults, exception);
|
||||
}
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
|
||||
|
||||
void IFileRead.WaitForThread()
|
||||
{
|
||||
WaitForThread(thread: null, threadExceptions: null);
|
||||
}
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
{
|
||||
if (_Description is not Description)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state) => Callback(state);
|
||||
|
||||
protected List<Stratus.Description> GetDescriptions(JsonElement[] jsonElements)
|
||||
{
|
||||
List<Stratus.Description> results = new();
|
||||
Stratus.Description description;
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
|
||||
foreach (JsonElement jsonElement in jsonElements)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
if (jsonElement.ValueKind != JsonValueKind.Object)
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, new Test[] { }, JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
description = JsonSerializer.Deserialize<Stratus.Description>(jsonElement.ToString(), jsonSerializerOptions);
|
||||
results.Add(description);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
string duplicateDirectory;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<Stratus.Description> descriptions = GetDescriptions(jsonElements);
|
||||
Tuple<Test[], Dictionary<Test, List<Shared.Properties.IDescription>>> tuple = GetTuple(this, from l in descriptions select (Shared.Properties.IDescription)l, extra: false);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tuple.Item1, jsonElements, new List<FileInfo>());
|
||||
bool isNotUsedInsightMetrologyViewerAttachments = (!(_FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) && _Hyphens == _HyphenIsXToOpenInsightMetrologyViewerAttachments);
|
||||
bool isDummyRun = (_DummyRuns.Any() && _DummyRuns.ContainsKey(_Logistics.JobID) && _DummyRuns[_Logistics.JobID].Any() && (from l in _DummyRuns[_Logistics.JobID] where l == _Logistics.Sequence select 1).Any());
|
||||
if (isDummyRun)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
try
|
||||
{ File.SetLastWriteTime(reportFullPath, dateTime); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
|
||||
if (_Hyphens != _HyphenIsXToOpenInsight)
|
||||
duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", segments[0]);
|
||||
else
|
||||
duplicateDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\Data");
|
||||
if (segments.Length > 2)
|
||||
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
if ((isDummyRun || isNotUsedInsightMetrologyViewerAttachments || _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0))
|
||||
{
|
||||
if (!(_Description is Description))
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state)
|
||||
{
|
||||
Callback(state);
|
||||
}
|
||||
|
||||
void IFileRead.MoveArchive()
|
||||
{
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
string successDirectory;
|
||||
if (_Hyphens != _HyphenIsXToAPC)
|
||||
successDirectory = string.Empty;
|
||||
else
|
||||
{
|
||||
successDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\ViewerPath");
|
||||
if (!Directory.Exists(successDirectory))
|
||||
_ = Directory.CreateDirectory(successDirectory);
|
||||
}
|
||||
List<Tuple<Shared.Properties.IScopeInfo, string>> tuples = new();
|
||||
string duplicateFile = string.Concat(duplicateDirectory, @"\", Path.GetFileName(reportFullPath));
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
|
||||
string jobIdDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
Directory.CreateDirectory(jobIdDirectory);
|
||||
//string destinationArchiveDirectory = string.Concat(jobIdDirectory, @"\!Archive\", weekDirectory);
|
||||
string destinationArchiveDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\Archive\", _Logistics.JobID, @"\", weekDirectory);
|
||||
if (!Directory.Exists(destinationArchiveDirectory))
|
||||
Directory.CreateDirectory(destinationArchiveDirectory);
|
||||
string[] matchDirectories = new string[] { GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).FirstOrDefault() };
|
||||
if ((matchDirectories is null) || matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
|
||||
destinationArchiveDirectory = string.Concat(destinationArchiveDirectory, @"\", Path.GetFileName(sourceDirectory));
|
||||
Directory.Move(sourceDirectory, destinationArchiveDirectory);
|
||||
}
|
||||
|
||||
protected List<Stratus.Description> GetDescriptions(JsonElement[] jsonElements)
|
||||
{
|
||||
List<Stratus.Description> results = new();
|
||||
Stratus.Description description;
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
|
||||
foreach (JsonElement jsonElement in jsonElements)
|
||||
string logisticsSequenceMemoryDirectory = string.Concat(_MemoryPath, @"\", _EquipmentType, @"\Source\", weekDirectory, @"\", _Logistics.Sequence);
|
||||
if (!Directory.Exists(logisticsSequenceMemoryDirectory))
|
||||
_ = Directory.CreateDirectory(logisticsSequenceMemoryDirectory);
|
||||
if (_Hyphens == _HyphenIsXToAPC)
|
||||
{
|
||||
if (jsonElement.ValueKind != JsonValueKind.Object)
|
||||
throw new Exception();
|
||||
description = JsonSerializer.Deserialize<Stratus.Description>(jsonElement.ToString(), jsonSerializerOptions);
|
||||
results.Add(description);
|
||||
if (!isDummyRun && _IsEAFHosted)
|
||||
File.Copy(reportFullPath, duplicateFile, overwrite: true);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
string duplicateDirectory;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<Stratus.Description> descriptions = GetDescriptions(jsonElements);
|
||||
Tuple<Test[], Dictionary<Test, List<Shared.Properties.IDescription>>> tuple = GetTuple(this, from l in descriptions select (Shared.Properties.IDescription)l, extra: false);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tuple.Item1, jsonElements, new List<FileInfo>());
|
||||
bool isNotUsedInsightMetrologyViewerAttachments = (!(_FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) && _Hyphens == _HyphenIsXToOpenInsightMetrologyViewerAttachments);
|
||||
bool isDummyRun = (_DummyRuns.Any() && _DummyRuns.ContainsKey(_Logistics.JobID) && _DummyRuns[_Logistics.JobID].Any() && (from l in _DummyRuns[_Logistics.JobID] where l == _Logistics.Sequence select 1).Any());
|
||||
if (isDummyRun)
|
||||
{
|
||||
try
|
||||
{ File.SetLastWriteTime(reportFullPath, dateTime); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
|
||||
if (_Hyphens != _HyphenIsXToOpenInsight)
|
||||
duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", segments[0]);
|
||||
else
|
||||
duplicateDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\Data");
|
||||
if (segments.Length > 2)
|
||||
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
Directory.CreateDirectory(duplicateDirectory);
|
||||
if ((isDummyRun || isNotUsedInsightMetrologyViewerAttachments || _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) && _Hyphens != _HyphenIsXToArchive && _Hyphens != _HyphenIsArchive)
|
||||
{
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
Directory.CreateDirectory(duplicateDirectory);
|
||||
string successDirectory;
|
||||
if (_Hyphens != _HyphenIsXToAPC)
|
||||
successDirectory = string.Empty;
|
||||
else
|
||||
{
|
||||
successDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\ViewerPath");
|
||||
if (!Directory.Exists(successDirectory))
|
||||
Directory.CreateDirectory(successDirectory);
|
||||
}
|
||||
List<Tuple<Shared.Properties.IScopeInfo, string>> tuples = new();
|
||||
string duplicateFile = string.Concat(duplicateDirectory, @"\", Path.GetFileName(reportFullPath));
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
|
||||
string logisticsSequenceMemoryDirectory = string.Concat(_MemoryPath, @"\", _EquipmentType, @"\Source\", weekDirectory, @"\", _Logistics.Sequence);
|
||||
if (!Directory.Exists(logisticsSequenceMemoryDirectory))
|
||||
Directory.CreateDirectory(logisticsSequenceMemoryDirectory);
|
||||
if (_Hyphens == _HyphenIsXToAPC)
|
||||
{
|
||||
if (!isDummyRun && _IsEAFHosted)
|
||||
File.Copy(reportFullPath, duplicateFile, overwrite: true);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (_Hyphens == _HyphenIsXToOpenInsightMetrologyViewer)
|
||||
{
|
||||
WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
if (!isDummyRun && _IsEAFHosted)
|
||||
{
|
||||
Tuple<string, WS.Results> wsResults = WS.SendData(_OpenInsightMetrologyViewerAPI, wsRequest);
|
||||
if (!wsResults.Item2.Success)
|
||||
throw new Exception(wsResults.ToString());
|
||||
_Log.Debug(wsResults.Item2.HeaderID);
|
||||
File.WriteAllText(string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json"), wsResults.Item1);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
Test test;
|
||||
string lines;
|
||||
Shared.Properties.IScopeInfo scopeInfo;
|
||||
foreach (KeyValuePair<Test, List<Shared.Properties.IDescription>> keyValuePair in tuple.Item2)
|
||||
{
|
||||
test = keyValuePair.Key;
|
||||
//scopeInfo = new ScopeInfo(test);
|
||||
if (_Hyphens != _HyphenIsXToOpenInsight)
|
||||
scopeInfo = new ScopeInfo(test, _IqsFile);
|
||||
else
|
||||
scopeInfo = new ScopeInfo(test, _OpenInsightFilePattern);
|
||||
//lines = ProcessDataStandardFormat.GetLines(this, scopeInfo, names, values, dateFormat: "M/d/yyyy hh:mm:ss tt", timeFormat: string.Empty, pairedColumns: ExtractResultPairedColumns);
|
||||
lines = ProcessData.GetLines(this, _Logistics, descriptions);
|
||||
tuples.Add(new Tuple<Shared.Properties.IScopeInfo, string>(scopeInfo, lines));
|
||||
}
|
||||
}
|
||||
if (_Hyphens == _HyphenIsXToOpenInsightMetrologyViewerAttachments)
|
||||
{
|
||||
string[] matchDirectories = Shared1567(reportFullPath, tuples);
|
||||
if (!isDummyRun && _IsEAFHosted && !isNotUsedInsightMetrologyViewerAttachments)
|
||||
ProcessData.PostOpenInsightMetrologyViewerAttachments(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OriginalDataBioRad, dateTime, logisticsSequenceMemoryDirectory, descriptions, matchDirectories[0]);
|
||||
}
|
||||
}
|
||||
if (_Hyphens != _HyphenIsXToOpenInsightMetrologyViewer && _Hyphens != _HyphenIsXToOpenInsightMetrologyViewerAttachments)
|
||||
Shared0413(dateTime, isDummyRun, successDirectory, duplicateDirectory, tuples, duplicateFile);
|
||||
}
|
||||
if (_Hyphens == _HyphenIsXToOpenInsightMetrologyViewerAttachments)
|
||||
{
|
||||
string destinationDirectory;
|
||||
//string destinationDirectory = WriteScopeInfo(_ProgressPath, _Logistics, dateTime, duplicateDirectory, tuples);
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
|
||||
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\", _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
Directory.CreateDirectory(jobIdDirectory);
|
||||
string[] matchDirectories;
|
||||
if (!_IsEAFHosted)
|
||||
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
|
||||
else
|
||||
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
|
||||
if ((matchDirectories is null) || matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
destinationDirectory = matchDirectories[0];
|
||||
if (isDummyRun)
|
||||
Shared0607(reportFullPath, duplicateDirectory, logisticsSequence, destinationDirectory);
|
||||
else
|
||||
if (_Hyphens == _HyphenIsXToOpenInsightMetrologyViewer)
|
||||
{
|
||||
WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
|
||||
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
|
||||
if (_IsEAFHosted)
|
||||
Shared1277(reportFullPath, destinationDirectory, logisticsSequence, jobIdDirectory, json);
|
||||
else
|
||||
if (!isDummyRun && _IsEAFHosted)
|
||||
{
|
||||
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
|
||||
string historicalText = File.ReadAllText(jsonFileName);
|
||||
if (json != historicalText)
|
||||
throw new Exception("File doesn't match historical!");
|
||||
Tuple<string, WS.Results> wsResults = WS.SendData(_OpenInsightMetrologyViewerAPI, wsRequest);
|
||||
if (!wsResults.Item2.Success)
|
||||
throw new Exception(wsResults.ToString());
|
||||
_Log.Debug(wsResults.Item2.HeaderID);
|
||||
File.WriteAllText(string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json"), wsResults.Item1);
|
||||
}
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private void CallbackIsDummy(string traceDummyFile, List<Tuple<string, string, string, string, int>> tuples, bool fileConnectorConfigurationIncludeSubDirectories, bool includeSubDirectoriesExtra)
|
||||
{
|
||||
int fileCount;
|
||||
string[] files;
|
||||
string monARessource;
|
||||
string checkDirectory;
|
||||
string sourceArchiveFile;
|
||||
string inProcessDirectory;
|
||||
const string site = "sjc";
|
||||
string stateName = string.Concat("Dummy_", _EventName);
|
||||
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
|
||||
MonIn monIn = MonIn.GetInstance(monInURL);
|
||||
foreach (Tuple<string, string, string, string, int> item in tuples)
|
||||
{
|
||||
monARessource = item.Item1;
|
||||
sourceArchiveFile = item.Item2;
|
||||
inProcessDirectory = item.Item3;
|
||||
checkDirectory = item.Item4;
|
||||
fileCount = item.Item5;
|
||||
try
|
||||
else
|
||||
{
|
||||
if (fileCount > 0 || string.IsNullOrEmpty(checkDirectory))
|
||||
Test test;
|
||||
string lines;
|
||||
Shared.Properties.IScopeInfo scopeInfo;
|
||||
foreach (KeyValuePair<Test, List<Shared.Properties.IDescription>> keyValuePair in tuple.Item2)
|
||||
{
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
|
||||
monIn.SendStatus(site, monARessource, stateName, State.Warning);
|
||||
for (int i = 1; i < 12; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
else if (inProcessDirectory == checkDirectory)
|
||||
continue;
|
||||
if (!_IsEAFHosted)
|
||||
continue;
|
||||
if (!File.Exists(sourceArchiveFile))
|
||||
continue;
|
||||
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
continue;
|
||||
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
|
||||
if (fileConnectorConfigurationIncludeSubDirectories && includeSubDirectoriesExtra)
|
||||
{
|
||||
if (_EventName == _EventNameFileRead)
|
||||
checkDirectory = string.Concat(checkDirectory, @"\", sequence);
|
||||
else if (_EventName == _EventNameFileReadDaily)
|
||||
checkDirectory = string.Concat(checkDirectory, @"\Source\", sequence);
|
||||
test = keyValuePair.Key;
|
||||
//scopeInfo = new ScopeInfo(test);
|
||||
if (_Hyphens != _HyphenIsXToOpenInsight)
|
||||
scopeInfo = new ScopeInfo(test, _IqsFile);
|
||||
else
|
||||
throw new Exception();
|
||||
scopeInfo = new ScopeInfo(test, _OpenInsightFilePattern);
|
||||
//lines = ProcessDataStandardFormat.GetLines(this, scopeInfo, names, values, dateFormat: "M/d/yyyy hh:mm:ss tt", timeFormat: string.Empty, pairedColumns: ExtractResultPairedColumns);
|
||||
lines = ProcessData.GetLines(this, _Logistics, descriptions);
|
||||
tuples.Add(new Tuple<Shared.Properties.IScopeInfo, string>(scopeInfo, lines));
|
||||
}
|
||||
if (fileConnectorConfigurationIncludeSubDirectories)
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
else
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
foreach (string file in files)
|
||||
File.SetLastWriteTime(file, new DateTime(sequence));
|
||||
if (!fileConnectorConfigurationIncludeSubDirectories)
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(checkDirectory, @"\", Path.GetFileName(file)));
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
foreach (string directory in directories)
|
||||
Directory.CreateDirectory(string.Concat(checkDirectory, directory.Substring(inProcessDirectory.Length)));
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(checkDirectory, file.Substring(inProcessDirectory.Length)));
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
|
||||
monIn.SendStatus(site, monARessource, stateName, State.Ok);
|
||||
}
|
||||
catch (Exception exception)
|
||||
if (_Hyphens == _HyphenIsXToOpenInsightMetrologyViewerAttachments)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
|
||||
monIn.SendStatus(site, monARessource, stateName, State.Critical);
|
||||
string[] matchDirectories = Shared1567(reportFullPath, tuples);
|
||||
if (!isDummyRun && _IsEAFHosted && !isNotUsedInsightMetrologyViewerAttachments)
|
||||
ProcessData.PostOpenInsightMetrologyViewerAttachments(this, _Logistics, _OpenInsightMetrologyViewerAPI, _OriginalDataBioRad, dateTime, logisticsSequenceMemoryDirectory, descriptions, matchDirectories[0]);
|
||||
}
|
||||
}
|
||||
if (_Hyphens != _HyphenIsXToOpenInsightMetrologyViewer && _Hyphens != _HyphenIsXToOpenInsightMetrologyViewerAttachments)
|
||||
Shared0413(dateTime, isDummyRun, successDirectory, duplicateDirectory, tuples, duplicateFile);
|
||||
}
|
||||
|
||||
private void Callback(object state)
|
||||
if (_Hyphens == _HyphenIsXToOpenInsightMetrologyViewerAttachments)
|
||||
{
|
||||
if (_Hyphens != _HyphenIsDummy)
|
||||
throw new Exception();
|
||||
try
|
||||
string destinationDirectory;
|
||||
//string destinationDirectory = WriteScopeInfo(_ProgressPath, _Logistics, dateTime, duplicateDirectory, tuples);
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
|
||||
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\", _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
_ = Directory.CreateDirectory(jobIdDirectory);
|
||||
string[] matchDirectories;
|
||||
if (!_IsEAFHosted)
|
||||
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
|
||||
else
|
||||
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
|
||||
if ((matchDirectories is null) || matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
destinationDirectory = matchDirectories[0];
|
||||
if (isDummyRun)
|
||||
Shared0607(reportFullPath, duplicateDirectory, logisticsSequence, destinationDirectory);
|
||||
else
|
||||
{
|
||||
DateTime dateTime = DateTime.Now;
|
||||
bool check = (dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday);
|
||||
if (check)
|
||||
WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
|
||||
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
|
||||
if (_IsEAFHosted)
|
||||
Shared1277(reportFullPath, destinationDirectory, logisticsSequence, jobIdDirectory, json);
|
||||
else
|
||||
{
|
||||
int fileCount;
|
||||
string[] files;
|
||||
string monARessource;
|
||||
string checkDirectory;
|
||||
string sourceArchiveFile;
|
||||
string sourceFileLocation;
|
||||
string inProcessDirectory;
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string traceDummyDirectory = string.Concat(Path.GetPathRoot(_TracePath), @"\TracesDummy\", _CellInstanceName, @"\Source\", dateTime.ToString("yyyy"), "___Week_", weekOfYear);
|
||||
if (!Directory.Exists(traceDummyDirectory))
|
||||
Directory.CreateDirectory(traceDummyDirectory);
|
||||
string traceDummyFile = string.Concat(traceDummyDirectory, @"\", dateTime.Ticks, " - ", _CellInstanceName, ".txt");
|
||||
File.AppendAllText(traceDummyFile, string.Empty);
|
||||
List<Tuple<string, string, string, string, int>> tuples = new();
|
||||
string progressDirectory = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\_ Progress"));
|
||||
if (progressDirectory != _ProgressPath || !Directory.Exists(progressDirectory))
|
||||
throw new Exception("Invalid progress path");
|
||||
foreach (KeyValuePair<string, string> keyValuePair in _CellNames)
|
||||
{
|
||||
monARessource = keyValuePair.Key;
|
||||
if (!keyValuePair.Value.Contains(@"\"))
|
||||
continue;
|
||||
foreach (string sourceFileFilter in _FileConnectorConfiguration.SourceFileFilter.Split('|'))
|
||||
{
|
||||
if (sourceFileFilter.ToLower().StartsWith(keyValuePair.Value.Replace(@"\", string.Empty)))
|
||||
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
|
||||
else if (_FileConnectorConfiguration.SourceFileLocation.ToLower().EndsWith(keyValuePair.Value))
|
||||
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
|
||||
else
|
||||
sourceFileLocation = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\", keyValuePair.Value));
|
||||
sourceArchiveFile = Path.GetFullPath(string.Concat(sourceFileLocation, @"\", sourceFileFilter));
|
||||
if (!File.Exists(sourceArchiveFile))
|
||||
continue;
|
||||
if (!_DummyRuns.ContainsKey(monARessource))
|
||||
_DummyRuns.Add(monARessource, new List<long>());
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceFileFilter, sourceFileLocation, sourceArchiveFile, 0));
|
||||
}
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, from l in tuples select l.Item4);
|
||||
if (tuples.Any())
|
||||
{
|
||||
_LastDummyRunIndex += 1;
|
||||
if (_LastDummyRunIndex >= tuples.Count)
|
||||
_LastDummyRunIndex = 0;
|
||||
monARessource = tuples[_LastDummyRunIndex].Item1;
|
||||
string sourceFileFilter = tuples[_LastDummyRunIndex].Item2;
|
||||
sourceFileLocation = tuples[_LastDummyRunIndex].Item3;
|
||||
sourceArchiveFile = tuples[_LastDummyRunIndex].Item4;
|
||||
//fileCount = tuples[_LastDummyRunIndex].Item5;
|
||||
tuples.Clear();
|
||||
if (long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
{
|
||||
if (!_DummyRuns[monARessource].Contains(sequence))
|
||||
_DummyRuns[monARessource].Add(sequence);
|
||||
inProcessDirectory = string.Concat(progressDirectory, @"\Dummy_in process\", sequence);
|
||||
checkDirectory = inProcessDirectory;
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
Directory.CreateDirectory(checkDirectory);
|
||||
files = Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories);
|
||||
fileCount = files.Length;
|
||||
if (files.Any())
|
||||
{
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
try
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Delete(file);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
|
||||
checkDirectory = sourceFileLocation;
|
||||
files = Directory.GetFiles(checkDirectory, string.Concat("*", sequence, "*"), SearchOption.TopDirectoryOnly);
|
||||
fileCount = files.Length;
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
|
||||
}
|
||||
}
|
||||
if (tuples.Any())
|
||||
//CallbackIsDummy(traceDummyFile, tuples, FileConnectorConfiguration.IncludeSubDirectories.Value, includeSubDirectoriesExtra: false);
|
||||
CallbackIsDummy(traceDummyFile, tuples, fileConnectorConfigurationIncludeSubDirectories: true, includeSubDirectoriesExtra: true);
|
||||
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
|
||||
string historicalText = File.ReadAllText(jsonFileName);
|
||||
if (json != historicalText)
|
||||
throw new Exception("File doesn't match historical!");
|
||||
}
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private void CallbackIsDummy(string traceDummyFile, List<Tuple<string, string, string, string, int>> tuples, bool fileConnectorConfigurationIncludeSubDirectories, bool includeSubDirectoriesExtra)
|
||||
{
|
||||
int fileCount;
|
||||
string[] files;
|
||||
string monARessource;
|
||||
string checkDirectory;
|
||||
string sourceArchiveFile;
|
||||
string inProcessDirectory;
|
||||
const string site = "sjc";
|
||||
string stateName = string.Concat("Dummy_", _EventName);
|
||||
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
|
||||
MonIn monIn = MonIn.GetInstance(monInURL);
|
||||
foreach (Tuple<string, string, string, string, int> item in tuples)
|
||||
{
|
||||
monARessource = item.Item1;
|
||||
sourceArchiveFile = item.Item2;
|
||||
inProcessDirectory = item.Item3;
|
||||
checkDirectory = item.Item4;
|
||||
fileCount = item.Item5;
|
||||
try
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
if (fileCount > 0 || string.IsNullOrEmpty(checkDirectory))
|
||||
{
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Warning);
|
||||
for (int i = 1; i < 12; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
else if (inProcessDirectory == checkDirectory)
|
||||
continue;
|
||||
if (!_IsEAFHosted)
|
||||
continue;
|
||||
if (!File.Exists(sourceArchiveFile))
|
||||
continue;
|
||||
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
continue;
|
||||
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
|
||||
if (fileConnectorConfigurationIncludeSubDirectories && includeSubDirectoriesExtra)
|
||||
{
|
||||
if (_EventName == _EventNameFileRead)
|
||||
checkDirectory = string.Concat(checkDirectory, @"\", sequence);
|
||||
else if (_EventName == _EventNameFileReadDaily)
|
||||
checkDirectory = string.Concat(checkDirectory, @"\Source\", sequence);
|
||||
else
|
||||
throw new Exception();
|
||||
}
|
||||
if (fileConnectorConfigurationIncludeSubDirectories)
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
else
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
foreach (string file in files)
|
||||
File.SetLastWriteTime(file, new DateTime(sequence));
|
||||
if (!fileConnectorConfigurationIncludeSubDirectories)
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(checkDirectory, @"\", Path.GetFileName(file)));
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
foreach (string directory in directories)
|
||||
_ = Directory.CreateDirectory(string.Concat(checkDirectory, directory.Substring(inProcessDirectory.Length)));
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(checkDirectory, file.Substring(inProcessDirectory.Length)));
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Ok);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
@ -547,9 +393,126 @@ namespace Adaptation.FileHandlers.MET08THFTIRQS408M
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Critical);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void Callback(object state)
|
||||
{
|
||||
if (_Hyphens != _HyphenIsDummy)
|
||||
throw new Exception();
|
||||
try
|
||||
{
|
||||
DateTime dateTime = DateTime.Now;
|
||||
bool check = (dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday);
|
||||
if (check)
|
||||
{
|
||||
int fileCount;
|
||||
string[] files;
|
||||
string monARessource;
|
||||
string checkDirectory;
|
||||
string sourceArchiveFile;
|
||||
string sourceFileLocation;
|
||||
string inProcessDirectory;
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string traceDummyDirectory = string.Concat(Path.GetPathRoot(_TracePath), @"\TracesDummy\", _CellInstanceName, @"\Source\", dateTime.ToString("yyyy"), "___Week_", weekOfYear);
|
||||
if (!Directory.Exists(traceDummyDirectory))
|
||||
_ = Directory.CreateDirectory(traceDummyDirectory);
|
||||
string traceDummyFile = string.Concat(traceDummyDirectory, @"\", dateTime.Ticks, " - ", _CellInstanceName, ".txt");
|
||||
File.AppendAllText(traceDummyFile, string.Empty);
|
||||
List<Tuple<string, string, string, string, int>> tuples = new();
|
||||
string progressDirectory = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\_ Progress"));
|
||||
if (progressDirectory != _ProgressPath || !Directory.Exists(progressDirectory))
|
||||
throw new Exception("Invalid progress path");
|
||||
foreach (KeyValuePair<string, string> keyValuePair in _CellNames)
|
||||
{
|
||||
monARessource = keyValuePair.Key;
|
||||
if (!keyValuePair.Value.Contains(@"\"))
|
||||
continue;
|
||||
foreach (string sourceFileFilter in _FileConnectorConfiguration.SourceFileFilter.Split('|'))
|
||||
{
|
||||
if (sourceFileFilter.ToLower().StartsWith(keyValuePair.Value.Replace(@"\", string.Empty)))
|
||||
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
|
||||
else if (_FileConnectorConfiguration.SourceFileLocation.ToLower().EndsWith(keyValuePair.Value))
|
||||
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
|
||||
else
|
||||
sourceFileLocation = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\", keyValuePair.Value));
|
||||
sourceArchiveFile = Path.GetFullPath(string.Concat(sourceFileLocation, @"\", sourceFileFilter));
|
||||
if (!File.Exists(sourceArchiveFile))
|
||||
continue;
|
||||
if (!_DummyRuns.ContainsKey(monARessource))
|
||||
_DummyRuns.Add(monARessource, new List<long>());
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceFileFilter, sourceFileLocation, sourceArchiveFile, 0));
|
||||
}
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, from l in tuples select l.Item4);
|
||||
if (tuples.Any())
|
||||
{
|
||||
_LastDummyRunIndex += 1;
|
||||
if (_LastDummyRunIndex >= tuples.Count)
|
||||
_LastDummyRunIndex = 0;
|
||||
monARessource = tuples[_LastDummyRunIndex].Item1;
|
||||
string sourceFileFilter = tuples[_LastDummyRunIndex].Item2;
|
||||
sourceFileLocation = tuples[_LastDummyRunIndex].Item3;
|
||||
sourceArchiveFile = tuples[_LastDummyRunIndex].Item4;
|
||||
//fileCount = tuples[_LastDummyRunIndex].Item5;
|
||||
tuples.Clear();
|
||||
if (long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
{
|
||||
if (!_DummyRuns[monARessource].Contains(sequence))
|
||||
_DummyRuns[monARessource].Add(sequence);
|
||||
inProcessDirectory = string.Concat(progressDirectory, @"\Dummy_in process\", sequence);
|
||||
checkDirectory = inProcessDirectory;
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
files = Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories);
|
||||
fileCount = files.Length;
|
||||
if (files.Any())
|
||||
{
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
try
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Delete(file);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
|
||||
checkDirectory = sourceFileLocation;
|
||||
files = Directory.GetFiles(checkDirectory, string.Concat("*", sequence, "*"), SearchOption.TopDirectoryOnly);
|
||||
fileCount = files.Length;
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
|
||||
}
|
||||
}
|
||||
if (tuples.Any())
|
||||
//CallbackIsDummy(traceDummyFile, tuples, FileConnectorConfiguration.IncludeSubDirectories.Value, includeSubDirectoriesExtra: false);
|
||||
CallbackIsDummy(traceDummyFile, tuples, fileConnectorConfigurationIncludeSubDirectories: true, includeSubDirectoriesExtra: true);
|
||||
}
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
try
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -1,18 +1,15 @@
|
||||
namespace Adaptation.FileHandlers.MET08THFTIRQS408M
|
||||
namespace Adaptation.FileHandlers.MET08THFTIRQS408M;
|
||||
|
||||
public enum Hyphen
|
||||
{
|
||||
|
||||
public enum Hyphen
|
||||
{
|
||||
IsXToOpenInsightMetrologyViewer, //MetrologyWS.SendData(file, string.Concat("http://", serverName, "/api/inbound/StratusBioRad"), headerAttachments);
|
||||
IsXToIQSSi, //NA <d7p1:FileScanningIntervalInSeconds>-361</d7p1:FileScanningIntervalInSeconds>
|
||||
IsXToOpenInsight, //bool WriteFileOpenInsight(StratusBioRadFile
|
||||
IsXToOpenInsightMetrologyViewerAttachments, //Site-One
|
||||
IsXToAPC,
|
||||
IsXToSPaCe,
|
||||
IsXToArchive,
|
||||
IsArchive,
|
||||
IsDummy,
|
||||
IsNaEDA
|
||||
}
|
||||
|
||||
IsXToOpenInsightMetrologyViewer, //MetrologyWS.SendData(file, string.Concat("http://", serverName, "/api/inbound/StratusBioRad"), headerAttachments);
|
||||
IsXToIQSSi, //NA <d7p1:FileScanningIntervalInSeconds>-361</d7p1:FileScanningIntervalInSeconds>
|
||||
IsXToOpenInsight, //bool WriteFileOpenInsight(StratusBioRadFile
|
||||
IsXToOpenInsightMetrologyViewerAttachments, //Site-One
|
||||
IsXToAPC,
|
||||
IsXToSPaCe,
|
||||
IsXToArchive,
|
||||
IsArchive,
|
||||
IsDummy,
|
||||
IsNaEDA
|
||||
}
|
@ -8,90 +8,87 @@ using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.MET08THFTIRQS408M
|
||||
namespace Adaptation.FileHandlers.MET08THFTIRQS408M;
|
||||
|
||||
public class ProcessData
|
||||
{
|
||||
|
||||
public class ProcessData
|
||||
internal static List<Tuple<int, Enum, string>> HyphenTuples => new()
|
||||
{
|
||||
new Tuple<int, Enum, string>(0, Hyphen.IsNaEDA, @"\EC_EDA\Staging\Traces\~\Source"),
|
||||
new Tuple<int, Enum, string>(15, Hyphen.IsXToOpenInsightMetrologyViewer, @"\EC_EAFLog\TracesMES\~\Source"),
|
||||
new Tuple<int, Enum, string>(-36, Hyphen.IsXToIQSSi, @"\EC_SPC_Si\Traces\~\PollPath"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsXToOpenInsight, @"\\messa01ec.ec.local\APPS\Metrology\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsXToOpenInsightMetrologyViewerAttachments, @"\EC_Characterization_Si\In Process\~\Source"),
|
||||
new Tuple<int, Enum, string>(360, Hyphen.IsXToAPC, @"\EC_APC\Staging\Traces\~\PollPath"),
|
||||
new Tuple<int, Enum, string>(-36, Hyphen.IsXToSPaCe, @"\EC_SPC_Si\Traces\~\Source"),
|
||||
new Tuple<int, Enum, string>(180, Hyphen.IsXToArchive, @"\EC_EAFLog\TracesArchive\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsArchive, @"\EC_Characterization_Si\Processed")
|
||||
//new Tuple<int, Enum, string>("IsDummy"
|
||||
};
|
||||
|
||||
internal static List<Tuple<int, Enum, string>> HyphenTuples => new()
|
||||
{
|
||||
new Tuple<int, Enum, string>(0, Hyphen.IsNaEDA, @"\EC_EDA\Staging\Traces\~\Source"),
|
||||
new Tuple<int, Enum, string>(15, Hyphen.IsXToOpenInsightMetrologyViewer, @"\EC_EAFLog\TracesMES\~\Source"),
|
||||
new Tuple<int, Enum, string>(-36, Hyphen.IsXToIQSSi, @"\EC_SPC_Si\Traces\~\PollPath"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsXToOpenInsight, @"\\messa01ec.ec.local\APPS\Metrology\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsXToOpenInsightMetrologyViewerAttachments, @"\EC_Characterization_Si\In Process\~\Source"),
|
||||
new Tuple<int, Enum, string>(360, Hyphen.IsXToAPC, @"\EC_APC\Staging\Traces\~\PollPath"),
|
||||
new Tuple<int, Enum, string>(-36, Hyphen.IsXToSPaCe, @"\EC_SPC_Si\Traces\~\Source"),
|
||||
new Tuple<int, Enum, string>(180, Hyphen.IsXToArchive, @"\EC_EAFLog\TracesArchive\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Hyphen.IsArchive, @"\EC_Characterization_Si\Processed")
|
||||
//new Tuple<int, Enum, string>("IsDummy"
|
||||
};
|
||||
internal static string GetLines(IFileRead fileRead, Logistics logistics, List<Stratus.Description> descriptions)
|
||||
{
|
||||
StringBuilder results = new();
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
Stratus.Description x = descriptions[0];
|
||||
_ = results.Append("Stratus_").Append(logistics.MID).Append('_').Append(logistics.DateTimeFromSequence.ToString("yyyyMMddhhmmssfff")).Append('\t').
|
||||
Append(x.Date).Append('\t').
|
||||
Append(logistics.JobID).Append('\t').
|
||||
Append("FQA Thickness").Append('\t').
|
||||
Append(x.Employee).Append('\t').
|
||||
Append(x.Recipe).Append('\t').
|
||||
Append(x.Reactor).Append('\t').
|
||||
Append(x.RDS).Append('\t').
|
||||
Append(x.PSN).Append('\t').
|
||||
Append(x.Lot).Append('\t').
|
||||
Append(x.Cassette).Append('\t').
|
||||
Append(x.MeanThickness);
|
||||
for (int i = 0; i < descriptions.Count; i++)
|
||||
_ = results.Append('\t').Append(descriptions[i].Slot).Append('\t').Append(descriptions[i].Mean);
|
||||
return results.ToString();
|
||||
}
|
||||
|
||||
internal static string GetLines(IFileRead fileRead, Logistics logistics, List<Stratus.Description> descriptions)
|
||||
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string originalDataBioRad, DateTime dateTime, string logisticsSequenceMemoryDirectory, List<Stratus.Description> descriptions, string matchDirectory)
|
||||
{
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
if (dateTime == DateTime.MinValue)
|
||||
{ }
|
||||
string wsResultsMemoryFile = string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json");
|
||||
if (!File.Exists(wsResultsMemoryFile))
|
||||
throw new Exception(string.Concat("Memory file <", wsResultsMemoryFile, "> doesn't exist!"));
|
||||
string json = File.ReadAllText(wsResultsMemoryFile);
|
||||
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json);
|
||||
long wsResultsHeaderID = metrologyWSRequest.HeaderID;
|
||||
string dataPDFFile = Path.Combine(matchDirectory, $"{wsResultsHeaderID}.pdf");
|
||||
string[] txtFiles = Directory.GetFiles(matchDirectory, string.Concat(originalDataBioRad, "*.txt"), SearchOption.TopDirectoryOnly);
|
||||
if (txtFiles.Length != 1)
|
||||
throw new Exception("Invalid source file count!");
|
||||
string[] lines = File.ReadAllLines(txtFiles[0]);
|
||||
lines = (from l in lines where !string.IsNullOrEmpty(l) select l).ToArray();
|
||||
if (lines.Length > 1)
|
||||
{
|
||||
StringBuilder results = new();
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
Stratus.Description x = descriptions[0];
|
||||
results.Append("Stratus_").Append(logistics.MID).Append('_').Append(logistics.DateTimeFromSequence.ToString("yyyyMMddhhmmssfff")).Append('\t').
|
||||
Append(x.Date).Append('\t').
|
||||
Append(logistics.JobID).Append('\t').
|
||||
Append("FQA Thickness").Append('\t').
|
||||
Append(x.Employee).Append('\t').
|
||||
Append(x.Recipe).Append('\t').
|
||||
Append(x.Reactor).Append('\t').
|
||||
Append(x.RDS).Append('\t').
|
||||
Append(x.PSN).Append('\t').
|
||||
Append(x.Lot).Append('\t').
|
||||
Append(x.Cassette).Append('\t').
|
||||
Append(x.MeanThickness);
|
||||
for (int i = 0; i < descriptions.Count; i++)
|
||||
results.Append('\t').Append(descriptions[i].Slot).Append('\t').Append(descriptions[i].Mean);
|
||||
return results.ToString();
|
||||
}
|
||||
|
||||
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, string originalDataBioRad, DateTime dateTime, string logisticsSequenceMemoryDirectory, List<Stratus.Description> descriptions, string matchDirectory)
|
||||
{
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
if (dateTime == DateTime.MinValue)
|
||||
{ }
|
||||
string wsResultsMemoryFile = string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json");
|
||||
if (!File.Exists(wsResultsMemoryFile))
|
||||
throw new Exception(string.Concat("Memory file <", wsResultsMemoryFile, "> doesn't exist!"));
|
||||
string json = File.ReadAllText(wsResultsMemoryFile);
|
||||
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json);
|
||||
long wsResultsHeaderID = metrologyWSRequest.HeaderID;
|
||||
string dataPDFFile = Path.Combine(matchDirectory, $"{wsResultsHeaderID}.pdf");
|
||||
string[] txtFiles = Directory.GetFiles(matchDirectory, string.Concat(originalDataBioRad, "*.txt"), SearchOption.TopDirectoryOnly);
|
||||
if (txtFiles.Length != 1)
|
||||
throw new Exception("Invalid source file count!");
|
||||
string[] lines = File.ReadAllLines(txtFiles[0]);
|
||||
lines = (from l in lines where !string.IsNullOrEmpty(l) select l).ToArray();
|
||||
if (lines.Length > 1)
|
||||
org.apache.pdfbox.pdmodel.PDDocument pdDocument = new();
|
||||
org.apache.pdfbox.pdmodel.PDPage pdPage = new();
|
||||
pdDocument.addPage(pdPage);
|
||||
org.apache.pdfbox.pdmodel.edit.PDPageContentStream pdPageContentStream = new(pdDocument, pdPage);
|
||||
org.apache.pdfbox.pdmodel.font.PDFont pdFont = org.apache.pdfbox.pdmodel.font.PDType1Font.HELVETICA;
|
||||
pdPageContentStream.setFont(pdFont, 16);
|
||||
for (int i = 1; i < lines.Length; i++)
|
||||
{
|
||||
org.apache.pdfbox.pdmodel.PDDocument pdDocument = new();
|
||||
org.apache.pdfbox.pdmodel.PDPage pdPage = new();
|
||||
pdDocument.addPage(pdPage);
|
||||
org.apache.pdfbox.pdmodel.edit.PDPageContentStream pdPageContentStream = new(pdDocument, pdPage);
|
||||
org.apache.pdfbox.pdmodel.font.PDFont pdFont = org.apache.pdfbox.pdmodel.font.PDType1Font.HELVETICA;
|
||||
pdPageContentStream.setFont(pdFont, 16);
|
||||
for (int i = 1; i < lines.Length; i++)
|
||||
{
|
||||
pdPageContentStream.beginText();
|
||||
pdPageContentStream.moveTextPositionByAmount(16, 750 - (i * 16));
|
||||
pdPageContentStream.drawString(lines[i]);
|
||||
pdPageContentStream.endText();
|
||||
}
|
||||
pdPageContentStream.close();
|
||||
pdDocument.save(dataPDFFile);
|
||||
pdDocument.close();
|
||||
List<WS.Attachment> headerAttachments = new() { new WS.Attachment(descriptions[0].HeaderUniqueId, "Data.pdf", dataPDFFile) };
|
||||
WS.AttachFiles(openInsightMetrologyViewerAPI, wsResultsHeaderID, headerAttachments, dataAttachments: null);
|
||||
pdPageContentStream.beginText();
|
||||
pdPageContentStream.moveTextPositionByAmount(16, 750 - (i * 16));
|
||||
pdPageContentStream.drawString(lines[i]);
|
||||
pdPageContentStream.endText();
|
||||
}
|
||||
pdPageContentStream.close();
|
||||
pdDocument.save(dataPDFFile);
|
||||
pdDocument.close();
|
||||
List<WS.Attachment> headerAttachments = new() { new WS.Attachment(descriptions[0].HeaderUniqueId, "Data.pdf", dataPDFFile) };
|
||||
WS.AttachFiles(openInsightMetrologyViewerAPI, wsResultsHeaderID, headerAttachments, dataAttachments: null);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
@ -4,90 +4,87 @@ using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
namespace Adaptation.FileHandlers.MET08THFTIRQS408M
|
||||
namespace Adaptation.FileHandlers.MET08THFTIRQS408M;
|
||||
|
||||
public class WSRequest
|
||||
{
|
||||
|
||||
public class WSRequest
|
||||
public long Id { get; set; }
|
||||
public string Batch { get; set; }
|
||||
public string Cassette { get; set; }
|
||||
public string CellName { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string FilePath { get; set; }
|
||||
public string MeanThickness { get; set; }
|
||||
public string Operator { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public List<Stratus.Detail> Details { get; protected set; }
|
||||
|
||||
[Obsolete("For json")] public WSRequest() { }
|
||||
|
||||
internal WSRequest(IFileRead fileRead, Logistics logistics, List<Stratus.Description> descriptions)
|
||||
{
|
||||
|
||||
public long Id { get; set; }
|
||||
public string Batch { get; set; }
|
||||
public string Cassette { get; set; }
|
||||
public string CellName { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string FilePath { get; set; }
|
||||
public string MeanThickness { get; set; }
|
||||
public string Operator { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public List<Stratus.Detail> Details { get; protected set; }
|
||||
|
||||
[Obsolete("For json")] public WSRequest() { }
|
||||
|
||||
internal WSRequest(IFileRead fileRead, Logistics logistics, List<Stratus.Description> descriptions)
|
||||
Id = 0;
|
||||
FilePath = string.Empty;
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
CellName = logistics.MesEntity;
|
||||
if (descriptions[0] is not Stratus.Description x)
|
||||
throw new Exception();
|
||||
Details = new List<Stratus.Detail>();
|
||||
//Header
|
||||
{
|
||||
Id = 0;
|
||||
FilePath = string.Empty;
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
CellName = logistics.MesEntity;
|
||||
if (descriptions[0] is not Stratus.Description x)
|
||||
throw new Exception();
|
||||
Details = new List<Stratus.Detail>();
|
||||
//Header
|
||||
{
|
||||
Batch = x.Lot;
|
||||
Cassette = x.Cassette;
|
||||
Date = x.Date;
|
||||
MeanThickness = x.MeanThickness;
|
||||
Operator = x.Employee;
|
||||
PSN = x.PSN;
|
||||
RDS = x.RDS;
|
||||
Reactor = x.Reactor;
|
||||
Recipe = x.Recipe;
|
||||
StdDev = x.GradeStdDev;
|
||||
Title = x.Title;
|
||||
UniqueId = x.UniqueId;
|
||||
}
|
||||
string[] segments;
|
||||
Stratus.Detail detail;
|
||||
foreach (Stratus.Description description in descriptions)
|
||||
{
|
||||
detail = new Stratus.Detail
|
||||
{
|
||||
HeaderUniqueId = description.HeaderUniqueId,
|
||||
Mean = description.Mean,
|
||||
PassFail = description.PassFail,
|
||||
Position = description.Position,
|
||||
Recipe = description.Recipe,
|
||||
Slot = description.Slot,
|
||||
StdDev = description.StdDev,
|
||||
Thickness = description.Thickness,
|
||||
UniqueId = description.UniqueId,
|
||||
Wafer = description.Wafer,
|
||||
};
|
||||
detail.Points = new();
|
||||
segments = description.Position.Split(',');
|
||||
foreach (string segment in segments)
|
||||
detail.Points.Add(new Stratus.Point { HeaderUniqueId = description.HeaderUniqueId, UniqueId = description.UniqueId, Position = segment });
|
||||
segments = description.Thickness.Split(',');
|
||||
if (detail.Points.Count != segments.Length)
|
||||
throw new Exception();
|
||||
for (int i = 0; i < detail.Points.Count; i++)
|
||||
detail.Points[i].Thickness = segments[i];
|
||||
Details.Add(detail);
|
||||
}
|
||||
if (Date is null)
|
||||
Date = logistics.DateTimeFromSequence.ToString();
|
||||
if (UniqueId is null && Details.Any())
|
||||
UniqueId = Details[0].HeaderUniqueId;
|
||||
Batch = x.Lot;
|
||||
Cassette = x.Cassette;
|
||||
Date = x.Date;
|
||||
MeanThickness = x.MeanThickness;
|
||||
Operator = x.Employee;
|
||||
PSN = x.PSN;
|
||||
RDS = x.RDS;
|
||||
Reactor = x.Reactor;
|
||||
Recipe = x.Recipe;
|
||||
StdDev = x.GradeStdDev;
|
||||
Title = x.Title;
|
||||
UniqueId = x.UniqueId;
|
||||
}
|
||||
|
||||
string[] segments;
|
||||
Stratus.Detail detail;
|
||||
foreach (Stratus.Description description in descriptions)
|
||||
{
|
||||
detail = new Stratus.Detail
|
||||
{
|
||||
HeaderUniqueId = description.HeaderUniqueId,
|
||||
Mean = description.Mean,
|
||||
PassFail = description.PassFail,
|
||||
Position = description.Position,
|
||||
Recipe = description.Recipe,
|
||||
Slot = description.Slot,
|
||||
StdDev = description.StdDev,
|
||||
Thickness = description.Thickness,
|
||||
UniqueId = description.UniqueId,
|
||||
Wafer = description.Wafer,
|
||||
};
|
||||
detail.Points = new();
|
||||
segments = description.Position.Split(',');
|
||||
foreach (string segment in segments)
|
||||
detail.Points.Add(new Stratus.Point { HeaderUniqueId = description.HeaderUniqueId, UniqueId = description.UniqueId, Position = segment });
|
||||
segments = description.Thickness.Split(',');
|
||||
if (detail.Points.Count != segments.Length)
|
||||
throw new Exception();
|
||||
for (int i = 0; i < detail.Points.Count; i++)
|
||||
detail.Points[i].Thickness = segments[i];
|
||||
Details.Add(detail);
|
||||
}
|
||||
if (Date is null)
|
||||
Date = logistics.DateTimeFromSequence.ToString();
|
||||
if (UniqueId is null && Details.Any())
|
||||
UniqueId = Details[0].HeaderUniqueId;
|
||||
}
|
||||
|
||||
}
|
@ -5,272 +5,265 @@ using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
public class Description : IDescription, Shared.Properties.IDescription
|
||||
{
|
||||
|
||||
public class Description : IDescription, Shared.Properties.IDescription
|
||||
public int Test { get; set; }
|
||||
public int Count { get; set; }
|
||||
public int Index { get; set; }
|
||||
//
|
||||
public string EventName { get; set; }
|
||||
public string NullData { get; set; }
|
||||
public string JobID { get; set; }
|
||||
public string Sequence { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string ReportFullPath { get; set; }
|
||||
public string ProcessJobID { get; set; }
|
||||
public string MID { get; set; }
|
||||
//
|
||||
public string Date { get; set; }
|
||||
public string Employee { get; set; }
|
||||
public string Lot { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
//
|
||||
public string Cassette { get; set; }
|
||||
public string GradeStdDev { get; set; }
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string MeanThickness { get; set; }
|
||||
public string PassFail { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Slot { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Wafer { get; set; }
|
||||
//
|
||||
public string Mean { get; set; }
|
||||
public string Position { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string Thickness { get; set; }
|
||||
|
||||
string IDescription.GetEventDescription() => "File Has been read and parsed";
|
||||
|
||||
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
List<string> results = new();
|
||||
IDescription description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
if (@object is not JsonElement jsonElement)
|
||||
throw new Exception();
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
results.Add(jsonProperty.Name);
|
||||
return results;
|
||||
}
|
||||
|
||||
|
||||
public int Test { get; set; }
|
||||
public int Count { get; set; }
|
||||
public int Index { get; set; }
|
||||
//
|
||||
public string EventName { get; set; }
|
||||
public string NullData { get; set; }
|
||||
public string JobID { get; set; }
|
||||
public string Sequence { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string ReportFullPath { get; set; }
|
||||
public string ProcessJobID { get; set; }
|
||||
public string MID { get; set; }
|
||||
//
|
||||
public string Date { get; set; }
|
||||
public string Employee { get; set; }
|
||||
public string Lot { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
//
|
||||
public string Cassette { get; set; }
|
||||
public string GradeStdDev { get; set; }
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string MeanThickness { get; set; }
|
||||
public string PassFail { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Slot { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Wafer { get; set; }
|
||||
//
|
||||
public string Mean { get; set; }
|
||||
public string Position { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string Thickness { get; set; }
|
||||
|
||||
string IDescription.GetEventDescription()
|
||||
List<string> IDescription.GetDetailNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
return "File Has been read and parsed";
|
||||
nameof(Cassette),
|
||||
nameof(GradeStdDev),
|
||||
nameof(HeaderUniqueId),
|
||||
nameof(MeanThickness),
|
||||
nameof(PassFail),
|
||||
nameof(RDS),
|
||||
nameof(Slot),
|
||||
nameof(Title),
|
||||
nameof(UniqueId),
|
||||
nameof(Wafer)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetHeaderNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
nameof(Date),
|
||||
nameof(Employee),
|
||||
nameof(Lot),
|
||||
nameof(PSN),
|
||||
nameof(Reactor),
|
||||
nameof(Recipe)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
IDescription IDescription.GetDisplayNames()
|
||||
{
|
||||
Description result = GetDisplayNames();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetParameterNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
nameof(Mean),
|
||||
nameof(Position),
|
||||
nameof(StdDev),
|
||||
nameof(Thickness)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
JsonProperty[] results;
|
||||
IDescription description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
results = ((JsonElement)@object).EnumerateObject().ToArray();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetPairedParameterNames()
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetIgnoreParameterNames(Test test)
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = GetDefault(fileRead, logistics);
|
||||
return result;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
|
||||
{
|
||||
Dictionary<string, string> results = new();
|
||||
IDescription description = GetDisplayNames();
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
{
|
||||
if (!results.ContainsKey(jsonProperty.Name))
|
||||
results.Add(jsonProperty.Name, string.Empty);
|
||||
if (jsonProperty.Value is JsonElement jsonPropertyValue)
|
||||
results[jsonProperty.Name] = jsonPropertyValue.ToString();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
|
||||
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
|
||||
{
|
||||
List<IDescription> results = new();
|
||||
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData processData)
|
||||
results.Add(GetDefault(fileRead, logistics));
|
||||
else
|
||||
{
|
||||
List<string> results = new();
|
||||
IDescription description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
if (@object is not JsonElement jsonElement)
|
||||
throw new Exception();
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
results.Add(jsonProperty.Name);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetDetailNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
nameof(Cassette),
|
||||
nameof(GradeStdDev),
|
||||
nameof(HeaderUniqueId),
|
||||
nameof(MeanThickness),
|
||||
nameof(PassFail),
|
||||
nameof(RDS),
|
||||
nameof(Slot),
|
||||
nameof(Title),
|
||||
nameof(UniqueId),
|
||||
nameof(Wafer)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetHeaderNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
nameof(Date),
|
||||
nameof(Employee),
|
||||
nameof(Lot),
|
||||
nameof(PSN),
|
||||
nameof(Reactor),
|
||||
nameof(Recipe)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
IDescription IDescription.GetDisplayNames()
|
||||
{
|
||||
Description result = GetDisplayNames();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetParameterNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
nameof(Mean),
|
||||
nameof(Position),
|
||||
nameof(StdDev),
|
||||
nameof(Thickness)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
JsonProperty[] results;
|
||||
IDescription description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
results = ((JsonElement)@object).EnumerateObject().ToArray();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetPairedParameterNames()
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetIgnoreParameterNames(Test test)
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = GetDefault(fileRead, logistics);
|
||||
return result;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
|
||||
{
|
||||
Dictionary<string, string> results = new();
|
||||
IDescription description = GetDisplayNames();
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
{
|
||||
if (!results.ContainsKey(jsonProperty.Name))
|
||||
results.Add(jsonProperty.Name, string.Empty);
|
||||
if (jsonProperty.Value is JsonElement jsonPropertyValue)
|
||||
results[jsonProperty.Name] = jsonPropertyValue.ToString();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
|
||||
{
|
||||
List<IDescription> results = new();
|
||||
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData processData)
|
||||
results.Add(GetDefault(fileRead, logistics));
|
||||
string nullData;
|
||||
Description description;
|
||||
object configDataNullData = fileRead.NullData;
|
||||
if (configDataNullData is null)
|
||||
nullData = string.Empty;
|
||||
else
|
||||
nullData = configDataNullData.ToString();
|
||||
for (int i = 0; i < iProcessData.Details.Count; i++)
|
||||
{
|
||||
string nullData;
|
||||
Description description;
|
||||
object configDataNullData = fileRead.NullData;
|
||||
if (configDataNullData is null)
|
||||
nullData = string.Empty;
|
||||
else
|
||||
nullData = configDataNullData.ToString();
|
||||
for (int i = 0; i < iProcessData.Details.Count; i++)
|
||||
if (iProcessData.Details[i] is not Detail detail)
|
||||
continue;
|
||||
description = new Description
|
||||
{
|
||||
if (iProcessData.Details[i] is not Detail detail)
|
||||
continue;
|
||||
description = new Description
|
||||
{
|
||||
Test = (int)tests[i],
|
||||
Count = tests.Count,
|
||||
Index = i,
|
||||
//
|
||||
EventName = fileRead.EventName,
|
||||
NullData = nullData,
|
||||
JobID = fileRead.CellInstanceName,
|
||||
Sequence = logistics.Sequence.ToString(),
|
||||
MesEntity = logistics.MesEntity,
|
||||
ReportFullPath = logistics.ReportFullPath,
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
//
|
||||
Date = processData.Date,
|
||||
Employee = processData.Employee,
|
||||
Lot = processData.Batch,
|
||||
PSN = processData.PSN,
|
||||
Reactor = processData.Reactor,
|
||||
Recipe = processData.Recipe,
|
||||
//
|
||||
Cassette = processData.Cassette,
|
||||
GradeStdDev = processData.StdDev,
|
||||
HeaderUniqueId = detail.HeaderUniqueId,
|
||||
MeanThickness = processData.MeanThickness,
|
||||
PassFail = detail.PassFail,
|
||||
RDS = processData.RDS,
|
||||
Slot = detail.Slot,
|
||||
Title = processData.Title,
|
||||
UniqueId = detail.UniqueId,
|
||||
Wafer = detail.Wafer,
|
||||
//
|
||||
Mean = detail.Mean,
|
||||
Position = detail.Position,
|
||||
StdDev = detail.StdDev,
|
||||
Thickness = detail.Thickness
|
||||
};
|
||||
results.Add(description);
|
||||
}
|
||||
Test = (int)tests[i],
|
||||
Count = tests.Count,
|
||||
Index = i,
|
||||
//
|
||||
EventName = fileRead.EventName,
|
||||
NullData = nullData,
|
||||
JobID = fileRead.CellInstanceName,
|
||||
Sequence = logistics.Sequence.ToString(),
|
||||
MesEntity = logistics.MesEntity,
|
||||
ReportFullPath = logistics.ReportFullPath,
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
//
|
||||
Date = processData.Date,
|
||||
Employee = processData.Employee,
|
||||
Lot = processData.Batch,
|
||||
PSN = processData.PSN,
|
||||
Reactor = processData.Reactor,
|
||||
Recipe = processData.Recipe,
|
||||
//
|
||||
Cassette = processData.Cassette,
|
||||
GradeStdDev = processData.StdDev,
|
||||
HeaderUniqueId = detail.HeaderUniqueId,
|
||||
MeanThickness = processData.MeanThickness,
|
||||
PassFail = detail.PassFail,
|
||||
RDS = processData.RDS,
|
||||
Slot = detail.Slot,
|
||||
Title = processData.Title,
|
||||
UniqueId = detail.UniqueId,
|
||||
Wafer = detail.Wafer,
|
||||
//
|
||||
Mean = detail.Mean,
|
||||
Position = detail.Position,
|
||||
StdDev = detail.StdDev,
|
||||
Thickness = detail.Thickness
|
||||
};
|
||||
results.Add(description);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private Description GetDisplayNames()
|
||||
private Description GetDisplayNames()
|
||||
{
|
||||
Description result = new();
|
||||
return result;
|
||||
}
|
||||
|
||||
private Description GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = new()
|
||||
{
|
||||
Description result = new();
|
||||
return result;
|
||||
}
|
||||
|
||||
private Description GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = new()
|
||||
{
|
||||
Test = -1,
|
||||
Count = 0,
|
||||
Index = -1,
|
||||
//
|
||||
EventName = fileRead.EventName,
|
||||
NullData = fileRead.NullData,
|
||||
JobID = fileRead.CellInstanceName,
|
||||
Sequence = logistics.Sequence.ToString(),
|
||||
MesEntity = fileRead.MesEntity,
|
||||
ReportFullPath = logistics.ReportFullPath,
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
//
|
||||
Date = nameof(Date),
|
||||
Employee = nameof(Employee),
|
||||
Lot = nameof(Lot),
|
||||
PSN = nameof(PSN),
|
||||
Reactor = nameof(Reactor),
|
||||
Recipe = nameof(Recipe),
|
||||
//
|
||||
Cassette = nameof(Cassette),
|
||||
GradeStdDev = nameof(GradeStdDev),
|
||||
HeaderUniqueId = nameof(HeaderUniqueId),
|
||||
MeanThickness = nameof(MeanThickness),
|
||||
PassFail = nameof(PassFail),
|
||||
RDS = nameof(RDS),
|
||||
Slot = nameof(Slot),
|
||||
Title = nameof(Title),
|
||||
UniqueId = nameof(UniqueId),
|
||||
Wafer = nameof(Wafer),
|
||||
//
|
||||
Mean = nameof(Mean),
|
||||
Position = nameof(Position),
|
||||
StdDev = nameof(StdDev),
|
||||
Thickness = nameof(Thickness)
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
Test = -1,
|
||||
Count = 0,
|
||||
Index = -1,
|
||||
//
|
||||
EventName = fileRead.EventName,
|
||||
NullData = fileRead.NullData,
|
||||
JobID = fileRead.CellInstanceName,
|
||||
Sequence = logistics.Sequence.ToString(),
|
||||
MesEntity = fileRead.MesEntity,
|
||||
ReportFullPath = logistics.ReportFullPath,
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
//
|
||||
Date = nameof(Date),
|
||||
Employee = nameof(Employee),
|
||||
Lot = nameof(Lot),
|
||||
PSN = nameof(PSN),
|
||||
Reactor = nameof(Reactor),
|
||||
Recipe = nameof(Recipe),
|
||||
//
|
||||
Cassette = nameof(Cassette),
|
||||
GradeStdDev = nameof(GradeStdDev),
|
||||
HeaderUniqueId = nameof(HeaderUniqueId),
|
||||
MeanThickness = nameof(MeanThickness),
|
||||
PassFail = nameof(PassFail),
|
||||
RDS = nameof(RDS),
|
||||
Slot = nameof(Slot),
|
||||
Title = nameof(Title),
|
||||
UniqueId = nameof(UniqueId),
|
||||
Wafer = nameof(Wafer),
|
||||
//
|
||||
Mean = nameof(Mean),
|
||||
Position = nameof(Position),
|
||||
StdDev = nameof(StdDev),
|
||||
Thickness = nameof(Thickness)
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
@ -1,23 +1,20 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
public class Detail
|
||||
{
|
||||
|
||||
public class Detail
|
||||
{
|
||||
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string Mean { get; set; }
|
||||
public string PassFail { get; set; }
|
||||
public string Position { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string Slot { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Thickness { get; set; }
|
||||
public string Wafer { get; set; }
|
||||
public List<Point> Points { get; set; }
|
||||
|
||||
}
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string Mean { get; set; }
|
||||
public string PassFail { get; set; }
|
||||
public string Position { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string Slot { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Thickness { get; set; }
|
||||
public string Wafer { get; set; }
|
||||
public List<Point> Points { get; set; }
|
||||
|
||||
}
|
@ -9,140 +9,120 @@ using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
private readonly string _OriginalDataBioRad;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_OriginalDataBioRad = "OriginalDataBioRad_";
|
||||
}
|
||||
|
||||
private readonly string _OriginalDataBioRad;
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive) :
|
||||
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, hyphenXToArchive, hyphenIsArchive)
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
if (reportFullPath.Length < _MinFileLength)
|
||||
results.Item4.Add(new FileInfo(reportFullPath));
|
||||
else
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_OriginalDataBioRad = "OriginalDataBioRad_";
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
Move(this, extractResults, exception);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread()
|
||||
{
|
||||
WaitForThread(thread: null, threadExceptions: null);
|
||||
}
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, new Test[] { }, JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
{
|
||||
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
}
|
||||
|
||||
void IFileRead.MoveArchive()
|
||||
{
|
||||
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state)
|
||||
{
|
||||
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
if (reportFullPath.Length < _MinFileLength)
|
||||
results.Item4.Add(new FileInfo(reportFullPath));
|
||||
else
|
||||
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, _OriginalDataBioRad, dataText: string.Empty);
|
||||
if (iProcessData is ProcessData processData)
|
||||
{
|
||||
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, _OriginalDataBioRad, dataText: string.Empty);
|
||||
if (iProcessData is ProcessData processData)
|
||||
{
|
||||
string mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
|
||||
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
_Logistics.MID = mid;
|
||||
SetFileParameterLotID(mid);
|
||||
_Logistics.ProcessJobID = processData.Reactor;
|
||||
}
|
||||
if (!iProcessData.Details.Any())
|
||||
throw new Exception(string.Concat("No Data - ", dateTime.Ticks));
|
||||
results = iProcessData.GetResults(this, _Logistics, results.Item4);
|
||||
string mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
|
||||
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
_Logistics.MID = mid;
|
||||
SetFileParameterLotID(mid);
|
||||
_Logistics.ProcessJobID = processData.Reactor;
|
||||
}
|
||||
return results;
|
||||
if (!iProcessData.Details.Any())
|
||||
throw new Exception(string.Concat("No Data - ", dateTime.Ticks));
|
||||
results = iProcessData.GetResults(this, _Logistics, results.Item4);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -1,19 +1,13 @@
|
||||
namespace Adaptation.FileHandlers.Stratus
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
public class Point
|
||||
{
|
||||
|
||||
public class Point
|
||||
{
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string Position { get; set; }
|
||||
public string Thickness { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string Position { get; set; }
|
||||
public string Thickness { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
|
||||
public override string ToString()
|
||||
{
|
||||
return string.Concat(Position, ";", Thickness, ";");
|
||||
}
|
||||
|
||||
}
|
||||
public override string ToString() => string.Concat(Position, ";", Thickness, ";");
|
||||
|
||||
}
|
@ -11,457 +11,451 @@ using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.Stratus
|
||||
namespace Adaptation.FileHandlers.Stratus;
|
||||
|
||||
public partial class ProcessData : IProcessData
|
||||
{
|
||||
|
||||
public partial class ProcessData : IProcessData
|
||||
private readonly List<object> _Details;
|
||||
|
||||
public string JobID { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string Batch { get; set; }
|
||||
public string Cassette { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string FilePath { get; set; }
|
||||
public string MeanThickness { get; set; }
|
||||
public string Employee { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
|
||||
List<object> Shared.Properties.IProcessData.Details => _Details;
|
||||
|
||||
private int _I;
|
||||
private string _Data;
|
||||
private readonly ILog _Log;
|
||||
|
||||
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, string dataText)
|
||||
{
|
||||
JobID = logistics.JobID;
|
||||
fileInfoCollection.Clear();
|
||||
_Details = new List<object>();
|
||||
MesEntity = logistics.MesEntity;
|
||||
_Log = LogManager.GetLogger(typeof(ProcessData));
|
||||
Parse(fileRead, logistics, fileInfoCollection, originalDataBioRad, dataText);
|
||||
}
|
||||
|
||||
private readonly List<object> _Details;
|
||||
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
|
||||
|
||||
public string JobID { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string Batch { get; set; }
|
||||
public string Cassette { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string FilePath { get; set; }
|
||||
public string MeanThickness { get; set; }
|
||||
public string Employee { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
|
||||
List<object> Shared.Properties.IProcessData.Details => _Details;
|
||||
|
||||
private int _I;
|
||||
private string _Data;
|
||||
private readonly ILog _Log;
|
||||
|
||||
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, string dataText)
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<Test> tests = new();
|
||||
foreach (object item in _Details)
|
||||
tests.Add(Test.BioRadStratus);
|
||||
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
|
||||
if (tests.Count != descriptions.Count)
|
||||
throw new Exception();
|
||||
for (int i = 0; i < tests.Count; i++)
|
||||
{
|
||||
JobID = logistics.JobID;
|
||||
fileInfoCollection.Clear();
|
||||
_Details = new List<object>();
|
||||
MesEntity = logistics.MesEntity;
|
||||
_Log = LogManager.GetLogger(typeof(ProcessData));
|
||||
Parse(fileRead, logistics, fileInfoCollection, originalDataBioRad, dataText);
|
||||
}
|
||||
|
||||
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors)
|
||||
{
|
||||
throw new Exception(string.Concat("See ", nameof(Parse)));
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<Test> tests = new();
|
||||
foreach (object item in _Details)
|
||||
tests.Add(Test.BioRadStratus);
|
||||
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
|
||||
if (tests.Count != descriptions.Count)
|
||||
if (descriptions[i] is not Description description)
|
||||
throw new Exception();
|
||||
if (description.Test != (int)tests[i])
|
||||
throw new Exception();
|
||||
for (int i = 0; i < tests.Count; i++)
|
||||
{
|
||||
if (descriptions[i] is not Description description)
|
||||
throw new Exception();
|
||||
if (description.Test != (int)tests[i])
|
||||
throw new Exception();
|
||||
}
|
||||
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
|
||||
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
|
||||
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
|
||||
return results;
|
||||
}
|
||||
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
|
||||
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
|
||||
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
|
||||
return results;
|
||||
}
|
||||
|
||||
private string GetBefore(string text)
|
||||
private string GetBefore(string text)
|
||||
{
|
||||
string str;
|
||||
string str1;
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num <= -1)
|
||||
{
|
||||
str = _Data.Substring(_I);
|
||||
_I = _Data.Length;
|
||||
str1 = str.Trim();
|
||||
}
|
||||
else
|
||||
{
|
||||
str = _Data.Substring(_I, num - _I);
|
||||
_I = num + text.Length;
|
||||
str1 = str.Trim();
|
||||
}
|
||||
return str1;
|
||||
}
|
||||
|
||||
private string GetBefore(string text, bool trim)
|
||||
{
|
||||
string str;
|
||||
string before;
|
||||
if (!trim)
|
||||
{
|
||||
string str;
|
||||
string str1;
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num <= -1)
|
||||
{
|
||||
str = _Data.Substring(_I);
|
||||
_I = _Data.Length;
|
||||
str1 = str.Trim();
|
||||
before = str;
|
||||
}
|
||||
else
|
||||
{
|
||||
str = _Data.Substring(_I, num - _I);
|
||||
_I = num + text.Length;
|
||||
str1 = str.Trim();
|
||||
before = str;
|
||||
}
|
||||
return str1;
|
||||
}
|
||||
|
||||
private string GetBefore(string text, bool trim)
|
||||
else
|
||||
{
|
||||
string str;
|
||||
string before;
|
||||
if (!trim)
|
||||
before = GetBefore(text);
|
||||
}
|
||||
return before;
|
||||
}
|
||||
|
||||
private string GetToEOL()
|
||||
{
|
||||
string result;
|
||||
if (_Data.IndexOf("\n", _I) > -1)
|
||||
result = GetBefore("\n");
|
||||
else
|
||||
result = GetBefore(Environment.NewLine);
|
||||
return result;
|
||||
}
|
||||
|
||||
private string GetToEOL(bool trim)
|
||||
{
|
||||
string str;
|
||||
if (_Data.IndexOf("\n", _I) > -1)
|
||||
str = (!trim ? GetBefore("\n", false) : GetToEOL());
|
||||
else
|
||||
str = (!trim ? GetBefore(Environment.NewLine, false) : GetToEOL());
|
||||
return str;
|
||||
}
|
||||
|
||||
private string GetToken()
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
if ((_I >= _Data.Length || !IsNullOrWhiteSpace(_Data.Substring(_I, 1))))
|
||||
{
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num <= -1)
|
||||
{
|
||||
str = _Data.Substring(_I);
|
||||
_I = _Data.Length;
|
||||
before = str;
|
||||
}
|
||||
else
|
||||
{
|
||||
str = _Data.Substring(_I, num - _I);
|
||||
_I = num + text.Length;
|
||||
before = str;
|
||||
}
|
||||
break;
|
||||
}
|
||||
else
|
||||
_I++;
|
||||
}
|
||||
int num = _I;
|
||||
while (true)
|
||||
{
|
||||
if (num >= _Data.Length || IsNullOrWhiteSpace(_Data.Substring(num, 1)))
|
||||
{
|
||||
before = GetBefore(text);
|
||||
break;
|
||||
}
|
||||
return before;
|
||||
num++;
|
||||
}
|
||||
string str = _Data.Substring(_I, num - _I);
|
||||
_I = num;
|
||||
return str.Trim();
|
||||
}
|
||||
|
||||
private string GetToEOL()
|
||||
{
|
||||
string result;
|
||||
if (_Data.IndexOf("\n", _I) > -1)
|
||||
result = GetBefore("\n");
|
||||
else
|
||||
result = GetBefore(Environment.NewLine);
|
||||
return result;
|
||||
}
|
||||
private string GetToText(string text)
|
||||
{
|
||||
string str = _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
|
||||
return str;
|
||||
}
|
||||
|
||||
private string GetToEOL(bool trim)
|
||||
{
|
||||
string str;
|
||||
if (_Data.IndexOf("\n", _I) > -1)
|
||||
str = (!trim ? GetBefore("\n", false) : GetToEOL());
|
||||
else
|
||||
str = (!trim ? GetBefore(Environment.NewLine, false) : GetToEOL());
|
||||
return str;
|
||||
}
|
||||
private bool IsBlankLine()
|
||||
{
|
||||
int num = _Data.IndexOf("\n", _I);
|
||||
return IsNullOrWhiteSpace((num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I)));
|
||||
}
|
||||
|
||||
private string GetToken()
|
||||
private bool IsNullOrWhiteSpace(string text)
|
||||
{
|
||||
bool flag;
|
||||
int num = 0;
|
||||
while (true)
|
||||
{
|
||||
while (true)
|
||||
if (num >= text.Length)
|
||||
{
|
||||
if ((_I >= _Data.Length || !IsNullOrWhiteSpace(_Data.Substring(_I, 1))))
|
||||
{
|
||||
break;
|
||||
}
|
||||
_I++;
|
||||
flag = true;
|
||||
break;
|
||||
}
|
||||
int num = _I;
|
||||
while (true)
|
||||
else if (char.IsWhiteSpace(text[num]))
|
||||
{
|
||||
if (num >= _Data.Length || IsNullOrWhiteSpace(_Data.Substring(num, 1)))
|
||||
{
|
||||
break;
|
||||
}
|
||||
num++;
|
||||
}
|
||||
string str = _Data.Substring(_I, num - _I);
|
||||
_I = num;
|
||||
return str.Trim();
|
||||
}
|
||||
|
||||
private string GetToText(string text)
|
||||
{
|
||||
string str = _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
|
||||
return str;
|
||||
}
|
||||
|
||||
private bool IsBlankLine()
|
||||
{
|
||||
int num = _Data.IndexOf("\n", _I);
|
||||
return IsNullOrWhiteSpace((num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I)));
|
||||
}
|
||||
|
||||
private bool IsNullOrWhiteSpace(string text)
|
||||
{
|
||||
bool flag;
|
||||
int num = 0;
|
||||
while (true)
|
||||
{
|
||||
if (num >= text.Length)
|
||||
{
|
||||
flag = true;
|
||||
break;
|
||||
}
|
||||
else if (char.IsWhiteSpace(text[num]))
|
||||
{
|
||||
num++;
|
||||
}
|
||||
else
|
||||
{
|
||||
flag = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return flag;
|
||||
}
|
||||
|
||||
private string PeekNextLine()
|
||||
{
|
||||
int num = _I;
|
||||
string toEOL = GetToEOL();
|
||||
_I = num;
|
||||
return toEOL;
|
||||
}
|
||||
|
||||
private void ScanPast(string text)
|
||||
{
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num <= -1)
|
||||
{
|
||||
_I = _Data.Length;
|
||||
}
|
||||
else
|
||||
{
|
||||
_I = num + text.Length;
|
||||
flag = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return flag;
|
||||
}
|
||||
|
||||
internal static DateTime GetDateTime(Logistics logistics, string dateTimeText)
|
||||
private string PeekNextLine()
|
||||
{
|
||||
int num = _I;
|
||||
string toEOL = GetToEOL();
|
||||
_I = num;
|
||||
return toEOL;
|
||||
}
|
||||
|
||||
private void ScanPast(string text)
|
||||
{
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num <= -1)
|
||||
{
|
||||
DateTime result;
|
||||
string inputDateFormat = "MM/dd/yy HH:mm";
|
||||
if (dateTimeText.Length != inputDateFormat.Length)
|
||||
_I = _Data.Length;
|
||||
}
|
||||
else
|
||||
{
|
||||
_I = num + text.Length;
|
||||
}
|
||||
}
|
||||
|
||||
internal static DateTime GetDateTime(Logistics logistics, string dateTimeText)
|
||||
{
|
||||
DateTime result;
|
||||
string inputDateFormat = "MM/dd/yy HH:mm";
|
||||
if (dateTimeText.Length != inputDateFormat.Length)
|
||||
result = logistics.DateTimeFromSequence;
|
||||
else
|
||||
{
|
||||
if (!DateTime.TryParseExact(dateTimeText, inputDateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
|
||||
result = logistics.DateTimeFromSequence;
|
||||
else
|
||||
{
|
||||
if (!DateTime.TryParseExact(dateTimeText, inputDateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
|
||||
if (dateTimeParsed < logistics.DateTimeFromSequence.AddDays(1) && dateTimeParsed > logistics.DateTimeFromSequence.AddDays(-1))
|
||||
result = dateTimeParsed;
|
||||
else
|
||||
result = logistics.DateTimeFromSequence;
|
||||
else
|
||||
{
|
||||
if (dateTimeParsed < logistics.DateTimeFromSequence.AddDays(1) && dateTimeParsed > logistics.DateTimeFromSequence.AddDays(-1))
|
||||
result = dateTimeParsed;
|
||||
else
|
||||
result = logistics.DateTimeFromSequence;
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, string receivedData)
|
||||
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, string receivedData)
|
||||
{
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
_I = 0;
|
||||
_Data = string.Empty;
|
||||
List<Detail> details = new();
|
||||
if (string.IsNullOrEmpty(receivedData))
|
||||
receivedData = File.ReadAllText(logistics.ReportFullPath);
|
||||
_Log.Debug($"****ParseData - Source file contents:");
|
||||
_Log.Debug(receivedData);
|
||||
string[] files = Directory.GetFiles(Path.GetDirectoryName(logistics.ReportFullPath), string.Concat(originalDataBioRad, logistics.Sequence, "*"), SearchOption.TopDirectoryOnly);
|
||||
foreach (string file in files)
|
||||
fileInfoCollection.Add(new FileInfo(file));
|
||||
if (!string.IsNullOrEmpty(receivedData))
|
||||
{
|
||||
if (fileRead is null)
|
||||
{ }
|
||||
int i;
|
||||
int num;
|
||||
int num1;
|
||||
int num2;
|
||||
Point point;
|
||||
Detail detail;
|
||||
string[] segments;
|
||||
string batch = "Batch";
|
||||
string started = "started";
|
||||
string cassette = "Cassette";
|
||||
string startedAt = "started at";
|
||||
_I = 0;
|
||||
_Data = string.Empty;
|
||||
List<Detail> details = new();
|
||||
if (string.IsNullOrEmpty(receivedData))
|
||||
receivedData = File.ReadAllText(logistics.ReportFullPath);
|
||||
_Log.Debug($"****ParseData - Source file contents:");
|
||||
_Log.Debug(receivedData);
|
||||
string[] files = Directory.GetFiles(Path.GetDirectoryName(logistics.ReportFullPath), string.Concat(originalDataBioRad, logistics.Sequence, "*"), SearchOption.TopDirectoryOnly);
|
||||
foreach (string file in files)
|
||||
fileInfoCollection.Add(new FileInfo(file));
|
||||
if (!string.IsNullOrEmpty(receivedData))
|
||||
_Data = receivedData;
|
||||
if (!_Data.Contains(batch) || !_Data.Contains(started))
|
||||
Batch = string.Empty;
|
||||
else
|
||||
{
|
||||
int i;
|
||||
int num;
|
||||
int num1;
|
||||
int num2;
|
||||
Point point;
|
||||
Detail detail;
|
||||
string[] segments;
|
||||
string batch = "Batch";
|
||||
string started = "started";
|
||||
string cassette = "Cassette";
|
||||
string startedAt = "started at";
|
||||
_I = 0;
|
||||
_Data = receivedData;
|
||||
if (!_Data.Contains(batch) || !_Data.Contains(started))
|
||||
Batch = string.Empty;
|
||||
else
|
||||
for (int z = 0; z < int.MaxValue; z++)
|
||||
{
|
||||
for (int z = 0; z < int.MaxValue; z++)
|
||||
{
|
||||
ScanPast(batch);
|
||||
if (!_Data.Substring(_I).Contains(batch))
|
||||
break;
|
||||
}
|
||||
Batch = GetToText(started);
|
||||
ScanPast(startedAt);
|
||||
ScanPast(batch);
|
||||
if (!_Data.Substring(_I).Contains(batch))
|
||||
break;
|
||||
}
|
||||
ScanPast(cassette);
|
||||
if (!_Data.Substring(_I).Contains(started))
|
||||
Cassette = string.Empty;
|
||||
else
|
||||
Cassette = GetToText(started);
|
||||
// Remove illegal characters \/:*?"<>| found in the Cassette.
|
||||
Cassette = Regex.Replace(Cassette, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
if (Cassette.StartsWith("1T") || Cassette.StartsWith("1t"))
|
||||
Cassette = Cassette.Substring(2);
|
||||
Title = (!string.IsNullOrEmpty(Batch) ? Batch : Cassette);
|
||||
Batch = GetToText(started);
|
||||
ScanPast(startedAt);
|
||||
string dateTimeText = GetToEOL();
|
||||
if (dateTimeText.EndsWith("."))
|
||||
dateTimeText = dateTimeText.Remove(dateTimeText.Length - 1, 1);
|
||||
DateTime dateTime = GetDateTime(logistics, dateTimeText);
|
||||
Date = dateTime.ToString();
|
||||
if (Cassette.Contains('.'))
|
||||
segments = Cassette.Split(new char[] { '.' });
|
||||
else if (Cassette.Contains('-'))
|
||||
segments = Cassette.Split(new char[] { '-' });
|
||||
else if (!Cassette.Contains('\u005F'))
|
||||
segments = Cassette.Split(new char[] { ' ' });
|
||||
else
|
||||
segments = Cassette.Split(new char[] { '\u005F' });
|
||||
if (segments.Length >= 1)
|
||||
Reactor = segments[0];
|
||||
if (segments.Length >= 2)
|
||||
RDS = segments[1];
|
||||
if (segments.Length >= 3)
|
||||
PSN = segments[2];
|
||||
if (segments.Length >= 4)
|
||||
Employee = segments[3];
|
||||
if (Reactor.Length > 3)
|
||||
{
|
||||
RDS = Reactor;
|
||||
Reactor = string.Empty;
|
||||
}
|
||||
num1 = 0;
|
||||
if (PeekNextLine().Contains("Wafer"))
|
||||
{
|
||||
_Log.Debug("****ProcessData Contains Wafer");
|
||||
while (!PeekNextLine().Contains(cassette))
|
||||
{
|
||||
num2 = num1;
|
||||
num1 = num2 + 1;
|
||||
if (num2 > 25)
|
||||
break;
|
||||
else
|
||||
{
|
||||
_Log.Debug("****ProcessData new stratusBioRadWaferDetail");
|
||||
detail = new Detail();
|
||||
ScanPast("Wafer");
|
||||
detail.Wafer = GetToEOL();
|
||||
if (detail.Wafer.EndsWith("."))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing Wafer");
|
||||
detail.Wafer = detail.Wafer.Remove(detail.Wafer.Length - 1, 1);
|
||||
}
|
||||
ScanPast("Slot");
|
||||
detail.Slot = GetToEOL();
|
||||
ScanPast("Recipe");
|
||||
Recipe = GetToEOL();
|
||||
if (Recipe.EndsWith("."))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing Recipe");
|
||||
Recipe = Recipe.Remove(Recipe.Length - 1, 1);
|
||||
}
|
||||
detail.Recipe = Recipe;
|
||||
GetToEOL();
|
||||
if (PeekNextLine().Contains("Thickness"))
|
||||
{
|
||||
ScanPast("1 - ");
|
||||
num = Convert.ToInt32(GetToken());
|
||||
_Log.Debug(string.Concat("****ProcessData Thickness =", num));
|
||||
detail.Points = new();
|
||||
for (i = 0; i < num; i++)
|
||||
{
|
||||
point = new() { Thickness = GetToken() };
|
||||
detail.Points.Add(point);
|
||||
point.Position = Convert.ToString(detail.Points.Count);
|
||||
}
|
||||
}
|
||||
GetToEOL();
|
||||
if (PeekNextLine().Contains("Thickness"))
|
||||
{
|
||||
ScanPast("11 - ");
|
||||
num = Convert.ToInt32(GetToken());
|
||||
for (i = detail.Points.Count; i < num; i++)
|
||||
{
|
||||
point = new() { Thickness = GetToken() };
|
||||
detail.Points.Add(point);
|
||||
point.Position = Convert.ToString(detail.Points.Count);
|
||||
}
|
||||
}
|
||||
ScanPast("Slot");
|
||||
GetToken();
|
||||
detail.PassFail = GetToken();
|
||||
if (detail.PassFail.EndsWith("."))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing PassFail");
|
||||
detail.PassFail = detail.PassFail.Remove(detail.PassFail.Length - 1, 1);
|
||||
}
|
||||
ScanPast("Mean");
|
||||
detail.Mean = GetToken();
|
||||
if (detail.Mean.EndsWith(","))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing Mean");
|
||||
detail.Mean = detail.Mean.Remove(detail.Mean.Length - 1, 1);
|
||||
}
|
||||
ScanPast("STDD");
|
||||
detail.StdDev = GetToEOL();
|
||||
if (detail.StdDev.EndsWith("."))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing stdDev");
|
||||
detail.StdDev = detail.StdDev.Remove(detail.StdDev.Length - 1, 1);
|
||||
}
|
||||
detail.UniqueId = string.Concat("_Wafer-", detail.Wafer, "_Slot-", detail.Slot, "_Point-", detail.Position);
|
||||
details.Add(detail);
|
||||
if (PeekNextLine().Contains(cassette))
|
||||
GetToEOL();
|
||||
if (PeekNextLine().Contains(cassette))
|
||||
GetToEOL();
|
||||
if (PeekNextLine().Contains("Process failed"))
|
||||
GetToEOL();
|
||||
}
|
||||
}
|
||||
ScanPast("Mean");
|
||||
MeanThickness = GetToken();
|
||||
if (MeanThickness.EndsWith(","))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing MeanThickness");
|
||||
MeanThickness = MeanThickness.Remove(MeanThickness.Length - 1, 1);
|
||||
}
|
||||
ScanPast("STDD");
|
||||
StdDev = GetToken();
|
||||
if (StdDev.EndsWith(","))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing thi.StdDev");
|
||||
StdDev = StdDev.Remove(StdDev.Length - 1, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
StringBuilder stringBuilder = new();
|
||||
UniqueId = string.Concat("StratusBioRad_", Reactor, "_", RDS, "_", PSN, "_", logistics.DateTimeFromSequence.ToString("yyyyMMddHHmmssffff"));
|
||||
foreach (Detail detail in details)
|
||||
ScanPast(cassette);
|
||||
if (!_Data.Substring(_I).Contains(started))
|
||||
Cassette = string.Empty;
|
||||
else
|
||||
Cassette = GetToText(started);
|
||||
// Remove illegal characters \/:*?"<>| found in the Cassette.
|
||||
Cassette = Regex.Replace(Cassette, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
if (Cassette.StartsWith("1T") || Cassette.StartsWith("1t"))
|
||||
Cassette = Cassette.Substring(2);
|
||||
Title = (!string.IsNullOrEmpty(Batch) ? Batch : Cassette);
|
||||
ScanPast(startedAt);
|
||||
string dateTimeText = GetToEOL();
|
||||
if (dateTimeText.EndsWith("."))
|
||||
dateTimeText = dateTimeText.Remove(dateTimeText.Length - 1, 1);
|
||||
DateTime dateTime = GetDateTime(logistics, dateTimeText);
|
||||
Date = dateTime.ToString();
|
||||
if (Cassette.Contains('.'))
|
||||
segments = Cassette.Split(new char[] { '.' });
|
||||
else if (Cassette.Contains('-'))
|
||||
segments = Cassette.Split(new char[] { '-' });
|
||||
else if (!Cassette.Contains('\u005F'))
|
||||
segments = Cassette.Split(new char[] { ' ' });
|
||||
else
|
||||
segments = Cassette.Split(new char[] { '\u005F' });
|
||||
if (segments.Length >= 1)
|
||||
Reactor = segments[0];
|
||||
if (segments.Length >= 2)
|
||||
RDS = segments[1];
|
||||
if (segments.Length >= 3)
|
||||
PSN = segments[2];
|
||||
if (segments.Length >= 4)
|
||||
Employee = segments[3];
|
||||
if (Reactor.Length > 3)
|
||||
{
|
||||
detail.HeaderUniqueId = UniqueId;
|
||||
detail.UniqueId = string.Concat(UniqueId, detail.UniqueId);
|
||||
if (detail.Points is null)
|
||||
detail.Points = new List<Point>();
|
||||
foreach (Point bioRadDetail in detail.Points)
|
||||
{
|
||||
bioRadDetail.HeaderUniqueId = detail.HeaderUniqueId;
|
||||
bioRadDetail.UniqueId = detail.UniqueId;
|
||||
}
|
||||
stringBuilder.Clear();
|
||||
foreach (Point point in detail.Points)
|
||||
stringBuilder.Append(point.Thickness).Append(',');
|
||||
if (stringBuilder.Length > 0)
|
||||
stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
detail.Thickness = stringBuilder.ToString();
|
||||
stringBuilder.Clear();
|
||||
foreach (Point point in detail.Points)
|
||||
stringBuilder.Append(point.Position).Append(',');
|
||||
if (stringBuilder.Length > 0)
|
||||
stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
detail.Position = stringBuilder.ToString();
|
||||
RDS = Reactor;
|
||||
Reactor = string.Empty;
|
||||
}
|
||||
num1 = 0;
|
||||
if (PeekNextLine().Contains("Wafer"))
|
||||
{
|
||||
_Log.Debug("****ProcessData Contains Wafer");
|
||||
while (!PeekNextLine().Contains(cassette))
|
||||
{
|
||||
num2 = num1;
|
||||
num1 = num2 + 1;
|
||||
if (num2 > 25)
|
||||
break;
|
||||
else
|
||||
{
|
||||
_Log.Debug("****ProcessData new stratusBioRadWaferDetail");
|
||||
detail = new Detail();
|
||||
ScanPast("Wafer");
|
||||
detail.Wafer = GetToEOL();
|
||||
if (detail.Wafer.EndsWith("."))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing Wafer");
|
||||
detail.Wafer = detail.Wafer.Remove(detail.Wafer.Length - 1, 1);
|
||||
}
|
||||
ScanPast("Slot");
|
||||
detail.Slot = GetToEOL();
|
||||
ScanPast("Recipe");
|
||||
Recipe = GetToEOL();
|
||||
if (Recipe.EndsWith("."))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing Recipe");
|
||||
Recipe = Recipe.Remove(Recipe.Length - 1, 1);
|
||||
}
|
||||
detail.Recipe = Recipe;
|
||||
_ = GetToEOL();
|
||||
if (PeekNextLine().Contains("Thickness"))
|
||||
{
|
||||
ScanPast("1 - ");
|
||||
num = Convert.ToInt32(GetToken());
|
||||
_Log.Debug(string.Concat("****ProcessData Thickness =", num));
|
||||
detail.Points = new();
|
||||
for (i = 0; i < num; i++)
|
||||
{
|
||||
point = new() { Thickness = GetToken() };
|
||||
detail.Points.Add(point);
|
||||
point.Position = Convert.ToString(detail.Points.Count);
|
||||
}
|
||||
}
|
||||
_ = GetToEOL();
|
||||
if (PeekNextLine().Contains("Thickness"))
|
||||
{
|
||||
ScanPast("11 - ");
|
||||
num = Convert.ToInt32(GetToken());
|
||||
for (i = detail.Points.Count; i < num; i++)
|
||||
{
|
||||
point = new() { Thickness = GetToken() };
|
||||
detail.Points.Add(point);
|
||||
point.Position = Convert.ToString(detail.Points.Count);
|
||||
}
|
||||
}
|
||||
ScanPast("Slot");
|
||||
_ = GetToken();
|
||||
detail.PassFail = GetToken();
|
||||
if (detail.PassFail.EndsWith("."))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing PassFail");
|
||||
detail.PassFail = detail.PassFail.Remove(detail.PassFail.Length - 1, 1);
|
||||
}
|
||||
ScanPast("Mean");
|
||||
detail.Mean = GetToken();
|
||||
if (detail.Mean.EndsWith(","))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing Mean");
|
||||
detail.Mean = detail.Mean.Remove(detail.Mean.Length - 1, 1);
|
||||
}
|
||||
ScanPast("STDD");
|
||||
detail.StdDev = GetToEOL();
|
||||
if (detail.StdDev.EndsWith("."))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing stdDev");
|
||||
detail.StdDev = detail.StdDev.Remove(detail.StdDev.Length - 1, 1);
|
||||
}
|
||||
detail.UniqueId = string.Concat("_Wafer-", detail.Wafer, "_Slot-", detail.Slot, "_Point-", detail.Position);
|
||||
details.Add(detail);
|
||||
if (PeekNextLine().Contains(cassette))
|
||||
_ = GetToEOL();
|
||||
if (PeekNextLine().Contains(cassette))
|
||||
_ = GetToEOL();
|
||||
if (PeekNextLine().Contains("Process failed"))
|
||||
_ = GetToEOL();
|
||||
}
|
||||
}
|
||||
ScanPast("Mean");
|
||||
MeanThickness = GetToken();
|
||||
if (MeanThickness.EndsWith(","))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing MeanThickness");
|
||||
MeanThickness = MeanThickness.Remove(MeanThickness.Length - 1, 1);
|
||||
}
|
||||
ScanPast("STDD");
|
||||
StdDev = GetToken();
|
||||
if (StdDev.EndsWith(","))
|
||||
{
|
||||
_Log.Debug("****ProcessData Removing thi.StdDev");
|
||||
StdDev = StdDev.Remove(StdDev.Length - 1, 1);
|
||||
}
|
||||
}
|
||||
fileInfoCollection.Add(new FileInfo(logistics.ReportFullPath));
|
||||
_Details.AddRange(details);
|
||||
}
|
||||
|
||||
StringBuilder stringBuilder = new();
|
||||
UniqueId = string.Concat("StratusBioRad_", Reactor, "_", RDS, "_", PSN, "_", logistics.DateTimeFromSequence.ToString("yyyyMMddHHmmssffff"));
|
||||
foreach (Detail detail in details)
|
||||
{
|
||||
detail.HeaderUniqueId = UniqueId;
|
||||
detail.UniqueId = string.Concat(UniqueId, detail.UniqueId);
|
||||
if (detail.Points is null)
|
||||
detail.Points = new List<Point>();
|
||||
foreach (Point bioRadDetail in detail.Points)
|
||||
{
|
||||
bioRadDetail.HeaderUniqueId = detail.HeaderUniqueId;
|
||||
bioRadDetail.UniqueId = detail.UniqueId;
|
||||
}
|
||||
_ = stringBuilder.Clear();
|
||||
foreach (Point point in detail.Points)
|
||||
_ = stringBuilder.Append(point.Thickness).Append(',');
|
||||
if (stringBuilder.Length > 0)
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
detail.Thickness = stringBuilder.ToString();
|
||||
_ = stringBuilder.Clear();
|
||||
foreach (Point point in detail.Points)
|
||||
_ = stringBuilder.Append(point.Position).Append(',');
|
||||
if (stringBuilder.Length > 0)
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
detail.Position = stringBuilder.ToString();
|
||||
}
|
||||
fileInfoCollection.Add(new FileInfo(logistics.ReportFullPath));
|
||||
_Details.AddRange(details);
|
||||
}
|
||||
|
||||
}
|
140
Adaptation/FileHandlers/ToArchive/FileRead.cs
Normal file
140
Adaptation/FileHandlers/ToArchive/FileRead.cs
Normal file
@ -0,0 +1,140 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.ToArchive;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults, exception);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
{
|
||||
if (_Description is not Description)
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
if (dateTime == DateTime.MinValue)
|
||||
{ }
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
|
||||
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
|
||||
string duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", segments[0]);
|
||||
if (segments.Length > 2)
|
||||
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
bool isDummyRun = _DummyRuns.Any() && _DummyRuns.ContainsKey(_Logistics.JobID) && _DummyRuns[_Logistics.JobID].Any() && (from l in _DummyRuns[_Logistics.JobID] where l == _Logistics.Sequence select 1).Any();
|
||||
|
||||
List<Tuple<Shared.Properties.IScopeInfo, string>> tuples = new();
|
||||
|
||||
string destinationDirectory = WriteScopeInfo(_ProgressPath, _Logistics, dateTime, duplicateDirectory, tuples);
|
||||
if (isDummyRun)
|
||||
Shared0607(reportFullPath, duplicateDirectory, logisticsSequence, destinationDirectory);
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -9,167 +9,147 @@ using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.txt
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
private readonly string _OriginalDataBioRad;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_OriginalDataBioRad = "OriginalDataBioRad_";
|
||||
}
|
||||
|
||||
private readonly string _OriginalDataBioRad;
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive) :
|
||||
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, hyphenXToArchive, hyphenIsArchive)
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: false);
|
||||
SetFileParameterLotID(_Logistics.MID);
|
||||
if (reportFullPath.Length < _MinFileLength)
|
||||
results.Item4.Add(new FileInfo(reportFullPath));
|
||||
else
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new Logistics(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_OriginalDataBioRad = "OriginalDataBioRad_";
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
Move(this, extractResults, exception);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread()
|
||||
{
|
||||
WaitForThread(thread: null, threadExceptions: null);
|
||||
}
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, new Test[] { }, JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
void IFileRead.CheckTests(Test[] tests, bool extra)
|
||||
{
|
||||
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
}
|
||||
|
||||
void IFileRead.MoveArchive()
|
||||
{
|
||||
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
}
|
||||
|
||||
void IFileRead.Callback(object state)
|
||||
{
|
||||
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: false);
|
||||
SetFileParameterLotID(_Logistics.MID);
|
||||
if (reportFullPath.Length < _MinFileLength)
|
||||
results.Item4.Add(new FileInfo(reportFullPath));
|
||||
else
|
||||
bool isBioRad;
|
||||
string dataText;
|
||||
string cassetteID;
|
||||
string fileNameTemp;
|
||||
string tupleFileName;
|
||||
DateTime cassetteTicks;
|
||||
string directoryName = Path.GetDirectoryName(reportFullPath);
|
||||
string sequenceDirectoryName = string.Concat(Path.GetDirectoryName(reportFullPath), @"\", _Logistics.Sequence);
|
||||
string originalDataBioRad = string.Concat(Path.GetDirectoryName(reportFullPath), @"\", _OriginalDataBioRad, _Logistics.Sequence, ".txt");
|
||||
List<Tuple<string, bool, DateTime, string>> tuples = ProcessData.GetTuples(this, _Logistics, dateTime, results.Item4, _OriginalDataBioRad);
|
||||
if (_IsEAFHosted)
|
||||
{
|
||||
bool isBioRad;
|
||||
string dataText;
|
||||
string cassetteID;
|
||||
string fileNameTemp;
|
||||
string tupleFileName;
|
||||
DateTime cassetteTicks;
|
||||
string directoryName = Path.GetDirectoryName(reportFullPath);
|
||||
string sequenceDirectoryName = string.Concat(Path.GetDirectoryName(reportFullPath), @"\", _Logistics.Sequence);
|
||||
string originalDataBioRad = string.Concat(Path.GetDirectoryName(reportFullPath), @"\", _OriginalDataBioRad, _Logistics.Sequence, ".txt");
|
||||
List<Tuple<string, bool, DateTime, string>> tuples = ProcessData.GetTuples(this, _Logistics, dateTime, results.Item4, _OriginalDataBioRad);
|
||||
if (_IsEAFHosted)
|
||||
if (tuples.Any())
|
||||
{
|
||||
if (tuples.Any())
|
||||
{
|
||||
if (!Directory.Exists(sequenceDirectoryName))
|
||||
Directory.CreateDirectory(sequenceDirectoryName);
|
||||
File.Move(reportFullPath, originalDataBioRad);
|
||||
_Log.Debug(string.Concat("****Extract() - Renamed [", reportFullPath, "] to [", originalDataBioRad, "]"));
|
||||
}
|
||||
foreach (Tuple<string, bool, DateTime, string> tuple in tuples)
|
||||
{
|
||||
isBioRad = tuple.Item2;
|
||||
dataText = tuple.Item4;
|
||||
cassetteID = tuple.Item1;
|
||||
cassetteTicks = tuple.Item3;
|
||||
if (isBioRad)
|
||||
tupleFileName = string.Concat("DetailDataBioRad_", cassetteID, "_", cassetteTicks.Ticks, ".txt");
|
||||
else
|
||||
tupleFileName = string.Concat("CassetteDataBioRad_", cassetteID, "_", cassetteTicks.Ticks, ".txt");
|
||||
fileNameTemp = string.Concat(sequenceDirectoryName, @"\", tupleFileName);
|
||||
File.WriteAllText(fileNameTemp, dataText);
|
||||
File.SetLastWriteTime(fileNameTemp, cassetteTicks);
|
||||
if (_Logistics.Sequence != cassetteTicks.Ticks && File.Exists(originalDataBioRad))
|
||||
File.Copy(originalDataBioRad, string.Concat(Path.GetDirectoryName(reportFullPath), @"\", _OriginalDataBioRad, cassetteTicks.Ticks, ".txt"));
|
||||
File.Move(fileNameTemp, string.Concat(directoryName, @"\", tupleFileName));
|
||||
}
|
||||
if (Directory.Exists(sequenceDirectoryName))
|
||||
Directory.Delete(sequenceDirectoryName);
|
||||
if (!Directory.Exists(sequenceDirectoryName))
|
||||
_ = Directory.CreateDirectory(sequenceDirectoryName);
|
||||
File.Move(reportFullPath, originalDataBioRad);
|
||||
_Log.Debug(string.Concat("****Extract() - Renamed [", reportFullPath, "] to [", originalDataBioRad, "]"));
|
||||
}
|
||||
foreach (Tuple<string, bool, DateTime, string> tuple in tuples)
|
||||
{
|
||||
isBioRad = tuple.Item2;
|
||||
dataText = tuple.Item4;
|
||||
cassetteID = tuple.Item1;
|
||||
cassetteTicks = tuple.Item3;
|
||||
if (isBioRad)
|
||||
tupleFileName = string.Concat("DetailDataBioRad_", cassetteID, "_", cassetteTicks.Ticks, ".txt");
|
||||
else
|
||||
tupleFileName = string.Concat("CassetteDataBioRad_", cassetteID, "_", cassetteTicks.Ticks, ".txt");
|
||||
fileNameTemp = string.Concat(sequenceDirectoryName, @"\", tupleFileName);
|
||||
File.WriteAllText(fileNameTemp, dataText);
|
||||
File.SetLastWriteTime(fileNameTemp, cassetteTicks);
|
||||
if (_Logistics.Sequence != cassetteTicks.Ticks && File.Exists(originalDataBioRad))
|
||||
File.Copy(originalDataBioRad, string.Concat(Path.GetDirectoryName(reportFullPath), @"\", _OriginalDataBioRad, cassetteTicks.Ticks, ".txt"));
|
||||
File.Move(fileNameTemp, string.Concat(directoryName, @"\", tupleFileName));
|
||||
}
|
||||
if (Directory.Exists(sequenceDirectoryName))
|
||||
Directory.Delete(sequenceDirectoryName);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -8,503 +8,500 @@ using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.txt
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
public partial class ProcessData
|
||||
{
|
||||
|
||||
public partial class ProcessData
|
||||
internal static List<Tuple<string, bool, DateTime, string>> GetTuples(FileRead fileRead, Logistics logistics, DateTime dateTime, List<FileInfo> fileInfoCollection, string originalDataBioRad)
|
||||
{
|
||||
|
||||
internal static List<Tuple<string, bool, DateTime, string>> GetTuples(FileRead fileRead, Logistics logistics, DateTime dateTime, List<FileInfo> fileInfoCollection, string originalDataBioRad)
|
||||
List<Tuple<string, bool, DateTime, string>> results = new();
|
||||
ILog log = LogManager.GetLogger(typeof(ProcessData));
|
||||
// ***********************************************************************************
|
||||
// * Step #2 - Verify completeness of each cassette scan in the raw data source file *
|
||||
// ***********************************************************************************
|
||||
string line;
|
||||
StreamReader rawDataFilePtr;
|
||||
bool? cassetteScanCompleted = null;
|
||||
// Scrub the source file to verify that for each cassette, present in the file, there is a complete
|
||||
// data set (i.e., that is there is a start and finished statement).
|
||||
//
|
||||
// Scenario #1 - Normal
|
||||
// For every cassette "started" there must be a matching cassette "finished".
|
||||
// Scenario #2 - Only Cassette "finished" (with or witout additional cassette complete data sets)
|
||||
// Incomplete data file. File will be process and generate error for the incomplete portion.
|
||||
// Scenario #3 - Only Cassette "Started"
|
||||
// Bail out of the solution. Source data file not ready to be processed.
|
||||
using (rawDataFilePtr = new StreamReader(logistics.ReportFullPath))
|
||||
{
|
||||
List<Tuple<string, bool, DateTime, string>> results = new();
|
||||
ILog log = LogManager.GetLogger(typeof(ProcessData));
|
||||
// ***********************************************************************************
|
||||
// * Step #2 - Verify completeness of each cassette scan in the raw data source file *
|
||||
// ***********************************************************************************
|
||||
string line;
|
||||
StreamReader rawDataFilePtr;
|
||||
bool? cassetteScanCompleted = null;
|
||||
// Scrub the source file to verify that for each cassette, present in the file, there is a complete
|
||||
// data set (i.e., that is there is a start and finished statement).
|
||||
//
|
||||
// Scenario #1 - Normal
|
||||
// For every cassette "started" there must be a matching cassette "finished".
|
||||
// Scenario #2 - Only Cassette "finished" (with or witout additional cassette complete data sets)
|
||||
// Incomplete data file. File will be process and generate error for the incomplete portion.
|
||||
// Scenario #3 - Only Cassette "Started"
|
||||
// Bail out of the solution. Source data file not ready to be processed.
|
||||
using (rawDataFilePtr = new StreamReader(logistics.ReportFullPath))
|
||||
for (short i = 0; i < short.MaxValue; i++)
|
||||
{
|
||||
for (short i = 0; i < short.MaxValue; i++)
|
||||
line = rawDataFilePtr.ReadLine();
|
||||
if (line is null)
|
||||
break;
|
||||
if (line.Contains("Cassette") && line.Contains("started") && (cassetteScanCompleted is null || cassetteScanCompleted.Value))
|
||||
{
|
||||
line = rawDataFilePtr.ReadLine();
|
||||
if (line is null)
|
||||
break;
|
||||
if (line.Contains("Cassette") && line.Contains("started") && (cassetteScanCompleted is null || cassetteScanCompleted.Value))
|
||||
{
|
||||
cassetteScanCompleted = false;
|
||||
log.Debug("****Extract() - CassetteScanCompleted = FALSE");
|
||||
}
|
||||
else if (line.Contains("Cassette") && line.Contains("finished") && (cassetteScanCompleted is null || !cassetteScanCompleted.Value))
|
||||
{
|
||||
cassetteScanCompleted = true;
|
||||
log.Debug("****Extract() - CassetteScanCompleted = TRUE");
|
||||
}
|
||||
cassetteScanCompleted = false;
|
||||
log.Debug("****Extract() - CassetteScanCompleted = FALSE");
|
||||
}
|
||||
// Making sure that the file has been released
|
||||
rawDataFilePtr.Close();
|
||||
if (!(rawDataFilePtr is null))
|
||||
rawDataFilePtr.Dispose();
|
||||
}
|
||||
if (cassetteScanCompleted is null || !cassetteScanCompleted.Value)
|
||||
// Raw source file has an incomplete data set or it only contains a "Process failed" and should not be
|
||||
// processed /split yet. Simply get out of this routine until enough data has been appended to the file.
|
||||
log.Debug($"****Extract() - Raw source file has an incomplete data set and should not be processed yet.");
|
||||
else
|
||||
{
|
||||
Dictionary<string, List<string>> cassetteIDAndDataSets = new();
|
||||
if (!string.IsNullOrEmpty(logistics.ReportFullPath))
|
||||
else if (line.Contains("Cassette") && line.Contains("finished") && (cassetteScanCompleted is null || !cassetteScanCompleted.Value))
|
||||
{
|
||||
string[] segments;
|
||||
int cassetteEndIndex;
|
||||
int thicknessCounter;
|
||||
string thicknessHead;
|
||||
string thicknessInfo;
|
||||
string thicknessTail;
|
||||
int cassetteStartIndex;
|
||||
StringBuilder lines = new();
|
||||
string slotID = string.Empty;
|
||||
string cassetteID = string.Empty;
|
||||
string batchHeader = string.Empty;
|
||||
bool finishedReadingThicknessInfo;
|
||||
bool slotInformationCaptured = false;
|
||||
bool pointsInformationCaptured = false;
|
||||
bool sourceInformationCaptured = false;
|
||||
bool waferWaferInformationCaptured = false;
|
||||
bool destinationInformationCaptured = false;
|
||||
string[] reportFullPathlines = File.ReadAllLines(logistics.ReportFullPath);
|
||||
List<Tuple<string, int, int>> cassetteStartAndEnds = new();
|
||||
for (int i = 0; i < reportFullPathlines.Length; i++)
|
||||
cassetteScanCompleted = true;
|
||||
log.Debug("****Extract() - CassetteScanCompleted = TRUE");
|
||||
}
|
||||
}
|
||||
// Making sure that the file has been released
|
||||
rawDataFilePtr.Close();
|
||||
if (rawDataFilePtr is not null)
|
||||
rawDataFilePtr.Dispose();
|
||||
}
|
||||
if (cassetteScanCompleted is null || !cassetteScanCompleted.Value)
|
||||
// Raw source file has an incomplete data set or it only contains a "Process failed" and should not be
|
||||
// processed /split yet. Simply get out of this routine until enough data has been appended to the file.
|
||||
log.Debug($"****Extract() - Raw source file has an incomplete data set and should not be processed yet.");
|
||||
else
|
||||
{
|
||||
Dictionary<string, List<string>> cassetteIDAndDataSets = new();
|
||||
if (!string.IsNullOrEmpty(logistics.ReportFullPath))
|
||||
{
|
||||
string[] segments;
|
||||
int cassetteEndIndex;
|
||||
int thicknessCounter;
|
||||
string thicknessHead;
|
||||
string thicknessInfo;
|
||||
string thicknessTail;
|
||||
int cassetteStartIndex;
|
||||
StringBuilder lines = new();
|
||||
string slotID = string.Empty;
|
||||
string cassetteID = string.Empty;
|
||||
string batchHeader = string.Empty;
|
||||
bool finishedReadingThicknessInfo;
|
||||
bool slotInformationCaptured = false;
|
||||
bool pointsInformationCaptured = false;
|
||||
bool sourceInformationCaptured = false;
|
||||
bool waferWaferInformationCaptured = false;
|
||||
bool destinationInformationCaptured = false;
|
||||
string[] reportFullPathlines = File.ReadAllLines(logistics.ReportFullPath);
|
||||
List<Tuple<string, int, int>> cassetteStartAndEnds = new();
|
||||
for (int i = 0; i < reportFullPathlines.Length; i++)
|
||||
{
|
||||
line = reportFullPathlines[i].Trim();
|
||||
if (string.IsNullOrEmpty(line))
|
||||
continue;
|
||||
if (line.StartsWith("Batch") && line.Contains("started"))
|
||||
batchHeader = line;
|
||||
if (i + 1 == reportFullPathlines.Length)
|
||||
continue;
|
||||
if (line.StartsWith("Cassette") && line.Contains("started"))
|
||||
{
|
||||
line = reportFullPathlines[i].Trim();
|
||||
if (string.IsNullOrEmpty(line))
|
||||
continue;
|
||||
if (line.StartsWith("Batch") && line.Contains("started"))
|
||||
batchHeader = line;
|
||||
if (i + 1 == reportFullPathlines.Length)
|
||||
continue;
|
||||
if (line.StartsWith("Cassette") && line.Contains("started"))
|
||||
for (int j = i + 1; j < reportFullPathlines.Length; j++)
|
||||
{
|
||||
for (int j = i + 1; j < reportFullPathlines.Length; j++)
|
||||
if (j + 1 == reportFullPathlines.Length)
|
||||
cassetteStartAndEnds.Add(new Tuple<string, int, int>(batchHeader, i, j));
|
||||
else
|
||||
{
|
||||
if (j + 1 == reportFullPathlines.Length)
|
||||
cassetteStartAndEnds.Add(new Tuple<string, int, int>(batchHeader, i, j));
|
||||
else
|
||||
line = reportFullPathlines[j].Trim();
|
||||
if (line.StartsWith("Cassette") && line.Contains("started"))
|
||||
{
|
||||
line = reportFullPathlines[j].Trim();
|
||||
if (line.StartsWith("Cassette") && line.Contains("started"))
|
||||
{
|
||||
cassetteStartAndEnds.Add(new Tuple<string, int, int>(batchHeader, i, j - 1));
|
||||
break;
|
||||
}
|
||||
cassetteStartAndEnds.Add(new Tuple<string, int, int>(batchHeader, i, j - 1));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
foreach (Tuple<string, int, int> tuple in cassetteStartAndEnds)
|
||||
}
|
||||
foreach (Tuple<string, int, int> tuple in cassetteStartAndEnds)
|
||||
{
|
||||
_ = lines.Clear();
|
||||
batchHeader = tuple.Item1;
|
||||
cassetteEndIndex = tuple.Item3;
|
||||
cassetteStartIndex = tuple.Item2;
|
||||
for (int l = cassetteStartIndex; l <= cassetteEndIndex; l++)
|
||||
{
|
||||
lines.Clear();
|
||||
batchHeader = tuple.Item1;
|
||||
cassetteEndIndex = tuple.Item3;
|
||||
cassetteStartIndex = tuple.Item2;
|
||||
for (int l = cassetteStartIndex; l <= cassetteEndIndex; l++)
|
||||
line = reportFullPathlines[l].Trim();
|
||||
if (string.IsNullOrEmpty(line))
|
||||
continue;
|
||||
if (l == cassetteStartIndex)
|
||||
{
|
||||
line = reportFullPathlines[l].Trim();
|
||||
if (string.IsNullOrEmpty(line))
|
||||
continue;
|
||||
if (l == cassetteStartIndex)
|
||||
// Save the previously saved "Batch Header"
|
||||
_ = lines.AppendLine(batchHeader);
|
||||
// Save the first line of the cassette scan information
|
||||
_ = lines.AppendLine(line);
|
||||
// Each new cassette initialize the WaferWafer information flag
|
||||
waferWaferInformationCaptured = false;
|
||||
slotInformationCaptured = false;
|
||||
if (line.Length > 9)
|
||||
{
|
||||
// Save the previously saved "Batch Header"
|
||||
lines.AppendLine(batchHeader);
|
||||
// Save the first line of the cassette scan information
|
||||
lines.AppendLine(line);
|
||||
// Each new cassette initialize the WaferWafer information flag
|
||||
waferWaferInformationCaptured = false;
|
||||
slotInformationCaptured = false;
|
||||
if (line.Length > 9)
|
||||
// Detected a new cassette data scan. Extract the cassette ID.
|
||||
// Example: "Cassette 47-241330-4238 started."
|
||||
segments = line.Substring(9).Split(new string[] { "started" }, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (segments.Any())
|
||||
{
|
||||
// Detected a new cassette data scan. Extract the cassette ID.
|
||||
// Example: "Cassette 47-241330-4238 started."
|
||||
segments = line.Substring(9).Split(new string[] { "started" }, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (segments.Any())
|
||||
{
|
||||
// Detected a new cassette scan in the raw source file
|
||||
cassetteID = segments[0].Trim();
|
||||
cassetteID = cassetteID.Replace(":", string.Empty);
|
||||
cassetteID = cassetteID.Replace("*", string.Empty);
|
||||
cassetteID = cassetteID.Replace("\\", string.Empty);
|
||||
}
|
||||
// Detected a new cassette scan in the raw source file
|
||||
cassetteID = segments[0].Trim();
|
||||
cassetteID = cassetteID.Replace(":", string.Empty);
|
||||
cassetteID = cassetteID.Replace("*", string.Empty);
|
||||
cassetteID = cassetteID.Replace("\\", string.Empty);
|
||||
}
|
||||
}
|
||||
// Continue reading and saving the cassette scan information, into the cassette
|
||||
// scan output file, until the end of the cassette scan "Finished" statement has
|
||||
// been detected.
|
||||
// Maintain standard for mat between various BioRad tools. The "Points" and "Thickness"
|
||||
// values between vaious BioRad tools might be spread over multiple lines. The following
|
||||
// is simply to regroup the "Points" and "Thickness" information on the same line accordingly.
|
||||
if (line.StartsWith("Wafer Wafer"))
|
||||
}
|
||||
// Continue reading and saving the cassette scan information, into the cassette
|
||||
// scan output file, until the end of the cassette scan "Finished" statement has
|
||||
// been detected.
|
||||
// Maintain standard for mat between various BioRad tools. The "Points" and "Thickness"
|
||||
// values between various BioRad tools might be spread over multiple lines. The following
|
||||
// is simply to regroup the "Points" and "Thickness" information on the same line accordingly.
|
||||
if (line.StartsWith("Wafer Wafer"))
|
||||
{
|
||||
_ = lines.AppendLine(line);
|
||||
slotInformationCaptured = false;
|
||||
waferWaferInformationCaptured = true;
|
||||
}
|
||||
else if (line.StartsWith("Slot"))
|
||||
{
|
||||
slotID = string.Empty;
|
||||
segments = line.Split(' ');
|
||||
if (segments.Length > 1)
|
||||
slotID = segments[1];
|
||||
// There are cases where the WaferWafer information is missing. Create a
|
||||
// WaferWafer entry based off the slot number.
|
||||
if (!waferWaferInformationCaptured)
|
||||
{
|
||||
lines.AppendLine(line);
|
||||
slotInformationCaptured = false;
|
||||
waferWaferInformationCaptured = true;
|
||||
_ = lines.AppendLine("Wafer Wafer " + slotID + ".");
|
||||
}
|
||||
else if (line.StartsWith("Slot"))
|
||||
_ = lines.AppendLine(line);
|
||||
slotInformationCaptured = true;
|
||||
}
|
||||
else if (line.StartsWith("Recipe"))
|
||||
{
|
||||
_ = lines.AppendLine(line);
|
||||
pointsInformationCaptured = false;
|
||||
}
|
||||
else if (line.StartsWith("Points"))
|
||||
{
|
||||
_ = lines.AppendLine(line);
|
||||
pointsInformationCaptured = true;
|
||||
}
|
||||
else if (line.Contains("Thickness"))
|
||||
{
|
||||
// Before addressing the "Thickness" section, ensure that the "Points" section
|
||||
// has been found. Otherwise, we need to write out a default value.
|
||||
if (!pointsInformationCaptured)
|
||||
{
|
||||
slotID = string.Empty;
|
||||
segments = line.Split(' ');
|
||||
if (segments.Length > 1)
|
||||
slotID = segments[1];
|
||||
// There are cases where the WaferWafer information is missing. Create a
|
||||
// WaferWafer entry based off the slot number.
|
||||
if (!waferWaferInformationCaptured)
|
||||
{
|
||||
waferWaferInformationCaptured = true;
|
||||
lines.AppendLine("Wafer Wafer " + slotID + ".");
|
||||
}
|
||||
lines.AppendLine(line);
|
||||
slotInformationCaptured = true;
|
||||
}
|
||||
else if (line.StartsWith("Recipe"))
|
||||
{
|
||||
lines.AppendLine(line);
|
||||
pointsInformationCaptured = false;
|
||||
}
|
||||
else if (line.StartsWith("Points"))
|
||||
{
|
||||
lines.AppendLine(line);
|
||||
// No "Points" information has been capture. Default to "Points : 0 0"
|
||||
_ = lines.AppendLine("Points : 0 0");
|
||||
pointsInformationCaptured = true;
|
||||
}
|
||||
else if (line.Contains("Thickness"))
|
||||
// The "Thickness" output section comes out differently between various Stratus tools. In some
|
||||
// cases, the thickness values are either empty (no values), on the same line or on different lines.
|
||||
// Below are examples of how the data needs to be formatted after being parsed:
|
||||
// Thickness, um 1 - 1 0
|
||||
// Thickness, um 1 - 1 13.630
|
||||
// Thickness, um 1 - 9 1.197 1.231 1.248 1.235 1.199 1.202 1.236 1.242 1.212
|
||||
thicknessCounter = 0;
|
||||
thicknessHead = line;
|
||||
thicknessInfo = "";
|
||||
thicknessTail = "";
|
||||
finishedReadingThicknessInfo = false;
|
||||
for (int t = l + 1; t <= cassetteEndIndex; t++)
|
||||
{
|
||||
// Before addressing the "Thickness" section, ensure that the "Points" section
|
||||
// has been found. Otherwise, we need to write out a default value.
|
||||
if (!pointsInformationCaptured)
|
||||
l = t;
|
||||
line = reportFullPathlines[l].Trim();
|
||||
if (string.IsNullOrEmpty(line))
|
||||
continue;
|
||||
if (!line.StartsWith("Slot"))
|
||||
{
|
||||
// No "Points" information has been capture. Default to "Points : 0 0"
|
||||
lines.AppendLine("Points : 0 0");
|
||||
pointsInformationCaptured = true;
|
||||
thicknessCounter++;
|
||||
thicknessTail = string.Concat(thicknessTail, " ", line);
|
||||
}
|
||||
// The "Thickness" output section comes out differently between various Stratus tools. In some
|
||||
// cases, the thickness values are either empty (no values), on the same line or on different lines.
|
||||
// Below are examples of how the data needs to be formatted after being parsed:
|
||||
// Thickness, um 1 - 1 0
|
||||
// Thickness, um 1 - 1 13.630
|
||||
// Thickness, um 1 - 9 1.197 1.231 1.248 1.235 1.199 1.202 1.236 1.242 1.212
|
||||
thicknessCounter = 0;
|
||||
thicknessHead = line;
|
||||
thicknessInfo = "";
|
||||
thicknessTail = "";
|
||||
finishedReadingThicknessInfo = false;
|
||||
for (int t = l + 1; t <= cassetteEndIndex; t++)
|
||||
else
|
||||
{
|
||||
l = t;
|
||||
line = reportFullPathlines[l].Trim();
|
||||
if (string.IsNullOrEmpty(line))
|
||||
continue;
|
||||
if (!line.StartsWith("Slot"))
|
||||
{
|
||||
thicknessCounter++;
|
||||
thicknessTail = string.Concat(thicknessTail, " ", line);
|
||||
}
|
||||
finishedReadingThicknessInfo = true;
|
||||
if (thicknessCounter != 0)
|
||||
thicknessInfo = string.Concat(" 1 - ", thicknessCounter);
|
||||
else
|
||||
{
|
||||
finishedReadingThicknessInfo = true;
|
||||
if (thicknessCounter != 0)
|
||||
thicknessInfo = string.Concat(" 1 - ", thicknessCounter);
|
||||
// Two possible formatting scenarios at this point. Either the data was already
|
||||
// formatted properly on one line. Or the Thickness value was missing, in which
|
||||
// case we need to default the thickness value to zero (0).
|
||||
segments = thicknessHead.Split(' ');
|
||||
if (segments.Length > 2)
|
||||
{
|
||||
// The "Thickness" raw data if formatted as a normal single line format and
|
||||
// already include the Header + Info + Tail
|
||||
}
|
||||
else
|
||||
{
|
||||
// Two possible formatting scenarios at this point. Either the data was already
|
||||
// formatted properly on one line. Or the Thickness value was missing, in which
|
||||
// case we need to default the thickness value to zero (0).
|
||||
segments = thicknessHead.Split(' ');
|
||||
if (segments.Length > 2)
|
||||
{
|
||||
// The "Thickness" raw data if formatted as a normal single line format and
|
||||
// already include the Header + Info + Tail
|
||||
}
|
||||
else
|
||||
{
|
||||
// The "Thikness raw data has no values. Formatting the output with zero.
|
||||
thicknessInfo = " 1 - 1";
|
||||
thicknessTail = " 0";
|
||||
}
|
||||
// The "Thickness raw data has no values. Formatting the output with zero.
|
||||
thicknessInfo = " 1 - 1";
|
||||
thicknessTail = " 0";
|
||||
}
|
||||
lines.AppendLine(string.Concat(thicknessHead, thicknessInfo, thicknessTail));
|
||||
// The "Slot" keyword is the tag that determines the end of the Thickness section. The "Slot"
|
||||
// information has already been ready. Simply write it back.
|
||||
lines.AppendLine(line);
|
||||
}
|
||||
if (finishedReadingThicknessInfo)
|
||||
break;
|
||||
_ = lines.AppendLine(string.Concat(thicknessHead, thicknessInfo, thicknessTail));
|
||||
// The "Slot" keyword is the tag that determines the end of the Thickness section. The "Slot"
|
||||
// information has already been ready. Simply write it back.
|
||||
_ = lines.AppendLine(line);
|
||||
}
|
||||
if (finishedReadingThicknessInfo)
|
||||
break;
|
||||
}
|
||||
else if (line.StartsWith("Mean"))
|
||||
}
|
||||
else if (line.StartsWith("Mean"))
|
||||
{
|
||||
_ = lines.AppendLine(line);
|
||||
sourceInformationCaptured = false;
|
||||
destinationInformationCaptured = false;
|
||||
}
|
||||
else if (line.StartsWith("Source:") && slotInformationCaptured)
|
||||
{
|
||||
_ = lines.AppendLine(line);
|
||||
sourceInformationCaptured = true;
|
||||
}
|
||||
else if (line.StartsWith("Destination:") && slotInformationCaptured)
|
||||
{
|
||||
if (!sourceInformationCaptured)
|
||||
{
|
||||
lines.AppendLine(line);
|
||||
sourceInformationCaptured = false;
|
||||
destinationInformationCaptured = false;
|
||||
}
|
||||
else if (line.StartsWith("Source:") && slotInformationCaptured)
|
||||
{
|
||||
lines.AppendLine(line);
|
||||
sourceInformationCaptured = true;
|
||||
_ = lines.AppendLine(string.Concat("Source: Slot ", slotID, ", Cassette"));
|
||||
}
|
||||
else if (line.StartsWith("Destination:") && slotInformationCaptured)
|
||||
_ = lines.AppendLine(line);
|
||||
destinationInformationCaptured = true;
|
||||
// Each time a cassette slot section has been completed, we must reinitialize
|
||||
// the "Wafer Wafer" information flag in case there are multiple slots in the
|
||||
// same cassette
|
||||
slotInformationCaptured = false;
|
||||
waferWaferInformationCaptured = false;
|
||||
}
|
||||
else if (line.StartsWith("Cassette") && line.Contains("finished."))
|
||||
{
|
||||
// Reach the end of the cassette data set information
|
||||
if (!sourceInformationCaptured)
|
||||
{
|
||||
sourceInformationCaptured = true;
|
||||
_ = lines.AppendLine(string.Concat("Source: Slot ", slotID, ", Cassette"));
|
||||
}
|
||||
if (!destinationInformationCaptured)
|
||||
{
|
||||
if (!sourceInformationCaptured)
|
||||
{
|
||||
sourceInformationCaptured = true;
|
||||
lines.AppendLine(string.Concat("Source: Slot ", slotID, ", Cassette"));
|
||||
}
|
||||
lines.AppendLine(line);
|
||||
destinationInformationCaptured = true;
|
||||
_ = lines.AppendLine(string.Concat("Destination: Slot ", slotID, ", Cassette"));
|
||||
// Each time a cassette slot section has been completed, we must reinitialize
|
||||
// the "Wafer Wafer" information flag in case there are multiple slots in the
|
||||
// same cassette
|
||||
slotInformationCaptured = false;
|
||||
waferWaferInformationCaptured = false;
|
||||
}
|
||||
else if (line.StartsWith("Cassette") && line.Contains("finished."))
|
||||
// Write the end of cassette statement to the output file
|
||||
_ = lines.AppendLine(line);
|
||||
// Read the Mean-Average line information, post the cassette "Finished" statement
|
||||
for (int a = l + 1; a <= cassetteEndIndex; a++)
|
||||
{
|
||||
// Reach the end of the cassette data set information
|
||||
if (!sourceInformationCaptured)
|
||||
l = a;
|
||||
line = reportFullPathlines[l].Trim();
|
||||
if (string.IsNullOrEmpty(line))
|
||||
continue;
|
||||
// There are many blank lines in the source file. Search for the first
|
||||
// occurrence of the string "Mean".
|
||||
if (line.StartsWith("Mean"))
|
||||
{
|
||||
sourceInformationCaptured = true;
|
||||
lines.AppendLine(string.Concat("Source: Slot ", slotID, ", Cassette"));
|
||||
_ = lines.AppendLine(line);
|
||||
break;
|
||||
}
|
||||
if (!destinationInformationCaptured)
|
||||
{
|
||||
destinationInformationCaptured = true;
|
||||
lines.AppendLine(string.Concat("Destination: Slot ", slotID, ", Cassette"));
|
||||
// Each time a cassette slot section has been completed, we must reinitialize
|
||||
// the "Wafer Wafer" information flag in case there are multiple slots in the
|
||||
// same cassette
|
||||
slotInformationCaptured = false;
|
||||
waferWaferInformationCaptured = false;
|
||||
}
|
||||
// Write the end of cassette statement to the output file
|
||||
lines.AppendLine(line);
|
||||
// Read the Mean-Average line information, post the cassette "Finished" statement
|
||||
for (int a = l + 1; a <= cassetteEndIndex; a++)
|
||||
{
|
||||
l = a;
|
||||
line = reportFullPathlines[l].Trim();
|
||||
if (string.IsNullOrEmpty(line))
|
||||
continue;
|
||||
// There are many blank lines in the source file. Search for the first
|
||||
// occurence of the string "Mean".
|
||||
if (line.StartsWith("Mean"))
|
||||
{
|
||||
lines.AppendLine(line);
|
||||
break;
|
||||
}
|
||||
// The mean Average information is missing. We are done reading the cassette information.
|
||||
if (line.StartsWith("Batch"))
|
||||
break;
|
||||
}
|
||||
if (!cassetteIDAndDataSets.ContainsKey(cassetteID))
|
||||
cassetteIDAndDataSets.Add(cassetteID, new List<string>());
|
||||
cassetteIDAndDataSets[cassetteID].Add(lines.ToString());
|
||||
// The mean Average information is missing. We are done reading the cassette information.
|
||||
if (line.StartsWith("Batch"))
|
||||
break;
|
||||
}
|
||||
if (!cassetteIDAndDataSets.ContainsKey(cassetteID))
|
||||
cassetteIDAndDataSets.Add(cassetteID, new List<string>());
|
||||
cassetteIDAndDataSets[cassetteID].Add(lines.ToString());
|
||||
}
|
||||
}
|
||||
if (cassetteStartAndEnds is null)
|
||||
{ }
|
||||
}
|
||||
if (cassetteIDAndDataSets.Any())
|
||||
if (cassetteStartAndEnds is null)
|
||||
{ }
|
||||
}
|
||||
if (cassetteIDAndDataSets.Any())
|
||||
{
|
||||
int wafer;
|
||||
string user;
|
||||
string runID;
|
||||
bool isBioRad;
|
||||
string recipe;
|
||||
int count = -1;
|
||||
int stringIndex;
|
||||
string dataText;
|
||||
string dataType;
|
||||
string[] segments;
|
||||
string cassetteID;
|
||||
string recipeName;
|
||||
IProcessData iProcessData;
|
||||
DateTime cassetteDateTime;
|
||||
string recipeSearch = "Recipe";
|
||||
string toolType = string.Empty;
|
||||
StringBuilder contents = new();
|
||||
Stratus.ProcessData processData;
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in cassetteIDAndDataSets)
|
||||
{
|
||||
int wafer;
|
||||
string user;
|
||||
string runID;
|
||||
bool isBioRad;
|
||||
string recipe;
|
||||
int count = -1;
|
||||
int stringIndex;
|
||||
string dataText;
|
||||
string dataType;
|
||||
string[] segments;
|
||||
string cassetteID;
|
||||
string recipeName;
|
||||
IProcessData iProcessData;
|
||||
DateTime cassetteDateTime;
|
||||
string recipeSearch = "Recipe";
|
||||
string toolType = string.Empty;
|
||||
StringBuilder contents = new();
|
||||
Stratus.ProcessData processData;
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in cassetteIDAndDataSets)
|
||||
isBioRad = false;
|
||||
dataType = string.Empty;
|
||||
cassetteID = keyValuePair.Key;
|
||||
for (int i = 0; i < keyValuePair.Value.Count; i++)
|
||||
{
|
||||
isBioRad = false;
|
||||
dataType = string.Empty;
|
||||
cassetteID = keyValuePair.Key;
|
||||
for (int i = 0; i < keyValuePair.Value.Count; i++)
|
||||
dataText = keyValuePair.Value[i];
|
||||
// Finished capturing the complete cassette scan data information. Release the cassette file.
|
||||
if (dataText.Contains("Cassette") &&
|
||||
dataText.Contains("Wafer") &&
|
||||
dataText.Contains("Slot") &&
|
||||
dataText.Contains("Recipe") &&
|
||||
dataText.Contains("Points") &&
|
||||
dataText.Contains("Thickness") &&
|
||||
dataText.Contains("Mean") &&
|
||||
dataText.Contains("Source:") &&
|
||||
dataText.Contains("Destination:"))
|
||||
{
|
||||
dataText = keyValuePair.Value[i];
|
||||
// Finished capturing the complete cassette scan data information. Release the cassette file.
|
||||
if (dataText.Contains("Cassette") &&
|
||||
dataText.Contains("Wafer") &&
|
||||
dataText.Contains("Slot") &&
|
||||
dataText.Contains("Recipe") &&
|
||||
dataText.Contains("Points") &&
|
||||
dataText.Contains("Thickness") &&
|
||||
dataText.Contains("Mean") &&
|
||||
dataText.Contains("Source:") &&
|
||||
dataText.Contains("Destination:"))
|
||||
// Extract the recipe name
|
||||
runID = string.Empty;
|
||||
recipeName = string.Empty;
|
||||
stringIndex = dataText.IndexOf(recipeSearch);
|
||||
recipeName = dataText.Substring(stringIndex + recipeSearch.Length);
|
||||
log.Debug($"****Extract(FDR): recipeName = {recipeName}");
|
||||
if (!(string.IsNullOrEmpty(recipeName)) && (recipeName.IndexOf("center", StringComparison.CurrentCultureIgnoreCase) >= 0))
|
||||
{
|
||||
// Extract the recipe name
|
||||
runID = string.Empty;
|
||||
recipeName = string.Empty;
|
||||
stringIndex = dataText.IndexOf(recipeSearch);
|
||||
recipeName = dataText.Substring(stringIndex + recipeSearch.Length);
|
||||
log.Debug($"****Extract(FDR): recipeName = {recipeName}");
|
||||
if (!(string.IsNullOrEmpty(recipeName)) && (recipeName.IndexOf("center", StringComparison.CurrentCultureIgnoreCase) >= 0))
|
||||
{
|
||||
/***************************************/
|
||||
/* STRATUS Measurement = FQA Thickness */
|
||||
/***************************************/
|
||||
// Recipes that contains the substring "Center" are STRATUS centerpoint recipes. They are used for Inspection and FQA measurements.
|
||||
// measurement. The data from thise scans should be uploaded to the Metrology Viewer database as STRATUS and uploaded to the
|
||||
// OpenInsight [FQA Thickness - Post Epi - QA Metrology / Thk/RHO Value for each slotID] automatically.
|
||||
isBioRad = false;
|
||||
toolType = "STRATUS";
|
||||
dataType = "FQA Thickness";
|
||||
}
|
||||
else if (!(string.IsNullOrEmpty(recipeName)) && (recipeName.IndexOf("prod_", StringComparison.CurrentCultureIgnoreCase) >= 0))
|
||||
{
|
||||
/******************************************/
|
||||
/* BIORAD Measurement = Product Thickness */
|
||||
/******************************************/
|
||||
// Recipes that contains the substring "Center" are STRATUS centerpoint recipes. They are used for Inspection and FQA measurements.
|
||||
// measurement. The data from thise scans should be uploaded to the Metrology Viewer database as STRATUS and uploaded to the
|
||||
// OpenInsight [FQA Thickness - Post Epi - QA Metrology / Thk/RHO Value for each slotID] automatically.
|
||||
isBioRad = true;
|
||||
toolType = "BIORAD";
|
||||
dataType = "Product Thickness";
|
||||
}
|
||||
else if (!(string.IsNullOrEmpty(recipeName)) &&
|
||||
((recipeName.IndexOf("T-Low", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T_Low", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T-Mid", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T_Mid", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T-High", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T_High", StringComparison.CurrentCultureIgnoreCase) >= 0)))
|
||||
{
|
||||
/*************************************/
|
||||
/* BIORAD Measurement = No Uploading */
|
||||
/*************************************/
|
||||
// Recipes that contains the substring "T-Low, T_Low, T-Mid, T_Mid and T-High, T_High" are BIORAD verification recipe. The information
|
||||
// should be uploaded to the Metrology Viewer database as BIORAD. No OpenInsight.
|
||||
isBioRad = true;
|
||||
toolType = "BIORAD";
|
||||
dataType = "Verification";
|
||||
}
|
||||
else
|
||||
{
|
||||
// Count the number of wafers (ref. "Source: Slot") in the cassette
|
||||
int waferCount = Regex.Matches(dataText, "Source: Slot").Count;
|
||||
if (waferCount == 1)
|
||||
{
|
||||
// Metrology Thickness. Upload to OpenInsight same as BR2 and BR3
|
||||
isBioRad = true;
|
||||
toolType = "BIORAD";
|
||||
dataType = "Metrology Thickness";
|
||||
}
|
||||
else if (waferCount > 1)
|
||||
{
|
||||
// Inspection Measurement. Do not upload to OpenInsight.
|
||||
isBioRad = true;
|
||||
toolType = "BIORAD";
|
||||
dataType = "Inspection";
|
||||
}
|
||||
}
|
||||
/***************************************/
|
||||
/* STRATUS Measurement = FQA Thickness */
|
||||
/***************************************/
|
||||
// Recipes that contains the substring "Center" are STRATUS centerpoint recipes. They are used for Inspection and FQA measurements.
|
||||
// measurement. The data from these scans should be uploaded to the Metrology Viewer database as STRATUS and uploaded to the
|
||||
// OpenInsight [FQA Thickness - Post Epi - QA Metrology / Thk/RHO Value for each slotID] automatically.
|
||||
isBioRad = false;
|
||||
toolType = "STRATUS";
|
||||
dataType = "FQA Thickness";
|
||||
}
|
||||
log.Debug($"****Extract(FDR): ToolType = {toolType}");
|
||||
log.Debug($"****Extract(FDR): DataType = {dataType}");
|
||||
if (!isBioRad)
|
||||
else if (!(string.IsNullOrEmpty(recipeName)) && (recipeName.IndexOf("prod_", StringComparison.CurrentCultureIgnoreCase) >= 0))
|
||||
{
|
||||
cassetteDateTime = logistics.DateTimeFromSequence.AddTicks(i * -1);
|
||||
results.Add(new Tuple<string, bool, DateTime, string>(cassetteID, isBioRad, cassetteDateTime, dataText));
|
||||
/******************************************/
|
||||
/* BIORAD Measurement = Product Thickness */
|
||||
/******************************************/
|
||||
// Recipes that contains the substring "Center" are STRATUS centerpoint recipes. They are used for Inspection and FQA measurements.
|
||||
// measurement. The data from these scans should be uploaded to the Metrology Viewer database as STRATUS and uploaded to the
|
||||
// OpenInsight [FQA Thickness - Post Epi - QA Metrology / Thk/RHO Value for each slotID] automatically.
|
||||
isBioRad = true;
|
||||
toolType = "BIORAD";
|
||||
dataType = "Product Thickness";
|
||||
}
|
||||
else if (!(string.IsNullOrEmpty(recipeName)) &&
|
||||
((recipeName.IndexOf("T-Low", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T_Low", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T-Mid", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T_Mid", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T-High", StringComparison.CurrentCultureIgnoreCase) >= 0) ||
|
||||
(recipeName.IndexOf("T_High", StringComparison.CurrentCultureIgnoreCase) >= 0)))
|
||||
{
|
||||
/*************************************/
|
||||
/* BIORAD Measurement = No Uploading */
|
||||
/*************************************/
|
||||
// Recipes that contains the substring "T-Low, T_Low, T-Mid, T_Mid and T-High, T_High" are BIORAD verification recipe. The information
|
||||
// should be uploaded to the Metrology Viewer database as BIORAD. No OpenInsight.
|
||||
isBioRad = true;
|
||||
toolType = "BIORAD";
|
||||
dataType = "Verification";
|
||||
}
|
||||
else
|
||||
{
|
||||
processData = new Stratus.ProcessData(fileRead, logistics, fileInfoCollection, originalDataBioRad, dataText: dataText);
|
||||
iProcessData = processData;
|
||||
if (!iProcessData.Details.Any())
|
||||
log.Warn("No Details!");
|
||||
else
|
||||
// Count the number of wafers (ref. "Source: Slot") in the cassette
|
||||
int waferCount = Regex.Matches(dataText, "Source: Slot").Count;
|
||||
if (waferCount == 1)
|
||||
{
|
||||
foreach (object item in iProcessData.Details)
|
||||
// Metrology Thickness. Upload to OpenInsight same as BR2 and BR3
|
||||
isBioRad = true;
|
||||
toolType = "BIORAD";
|
||||
dataType = "Metrology Thickness";
|
||||
}
|
||||
else if (waferCount > 1)
|
||||
{
|
||||
// Inspection Measurement. Do not upload to OpenInsight.
|
||||
isBioRad = true;
|
||||
toolType = "BIORAD";
|
||||
dataType = "Inspection";
|
||||
}
|
||||
}
|
||||
}
|
||||
log.Debug($"****Extract(FDR): ToolType = {toolType}");
|
||||
log.Debug($"****Extract(FDR): DataType = {dataType}");
|
||||
if (!isBioRad)
|
||||
{
|
||||
cassetteDateTime = logistics.DateTimeFromSequence.AddTicks(i * -1);
|
||||
results.Add(new Tuple<string, bool, DateTime, string>(cassetteID, isBioRad, cassetteDateTime, dataText));
|
||||
}
|
||||
else
|
||||
{
|
||||
processData = new Stratus.ProcessData(fileRead, logistics, fileInfoCollection, originalDataBioRad, dataText: dataText);
|
||||
iProcessData = processData;
|
||||
if (!iProcessData.Details.Any())
|
||||
log.Warn("No Details!");
|
||||
else
|
||||
{
|
||||
foreach (object item in iProcessData.Details)
|
||||
{
|
||||
if (item is not Stratus.Detail detail)
|
||||
throw new Exception();
|
||||
count += 1;
|
||||
_ = contents.Clear();
|
||||
cassetteDateTime = logistics.DateTimeFromSequence.AddTicks(count * -1);
|
||||
user = processData.Employee?.ToString() ?? "";
|
||||
recipe = detail.Recipe?.ToString() ?? "";
|
||||
_ = contents.Append("Bio-Rad ").Append("QS400MEPI".PadRight(17)).Append("Recipe: ").Append(recipe.PadRight(25)).AppendLine(processData.Date);
|
||||
_ = contents.Append("operator: ").Append(user.PadRight(22)).Append("batch: BIORAD #").AppendLine(logistics.JobID.Substring(6, 1));
|
||||
_ = contents.Append("cassette: ").Append("".PadRight(22)).Append("wafer: ").AppendLine(processData.Cassette);
|
||||
_ = contents.AppendLine("--------------------------------------------------------------------------------");
|
||||
_ = contents.AppendLine(" position thickness position thickness position thickness");
|
||||
segments = detail.Thickness.Split(',');
|
||||
for (int j = 0; j < segments.Length; j++)
|
||||
{
|
||||
if (item is not Stratus.Detail detail)
|
||||
throw new Exception();
|
||||
count += 1;
|
||||
contents.Clear();
|
||||
cassetteDateTime = logistics.DateTimeFromSequence.AddTicks(count * -1);
|
||||
user = processData.Employee?.ToString() ?? "";
|
||||
recipe = detail.Recipe?.ToString() ?? "";
|
||||
contents.Append("Bio-Rad ").Append("QS400MEPI".PadRight(17)).Append("Recipe: ").Append(recipe.PadRight(25)).AppendLine(processData.Date);
|
||||
contents.Append("operator: ").Append(user.PadRight(22)).Append("batch: BIORAD #").AppendLine(logistics.JobID.Substring(6, 1));
|
||||
contents.Append("cassette: ").Append("".PadRight(22)).Append("wafer: ").AppendLine(processData.Cassette);
|
||||
contents.AppendLine("--------------------------------------------------------------------------------");
|
||||
contents.AppendLine(" position thickness position thickness position thickness");
|
||||
segments = detail.Thickness.Split(',');
|
||||
for (int j = 0; j < segments.Length; j++)
|
||||
{
|
||||
wafer = j + 1;
|
||||
contents.Append(wafer.ToString().PadLeft(11));
|
||||
if ((wafer % 3) > 0)
|
||||
contents.Append(segments[j].PadLeft(10));
|
||||
else
|
||||
contents.AppendLine(segments[j].PadLeft(10));
|
||||
}
|
||||
if ((segments.Length % 3) > 0)
|
||||
contents.AppendLine();
|
||||
contents.Append(" wafer mean thickness = ").Append(detail.Mean).Append(", std. dev = ").Append(detail.StdDev).Append(" ").AppendLine(detail.PassFail);
|
||||
contents.AppendLine("================================================================================");
|
||||
contents.AppendLine("");
|
||||
contents.AppendLine("Radial variation (computation B) PASS:");
|
||||
contents.AppendLine("");
|
||||
contents.AppendLine(" thickness 0.0000");
|
||||
results.Add(new Tuple<string, bool, DateTime, string>(cassetteID, isBioRad, cassetteDateTime, contents.ToString()));
|
||||
wafer = j + 1;
|
||||
_ = contents.Append(wafer.ToString().PadLeft(11));
|
||||
if ((wafer % 3) > 0)
|
||||
_ = contents.Append(segments[j].PadLeft(10));
|
||||
else
|
||||
_ = contents.AppendLine(segments[j].PadLeft(10));
|
||||
}
|
||||
if ((segments.Length % 3) > 0)
|
||||
_ = contents.AppendLine();
|
||||
_ = contents.Append(" wafer mean thickness = ").Append(detail.Mean).Append(", std. dev = ").Append(detail.StdDev).Append(' ').AppendLine(detail.PassFail);
|
||||
_ = contents.AppendLine("================================================================================");
|
||||
_ = contents.AppendLine("");
|
||||
_ = contents.AppendLine("Radial variation (computation B) PASS:");
|
||||
_ = contents.AppendLine("");
|
||||
_ = contents.AppendLine(" thickness 0.0000");
|
||||
results.Add(new Tuple<string, bool, DateTime, string>(cassetteID, isBioRad, cassetteDateTime, contents.ToString()));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// **********************************************
|
||||
// * Step #3 - Protect the raw data source file *
|
||||
// **********************************************
|
||||
// The multi-cassettes raw source file is ready to be splitted. Each cassette scan set has
|
||||
// been determined to be complete (i.e., has the started & finished statements). At this point
|
||||
// it is important to rename the multi-cassette raw data source file, located in the RawData
|
||||
// folder, to a different name so that the tool does not attempt to update the file while being
|
||||
// processed by the EAF cell instance.
|
||||
// Get the last date/time the DataBioRad.txt file was updated
|
||||
DateTime afterCheck = File.GetLastWriteTime(logistics.ReportFullPath);
|
||||
// Ensure that the DataBioRad.txt file has not been updated since the FileReader began the healthcheck
|
||||
// If the date/time values are different between the "Before" and "After" checks then let it go. The
|
||||
// tool is still busy trying to update the file. The FileReader will try to catch the data on the
|
||||
// next udpate.
|
||||
if (logistics.DateTimeFromSequence != afterCheck)
|
||||
{
|
||||
results.Clear();
|
||||
log.Debug($"****Extract() - DataBioRad.txt file is getting updated fast");
|
||||
log.Debug($"****Extract() - DataBioRadDateTime_AfterCheck = {afterCheck.Ticks}");
|
||||
log.Debug($"****Extract() - DataBioRadDateTime_BeforeCheck = {logistics.Sequence}");
|
||||
}
|
||||
return results;
|
||||
}
|
||||
// **********************************************
|
||||
// * Step #3 - Protect the raw data source file *
|
||||
// **********************************************
|
||||
// The multi-cassettes raw source file is ready to be splitted. Each cassette scan set has
|
||||
// been determined to be complete (i.e., has the started & finished statements). At this point
|
||||
// it is important to rename the multi-cassette raw data source file, located in the RawData
|
||||
// folder, to a different name so that the tool does not attempt to update the file while being
|
||||
// processed by the EAF cell instance.
|
||||
// Get the last date/time the DataBioRad.txt file was updated
|
||||
DateTime afterCheck = File.GetLastWriteTime(logistics.ReportFullPath);
|
||||
// Ensure that the DataBioRad.txt file has not been updated since the FileReader began the healthcheck
|
||||
// If the date/time values are different between the "Before" and "After" checks then let it go. The
|
||||
// tool is still busy trying to update the file. The FileReader will try to catch the data on the
|
||||
// next update.
|
||||
if (logistics.DateTimeFromSequence != afterCheck)
|
||||
{
|
||||
results.Clear();
|
||||
log.Debug($"****Extract() - DataBioRad.txt file is getting updated fast");
|
||||
log.Debug($"****Extract() - DataBioRadDateTime_AfterCheck = {afterCheck.Ticks}");
|
||||
log.Debug($"****Extract() - DataBioRadDateTime_BeforeCheck = {logistics.Sequence}");
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
Reference in New Issue
Block a user