Ready to test v2.39.0

This commit is contained in:
2022-02-14 18:41:51 -07:00
parent 199eb8567a
commit b97d7ebdb3
106 changed files with 11567 additions and 13653 deletions

View File

@ -5,272 +5,265 @@ using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.QS408M
namespace Adaptation.FileHandlers.QS408M;
public class Description : IDescription, Shared.Properties.IDescription
{
public class Description : IDescription, Shared.Properties.IDescription
public int Test { get; set; }
public int Count { get; set; }
public int Index { get; set; }
//
public string EventName { get; set; }
public string NullData { get; set; }
public string JobID { get; set; }
public string Sequence { get; set; }
public string MesEntity { get; set; }
public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; }
public string MID { get; set; }
//
public string Date { get; set; }
public string Employee { get; set; }
public string Lot { get; set; }
public string PSN { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
//
public string Cassette { get; set; }
public string HeaderUniqueId { get; set; }
public string Layer { get; set; }
public string PassFail { get; set; }
public string Position { get; set; }
public string RDS { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Wafer { get; set; }
public string Zone { get; set; }
//
public string MeanThickness { get; set; }
public string RVThickness { get; set; }
public string StdDev { get; set; }
public string Thickness { get; set; }
string IDescription.GetEventDescription() => "File Has been read and parsed";
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
{
List<string> results = new();
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
if (@object is not JsonElement jsonElement)
throw new Exception();
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
results.Add(jsonProperty.Name);
return results;
}
public int Test { get; set; }
public int Count { get; set; }
public int Index { get; set; }
//
public string EventName { get; set; }
public string NullData { get; set; }
public string JobID { get; set; }
public string Sequence { get; set; }
public string MesEntity { get; set; }
public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; }
public string MID { get; set; }
//
public string Date { get; set; }
public string Employee { get; set; }
public string Lot { get; set; }
public string PSN { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
//
public string Cassette { get; set; }
public string HeaderUniqueId { get; set; }
public string Layer { get; set; }
public string PassFail { get; set; }
public string Position { get; set; }
public string RDS { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Wafer { get; set; }
public string Zone { get; set; }
//
public string MeanThickness { get; set; }
public string RVThickness { get; set; }
public string StdDev { get; set; }
public string Thickness { get; set; }
string IDescription.GetEventDescription()
List<string> IDescription.GetDetailNames()
{
List<string> results = new()
{
return "File Has been read and parsed";
nameof(Cassette),
nameof(HeaderUniqueId),
nameof(Layer),
nameof(PassFail),
nameof(Position),
nameof(RDS),
nameof(Title),
nameof(UniqueId),
nameof(Wafer),
nameof(Zone)
};
return results;
}
List<string> IDescription.GetHeaderNames()
{
List<string> results = new()
{
nameof(Date),
nameof(Employee),
nameof(Lot),
nameof(PSN),
nameof(Reactor),
nameof(Recipe)
};
return results;
}
IDescription IDescription.GetDisplayNames()
{
Description result = GetDisplayNames();
return result;
}
List<string> IDescription.GetParameterNames()
{
List<string> results = new()
{
nameof(MeanThickness),
nameof(RVThickness),
nameof(StdDev),
nameof(Thickness)
};
return results;
}
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
{
JsonProperty[] results;
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
results = ((JsonElement)@object).EnumerateObject().ToArray();
return results;
}
List<string> IDescription.GetPairedParameterNames()
{
List<string> results = new();
return results;
}
List<string> IDescription.GetIgnoreParameterNames(Test test)
{
List<string> results = new();
return results;
}
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
{
Description result = GetDefault(fileRead, logistics);
return result;
}
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
{
Dictionary<string, string> results = new();
IDescription description = GetDisplayNames();
string json = JsonSerializer.Serialize(description, description.GetType());
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, string.Empty);
if (jsonProperty.Value is JsonElement jsonPropertyValue)
results[jsonProperty.Name] = jsonPropertyValue.ToString();
}
return results;
}
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
{
List<IDescription> results = new();
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData processData)
results.Add(GetDefault(fileRead, logistics));
else
{
List<string> results = new();
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
if (@object is not JsonElement jsonElement)
throw new Exception();
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
results.Add(jsonProperty.Name);
return results;
}
List<string> IDescription.GetDetailNames()
{
List<string> results = new()
{
nameof(Cassette),
nameof(HeaderUniqueId),
nameof(Layer),
nameof(PassFail),
nameof(Position),
nameof(RDS),
nameof(Title),
nameof(UniqueId),
nameof(Wafer),
nameof(Zone)
};
return results;
}
List<string> IDescription.GetHeaderNames()
{
List<string> results = new()
{
nameof(Date),
nameof(Employee),
nameof(Lot),
nameof(PSN),
nameof(Reactor),
nameof(Recipe)
};
return results;
}
IDescription IDescription.GetDisplayNames()
{
Description result = GetDisplayNames();
return result;
}
List<string> IDescription.GetParameterNames()
{
List<string> results = new()
{
nameof(MeanThickness),
nameof(RVThickness),
nameof(StdDev),
nameof(Thickness)
};
return results;
}
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
{
JsonProperty[] results;
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
results = ((JsonElement)@object).EnumerateObject().ToArray();
return results;
}
List<string> IDescription.GetPairedParameterNames()
{
List<string> results = new();
return results;
}
List<string> IDescription.GetIgnoreParameterNames(Test test)
{
List<string> results = new();
return results;
}
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
{
Description result = GetDefault(fileRead, logistics);
return result;
}
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
{
Dictionary<string, string> results = new();
IDescription description = GetDisplayNames();
string json = JsonSerializer.Serialize(description, description.GetType());
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, string.Empty);
if (jsonProperty.Value is JsonElement jsonPropertyValue)
results[jsonProperty.Name] = jsonPropertyValue.ToString();
}
return results;
}
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
{
List<IDescription> results = new();
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData processData)
results.Add(GetDefault(fileRead, logistics));
string nullData;
Description description;
object configDataNullData = fileRead.NullData;
if (configDataNullData is null)
nullData = string.Empty;
else
nullData = configDataNullData.ToString();
for (int i = 0; i < iProcessData.Details.Count; i++)
{
string nullData;
Description description;
object configDataNullData = fileRead.NullData;
if (configDataNullData is null)
nullData = string.Empty;
else
nullData = configDataNullData.ToString();
for (int i = 0; i < iProcessData.Details.Count; i++)
if (iProcessData.Details[i] is not Detail detail)
continue;
description = new Description
{
if (iProcessData.Details[i] is not Detail detail)
continue;
description = new Description
{
Test = (int)tests[i],
Count = tests.Count,
Index = i,
//
EventName = fileRead.EventName,
NullData = nullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = logistics.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = processData.Date,
Employee = processData.Employee,
Lot = processData.Batch,
PSN = processData.PSN,
Reactor = processData.Reactor,
Recipe = processData.Recipe,
//
Cassette = processData.Cassette,
HeaderUniqueId = detail.HeaderUniqueId,
Layer = processData.Layer,
PassFail = processData.PassFail,
Position = detail.Position,
RDS = processData.RDS,
Title = processData.Title,
UniqueId = detail.UniqueId,
Wafer = processData.Wafer,
Zone = processData.Zone,
//
MeanThickness = processData.MeanThickness,
RVThickness = processData.RVThickness,
StdDev = processData.StdDev,
Thickness = detail.Thickness
};
results.Add(description);
}
Test = (int)tests[i],
Count = tests.Count,
Index = i,
//
EventName = fileRead.EventName,
NullData = nullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = logistics.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = processData.Date,
Employee = processData.Employee,
Lot = processData.Batch,
PSN = processData.PSN,
Reactor = processData.Reactor,
Recipe = processData.Recipe,
//
Cassette = processData.Cassette,
HeaderUniqueId = detail.HeaderUniqueId,
Layer = processData.Layer,
PassFail = processData.PassFail,
Position = detail.Position,
RDS = processData.RDS,
Title = processData.Title,
UniqueId = detail.UniqueId,
Wafer = processData.Wafer,
Zone = processData.Zone,
//
MeanThickness = processData.MeanThickness,
RVThickness = processData.RVThickness,
StdDev = processData.StdDev,
Thickness = detail.Thickness
};
results.Add(description);
}
return results;
}
return results;
}
private Description GetDisplayNames()
private Description GetDisplayNames()
{
Description result = new();
return result;
}
private Description GetDefault(IFileRead fileRead, Logistics logistics)
{
Description result = new()
{
Description result = new();
return result;
}
private Description GetDefault(IFileRead fileRead, Logistics logistics)
{
Description result = new()
{
Test = -1,
Count = 0,
Index = -1,
//
EventName = fileRead.EventName,
NullData = fileRead.NullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = fileRead.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = nameof(Date),
Employee = nameof(Employee),
Lot = nameof(Lot),
PSN = nameof(PSN),
Reactor = nameof(Reactor),
Recipe = nameof(Recipe),
//
Cassette = nameof(Cassette),
HeaderUniqueId = nameof(HeaderUniqueId),
Layer = nameof(Layer),
PassFail = nameof(PassFail),
Position = nameof(Position),
RDS = nameof(RDS),
Title = nameof(Title),
UniqueId = nameof(UniqueId),
Wafer = nameof(Wafer),
Zone = nameof(Zone),
//
MeanThickness = nameof(MeanThickness),
RVThickness = nameof(RVThickness),
StdDev = nameof(StdDev),
Thickness = nameof(Thickness)
};
return result;
}
Test = -1,
Count = 0,
Index = -1,
//
EventName = fileRead.EventName,
NullData = fileRead.NullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = fileRead.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = nameof(Date),
Employee = nameof(Employee),
Lot = nameof(Lot),
PSN = nameof(PSN),
Reactor = nameof(Reactor),
Recipe = nameof(Recipe),
//
Cassette = nameof(Cassette),
HeaderUniqueId = nameof(HeaderUniqueId),
Layer = nameof(Layer),
PassFail = nameof(PassFail),
Position = nameof(Position),
RDS = nameof(RDS),
Title = nameof(Title),
UniqueId = nameof(UniqueId),
Wafer = nameof(Wafer),
Zone = nameof(Zone),
//
MeanThickness = nameof(MeanThickness),
RVThickness = nameof(RVThickness),
StdDev = nameof(StdDev),
Thickness = nameof(Thickness)
};
return result;
}
}

View File

@ -1,19 +1,13 @@
namespace Adaptation.FileHandlers.QS408M
namespace Adaptation.FileHandlers.QS408M;
public class Detail
{
public class Detail
{
public string HeaderUniqueId { get; set; }
public string Position { get; set; }
public string Thickness { get; set; }
public string UniqueId { get; set; }
public string HeaderUniqueId { get; set; }
public string Position { get; set; }
public string Thickness { get; set; }
public string UniqueId { get; set; }
public override string ToString()
{
return string.Concat(Position, ";", Thickness, ";");
}
}
public override string ToString() => string.Concat(Position, ";", Thickness, ";");
}

View File

@ -9,142 +9,122 @@ using System.Linq;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.QS408M
namespace Adaptation.FileHandlers.QS408M;
public class FileRead : Shared.FileRead, IFileRead
{
public class FileRead : Shared.FileRead, IFileRead
private readonly string _OriginalDataBioRad;
private readonly ProcessData _LastProcessData;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_OriginalDataBioRad = "OriginalDataBioRad_";
_LastProcessData = new();
}
private readonly string _OriginalDataBioRad;
private readonly ProcessData _LastProcessData;
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults, exception);
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive) :
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, hyphenXToArchive, hyphenIsArchive)
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
void IFileRead.Callback(object state) => throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
SetFileParameterLotIDToLogisticsMID();
if (reportFullPath.Length < _MinFileLength)
results.Item4.Add(new FileInfo(reportFullPath));
else
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_OriginalDataBioRad = "OriginalDataBioRad_";
_LastProcessData = new();
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
Move(this, extractResults, exception);
}
void IFileRead.WaitForThread()
{
WaitForThread(thread: null, threadExceptions: null);
}
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, new Test[] { }, JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra)
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
void IFileRead.MoveArchive()
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
void IFileRead.Callback(object state)
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
SetFileParameterLotIDToLogisticsMID();
if (reportFullPath.Length < _MinFileLength)
results.Item4.Add(new FileInfo(reportFullPath));
else
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, _OriginalDataBioRad, lastProcessData: _LastProcessData);
if (iProcessData is ProcessData processData)
{
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, _OriginalDataBioRad, lastProcessData: _LastProcessData);
if (iProcessData is ProcessData processData)
{
string mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
_Logistics.MID = mid;
SetFileParameterLotID(mid);
_Logistics.ProcessJobID = processData.Reactor;
}
if (!iProcessData.Details.Any())
throw new Exception(string.Concat("No Data - ", dateTime.Ticks));
results = iProcessData.GetResults(this, _Logistics, results.Item4);
string mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
_Logistics.MID = mid;
SetFileParameterLotID(mid);
_Logistics.ProcessJobID = processData.Reactor;
}
return results;
if (!iProcessData.Details.Any())
throw new Exception(string.Concat("No Data - ", dateTime.Ticks));
results = iProcessData.GetResults(this, _Logistics, results.Item4);
}
return results;
}
}

View File

@ -11,425 +11,419 @@ using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.QS408M
namespace Adaptation.FileHandlers.QS408M;
public partial class ProcessData : IProcessData
{
public partial class ProcessData : IProcessData
private readonly List<object> _Details;
public string JobID { get; set; }
public string MesEntity { get; set; }
public string Batch { get; set; }
public string Cassette { get; set; }
public string Date { get; set; }
public string Employee { get; set; }
public string Layer { get; set; }
public string MeanThickness { get; set; }
public string PSN { get; set; }
public string PassFail { get; set; }
public string RDS { get; set; }
public string RVThickness { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string StdDev { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Wafer { get; set; }
public string Zone { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details;
private int _I;
private string _Data;
private readonly ILog _Log;
public ProcessData()
{
}
private readonly List<object> _Details;
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, ProcessData lastProcessData)
{
JobID = logistics.JobID;
fileInfoCollection.Clear();
_Details = new List<object>();
MesEntity = logistics.MesEntity;
_Log = LogManager.GetLogger(typeof(ProcessData));
Parse(fileRead, logistics, fileInfoCollection, originalDataBioRad, lastProcessData);
}
public string JobID { get; set; }
public string MesEntity { get; set; }
public string Batch { get; set; }
public string Cassette { get; set; }
public string Date { get; set; }
public string Employee { get; set; }
public string Layer { get; set; }
public string MeanThickness { get; set; }
public string PSN { get; set; }
public string PassFail { get; set; }
public string RDS { get; set; }
public string RVThickness { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string StdDev { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Wafer { get; set; }
public string Zone { get; set; }
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
List<object> Shared.Properties.IProcessData.Details => _Details;
private int _I;
private string _Data;
private readonly ILog _Log;
public ProcessData()
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.BioRadQS408M);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
}
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, ProcessData lastProcessData)
{
JobID = logistics.JobID;
fileInfoCollection.Clear();
_Details = new List<object>();
MesEntity = logistics.MesEntity;
_Log = LogManager.GetLogger(typeof(ProcessData));
Parse(fileRead, logistics, fileInfoCollection, originalDataBioRad, lastProcessData);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors)
{
throw new Exception(string.Concat("See ", nameof(Parse)));
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.BioRadQS408M);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
private string GetBefore(string text)
private string GetBefore(string text)
{
string str;
string str1;
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
str = _Data.Substring(_I);
_I = _Data.Length;
str1 = str.Trim();
}
else
{
str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
str1 = str.Trim();
}
return str1;
}
private string GetBefore(string text, bool trim)
{
string str;
string before;
if (!trim)
{
string str;
string str1;
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
str = _Data.Substring(_I);
_I = _Data.Length;
str1 = str.Trim();
before = str;
}
else
{
str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
str1 = str.Trim();
before = str;
}
return str1;
}
private string GetBefore(string text, bool trim)
else
{
string str;
string before;
if (!trim)
before = GetBefore(text);
}
return before;
}
private string GetToEOL()
{
string result;
if (_Data.IndexOf("\n", _I) > -1)
result = GetBefore("\n");
else
result = GetBefore(Environment.NewLine);
return result;
}
private string GetToEOL(bool trim)
{
string str;
if (_Data.IndexOf("\n", _I) > -1)
str = (!trim ? GetBefore("\n", false) : GetToEOL());
else
str = (!trim ? GetBefore(Environment.NewLine, false) : GetToEOL());
return str;
}
private string GetToken()
{
while (true)
{
if (_I >= _Data.Length || !IsNullOrWhiteSpace(_Data.Substring(_I, 1)))
{
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
str = _Data.Substring(_I);
_I = _Data.Length;
before = str;
}
else
{
str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
before = str;
}
break;
}
else
_I++;
}
int num = _I;
while (true)
{
if (num >= _Data.Length || IsNullOrWhiteSpace(_Data.Substring(num, 1)))
{
before = GetBefore(text);
break;
}
return before;
num++;
}
string str = _Data.Substring(_I, num - _I);
_I = num;
return str.Trim();
}
private string GetToEOL()
{
string result;
if (_Data.IndexOf("\n", _I) > -1)
result = GetBefore("\n");
else
result = GetBefore(Environment.NewLine);
return result;
}
private string GetToText(string text)
{
string str = _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
return str;
}
private string GetToEOL(bool trim)
{
string str;
if (_Data.IndexOf("\n", _I) > -1)
str = (!trim ? GetBefore("\n", false) : GetToEOL());
else
str = (!trim ? GetBefore(Environment.NewLine, false) : GetToEOL());
return str;
}
private bool IsBlankLine()
{
int num = _Data.IndexOf("\n", _I);
return IsNullOrWhiteSpace((num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I)));
}
private string GetToken()
private bool IsNullOrWhiteSpace(string text)
{
bool flag;
int num = 0;
while (true)
{
while (true)
if (num >= text.Length)
{
if (_I >= _Data.Length || !IsNullOrWhiteSpace(_Data.Substring(_I, 1)))
{
break;
}
_I++;
flag = true;
break;
}
int num = _I;
while (true)
else if (char.IsWhiteSpace(text[num]))
{
if (num >= _Data.Length || IsNullOrWhiteSpace(_Data.Substring(num, 1)))
{
break;
}
num++;
}
string str = _Data.Substring(_I, num - _I);
_I = num;
return str.Trim();
}
private string GetToText(string text)
{
string str = _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
return str;
}
private bool IsBlankLine()
{
int num = _Data.IndexOf("\n", _I);
return IsNullOrWhiteSpace((num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I)));
}
private bool IsNullOrWhiteSpace(string text)
{
bool flag;
int num = 0;
while (true)
{
if (num >= text.Length)
{
flag = true;
break;
}
else if (char.IsWhiteSpace(text[num]))
{
num++;
}
else
{
flag = false;
break;
}
}
return flag;
}
private string PeekNextLine()
{
int num = _I;
string toEOL = GetToEOL();
_I = num;
return toEOL;
}
private void ScanPast(string text)
{
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
_I = _Data.Length;
}
else
{
_I = num + text.Length;
flag = false;
break;
}
}
return flag;
}
internal static DateTime GetDateTime(Logistics logistics, string dateTimeText)
private string PeekNextLine()
{
int num = _I;
string toEOL = GetToEOL();
_I = num;
return toEOL;
}
private void ScanPast(string text)
{
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
DateTime result;
string inputDateFormat = "ddd mmm dd HH:mm:ss yyyy";
if (dateTimeText.Length != inputDateFormat.Length)
_I = _Data.Length;
}
else
{
_I = num + text.Length;
}
}
internal static DateTime GetDateTime(Logistics logistics, string dateTimeText)
{
DateTime result;
string inputDateFormat = "ddd mmm dd HH:mm:ss yyyy";
if (dateTimeText.Length != inputDateFormat.Length)
result = logistics.DateTimeFromSequence;
else
{
if (!DateTime.TryParseExact(dateTimeText, inputDateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
result = logistics.DateTimeFromSequence;
else
{
if (!DateTime.TryParseExact(dateTimeText, inputDateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
result = logistics.DateTimeFromSequence;
if (dateTimeParsed < logistics.DateTimeFromSequence.AddDays(1) && dateTimeParsed > logistics.DateTimeFromSequence.AddDays(-1))
result = dateTimeParsed;
else
{
if (dateTimeParsed < logistics.DateTimeFromSequence.AddDays(1) && dateTimeParsed > logistics.DateTimeFromSequence.AddDays(-1))
result = dateTimeParsed;
else
result = logistics.DateTimeFromSequence;
}
result = logistics.DateTimeFromSequence;
}
return result;
}
return result;
}
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, ProcessData lastProcessData)
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, ProcessData lastProcessData)
{
if (fileRead is null)
{ }
_I = 0;
_Data = string.Empty;
List<Detail> details = new();
string receivedData = File.ReadAllText(logistics.ReportFullPath);
_Log.Debug($"****ParseData - Source file contents:");
_Log.Debug(receivedData);
string[] files = Directory.GetFiles(Path.GetDirectoryName(logistics.ReportFullPath), string.Concat(originalDataBioRad, logistics.Sequence, "*"), SearchOption.TopDirectoryOnly);
foreach (string file in files)
fileInfoCollection.Add(new FileInfo(file));
// occasionally there are multiple blocks of results, get the last one as earlier ones may be aborted runs.
int index = receivedData.LastIndexOf("Bio-Rad");
if (index > -1)
receivedData = receivedData.Substring(index);
_Log.Debug($"****ParseData - Source file contents to be parsed:");
_Log.Debug(receivedData);
if (!string.IsNullOrEmpty(receivedData))
{
if (fileRead is null)
{ }
_I = 0;
_Data = string.Empty;
List<Detail> details = new();
string receivedData = File.ReadAllText(logistics.ReportFullPath);
_Log.Debug($"****ParseData - Source file contents:");
_Log.Debug(receivedData);
string[] files = Directory.GetFiles(Path.GetDirectoryName(logistics.ReportFullPath), string.Concat(originalDataBioRad, logistics.Sequence, "*"), SearchOption.TopDirectoryOnly);
foreach (string file in files)
fileInfoCollection.Add(new FileInfo(file));
// occasionally there are multiple blocks of results, get the last one as earlier ones may be aborted runs.
int index = receivedData.LastIndexOf("Bio-Rad");
if (index > -1)
receivedData = receivedData.Substring(index);
_Log.Debug($"****ParseData - Source file contents to be parsed:");
_Log.Debug(receivedData);
if (!string.IsNullOrEmpty(receivedData))
_Data = receivedData;
Title = GetBefore("Recipe:");
Recipe = GetToken();
string dateTimeText = GetToEOL();
if (dateTimeText.EndsWith("."))
dateTimeText = dateTimeText.Remove(dateTimeText.Length - 1, 1);
DateTime dateTime = GetDateTime(logistics, dateTimeText);
Date = dateTime.ToString();
ScanPast("operator:");
Employee = GetBefore("batch:");
Batch = GetToEOL();
// Remove illegal characters \/:*?"<>| found in the Batch
Batch = Regex.Replace(Batch, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
ScanPast("cassette:");
Cassette = GetBefore("wafer:");
if (string.IsNullOrEmpty(Batch))
{
_I = 0;
_Data = receivedData;
Title = GetBefore("Recipe:");
Recipe = GetToken();
string dateTimeText = GetToEOL();
if (dateTimeText.EndsWith("."))
dateTimeText = dateTimeText.Remove(dateTimeText.Length - 1, 1);
DateTime dateTime = GetDateTime(logistics, dateTimeText);
Date = dateTime.ToString();
ScanPast("operator:");
Employee = GetBefore("batch:");
Batch = GetToEOL();
// Remove illegal characters \/:*?"<>| found in the Batch
Batch = Regex.Replace(Batch, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
ScanPast("cassette:");
Cassette = GetBefore("wafer:");
if (string.IsNullOrEmpty(Batch))
ScanPast("wafer:");
}
Wafer = GetToEOL();
_ = GetToEOL();
_ = GetToEOL();
string token = GetToken();
int counter = 1;
while (true)
{
if ((string.IsNullOrEmpty(token) || !char.IsDigit(token[0])))
break;
Detail detail = new()
{
_I = 0;
_Data = receivedData;
ScanPast("wafer:");
}
Wafer = GetToEOL();
GetToEOL();
GetToEOL();
string token = GetToken();
int counter = 1;
while (true)
{
if ((string.IsNullOrEmpty(token) || !char.IsDigit(token[0])))
break;
Detail detail = new()
{
Position = token,
Thickness = GetToken(),
UniqueId = string.Concat("_Point-", counter)
};
details.Add(detail);
token = GetToken();
counter++;
}
ScanPast("mean thickness =");
MeanThickness = GetBefore(", std. dev =");
StdDev = GetToken();
PassFail = GetToEOL();
ScanPast("thickness");
RVThickness = GetToEOL();
Position = token,
Thickness = GetToken(),
UniqueId = string.Concat("_Point-", counter)
};
details.Add(detail);
token = GetToken();
counter++;
}
if (string.IsNullOrEmpty(Wafer))
throw new Exception("Wafer field is missing.");
//parse out batch and validate
string[] parsedBatch = Wafer.Split('-');
if (parsedBatch.Length >= 1)
Reactor = parsedBatch[0];
if (parsedBatch.Length >= 2)
RDS = parsedBatch[1];
if (parsedBatch.Length >= 3)
{
string[] parsedPSN = parsedBatch[2].Split('.');
if (parsedPSN.Length >= 1)
PSN = parsedPSN[0];
if (parsedPSN.Length >= 2)
Layer = parsedPSN[1];
}
if (parsedBatch.Length >= 4)
Zone = parsedBatch[3];
JobID = logistics.JobID;
if (logistics.DateTimeFromSequence > DateTime.Now.AddHours(-24))
{
if (string.IsNullOrEmpty(lastProcessData.Wafer))
{
lastProcessData.Batch = JobID;
lastProcessData.Cassette = JobID;
lastProcessData.Employee = JobID;
lastProcessData.Recipe = JobID;
lastProcessData.Title = JobID;
}
lastProcessData.Wafer = Wafer;
lastProcessData.Reactor = Reactor;
lastProcessData.RDS = RDS;
string check = "--------";
if (string.IsNullOrEmpty(Batch) || Batch.Contains(check))
Batch = lastProcessData.Batch;
else
lastProcessData.Batch = Batch;
if (string.IsNullOrEmpty(Cassette) || Cassette.Contains(check))
Cassette = lastProcessData.Cassette;
else
lastProcessData.Cassette = Cassette;
if (string.IsNullOrEmpty(Employee) || Employee.Contains(check))
Employee = lastProcessData.Employee;
else
lastProcessData.Employee = Employee;
if (string.IsNullOrEmpty(Recipe) || Recipe.Contains(check))
Recipe = lastProcessData.Recipe;
else
lastProcessData.Recipe = Recipe;
if (string.IsNullOrEmpty(Title) || Title.Contains(check))
Title = lastProcessData.Title;
else
lastProcessData.Title = Title;
}
//fix title
StringBuilder titleFixed = new();
foreach (char c in Title)
{
if (char.IsLetterOrDigit(c) || c == '-' || c == '.')
titleFixed.Append(c);
}
Title = titleFixed.ToString();
//fix wafer
StringBuilder waferFixed = new();
foreach (char c in Wafer)
{
if (char.IsLetterOrDigit(c) || c == '-' || c == '.')
waferFixed.Append(c);
}
Wafer = waferFixed.ToString();
//create filename / unique id
UniqueId = string.Concat(Title, "_", Wafer, "_", logistics.DateTimeFromSequence.ToString("yyyyMMddHHmmssffff"));
foreach (Detail detail in details)
{
detail.HeaderUniqueId = UniqueId;
detail.UniqueId = string.Concat(UniqueId, detail.UniqueId);
}
//trace datatype
_Log.Debug("BioRad parsed infomation:");
_Log.Debug(string.Format("Batch: {0}", Batch));
_Log.Debug(string.Format("Cassette: {0}", Cassette));
_Log.Debug(string.Format("Date: {0}", Date));
foreach (Detail bioRadDetail in details)
_Log.Debug(string.Format("Details: {0} - {1}", bioRadDetail.Position, bioRadDetail.Thickness));
_Log.Debug(string.Format("Mean Thickness: {0}", MeanThickness));
_Log.Debug(string.Format("Operator: {0}", Employee));
_Log.Debug(string.Format("Pass/Fail: {0}", PassFail));
_Log.Debug(string.Format("Recipe: {0}", Recipe));
_Log.Debug(string.Format("RV Thickness: {0}", RVThickness));
_Log.Debug(string.Format("Std Dev: {0}", StdDev));
_Log.Debug(string.Format("Title: {0}", Title));
_Log.Debug(string.Format("Wafer: {0}", Wafer));
fileInfoCollection.Add(new FileInfo(logistics.ReportFullPath));
_Details.AddRange(details);
ScanPast("mean thickness =");
MeanThickness = GetBefore(", std. dev =");
StdDev = GetToken();
PassFail = GetToEOL();
ScanPast("thickness");
RVThickness = GetToEOL();
}
if (string.IsNullOrEmpty(Wafer))
throw new Exception("Wafer field is missing.");
//parse out batch and validate
string[] parsedBatch = Wafer.Split('-');
if (parsedBatch.Length >= 1)
Reactor = parsedBatch[0];
if (parsedBatch.Length >= 2)
RDS = parsedBatch[1];
if (parsedBatch.Length >= 3)
{
string[] parsedPSN = parsedBatch[2].Split('.');
if (parsedPSN.Length >= 1)
PSN = parsedPSN[0];
if (parsedPSN.Length >= 2)
Layer = parsedPSN[1];
}
if (parsedBatch.Length >= 4)
Zone = parsedBatch[3];
JobID = logistics.JobID;
if (logistics.DateTimeFromSequence > DateTime.Now.AddHours(-24))
{
if (string.IsNullOrEmpty(lastProcessData.Wafer))
{
lastProcessData.Batch = JobID;
lastProcessData.Cassette = JobID;
lastProcessData.Employee = JobID;
lastProcessData.Recipe = JobID;
lastProcessData.Title = JobID;
}
lastProcessData.Wafer = Wafer;
lastProcessData.Reactor = Reactor;
lastProcessData.RDS = RDS;
string check = "--------";
if (string.IsNullOrEmpty(Batch) || Batch.Contains(check))
Batch = lastProcessData.Batch;
else
lastProcessData.Batch = Batch;
if (string.IsNullOrEmpty(Cassette) || Cassette.Contains(check))
Cassette = lastProcessData.Cassette;
else
lastProcessData.Cassette = Cassette;
if (string.IsNullOrEmpty(Employee) || Employee.Contains(check))
Employee = lastProcessData.Employee;
else
lastProcessData.Employee = Employee;
if (string.IsNullOrEmpty(Recipe) || Recipe.Contains(check))
Recipe = lastProcessData.Recipe;
else
lastProcessData.Recipe = Recipe;
if (string.IsNullOrEmpty(Title) || Title.Contains(check))
Title = lastProcessData.Title;
else
lastProcessData.Title = Title;
}
//fix title
StringBuilder titleFixed = new();
foreach (char c in Title)
{
if (char.IsLetterOrDigit(c) || c == '-' || c == '.')
_ = titleFixed.Append(c);
}
Title = titleFixed.ToString();
//fix wafer
StringBuilder waferFixed = new();
foreach (char c in Wafer)
{
if (char.IsLetterOrDigit(c) || c == '-' || c == '.')
_ = waferFixed.Append(c);
}
Wafer = waferFixed.ToString();
//create filename / unique id
UniqueId = string.Concat(Title, "_", Wafer, "_", logistics.DateTimeFromSequence.ToString("yyyyMMddHHmmssffff"));
foreach (Detail detail in details)
{
detail.HeaderUniqueId = UniqueId;
detail.UniqueId = string.Concat(UniqueId, detail.UniqueId);
}
//trace datatype
_Log.Debug("BioRad parsed infomation:");
_Log.Debug(string.Format("Batch: {0}", Batch));
_Log.Debug(string.Format("Cassette: {0}", Cassette));
_Log.Debug(string.Format("Date: {0}", Date));
foreach (Detail bioRadDetail in details)
_Log.Debug(string.Format("Details: {0} - {1}", bioRadDetail.Position, bioRadDetail.Thickness));
_Log.Debug(string.Format("Mean Thickness: {0}", MeanThickness));
_Log.Debug(string.Format("Operator: {0}", Employee));
_Log.Debug(string.Format("Pass/Fail: {0}", PassFail));
_Log.Debug(string.Format("Recipe: {0}", Recipe));
_Log.Debug(string.Format("RV Thickness: {0}", RVThickness));
_Log.Debug(string.Format("Std Dev: {0}", StdDev));
_Log.Debug(string.Format("Title: {0}", Title));
_Log.Debug(string.Format("Wafer: {0}", Wafer));
fileInfoCollection.Add(new FileInfo(logistics.ReportFullPath));
_Details.AddRange(details);
}
}