467 lines
18 KiB
C#

using Adaptation.Shared;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.Data;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.Stratus
{
public partial class ProcessData : IProcessData
{
private readonly List<object> _Details;
public string JobID { get; set; }
public string MesEntity { get; set; }
public string Batch { get; set; }
public string Cassette { get; set; }
public string Date { get; set; }
public string FilePath { get; set; }
public string MeanThickness { get; set; }
public string Employee { get; set; }
public string PSN { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string StdDev { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details;
private int _I;
private string _Data;
private readonly ILog _Log;
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, string dataText)
{
JobID = logistics.JobID;
fileInfoCollection.Clear();
_Details = new List<object>();
MesEntity = logistics.MesEntity;
_Log = LogManager.GetLogger(typeof(ProcessData));
Parse(fileRead, logistics, fileInfoCollection, originalDataBioRad, dataText);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors)
{
throw new Exception(string.Concat("See ", nameof(Parse)));
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.BioRadStratus);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
private string GetBefore(string text)
{
string str;
string str1;
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
str = _Data.Substring(_I);
_I = _Data.Length;
str1 = str.Trim();
}
else
{
str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
str1 = str.Trim();
}
return str1;
}
private string GetBefore(string text, bool trim)
{
string str;
string before;
if (!trim)
{
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
str = _Data.Substring(_I);
_I = _Data.Length;
before = str;
}
else
{
str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
before = str;
}
}
else
{
before = GetBefore(text);
}
return before;
}
private string GetToEOL()
{
string result;
if (_Data.IndexOf("\n", _I) > -1)
result = GetBefore("\n");
else
result = GetBefore(Environment.NewLine);
return result;
}
private string GetToEOL(bool trim)
{
string str;
if (_Data.IndexOf("\n", _I) > -1)
str = (!trim ? GetBefore("\n", false) : GetToEOL());
else
str = (!trim ? GetBefore(Environment.NewLine, false) : GetToEOL());
return str;
}
private string GetToken()
{
while (true)
{
if ((_I >= _Data.Length || !IsNullOrWhiteSpace(_Data.Substring(_I, 1))))
{
break;
}
_I++;
}
int num = _I;
while (true)
{
if (num >= _Data.Length || IsNullOrWhiteSpace(_Data.Substring(num, 1)))
{
break;
}
num++;
}
string str = _Data.Substring(_I, num - _I);
_I = num;
return str.Trim();
}
private string GetToText(string text)
{
string str = _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
return str;
}
private bool IsBlankLine()
{
int num = _Data.IndexOf("\n", _I);
return IsNullOrWhiteSpace((num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I)));
}
private bool IsNullOrWhiteSpace(string text)
{
bool flag;
int num = 0;
while (true)
{
if (num >= text.Length)
{
flag = true;
break;
}
else if (char.IsWhiteSpace(text[num]))
{
num++;
}
else
{
flag = false;
break;
}
}
return flag;
}
private string PeekNextLine()
{
int num = _I;
string toEOL = GetToEOL();
_I = num;
return toEOL;
}
private void ScanPast(string text)
{
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
_I = _Data.Length;
}
else
{
_I = num + text.Length;
}
}
internal static DateTime GetDateTime(Logistics logistics, string dateTimeText)
{
DateTime result;
string inputDateFormat = "MM/dd/yy HH:mm";
if (dateTimeText.Length != inputDateFormat.Length)
result = logistics.DateTimeFromSequence;
else
{
if (!DateTime.TryParseExact(dateTimeText, inputDateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
result = logistics.DateTimeFromSequence;
else
{
if (dateTimeParsed < logistics.DateTimeFromSequence.AddDays(1) && dateTimeParsed > logistics.DateTimeFromSequence.AddDays(-1))
result = dateTimeParsed;
else
result = logistics.DateTimeFromSequence;
}
}
return result;
}
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, string originalDataBioRad, string receivedData)
{
if (fileRead is null)
{ }
_I = 0;
_Data = string.Empty;
List<Detail> details = new();
if (string.IsNullOrEmpty(receivedData))
receivedData = File.ReadAllText(logistics.ReportFullPath);
_Log.Debug($"****ParseData - Source file contents:");
_Log.Debug(receivedData);
string[] files = Directory.GetFiles(Path.GetDirectoryName(logistics.ReportFullPath), string.Concat(originalDataBioRad, logistics.Sequence, "*"), SearchOption.TopDirectoryOnly);
foreach (string file in files)
fileInfoCollection.Add(new FileInfo(file));
if (!string.IsNullOrEmpty(receivedData))
{
int i;
int num;
int num1;
int num2;
Point point;
Detail detail;
string[] segments;
string batch = "Batch";
string started = "started";
string cassette = "Cassette";
string startedAt = "started at";
_I = 0;
_Data = receivedData;
if (!_Data.Contains(batch) || !_Data.Contains(started))
Batch = string.Empty;
else
{
for (int z = 0; z < int.MaxValue; z++)
{
ScanPast(batch);
if (!_Data.Substring(_I).Contains(batch))
break;
}
Batch = GetToText(started);
ScanPast(startedAt);
}
ScanPast(cassette);
if (!_Data.Substring(_I).Contains(started))
Cassette = string.Empty;
else
Cassette = GetToText(started);
// Remove illegal characters \/:*?"<>| found in the Cassette.
Cassette = Regex.Replace(Cassette, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
if (Cassette.StartsWith("1T") || Cassette.StartsWith("1t"))
Cassette = Cassette.Substring(2);
Title = (!string.IsNullOrEmpty(Batch) ? Batch : Cassette);
ScanPast(startedAt);
string dateTimeText = GetToEOL();
if (dateTimeText.EndsWith("."))
dateTimeText = dateTimeText.Remove(dateTimeText.Length - 1, 1);
DateTime dateTime = GetDateTime(logistics, dateTimeText);
Date = dateTime.ToString();
if (Cassette.Contains('.'))
segments = Cassette.Split(new char[] { '.' });
else if (Cassette.Contains('-'))
segments = Cassette.Split(new char[] { '-' });
else if (!Cassette.Contains('\u005F'))
segments = Cassette.Split(new char[] { ' ' });
else
segments = Cassette.Split(new char[] { '\u005F' });
if (segments.Length >= 1)
Reactor = segments[0];
if (segments.Length >= 2)
RDS = segments[1];
if (segments.Length >= 3)
PSN = segments[2];
if (segments.Length >= 4)
Employee = segments[3];
if (Reactor.Length > 3)
{
RDS = Reactor;
Reactor = string.Empty;
}
num1 = 0;
if (PeekNextLine().Contains("Wafer"))
{
_Log.Debug("****ProcessData Contains Wafer");
while (!PeekNextLine().Contains(cassette))
{
num2 = num1;
num1 = num2 + 1;
if (num2 > 25)
break;
else
{
_Log.Debug("****ProcessData new stratusBioRadWaferDetail");
detail = new Detail();
ScanPast("Wafer");
detail.Wafer = GetToEOL();
if (detail.Wafer.EndsWith("."))
{
_Log.Debug("****ProcessData Removing Wafer");
detail.Wafer = detail.Wafer.Remove(detail.Wafer.Length - 1, 1);
}
ScanPast("Slot");
detail.Slot = GetToEOL();
ScanPast("Recipe");
Recipe = GetToEOL();
if (Recipe.EndsWith("."))
{
_Log.Debug("****ProcessData Removing Recipe");
Recipe = Recipe.Remove(Recipe.Length - 1, 1);
}
detail.Recipe = Recipe;
GetToEOL();
if (PeekNextLine().Contains("Thickness"))
{
ScanPast("1 - ");
num = Convert.ToInt32(GetToken());
_Log.Debug(string.Concat("****ProcessData Thickness =", num));
detail.Points = new();
for (i = 0; i < num; i++)
{
point = new() { Thickness = GetToken() };
detail.Points.Add(point);
point.Position = Convert.ToString(detail.Points.Count);
}
}
GetToEOL();
if (PeekNextLine().Contains("Thickness"))
{
ScanPast("11 - ");
num = Convert.ToInt32(GetToken());
for (i = detail.Points.Count; i < num; i++)
{
point = new() { Thickness = GetToken() };
detail.Points.Add(point);
point.Position = Convert.ToString(detail.Points.Count);
}
}
ScanPast("Slot");
GetToken();
detail.PassFail = GetToken();
if (detail.PassFail.EndsWith("."))
{
_Log.Debug("****ProcessData Removing PassFail");
detail.PassFail = detail.PassFail.Remove(detail.PassFail.Length - 1, 1);
}
ScanPast("Mean");
detail.Mean = GetToken();
if (detail.Mean.EndsWith(","))
{
_Log.Debug("****ProcessData Removing Mean");
detail.Mean = detail.Mean.Remove(detail.Mean.Length - 1, 1);
}
ScanPast("STDD");
detail.StdDev = GetToEOL();
if (detail.StdDev.EndsWith("."))
{
_Log.Debug("****ProcessData Removing stdDev");
detail.StdDev = detail.StdDev.Remove(detail.StdDev.Length - 1, 1);
}
detail.UniqueId = string.Concat("_Wafer-", detail.Wafer, "_Slot-", detail.Slot, "_Point-", detail.Position);
details.Add(detail);
if (PeekNextLine().Contains(cassette))
GetToEOL();
if (PeekNextLine().Contains(cassette))
GetToEOL();
if (PeekNextLine().Contains("Process failed"))
GetToEOL();
}
}
ScanPast("Mean");
MeanThickness = GetToken();
if (MeanThickness.EndsWith(","))
{
_Log.Debug("****ProcessData Removing MeanThickness");
MeanThickness = MeanThickness.Remove(MeanThickness.Length - 1, 1);
}
ScanPast("STDD");
StdDev = GetToken();
if (StdDev.EndsWith(","))
{
_Log.Debug("****ProcessData Removing thi.StdDev");
StdDev = StdDev.Remove(StdDev.Length - 1, 1);
}
}
}
StringBuilder stringBuilder = new();
UniqueId = string.Concat("StratusBioRad_", Reactor, "_", RDS, "_", PSN, "_", logistics.DateTimeFromSequence.ToString("yyyyMMddHHmmssffff"));
foreach (Detail detail in details)
{
detail.HeaderUniqueId = UniqueId;
detail.UniqueId = string.Concat(UniqueId, detail.UniqueId);
if (detail.Points is null)
detail.Points = new List<Point>();
foreach (Point bioRadDetail in detail.Points)
{
bioRadDetail.HeaderUniqueId = detail.HeaderUniqueId;
bioRadDetail.UniqueId = detail.UniqueId;
}
stringBuilder.Clear();
foreach (Point point in detail.Points)
stringBuilder.Append(point.Thickness).Append(',');
if (stringBuilder.Length > 0)
stringBuilder.Remove(stringBuilder.Length - 1, 1);
detail.Thickness = stringBuilder.ToString();
stringBuilder.Clear();
foreach (Point point in detail.Points)
stringBuilder.Append(point.Position).Append(',');
if (stringBuilder.Length > 0)
stringBuilder.Remove(stringBuilder.Length - 1, 1);
detail.Position = stringBuilder.ToString();
}
fileInfoCollection.Add(new FileInfo(logistics.ReportFullPath));
_Details.AddRange(details);
}
}
}