Extra Thickness, um line
Nuget Bump del ProcessData : IProcessData IDescription.GetDescriptions with body
This commit is contained in:
parent
f66393e27e
commit
0f95c45638
@ -118,21 +118,22 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
private string GetLines(List<Stratus.Description> descriptions)
|
||||
{
|
||||
StringBuilder results = new();
|
||||
char del = '\t';
|
||||
Stratus.Description x = descriptions[0];
|
||||
_ = results.Append("Stratus_").Append(_Logistics.MID).Append('_').Append(_Logistics.DateTimeFromSequence.ToString("yyyyMMddhhmmssfff")).Append('\t').
|
||||
Append(x.Date).Append('\t').
|
||||
Append(_Logistics.JobID).Append('\t').
|
||||
Append("FQA Thickness").Append('\t').
|
||||
Append(x.Employee).Append('\t').
|
||||
Append(x.Recipe).Append('\t').
|
||||
Append(x.Reactor).Append('\t').
|
||||
Append(x.RDS).Append('\t').
|
||||
Append(x.PSN).Append('\t').
|
||||
Append(x.Lot).Append('\t').
|
||||
Append(x.Cassette).Append('\t').
|
||||
_ = results.Append("Stratus_").Append(_Logistics.MID).Append('_').Append(_Logistics.DateTimeFromSequence.ToString("yyyyMMddhhmmssfff")).Append(del).
|
||||
Append(x.Date).Append(del).
|
||||
Append(_Logistics.JobID).Append(del).
|
||||
Append("FQA Thickness").Append(del).
|
||||
Append(x.Employee).Append(del).
|
||||
Append(x.Recipe).Append(del).
|
||||
Append(x.Reactor).Append(del).
|
||||
Append(x.RDS).Append(del).
|
||||
Append(x.PSN).Append(del).
|
||||
Append(x.Lot).Append(del).
|
||||
Append(x.Cassette).Append(del).
|
||||
Append(x.MeanThickness);
|
||||
for (int i = 0; i < descriptions.Count; i++)
|
||||
_ = results.Append('\t').Append(descriptions[i].Slot).Append('\t').Append(descriptions[i].Mean);
|
||||
_ = results.Append(del).Append(descriptions[i].Slot).Append(del).Append(descriptions[i].Mean);
|
||||
return results.ToString();
|
||||
}
|
||||
|
||||
|
@ -516,6 +516,11 @@ public partial class ProcessData : IProcessData
|
||||
for (i = 0; i < num; i++)
|
||||
{
|
||||
point = new() { Thickness = GetToken() };
|
||||
if (point.Thickness == "Thickness,")
|
||||
{
|
||||
ScanPast("um");
|
||||
continue;
|
||||
}
|
||||
detail.Points.Add(point);
|
||||
point.Position = Convert.ToString(detail.Points.Count);
|
||||
}
|
||||
|
@ -117,18 +117,18 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
string directoryName = Path.GetDirectoryName(reportFullPath);
|
||||
string sequenceDirectoryName = Path.Combine(directoryName, _Logistics.Sequence.ToString());
|
||||
string originalDataBioRad = Path.Combine(directoryName, $"{_OriginalDataBioRad}{_Logistics.Sequence}.txt");
|
||||
List<Tuple<string, bool, DateTime, string>> tuples = ProcessData.GetTuples(this, _Logistics, _TickOffset.Value, results.Item4, _OriginalDataBioRad);
|
||||
if (_IsEAFHosted)
|
||||
{
|
||||
if (tuples.Any())
|
||||
IProcessData iProcessData = new ProcessData(this, _Logistics, _TickOffset.Value, results.Item4, _OriginalDataBioRad);
|
||||
if (_IsEAFHosted && iProcessData.Details.Any())
|
||||
{
|
||||
if (!Directory.Exists(sequenceDirectoryName))
|
||||
_ = Directory.CreateDirectory(sequenceDirectoryName);
|
||||
File.Move(reportFullPath, originalDataBioRad);
|
||||
_Log.Debug(string.Concat("****Extract() - Renamed [", reportFullPath, "] to [", originalDataBioRad, "]"));
|
||||
}
|
||||
foreach (Tuple<string, bool, DateTime, string> tuple in tuples)
|
||||
for (int i = 0; i < iProcessData.Details.Count; i++)
|
||||
{
|
||||
if (iProcessData.Details[i] is not Tuple<string, bool, DateTime, string> tuple)
|
||||
continue;
|
||||
isBioRad = tuple.Item2;
|
||||
dataText = tuple.Item4;
|
||||
cassetteID = tuple.Item1;
|
||||
@ -138,12 +138,18 @@ public class FileRead : Shared.FileRead, IFileRead
|
||||
else
|
||||
tupleFileName = string.Concat("CassetteDataBioRad_", cassetteID, "_", cassetteDateTime.Ticks, ".txt");
|
||||
fileNameTemp = Path.Combine(sequenceDirectoryName, tupleFileName);
|
||||
if (!_IsEAFHosted)
|
||||
continue;
|
||||
File.WriteAllText(fileNameTemp, dataText);
|
||||
File.SetLastWriteTime(fileNameTemp, cassetteDateTime);
|
||||
if (_Logistics.Sequence != cassetteDateTime.Ticks && File.Exists(originalDataBioRad))
|
||||
File.Copy(originalDataBioRad, Path.Combine(directoryName, $"{_OriginalDataBioRad}{cassetteDateTime.Ticks}.txt"));
|
||||
File.Move(fileNameTemp, Path.Combine(directoryName, tupleFileName));
|
||||
}
|
||||
if (!_IsEAFHosted)
|
||||
results = iProcessData.GetResults(this, _Logistics, results.Item4);
|
||||
else
|
||||
{
|
||||
if (Directory.Exists(sequenceDirectoryName))
|
||||
Directory.Delete(sequenceDirectoryName);
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using log4net;
|
||||
using System;
|
||||
@ -6,17 +7,61 @@ using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.txt;
|
||||
|
||||
public partial class ProcessData
|
||||
public partial class ProcessData : IProcessData
|
||||
{
|
||||
|
||||
internal static List<Tuple<string, bool, DateTime, string>> GetTuples(FileRead fileRead, Logistics logistics, long tickOffset, List<FileInfo> fileInfoCollection, string originalDataBioRad)
|
||||
private readonly ILog _Log;
|
||||
private readonly List<object> _Details;
|
||||
|
||||
public string JobID { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
|
||||
List<object> Shared.Properties.IProcessData.Details => _Details;
|
||||
|
||||
public ProcessData(IFileRead fileRead, Logistics logistics, long tickOffset, List<FileInfo> fileInfoCollection, string originalDataBioRad)
|
||||
{
|
||||
JobID = logistics.JobID;
|
||||
fileInfoCollection.Clear();
|
||||
_Details = new List<object>();
|
||||
MesEntity = logistics.MesEntity;
|
||||
_Log = LogManager.GetLogger(typeof(ProcessData));
|
||||
List<Tuple<string, bool, DateTime, string>> tuples = Parse(fileRead, logistics, tickOffset, fileInfoCollection, originalDataBioRad);
|
||||
_Details.AddRange(tuples);
|
||||
}
|
||||
|
||||
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<Test> tests = new();
|
||||
foreach (object item in _Details)
|
||||
tests.Add(Test.BioRadStratus);
|
||||
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
|
||||
if (tests.Count != descriptions.Count)
|
||||
throw new Exception();
|
||||
for (int i = 0; i < tests.Count; i++)
|
||||
{
|
||||
if (descriptions[i] is not Description description)
|
||||
throw new Exception();
|
||||
if (description.Test != (int)tests[i])
|
||||
throw new Exception();
|
||||
}
|
||||
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
|
||||
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
|
||||
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
|
||||
return results;
|
||||
}
|
||||
|
||||
private List<Tuple<string, bool, DateTime, string>> Parse(IFileRead fileRead, Logistics logistics, long tickOffset, List<FileInfo> fileInfoCollection, string originalDataBioRad)
|
||||
{
|
||||
List<Tuple<string, bool, DateTime, string>> results = new();
|
||||
ILog log = LogManager.GetLogger(typeof(ProcessData));
|
||||
string[] reportFullPathlines = File.ReadAllLines(logistics.ReportFullPath);
|
||||
// ***********************************************************************************
|
||||
// * Step #2 - Verify completeness of each cassette scan in the raw data source file *
|
||||
@ -38,12 +83,12 @@ public partial class ProcessData
|
||||
if (line.Contains("Cassette") && line.Contains("started") && (cassetteScanCompleted is null || cassetteScanCompleted.Value))
|
||||
{
|
||||
cassetteScanCompleted = false;
|
||||
log.Debug("****Extract() - CassetteScanCompleted = FALSE");
|
||||
_Log.Debug("****Extract() - CassetteScanCompleted = FALSE");
|
||||
}
|
||||
else if (line.Contains("Cassette") && line.Contains("finished") && (cassetteScanCompleted is null || !cassetteScanCompleted.Value))
|
||||
{
|
||||
cassetteScanCompleted = true;
|
||||
log.Debug("****Extract() - CassetteScanCompleted = TRUE");
|
||||
_Log.Debug("****Extract() - CassetteScanCompleted = TRUE");
|
||||
}
|
||||
}
|
||||
Dictionary<string, List<string>> cassetteIDAndDataSets;
|
||||
@ -54,7 +99,7 @@ public partial class ProcessData
|
||||
cassetteIDAndDataSets = new();
|
||||
// Raw source file has an incomplete data set or it only contains a "Process failed" and should not be
|
||||
// processed /split yet. Simply get out of this routine until enough data has been appended to the file.
|
||||
log.Debug($"****Extract() - Raw source file has an incomplete data set and should not be processed yet.");
|
||||
_Log.Debug($"****Extract() - Raw source file has an incomplete data set and should not be processed yet.");
|
||||
}
|
||||
else
|
||||
cassetteIDAndDataSets = GetCassetteIDAndDataSets(reportFullPathlines);
|
||||
@ -102,7 +147,7 @@ public partial class ProcessData
|
||||
recipeName = string.Empty;
|
||||
stringIndex = dataText.IndexOf(recipeSearch);
|
||||
recipeName = dataText.Substring(stringIndex + recipeSearch.Length);
|
||||
log.Debug($"****Extract(FDR): recipeName = {recipeName}");
|
||||
_Log.Debug($"****Extract(FDR): recipeName = {recipeName}");
|
||||
#pragma warning disable CA2249
|
||||
if (!string.IsNullOrEmpty(recipeName) && (recipeName.IndexOf("center", StringComparison.CurrentCultureIgnoreCase) >= 0))
|
||||
#pragma warning restore CA2249
|
||||
@ -170,8 +215,8 @@ public partial class ProcessData
|
||||
}
|
||||
}
|
||||
}
|
||||
log.Debug($"****Extract(FDR): ToolType = {toolType}");
|
||||
log.Debug($"****Extract(FDR): DataType = {dataType}");
|
||||
_Log.Debug($"****Extract(FDR): ToolType = {toolType}");
|
||||
_Log.Debug($"****Extract(FDR): DataType = {dataType}");
|
||||
if (!isBioRad)
|
||||
{
|
||||
cassetteDateTime = logistics.DateTimeFromSequence.AddTicks(i * -1);
|
||||
@ -182,7 +227,7 @@ public partial class ProcessData
|
||||
processData = new Stratus.ProcessData(fileRead, logistics, fileInfoCollection, originalDataBioRad, dataText: dataText);
|
||||
iProcessData = processData;
|
||||
if (!iProcessData.Details.Any())
|
||||
log.Warn("No Details!");
|
||||
_Log.Warn("No Details!");
|
||||
else
|
||||
{
|
||||
foreach (object item in iProcessData.Details)
|
||||
@ -243,9 +288,9 @@ public partial class ProcessData
|
||||
if (logistics.DateTimeFromSequence != afterCheck)
|
||||
{
|
||||
results.Clear();
|
||||
log.Debug($"****Extract() - DataBioRad.txt file is getting updated fast");
|
||||
log.Debug($"****Extract() - DataBioRadDateTime_AfterCheck = {afterCheck.Ticks}");
|
||||
log.Debug($"****Extract() - DataBioRadDateTime_BeforeCheck = {logistics.Sequence}");
|
||||
_Log.Debug($"****Extract() - DataBioRad.txt file is getting updated fast");
|
||||
_Log.Debug($"****Extract() - DataBioRadDateTime_AfterCheck = {afterCheck.Ticks}");
|
||||
_Log.Debug($"****Extract() - DataBioRadDateTime_BeforeCheck = {logistics.Sequence}");
|
||||
}
|
||||
return results;
|
||||
}
|
||||
@ -503,4 +548,5 @@ public partial class ProcessData
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -33,7 +33,7 @@
|
||||
<DefineConstants>Linux</DefineConstants>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="3.2.0" />
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.0" />
|
||||
<PackageReference Include="FFMpegCore" Version="5.1.0" />
|
||||
<PackageReference Include="IKVM.AWT.WinForms" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
|
||||
<PackageReference Include="IKVM.OpenJDK.Core" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
|
||||
@ -43,6 +43,8 @@
|
||||
<PackageReference Include="IKVM.OpenJDK.XML.API" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
|
||||
<PackageReference Include="IKVM.Runtime" Version="7.2.4630.5"><NoWarn>NU1701</NoWarn></PackageReference>
|
||||
<PackageReference Include="Instances" Version="3.0.0" />
|
||||
<PackageReference Include="Microsoft.AspNet.WebApi.SelfHost" Version="5.2.9" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.SignalR.Client" Version="7.0.5" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="7.0.4" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.CommandLine" Version="7.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="7.0.0" />
|
||||
@ -53,10 +55,10 @@
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Console" Version="7.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Debug" Version="7.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging" Version="7.0.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.5.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.6.1" />
|
||||
<PackageReference Include="Microsoft.Win32.SystemEvents" Version="7.0.0" />
|
||||
<PackageReference Include="MSTest.TestAdapter" Version="3.0.2" />
|
||||
<PackageReference Include="MSTest.TestFramework" Version="3.0.2" />
|
||||
<PackageReference Include="MSTest.TestAdapter" Version="3.0.4" />
|
||||
<PackageReference Include="MSTest.TestFramework" Version="3.0.4" />
|
||||
<PackageReference Include="Pdfbox" Version="1.1.1"><NoWarn>NU1701</NoWarn></PackageReference>
|
||||
<PackageReference Include="RoboSharp" Version="1.2.8" />
|
||||
<PackageReference Include="System.Configuration.ConfigurationManager" Version="7.0.0" />
|
||||
@ -67,8 +69,8 @@
|
||||
<PackageReference Include="Tesseract" Version="5.2.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Tibco.Rendezvous" Version="8.5.0" />
|
||||
<PackageReference Include="Infineon.Yoda" Version="5.4.1" />
|
||||
<PackageReference Include="Tibco.Rendezvous.DotNetCore" Version="8.5.0" />
|
||||
<PackageReference Include="Infineon.Yoda.DotNetCore" Version="5.4.1" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Infineon.Mesa.PDF.Text.Stripper" Version="4.8.0.1"><NoWarn>NU1701</NoWarn></PackageReference>
|
||||
|
@ -110,6 +110,42 @@ public class Description : IDescription, Properties.IDescription
|
||||
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
|
||||
{
|
||||
List<IDescription> results = new();
|
||||
if (iProcessData is null || !iProcessData.Details.Any())
|
||||
results.Add(GetDefault(fileRead, logistics));
|
||||
else
|
||||
{
|
||||
string nullData;
|
||||
Description description;
|
||||
object configDataNullData = fileRead.NullData;
|
||||
if (configDataNullData is null)
|
||||
nullData = string.Empty;
|
||||
else
|
||||
nullData = configDataNullData.ToString();
|
||||
for (int i = 0; i < iProcessData.Details.Count; i++)
|
||||
{
|
||||
if (iProcessData.Details[i] is null)
|
||||
continue;
|
||||
description = new Description
|
||||
{
|
||||
Test = (int)tests[i],
|
||||
Count = tests.Count,
|
||||
Index = i,
|
||||
//
|
||||
EventName = fileRead.EventName,
|
||||
NullData = nullData,
|
||||
JobID = fileRead.CellInstanceName,
|
||||
Sequence = logistics.Sequence.ToString(),
|
||||
MesEntity = logistics.MesEntity,
|
||||
ReportFullPath = logistics.ReportFullPath,
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
//
|
||||
Date = DateTime.Now.ToString(GetDateFormat()),
|
||||
RDS = string.Empty,
|
||||
};
|
||||
results.Add(description);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
|
@ -23,12 +23,36 @@ public class BIORAD5
|
||||
_BIORAD5 = CreateSelfDescription.Staging.v2_49_2.BIORAD5.EAFLoggingUnitTesting;
|
||||
}
|
||||
|
||||
private static void NonThrowTryCatch()
|
||||
{
|
||||
try
|
||||
{ throw new Exception(); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Staging__v2_49_2__BIORAD5__txt() => _BIORAD5.Staging__v2_49_2__BIORAD5__txt();
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
[TestMethod]
|
||||
public void Staging__v2_49_2__BIORAD5__txt638221788953480284__MorePoints()
|
||||
{
|
||||
bool validatePDSF = false;
|
||||
string check = "*DataBioRad.txt";
|
||||
_BIORAD5.Staging__v2_49_2__BIORAD5__txt();
|
||||
MethodBase methodBase = new StackFrame().GetMethod();
|
||||
string[] variables = _BIORAD5.AdaptationTesting.GetVariables(methodBase, check, validatePDSF);
|
||||
IFileRead fileRead = _BIORAD5.AdaptationTesting.Get(methodBase, sourceFileLocation: variables[2], sourceFileFilter: variables[3], useCyclicalForDescription: false);
|
||||
Logistics logistics = new(fileRead);
|
||||
_ = Shared.AdaptationTesting.ReExtractCompareUpdatePassDirectory(variables, fileRead, logistics, validatePDSF);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
[Ignore]
|
||||
#endif
|
||||
@ -54,6 +78,7 @@ public class BIORAD5
|
||||
Assert.IsTrue(dateTime == logistics.DateTimeFromSequence);
|
||||
dateTime = FileHandlers.Stratus.ProcessData.GetDateTime(logistics, "11/24/21 08:39");
|
||||
Assert.IsTrue(dateTime == logistics.DateTimeFromSequence);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
@ -75,6 +100,7 @@ public class BIORAD5
|
||||
Assert.IsTrue(dateTime == logistics.DateTimeFromSequence);
|
||||
dateTime = FileHandlers.Stratus.ProcessData.GetDateTime(logistics, "11/24/21 08:39");
|
||||
Assert.IsTrue(dateTime == logistics.DateTimeFromSequence);
|
||||
NonThrowTryCatch();
|
||||
}
|
||||
|
||||
#if DEBUG
|
||||
|
Loading…
x
Reference in New Issue
Block a user