Extra Thickness, um line

Nuget Bump
del
ProcessData : IProcessData
IDescription.GetDescriptions with body
This commit is contained in:
2023-06-19 10:32:15 -07:00
parent f66393e27e
commit 0f95c45638
7 changed files with 179 additions and 57 deletions

View File

@ -117,33 +117,39 @@ public class FileRead : Shared.FileRead, IFileRead
string directoryName = Path.GetDirectoryName(reportFullPath);
string sequenceDirectoryName = Path.Combine(directoryName, _Logistics.Sequence.ToString());
string originalDataBioRad = Path.Combine(directoryName, $"{_OriginalDataBioRad}{_Logistics.Sequence}.txt");
List<Tuple<string, bool, DateTime, string>> tuples = ProcessData.GetTuples(this, _Logistics, _TickOffset.Value, results.Item4, _OriginalDataBioRad);
if (_IsEAFHosted)
IProcessData iProcessData = new ProcessData(this, _Logistics, _TickOffset.Value, results.Item4, _OriginalDataBioRad);
if (_IsEAFHosted && iProcessData.Details.Any())
{
if (!Directory.Exists(sequenceDirectoryName))
_ = Directory.CreateDirectory(sequenceDirectoryName);
File.Move(reportFullPath, originalDataBioRad);
_Log.Debug(string.Concat("****Extract() - Renamed [", reportFullPath, "] to [", originalDataBioRad, "]"));
}
for (int i = 0; i < iProcessData.Details.Count; i++)
{
if (iProcessData.Details[i] is not Tuple<string, bool, DateTime, string> tuple)
continue;
isBioRad = tuple.Item2;
dataText = tuple.Item4;
cassetteID = tuple.Item1;
cassetteDateTime = tuple.Item3;
if (isBioRad)
tupleFileName = string.Concat("DetailDataBioRad_", cassetteID, "_", cassetteDateTime.Ticks, ".txt");
else
tupleFileName = string.Concat("CassetteDataBioRad_", cassetteID, "_", cassetteDateTime.Ticks, ".txt");
fileNameTemp = Path.Combine(sequenceDirectoryName, tupleFileName);
if (!_IsEAFHosted)
continue;
File.WriteAllText(fileNameTemp, dataText);
File.SetLastWriteTime(fileNameTemp, cassetteDateTime);
if (_Logistics.Sequence != cassetteDateTime.Ticks && File.Exists(originalDataBioRad))
File.Copy(originalDataBioRad, Path.Combine(directoryName, $"{_OriginalDataBioRad}{cassetteDateTime.Ticks}.txt"));
File.Move(fileNameTemp, Path.Combine(directoryName, tupleFileName));
}
if (!_IsEAFHosted)
results = iProcessData.GetResults(this, _Logistics, results.Item4);
else
{
if (tuples.Any())
{
if (!Directory.Exists(sequenceDirectoryName))
_ = Directory.CreateDirectory(sequenceDirectoryName);
File.Move(reportFullPath, originalDataBioRad);
_Log.Debug(string.Concat("****Extract() - Renamed [", reportFullPath, "] to [", originalDataBioRad, "]"));
}
foreach (Tuple<string, bool, DateTime, string> tuple in tuples)
{
isBioRad = tuple.Item2;
dataText = tuple.Item4;
cassetteID = tuple.Item1;
cassetteDateTime = tuple.Item3;
if (isBioRad)
tupleFileName = string.Concat("DetailDataBioRad_", cassetteID, "_", cassetteDateTime.Ticks, ".txt");
else
tupleFileName = string.Concat("CassetteDataBioRad_", cassetteID, "_", cassetteDateTime.Ticks, ".txt");
fileNameTemp = Path.Combine(sequenceDirectoryName, tupleFileName);
File.WriteAllText(fileNameTemp, dataText);
File.SetLastWriteTime(fileNameTemp, cassetteDateTime);
if (_Logistics.Sequence != cassetteDateTime.Ticks && File.Exists(originalDataBioRad))
File.Copy(originalDataBioRad, Path.Combine(directoryName, $"{_OriginalDataBioRad}{cassetteDateTime.Ticks}.txt"));
File.Move(fileNameTemp, Path.Combine(directoryName, tupleFileName));
}
if (Directory.Exists(sequenceDirectoryName))
Directory.Delete(sequenceDirectoryName);
}

View File

@ -1,4 +1,5 @@
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using log4net;
using System;
@ -6,17 +7,61 @@ using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.txt;
public partial class ProcessData
public partial class ProcessData : IProcessData
{
internal static List<Tuple<string, bool, DateTime, string>> GetTuples(FileRead fileRead, Logistics logistics, long tickOffset, List<FileInfo> fileInfoCollection, string originalDataBioRad)
private readonly ILog _Log;
private readonly List<object> _Details;
public string JobID { get; set; }
public string MesEntity { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details;
public ProcessData(IFileRead fileRead, Logistics logistics, long tickOffset, List<FileInfo> fileInfoCollection, string originalDataBioRad)
{
JobID = logistics.JobID;
fileInfoCollection.Clear();
_Details = new List<object>();
MesEntity = logistics.MesEntity;
_Log = LogManager.GetLogger(typeof(ProcessData));
List<Tuple<string, bool, DateTime, string>> tuples = Parse(fileRead, logistics, tickOffset, fileInfoCollection, originalDataBioRad);
_Details.AddRange(tuples);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.BioRadStratus);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
private List<Tuple<string, bool, DateTime, string>> Parse(IFileRead fileRead, Logistics logistics, long tickOffset, List<FileInfo> fileInfoCollection, string originalDataBioRad)
{
List<Tuple<string, bool, DateTime, string>> results = new();
ILog log = LogManager.GetLogger(typeof(ProcessData));
string[] reportFullPathlines = File.ReadAllLines(logistics.ReportFullPath);
// ***********************************************************************************
// * Step #2 - Verify completeness of each cassette scan in the raw data source file *
@ -38,12 +83,12 @@ public partial class ProcessData
if (line.Contains("Cassette") && line.Contains("started") && (cassetteScanCompleted is null || cassetteScanCompleted.Value))
{
cassetteScanCompleted = false;
log.Debug("****Extract() - CassetteScanCompleted = FALSE");
_Log.Debug("****Extract() - CassetteScanCompleted = FALSE");
}
else if (line.Contains("Cassette") && line.Contains("finished") && (cassetteScanCompleted is null || !cassetteScanCompleted.Value))
{
cassetteScanCompleted = true;
log.Debug("****Extract() - CassetteScanCompleted = TRUE");
_Log.Debug("****Extract() - CassetteScanCompleted = TRUE");
}
}
Dictionary<string, List<string>> cassetteIDAndDataSets;
@ -54,7 +99,7 @@ public partial class ProcessData
cassetteIDAndDataSets = new();
// Raw source file has an incomplete data set or it only contains a "Process failed" and should not be
// processed /split yet. Simply get out of this routine until enough data has been appended to the file.
log.Debug($"****Extract() - Raw source file has an incomplete data set and should not be processed yet.");
_Log.Debug($"****Extract() - Raw source file has an incomplete data set and should not be processed yet.");
}
else
cassetteIDAndDataSets = GetCassetteIDAndDataSets(reportFullPathlines);
@ -102,7 +147,7 @@ public partial class ProcessData
recipeName = string.Empty;
stringIndex = dataText.IndexOf(recipeSearch);
recipeName = dataText.Substring(stringIndex + recipeSearch.Length);
log.Debug($"****Extract(FDR): recipeName = {recipeName}");
_Log.Debug($"****Extract(FDR): recipeName = {recipeName}");
#pragma warning disable CA2249
if (!string.IsNullOrEmpty(recipeName) && (recipeName.IndexOf("center", StringComparison.CurrentCultureIgnoreCase) >= 0))
#pragma warning restore CA2249
@ -170,8 +215,8 @@ public partial class ProcessData
}
}
}
log.Debug($"****Extract(FDR): ToolType = {toolType}");
log.Debug($"****Extract(FDR): DataType = {dataType}");
_Log.Debug($"****Extract(FDR): ToolType = {toolType}");
_Log.Debug($"****Extract(FDR): DataType = {dataType}");
if (!isBioRad)
{
cassetteDateTime = logistics.DateTimeFromSequence.AddTicks(i * -1);
@ -182,7 +227,7 @@ public partial class ProcessData
processData = new Stratus.ProcessData(fileRead, logistics, fileInfoCollection, originalDataBioRad, dataText: dataText);
iProcessData = processData;
if (!iProcessData.Details.Any())
log.Warn("No Details!");
_Log.Warn("No Details!");
else
{
foreach (object item in iProcessData.Details)
@ -243,9 +288,9 @@ public partial class ProcessData
if (logistics.DateTimeFromSequence != afterCheck)
{
results.Clear();
log.Debug($"****Extract() - DataBioRad.txt file is getting updated fast");
log.Debug($"****Extract() - DataBioRadDateTime_AfterCheck = {afterCheck.Ticks}");
log.Debug($"****Extract() - DataBioRadDateTime_BeforeCheck = {logistics.Sequence}");
_Log.Debug($"****Extract() - DataBioRad.txt file is getting updated fast");
_Log.Debug($"****Extract() - DataBioRadDateTime_AfterCheck = {afterCheck.Ticks}");
_Log.Debug($"****Extract() - DataBioRadDateTime_BeforeCheck = {logistics.Sequence}");
}
return results;
}
@ -503,4 +548,5 @@ public partial class ProcessData
}
return results;
}
}