Initial Commit
This commit is contained in:
135
Adaptation/FileHandlers/APC/FileRead.cs
Normal file
135
Adaptation/FileHandlers/APC/FileRead.cs
Normal file
@ -0,0 +1,135 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.APC;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
|
||||
{
|
||||
bool isDummyRun = false;
|
||||
List<(Shared.Properties.IScopeInfo, string)> collection = new();
|
||||
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
|
||||
string fileNameAfterUnderscoreSplit = GetFileNameAfterUnderscoreSplit(reportFullPath);
|
||||
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.TargetFileLocation, fileNameAfterUnderscoreSplit);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
|
||||
File.Copy(reportFullPath, duplicateFile, overwrite: true);
|
||||
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
FileCopy(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
159
Adaptation/FileHandlers/Archive/FileRead.cs
Normal file
159
Adaptation/FileHandlers/Archive/FileRead.cs
Normal file
@ -0,0 +1,159 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.Archive;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly string _JobIdParentDirectory;
|
||||
private readonly string _JobIdArchiveParentDirectory;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_JobIdParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.SourceFileLocation);
|
||||
_JobIdArchiveParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.TargetFileLocation);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private static IEnumerable<string> GetDirectoriesRecursively(string path, string directoryNameSegment = null)
|
||||
{
|
||||
Queue<string> queue = new();
|
||||
queue.Enqueue(path);
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
path = queue.Dequeue();
|
||||
foreach (string subDirectory in Directory.GetDirectories(path))
|
||||
{
|
||||
queue.Enqueue(subDirectory);
|
||||
if (string.IsNullOrEmpty(directoryNameSegment) || Path.GetFileName(subDirectory).Contains(directoryNameSegment))
|
||||
yield return subDirectory;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
private void MoveArchive(string reportFullPath, DateTime dateTime)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
|
||||
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory);
|
||||
if (!Directory.Exists(destinationArchiveDirectory))
|
||||
_ = Directory.CreateDirectory(destinationArchiveDirectory);
|
||||
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
_ = Directory.CreateDirectory(jobIdDirectory);
|
||||
if (!Directory.GetDirectories(jobIdDirectory).Any())
|
||||
File.Copy(reportFullPath, Path.Combine(destinationArchiveDirectory, Path.GetFileName(reportFullPath)));
|
||||
else
|
||||
{
|
||||
string[] matchDirectories = GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).ToArray();
|
||||
if (matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
|
||||
destinationArchiveDirectory = Path.Combine(destinationArchiveDirectory, Path.GetFileName(sourceDirectory));
|
||||
Directory.Move(sourceDirectory, destinationArchiveDirectory);
|
||||
}
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
MoveArchive(reportFullPath, dateTime);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
34
Adaptation/FileHandlers/CellInstanceConnectionName.cs
Normal file
34
Adaptation/FileHandlers/CellInstanceConnectionName.cs
Normal file
@ -0,0 +1,34 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers;
|
||||
|
||||
public class CellInstanceConnectionName
|
||||
{
|
||||
|
||||
internal static IFileRead Get(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, int? connectionCount)
|
||||
{
|
||||
IFileRead result = cellInstanceConnectionName switch
|
||||
{
|
||||
nameof(APC) => new APC.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Archive) => new Archive.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(DownloadSRPxFile) => new DownloadSRPxFile.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Dummy) => new Dummy.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(IQSSi) => new IQSSi.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(MoveMatchingFiles) => new MoveMatchingFiles.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(OpenInsight) => new OpenInsight.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(OpenInsightMetrologyViewer) => new OpenInsightMetrologyViewer.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(csv) => new csv.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(json) => new json.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
_ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped")
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
273
Adaptation/FileHandlers/DownloadSRPxFile/FileRead.cs
Normal file
273
Adaptation/FileHandlers/DownloadSRPxFile/FileRead.cs
Normal file
@ -0,0 +1,273 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Net.Http;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.DownloadSRPxFile;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly Timer _Timer;
|
||||
private readonly HttpClient _HttpClient;
|
||||
private readonly string _StaticFileServer;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_HttpClient = new();
|
||||
_StaticFileServer = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, string.Concat("CellInstance.", cellInstanceName, ".StaticFileServer"));
|
||||
if (!Debugger.IsAttached && fileConnectorConfiguration.PreProcessingMode != FileConnectorConfiguration.PreProcessingModeEnum.Process)
|
||||
_Timer = new Timer(Callback, null, (int)(fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000), Timeout.Infinite);
|
||||
else
|
||||
{
|
||||
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
|
||||
Callback(null);
|
||||
}
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime) => throw new Exception(string.Concat("See ", nameof(Callback)));
|
||||
|
||||
private void DownloadSRPxFileAsync()
|
||||
{
|
||||
if (_HttpClient is null)
|
||||
throw new Exception();
|
||||
if (string.IsNullOrEmpty(_StaticFileServer))
|
||||
throw new Exception();
|
||||
string logUrl;
|
||||
string logText;
|
||||
string runJson;
|
||||
string rootJson;
|
||||
string targetJson;
|
||||
string[] logLines;
|
||||
string runFileName;
|
||||
string[] logSegments;
|
||||
string targetFileName;
|
||||
string runFullFileName;
|
||||
FileInfo targetFileInfo;
|
||||
FileInfo alternateFileInfo;
|
||||
List<string> runFullFileNameSegments;
|
||||
string dateTimeFormat = "yy/MM/dd HH:mm:ss";
|
||||
NginxFileSystem[] runNginxFileSystemCollection;
|
||||
NginxFileSystem[] rootNginxFileSystemCollection;
|
||||
NginxFileSystem[] innerNginxFileSystemCollection;
|
||||
DateTime fileAgeThresholdDateTime = DateTime.Now;
|
||||
string nginxFormat = "ddd, dd MMM yyyy HH:mm:ss zzz";
|
||||
List<Tuple<DateTime, FileInfo, FileInfo, string>> possibleDownload = new();
|
||||
string[] segments = _FileConnectorConfiguration.FileAgeThreshold.Split(':');
|
||||
JsonSerializerOptions propertyNameCaseInsensitiveJsonSerializerOptions = new() { PropertyNameCaseInsensitive = true };
|
||||
for (int i = 0; i < segments.Length; i++)
|
||||
{
|
||||
fileAgeThresholdDateTime = i switch
|
||||
{
|
||||
0 => fileAgeThresholdDateTime.AddDays(double.Parse(segments[i]) * -1),
|
||||
1 => fileAgeThresholdDateTime.AddHours(double.Parse(segments[i]) * -1),
|
||||
2 => fileAgeThresholdDateTime.AddMinutes(double.Parse(segments[i]) * -1),
|
||||
3 => fileAgeThresholdDateTime.AddSeconds(double.Parse(segments[i]) * -1),
|
||||
_ => throw new Exception(),
|
||||
};
|
||||
}
|
||||
rootJson = _HttpClient.GetStringAsync(string.Concat("http://", _StaticFileServer)).Result;
|
||||
rootNginxFileSystemCollection = JsonSerializer.Deserialize<NginxFileSystem[]>(rootJson, propertyNameCaseInsensitiveJsonSerializerOptions);
|
||||
foreach (NginxFileSystem rootNginxFileSystem in rootNginxFileSystemCollection)
|
||||
{
|
||||
if (!(from l in _FileConnectorConfiguration.SourceFileFilters where rootNginxFileSystem.Name == l select false).Any())
|
||||
continue;
|
||||
logUrl = string.Empty;
|
||||
if (rootNginxFileSystem.Name.Contains("."))
|
||||
logUrl = string.Concat("http://", _StaticFileServer, '/', rootNginxFileSystem.Name);
|
||||
else
|
||||
{
|
||||
rootJson = _HttpClient.GetStringAsync(string.Concat("http://", _StaticFileServer, '/', rootNginxFileSystem.Name)).Result;
|
||||
innerNginxFileSystemCollection = JsonSerializer.Deserialize<NginxFileSystem[]>(rootJson, propertyNameCaseInsensitiveJsonSerializerOptions);
|
||||
foreach (NginxFileSystem innerNginxFileSystem in innerNginxFileSystemCollection)
|
||||
{
|
||||
if (!(from l in _FileConnectorConfiguration.SourceFileFilters where innerNginxFileSystem.Name == l select false).Any())
|
||||
continue;
|
||||
logUrl = string.Concat("http://", _StaticFileServer, '/', rootNginxFileSystem.Name, '/', innerNginxFileSystem.Name);
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (string.IsNullOrEmpty(logUrl))
|
||||
continue;
|
||||
logText = _HttpClient.GetStringAsync(logUrl).Result;
|
||||
logLines = logText.Split(new string[] { Environment.NewLine }, StringSplitOptions.None);
|
||||
foreach (string logLine in logLines)
|
||||
{
|
||||
if (string.IsNullOrEmpty(logLine))
|
||||
continue;
|
||||
logSegments = logLine.Split('<');
|
||||
if (logSegments.Length < 1 || !DateTime.TryParseExact(logSegments[0].Trim(), dateTimeFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime tvsDateTime))
|
||||
continue;
|
||||
if (tvsDateTime < fileAgeThresholdDateTime)
|
||||
continue;
|
||||
if (logSegments.Length < 2)
|
||||
continue;
|
||||
runFullFileName = logSegments[1].Split('>')[0];
|
||||
if (!(from l in _FileConnectorConfiguration.SourceFileFilters where runFullFileName.EndsWith(l) select false).Any())
|
||||
continue;
|
||||
runFullFileNameSegments = runFullFileName.Split('\\').ToList();
|
||||
runFileName = runFullFileNameSegments[runFullFileNameSegments.Count - 1];
|
||||
runFullFileNameSegments.RemoveAt(runFullFileNameSegments.Count - 1);
|
||||
runJson = _HttpClient.GetStringAsync(string.Concat("http://", _StaticFileServer, '/', string.Join("/", runFullFileNameSegments))).Result;
|
||||
runFullFileNameSegments.Add(runFileName);
|
||||
runNginxFileSystemCollection = JsonSerializer.Deserialize<NginxFileSystem[]>(runJson, propertyNameCaseInsensitiveJsonSerializerOptions);
|
||||
foreach (NginxFileSystem matchNginxFileSystem in runNginxFileSystemCollection)
|
||||
{
|
||||
if (matchNginxFileSystem.Name != runFileName)
|
||||
continue;
|
||||
if (!(from l in _FileConnectorConfiguration.SourceFileFilters where matchNginxFileSystem.Name.EndsWith(l) select false).Any())
|
||||
continue;
|
||||
if (!DateTime.TryParseExact(matchNginxFileSystem.MTime.Replace("GMT", "+00:00"), nginxFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime matchNginxFileSystemDateTime))
|
||||
continue;
|
||||
if (matchNginxFileSystemDateTime < fileAgeThresholdDateTime)
|
||||
continue;
|
||||
targetFileInfo = new FileInfo(Path.Combine(_FileConnectorConfiguration.TargetFileLocation, runFullFileName));
|
||||
if (!Directory.Exists(targetFileInfo.Directory.FullName))
|
||||
_ = Directory.CreateDirectory(targetFileInfo.Directory.FullName);
|
||||
if (targetFileInfo.Exists && targetFileInfo.LastWriteTime == matchNginxFileSystemDateTime)
|
||||
continue;
|
||||
alternateFileInfo = new(Path.Combine(_FileConnectorConfiguration.AlternateTargetFolder, matchNginxFileSystem.Name));
|
||||
targetFileName = string.Concat("http://", _StaticFileServer, '/', string.Join("/", runFullFileNameSegments));
|
||||
possibleDownload.Add(new(matchNginxFileSystemDateTime, targetFileInfo, alternateFileInfo, targetFileName));
|
||||
break;
|
||||
}
|
||||
if (possibleDownload.Any())
|
||||
break;
|
||||
}
|
||||
if (possibleDownload.Any())
|
||||
break;
|
||||
}
|
||||
if (possibleDownload.Any())
|
||||
{
|
||||
possibleDownload = (from l in possibleDownload orderby l.Item1 select l).ToList();
|
||||
alternateFileInfo = possibleDownload[0].Item3;
|
||||
targetFileName = possibleDownload[0].Item4;
|
||||
targetFileInfo = possibleDownload[0].Item2;
|
||||
DateTime matchNginxFileSystemDateTime = possibleDownload[0].Item1;
|
||||
if (alternateFileInfo.Exists)
|
||||
File.Delete(alternateFileInfo.FullName);
|
||||
if (targetFileInfo.Exists)
|
||||
File.Delete(targetFileInfo.FullName);
|
||||
targetJson = _HttpClient.GetStringAsync(targetFileName).Result;
|
||||
File.WriteAllText(targetFileInfo.FullName, targetJson);
|
||||
targetFileInfo.LastWriteTime = matchNginxFileSystemDateTime;
|
||||
File.Copy(targetFileInfo.FullName, alternateFileInfo.FullName);
|
||||
}
|
||||
}
|
||||
|
||||
private void Callback(object state)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (_IsEAFHosted)
|
||||
DownloadSRPxFileAsync();
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
try
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
}
|
11
Adaptation/FileHandlers/DownloadSRPxFile/NginxFileSystem.cs
Normal file
11
Adaptation/FileHandlers/DownloadSRPxFile/NginxFileSystem.cs
Normal file
@ -0,0 +1,11 @@
|
||||
namespace Adaptation.FileHandlers.DownloadSRPxFile;
|
||||
|
||||
internal class NginxFileSystem
|
||||
{
|
||||
|
||||
public string Name { get; set; }
|
||||
public string Type { get; set; }
|
||||
public string MTime { get; set; }
|
||||
public float Size { get; set; }
|
||||
|
||||
}
|
286
Adaptation/FileHandlers/Dummy/FileRead.cs
Normal file
286
Adaptation/FileHandlers/Dummy/FileRead.cs
Normal file
@ -0,0 +1,286 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Infineon.Monitoring.MonA;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.Dummy;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly Timer _Timer;
|
||||
private int _LastDummyRunIndex;
|
||||
private readonly string[] _CellNames;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_LastDummyRunIndex = -1;
|
||||
List<string> cellNames = new();
|
||||
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
|
||||
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Alias");
|
||||
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
|
||||
cellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1]);
|
||||
_CellNames = cellNames.ToArray();
|
||||
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
|
||||
Callback(null);
|
||||
else
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName) => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract() => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
|
||||
|
||||
private void CallbackInProcessCleared(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, string inProcessDirectory, long sequence, bool warning)
|
||||
{
|
||||
const string site = "sjc";
|
||||
string stateName = string.Concat("Dummy_", _EventName);
|
||||
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
|
||||
MonIn monIn = MonIn.GetInstance(monInURL);
|
||||
try
|
||||
{
|
||||
if (warning)
|
||||
{
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Warning);
|
||||
for (int i = 1; i < 12; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
|
||||
string[] files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
foreach (string file in files)
|
||||
File.SetLastWriteTime(file, new DateTime(sequence));
|
||||
if (!_FileConnectorConfiguration.IncludeSubDirectories.Value)
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Move(file, Path.Combine(targetFileLocation, Path.GetFileName(file)));
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
foreach (string directory in directories)
|
||||
_ = Directory.CreateDirectory(string.Concat(targetFileLocation, directory.Substring(inProcessDirectory.Length)));
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(targetFileLocation, file.Substring(inProcessDirectory.Length)));
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Ok);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Critical);
|
||||
}
|
||||
}
|
||||
|
||||
private void CallbackFileExists(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, long sequence)
|
||||
{
|
||||
string[] files;
|
||||
bool warning = false;
|
||||
if (!_DummyRuns.ContainsKey(monARessource))
|
||||
_DummyRuns.Add(monARessource, new List<long>());
|
||||
if (!_DummyRuns[monARessource].Contains(sequence))
|
||||
_DummyRuns[monARessource].Add(sequence);
|
||||
File.AppendAllLines(traceDummyFile, new string[] { sourceArchiveFile });
|
||||
string inProcessDirectory = Path.Combine("_ProgressPath", "Dummy In-Process", sequence.ToString());
|
||||
if (!Directory.Exists(inProcessDirectory))
|
||||
_ = Directory.CreateDirectory(inProcessDirectory);
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
if (files.Any())
|
||||
{
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
try
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Delete(file);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
if (_FileConnectorConfiguration.IncludeSubDirectories.Value)
|
||||
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.AllDirectories);
|
||||
else
|
||||
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string file in files)
|
||||
{
|
||||
if (new FileInfo(file).LastWriteTime.Ticks == sequence)
|
||||
{
|
||||
warning = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
CallbackInProcessCleared(sourceArchiveFile, traceDummyFile, targetFileLocation, monARessource, inProcessDirectory, sequence, warning);
|
||||
}
|
||||
|
||||
private string GetCellName(string pathSegment)
|
||||
{
|
||||
string result = string.Empty;
|
||||
foreach (string cellName in _CellNames)
|
||||
{
|
||||
if (pathSegment.ToLower().Contains(cellName.ToLower()))
|
||||
{
|
||||
result = cellName;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (string.IsNullOrEmpty(result))
|
||||
{
|
||||
int count;
|
||||
List<(string CellName, int Count)> cellNames = new();
|
||||
foreach (string cellName in _CellNames)
|
||||
{
|
||||
count = 0;
|
||||
foreach (char @char in cellName.ToLower())
|
||||
count += pathSegment.Length - pathSegment.ToLower().Replace(@char.ToString(), string.Empty).Length;
|
||||
cellNames.Add(new(cellName, count));
|
||||
}
|
||||
result = (from l in cellNames orderby l.CellName.Length, l.Count descending select l.CellName).First();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private void Callback(object state)
|
||||
{
|
||||
try
|
||||
{
|
||||
string sourceParentDirectory;
|
||||
string targetParentDirectory;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
if (!string.IsNullOrEmpty(Path.GetFileName(_FileConnectorConfiguration.SourceFileLocation)))
|
||||
sourceParentDirectory = Path.GetDirectoryName(_FileConnectorConfiguration.SourceFileLocation);
|
||||
else
|
||||
sourceParentDirectory = GetParentParent(_FileConnectorConfiguration.SourceFileLocation);
|
||||
if (!string.IsNullOrEmpty(Path.GetFileName(_FileConnectorConfiguration.TargetFileLocation)))
|
||||
targetParentDirectory = Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation);
|
||||
else
|
||||
targetParentDirectory = GetParentParent(_FileConnectorConfiguration.TargetFileLocation);
|
||||
if (sourceParentDirectory != targetParentDirectory)
|
||||
throw new Exception("Target and source must have the same parent for Si Dummy FileConnectorConfiguration!");
|
||||
bool check = dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday;
|
||||
if (!_IsEAFHosted || check)
|
||||
{
|
||||
string monARessource;
|
||||
string sourceFileFilter;
|
||||
string sourceArchiveFile;
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string traceDummyDirectory = Path.Combine(Path.GetPathRoot(_TracePath), "TracesDummy", _CellInstanceName, "Source", $"{dateTime:yyyy}___Week_{weekOfYear}");
|
||||
if (!Directory.Exists(traceDummyDirectory))
|
||||
_ = Directory.CreateDirectory(traceDummyDirectory);
|
||||
string traceDummyFile = Path.Combine(traceDummyDirectory, $"{dateTime.Ticks} - {_CellInstanceName}.txt");
|
||||
File.AppendAllText(traceDummyFile, string.Empty);
|
||||
for (int i = 0; i < _FileConnectorConfiguration.SourceFileFilters.Count; i++)
|
||||
{
|
||||
_LastDummyRunIndex += 1;
|
||||
if (_LastDummyRunIndex >= _FileConnectorConfiguration.SourceFileFilters.Count)
|
||||
_LastDummyRunIndex = 0;
|
||||
sourceFileFilter = _FileConnectorConfiguration.SourceFileFilters[_LastDummyRunIndex];
|
||||
sourceArchiveFile = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, sourceFileFilter);
|
||||
if (File.Exists(sourceArchiveFile))
|
||||
{
|
||||
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
throw new Exception("Invalid file name for source archive file!");
|
||||
monARessource = GetCellName(sourceArchiveFile);
|
||||
if (string.IsNullOrEmpty(monARessource))
|
||||
throw new Exception("Could not determine which cell archive file is associated with!");
|
||||
if (_IsEAFHosted)
|
||||
CallbackFileExists(sourceArchiveFile, traceDummyFile, _FileConnectorConfiguration.TargetFileLocation, monARessource, sequence);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
try
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
}
|
134
Adaptation/FileHandlers/IQSSi/FileRead.cs
Normal file
134
Adaptation/FileHandlers/IQSSi/FileRead.cs
Normal file
@ -0,0 +1,134 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.IQSSi;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
|
||||
{
|
||||
bool isDummyRun = false;
|
||||
string successDirectory = string.Empty;
|
||||
List<(Shared.Properties.IScopeInfo, string)> collection = new();
|
||||
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
|
||||
File.Copy(reportFullPath, duplicateFile, overwrite: true);
|
||||
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
FileCopy(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
266
Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs
Normal file
266
Adaptation/FileHandlers/MoveMatchingFiles/FileRead.cs
Normal file
@ -0,0 +1,266 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.MoveMatchingFiles;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private static List<string> GetSearchDirectories(int numberLength, string parentDirectory)
|
||||
{
|
||||
List<string> results = new();
|
||||
string[] directories = Directory.GetDirectories(parentDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string directory in directories)
|
||||
{
|
||||
if (Path.GetFileName(directory).Length != numberLength)
|
||||
continue;
|
||||
results.Add(directory);
|
||||
}
|
||||
results.Sort();
|
||||
return results;
|
||||
}
|
||||
|
||||
private List<string> GetMatchingFiles(long ticks, string reportFullPath, List<string> searchDirectories)
|
||||
{
|
||||
List<string> results = new();
|
||||
string[] found;
|
||||
string fileName = Path.GetFileName(reportFullPath);
|
||||
foreach (string searchDirectory in searchDirectories)
|
||||
{
|
||||
for (int i = 0; i < int.MaxValue; i++)
|
||||
{
|
||||
found = Directory.GetFiles(searchDirectory, fileName, SearchOption.AllDirectories);
|
||||
if (found.Any())
|
||||
{
|
||||
results.AddRange(found);
|
||||
break;
|
||||
}
|
||||
if (new TimeSpan(DateTime.Now.Ticks - ticks).TotalSeconds > _BreakAfterSeconds)
|
||||
break;
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static List<(string matchingFile, string checkFile)> GetCollection(int numberLength, string parentDirectory, List<string> matchingFiles)
|
||||
{
|
||||
List<(string matchingFile, string checkFile)> results = new();
|
||||
string checkFile;
|
||||
int parentDirectoryLength = parentDirectory.Length;
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
results.Add(new(matchingFile, checkFile));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static List<(string, string, string, string, string)> GetCollection(List<(string matchingFile, string checkFile)> collection)
|
||||
{
|
||||
List<(string, string, string, string, string)> results = new();
|
||||
string errFile;
|
||||
string checkDirectory;
|
||||
string noWaitDirectory;
|
||||
foreach ((string matchingFile, string checkFile) in collection)
|
||||
{
|
||||
errFile = string.Concat(checkFile, ".err");
|
||||
checkDirectory = Path.GetDirectoryName(checkFile);
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
|
||||
results.Add(new(matchingFile, checkFile, errFile, checkDirectory, noWaitDirectory));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private void MoveCollection(DateTime dateTime, List<(string matchingFile, string checkFile)> collection)
|
||||
{
|
||||
long preWait;
|
||||
List<(string checkFile, string errFile)> postCollection = new();
|
||||
foreach ((string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory) in GetCollection(collection))
|
||||
{
|
||||
File.Move(matchingFile, checkFile);
|
||||
if (Directory.Exists(noWaitDirectory))
|
||||
{
|
||||
postCollection.Add(new(checkFile, errFile));
|
||||
continue;
|
||||
}
|
||||
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
|
||||
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
|
||||
else
|
||||
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
|
||||
for (short i = 0; i < short.MaxValue; i++)
|
||||
{
|
||||
if (DateTime.Now.Ticks > preWait)
|
||||
break;
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
for (int i = 0; i < int.MaxValue; i++)
|
||||
{
|
||||
if (File.Exists(errFile))
|
||||
throw new Exception(File.ReadAllText(errFile));
|
||||
if (!File.Exists(checkFile))
|
||||
break;
|
||||
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
|
||||
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
}
|
||||
if (postCollection.Any())
|
||||
{
|
||||
Thread.Sleep(500);
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach ((string checkFile, string errFile) in postCollection)
|
||||
{
|
||||
if (File.Exists(errFile))
|
||||
_ = stringBuilder.AppendLine(File.ReadAllText(errFile));
|
||||
if (File.Exists(checkFile))
|
||||
_ = stringBuilder.AppendLine($"<{checkFile}> was not consumed by the end!");
|
||||
}
|
||||
if (stringBuilder.Length > 0)
|
||||
throw new Exception(stringBuilder.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreatePointerFile(int numberLength, string parentDirectory, List<string> matchingFiles)
|
||||
{
|
||||
#nullable enable
|
||||
string checkFile;
|
||||
string writeFile;
|
||||
string? directoryName;
|
||||
int parentDirectoryLength = parentDirectory.Length;
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
directoryName = Path.GetDirectoryName(matchingFile);
|
||||
if (directoryName is null)
|
||||
continue;
|
||||
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
|
||||
if (File.Exists(writeFile))
|
||||
continue;
|
||||
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
|
||||
}
|
||||
#nullable disable
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
int numberLength = 2;
|
||||
long ticks = dateTime.Ticks;
|
||||
string parentParentDirectory = GetParentParent(reportFullPath);
|
||||
List<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
|
||||
List<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
|
||||
if (matchingFiles.Count != searchDirectories.Count)
|
||||
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
|
||||
try
|
||||
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
|
||||
catch (Exception) { }
|
||||
List<(string matchingFile, string checkFile)> collection = GetCollection(numberLength, parentParentDirectory, matchingFiles);
|
||||
MoveCollection(dateTime, collection);
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
186
Adaptation/FileHandlers/OpenInsight/FileRead.cs
Normal file
186
Adaptation/FileHandlers/OpenInsight/FileRead.cs
Normal file
@ -0,0 +1,186 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Adaptation.Shared.Metrology;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.OpenInsight;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly string _IqsConnectionString;
|
||||
private readonly string _OpenInsightFilePattern;
|
||||
private readonly string _OpenInsightApiECDirectory;
|
||||
private readonly string _OpenInsightApiIFXDirectory;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_OpenInsightApiIFXDirectory = @"\\messdv002.na.infineon.com\Candela\Archive\API";
|
||||
_OpenInsightApiECDirectory = @"\\messv02ecc1.ec.local\EC_Metrology_Si\Archive\API";
|
||||
_IqsConnectionString = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.ConnectionString");
|
||||
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private static string GetLines(List<json.Description> descriptions)
|
||||
{
|
||||
StringBuilder results = new();
|
||||
json.Description x = descriptions[0];
|
||||
char del = x.MesEntity == x.Reactor ? '\t' : '~';
|
||||
_ = results.Append(x.Date).Append(del).
|
||||
Append(x.Employee).Append(del).
|
||||
Append(x.Recipe).Append(del).
|
||||
Append(x.Reactor).Append(del).
|
||||
Append(x.RDS).Append(del).
|
||||
Append(x.PSN).Append(del).
|
||||
Append(x.Layer).Append(del).
|
||||
Append(x.Zone);
|
||||
return results.ToString();
|
||||
}
|
||||
|
||||
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, string logistics, List<json.Description> descriptions, Test[] tests)
|
||||
{
|
||||
bool isDummyRun = false;
|
||||
List<(Shared.Properties.IScopeInfo, string)> collection = new();
|
||||
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
|
||||
string parentParent = GetParentParent(_FileConnectorConfiguration.SourceFileLocation);
|
||||
if (parentParent.Contains(_CellInstanceName))
|
||||
parentParent = Path.GetDirectoryName(parentParent);
|
||||
string duplicateDirectory = Path.Combine(parentParent, "Data");
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
|
||||
if (descriptions.Any() && tests.Any())
|
||||
{
|
||||
string lines = GetLines(descriptions);
|
||||
if (!string.IsNullOrEmpty(lines))
|
||||
{
|
||||
long? subGroupId;
|
||||
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
|
||||
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
|
||||
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
|
||||
subGroupId = null;
|
||||
else
|
||||
(subGroupId, int? _, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
|
||||
if (subGroupId is null)
|
||||
collection.Add(new(new ScopeInfo(tests[0], _OpenInsightFilePattern), lines));
|
||||
else
|
||||
collection.Add(new(new ScopeInfo(tests[0], $"{subGroupId.Value} {_OpenInsightFilePattern}"), lines));
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
FromIQS.Save(_OpenInsightApiECDirectory, _OpenInsightApiIFXDirectory, _Logistics, reportFullPath, logistics, descriptions.First(), lines, subGroupId, weekOfYear);
|
||||
}
|
||||
}
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
// List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
List<json.Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SaveOpenInsightFile(reportFullPath, dateTime, pdsf.Item1, descriptions, tests);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
245
Adaptation/FileHandlers/OpenInsight/FromIQS.cs
Normal file
245
Adaptation/FileHandlers/OpenInsight/FromIQS.cs
Normal file
@ -0,0 +1,245 @@
|
||||
using Adaptation.Shared;
|
||||
using System;
|
||||
using System.Data;
|
||||
using System.Data.SqlClient;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.OpenInsight;
|
||||
|
||||
public class FromIQS
|
||||
{
|
||||
|
||||
#nullable enable
|
||||
|
||||
private static string GetCommandText(Logistics logistics, json.Description description, string dateTime, long? subGroupId)
|
||||
{
|
||||
StringBuilder result = new();
|
||||
_ = result
|
||||
.AppendLine(" select iq.ev_count, iq.cl_count, iq.sl_count, iq.se_sgrp, iq.se_sgtm, iq.se_tsno, iq.td_test, iq.pr_name, iq.jd_name, iq.pl_name, iq.pd_name, iq.td_name, iq.se_val ")
|
||||
.AppendLine(" from ( ")
|
||||
.AppendLine(" select ")
|
||||
.AppendLine(" se.f_sgrp se_sgrp, ")
|
||||
.AppendLine(" se.f_sgtm se_sgtm, ")
|
||||
.AppendLine(" se.f_tsno se_tsno, ")
|
||||
.AppendLine(" se.f_val se_val, ")
|
||||
.AppendLine(" pr.f_name pr_name, ")
|
||||
.AppendLine(" jd.f_name jd_name, ")
|
||||
.AppendLine(" pl.f_name pl_name, ")
|
||||
.AppendLine(" pd.f_name pd_name, ")
|
||||
.AppendLine(" td.f_test td_test, ")
|
||||
.AppendLine(" td.f_name td_name, ")
|
||||
.AppendLine(" (select count(cl.f_part) ")
|
||||
.AppendLine(" from [spcepiworld].[dbo].[ctrl_lim] cl ")
|
||||
.AppendLine(" where cl.f_part = pd.f_part ")
|
||||
.AppendLine(" and cl.f_test = td.f_test ")
|
||||
.AppendLine(" ) cl_count, ")
|
||||
.AppendLine(" (select count(sl.f_part) ")
|
||||
.AppendLine(" from [spcepiworld].[dbo].[spec_lim] sl ")
|
||||
.AppendLine(" where sl.f_part = pd.f_part ")
|
||||
.AppendLine(" and sl.f_test = td.f_test ")
|
||||
.AppendLine(" ) sl_count, ")
|
||||
.AppendLine(" (select count(ev.f_evnt) ")
|
||||
.AppendLine(" from [spcepiworld].[dbo].[evnt_inf] ev ")
|
||||
.AppendLine(" where ev.f_prcs = pr.f_prcs ")
|
||||
.AppendLine(" and ev.f_part = pd.f_part ")
|
||||
.AppendLine(" and ev.f_sgtm = se.f_sgtm ")
|
||||
.AppendLine(" ) ev_count ")
|
||||
.AppendLine(" from [spcepiworld].[dbo].[sgrp_ext] se ")
|
||||
.AppendLine(" join [spcepiworld].[dbo].[prcs_dat] pr ")
|
||||
.AppendLine(" on se.f_prcs = pr.f_prcs ")
|
||||
.AppendLine(" join [spcepiworld].[dbo].[job_dat] jd ")
|
||||
.AppendLine(" on se.f_job = jd.f_job ")
|
||||
.AppendLine(" join [spcepiworld].[dbo].[part_lot] pl ")
|
||||
.AppendLine(" on se.f_lot = pl.f_lot ")
|
||||
.AppendLine(" join [spcepiworld].[dbo].[part_dat] pd ")
|
||||
.AppendLine(" on se.f_part = pd.f_part ")
|
||||
.AppendLine(" join [spcepiworld].[dbo].[test_dat] td ")
|
||||
.AppendLine(" on se.f_test = td.f_test ")
|
||||
.AppendLine(" where se.f_flag = 0 ");
|
||||
if (subGroupId is not null)
|
||||
_ = result.Append(" and se.f_sgrp = ").Append(subGroupId).AppendLine(" ");
|
||||
if (!string.IsNullOrEmpty(description.RDS))
|
||||
_ = result.Append(" and pl.f_name = '").Append(description.RDS).AppendLine("' ");
|
||||
_ = result
|
||||
.Append(" and pr.f_name = '").Append(description.Reactor).AppendLine("' ")
|
||||
.Append(" and pd.f_name = '").Append(description.PSN).AppendLine("' ")
|
||||
.AppendLine(" and jd.f_name in ('SRP2100') ")
|
||||
.Append(" and jd.f_name = '").Append(logistics.MesEntity).AppendLine("' ")
|
||||
.Append(" and dateadd(HH, -7, (dateadd(SS, convert(bigint, se.f_sgtm), '19700101'))) = '").Append(dateTime).AppendLine("' ")
|
||||
.AppendLine(" ) as iq ")
|
||||
.AppendLine(" order by iq.ev_count desc, iq.cl_count desc, iq.sl_count desc, iq.se_sgrp, iq.se_tsno, iq.td_test ")
|
||||
.AppendLine(" for json path ");
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
private static StringBuilder GetForJsonPath(string connectionString, string commandText)
|
||||
{
|
||||
StringBuilder stringBuilder = new();
|
||||
using SqlConnection sqlConnection = new(connectionString);
|
||||
sqlConnection.Open();
|
||||
using SqlCommand sqlCommand = new(commandText, sqlConnection);
|
||||
SqlDataReader sqlDataReader = sqlCommand.ExecuteReader(CommandBehavior.SequentialAccess);
|
||||
while (sqlDataReader.Read())
|
||||
_ = stringBuilder.Append(sqlDataReader.GetString(0));
|
||||
return stringBuilder;
|
||||
}
|
||||
|
||||
internal static (long?, int?, string) GetCommandText(string connectionString, Logistics logistics, json.Description description, long breakAfter, long preWait)
|
||||
{
|
||||
string dateTime;
|
||||
int? count = null;
|
||||
string commandText;
|
||||
long? result = null;
|
||||
string dateFormat = json.Description.GetDateFormat();
|
||||
if (DateTime.TryParseExact(description.Date, dateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
|
||||
dateTime = dateTimeParsed.ToString("yyyy-MM-dd HH:mm:ss");
|
||||
else if (DateTime.TryParse(description.Date, CultureInfo.InvariantCulture, DateTimeStyles.None, out dateTimeParsed))
|
||||
dateTime = dateTimeParsed.ToString("yyyy-MM-dd HH:mm:ss");
|
||||
else
|
||||
dateTime = logistics.DateTimeFromSequence.ToString("yyyy-MM-dd HH:mm:ss");
|
||||
commandText = GetCommandText(logistics, description, dateTime, subGroupId: null);
|
||||
for (short i = 0; i < short.MaxValue; i++)
|
||||
{
|
||||
if (DateTime.Now.Ticks > preWait)
|
||||
break;
|
||||
Thread.Sleep(100);
|
||||
}
|
||||
StringBuilder stringBuilder = new();
|
||||
for (short z = 0; z < short.MaxValue; z++)
|
||||
{
|
||||
stringBuilder = GetForJsonPath(connectionString, commandText);
|
||||
if (stringBuilder.Length > 0)
|
||||
{
|
||||
long postBreakAfter = DateTime.Now.AddSeconds(5).Ticks;
|
||||
for (short y = 0; y < short.MaxValue; y++)
|
||||
{
|
||||
if (DateTime.Now.Ticks > postBreakAfter)
|
||||
break;
|
||||
Thread.Sleep(250);
|
||||
}
|
||||
stringBuilder = GetForJsonPath(connectionString, commandText);
|
||||
break;
|
||||
}
|
||||
if (DateTime.Now.Ticks > breakAfter)
|
||||
// throw new Exception($"After {breakAfterSeconds} seconds, didn't find sub group id!");
|
||||
break;
|
||||
Thread.Sleep(250);
|
||||
}
|
||||
if (stringBuilder.Length == 0)
|
||||
commandText = stringBuilder.ToString();
|
||||
else
|
||||
{
|
||||
JsonElement[]? jsonElements = JsonSerializer.Deserialize<JsonElement[]>(stringBuilder.ToString());
|
||||
if (jsonElements is null || !jsonElements.Any() || jsonElements[0].ValueKind != JsonValueKind.Object)
|
||||
commandText = stringBuilder.ToString();
|
||||
else
|
||||
{
|
||||
JsonProperty[] jsonProperties = jsonElements[0].EnumerateObject().ToArray();
|
||||
if (!jsonProperties.Any() || jsonProperties[3].Name != "se_sgrp" || !long.TryParse(jsonProperties[3].Value.ToString(), out long subGroupId))
|
||||
commandText = stringBuilder.ToString();
|
||||
else
|
||||
{
|
||||
result = subGroupId;
|
||||
if (jsonProperties.Any() && jsonProperties[0].Name == "ev_count" && int.TryParse(jsonProperties[0].Value.ToString(), out int evCount))
|
||||
count = evCount;
|
||||
}
|
||||
}
|
||||
}
|
||||
return new(result, count, commandText);
|
||||
}
|
||||
|
||||
private static string GetJson(Logistics logistics, string logisticLines, json.Description description)
|
||||
{
|
||||
string result;
|
||||
StringBuilder stringBuilder = new();
|
||||
var @object = new
|
||||
{
|
||||
description.MesEntity,
|
||||
description.Employee,
|
||||
description.Layer,
|
||||
description.PSN,
|
||||
description.RDS,
|
||||
description.Reactor,
|
||||
description.Recipe,
|
||||
description.Zone,
|
||||
logistics.DateTimeFromSequence.Ticks
|
||||
};
|
||||
string[] pair;
|
||||
string safeValue;
|
||||
string[] segments;
|
||||
string serializerValue;
|
||||
foreach (string line in logisticLines.Split(new string[] { Environment.NewLine }, StringSplitOptions.None))
|
||||
{
|
||||
segments = line.Split('\t');
|
||||
if (segments.Length < 2)
|
||||
continue;
|
||||
segments = segments[1].Split(';');
|
||||
_ = stringBuilder.Append('{');
|
||||
foreach (string segment in segments)
|
||||
{
|
||||
pair = segment.Split('=');
|
||||
if (pair.Length != 2 || pair[0].Length < 3)
|
||||
continue;
|
||||
serializerValue = JsonSerializer.Serialize(pair[1]);
|
||||
safeValue = serializerValue.Substring(1, serializerValue.Length - 2);
|
||||
_ = stringBuilder.Append('"').Append(pair[0].Substring(2)).Append('"').Append(':').Append('"').Append(safeValue).Append('"').Append(',');
|
||||
}
|
||||
if (stringBuilder.Length > 0)
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
_ = stringBuilder.Append('}').Append(',');
|
||||
}
|
||||
if (stringBuilder.Length > 0)
|
||||
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
_ = stringBuilder.Append(']').Append('}');
|
||||
_ = stringBuilder.Insert(0, ",\"Logistics\":[");
|
||||
string json = JsonSerializer.Serialize(@object);
|
||||
_ = stringBuilder.Insert(0, json.Substring(0, json.Length - 1));
|
||||
JsonElement? jsonElement = JsonSerializer.Deserialize<JsonElement>(stringBuilder.ToString());
|
||||
result = jsonElement is null ? "{}" : JsonSerializer.Serialize(jsonElement, new JsonSerializerOptions { WriteIndented = true });
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static void Save(string openInsightApiECDirectory, string openInsightApiIFXDirectory, Logistics logistics, string reportFullPath, string logisticLines, json.Description description, string lines, long? subGroupId, string weekOfYear)
|
||||
{
|
||||
string checkFile;
|
||||
string fileName = Path.GetFileName(reportFullPath);
|
||||
string json = GetJson(logistics, logisticLines, description);
|
||||
string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);
|
||||
string? ifxPathRoot = Path.GetPathRoot(openInsightApiIFXDirectory);
|
||||
bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot);
|
||||
bool ifxExists = ifxPathRoot is not null && Directory.Exists(ifxPathRoot);
|
||||
string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
|
||||
string ecDirectory = Path.Combine(openInsightApiECDirectory, weekYear, $"-{description.PSN}", $"-{description.Reactor}", $"-{description.RDS}", $"-{subGroupId}");
|
||||
string ifxDirectory = Path.Combine(openInsightApiIFXDirectory, weekYear, $"-{description.PSN}", $"-{description.Reactor}", $"-{description.RDS}", $"-{subGroupId}");
|
||||
if (ecExists && !Directory.Exists(ecDirectory))
|
||||
_ = Directory.CreateDirectory(ecDirectory);
|
||||
if (ifxExists && !Directory.Exists(ifxDirectory))
|
||||
_ = Directory.CreateDirectory(ifxDirectory);
|
||||
checkFile = Path.Combine(ecDirectory, fileName);
|
||||
if (ecExists && !File.Exists(checkFile))
|
||||
File.Copy(reportFullPath, checkFile);
|
||||
checkFile = Path.Combine(ifxDirectory, fileName);
|
||||
if (ifxExists && !File.Exists(checkFile))
|
||||
File.Copy(reportFullPath, checkFile);
|
||||
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.txt");
|
||||
if (ecExists && !File.Exists(checkFile))
|
||||
File.WriteAllText(checkFile, lines);
|
||||
checkFile = Path.Combine(ifxDirectory, $"{logistics.DateTimeFromSequence.Ticks}.txt");
|
||||
if (ifxExists && !File.Exists(checkFile))
|
||||
File.WriteAllText(checkFile, lines);
|
||||
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
|
||||
if (ecExists && !File.Exists(checkFile))
|
||||
File.WriteAllText(checkFile, json);
|
||||
checkFile = Path.Combine(ifxDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
|
||||
if (ifxExists && !File.Exists(checkFile))
|
||||
File.WriteAllText(checkFile, json);
|
||||
}
|
||||
|
||||
#nullable disable
|
||||
|
||||
}
|
142
Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs
Normal file
142
Adaptation/FileHandlers/OpenInsightMetrologyViewer/FileRead.cs
Normal file
@ -0,0 +1,142 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Adaptation.Shared.Metrology;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly string _OpenInsightMetrologyViewerAPI;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
private void SendData(DateTime dateTime, List<json.Description> descriptions)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
(string json, WS.Results wsResults) = WS.SendData(_OpenInsightMetrologyViewerAPI, wsRequest);
|
||||
if (!wsResults.Success)
|
||||
throw new Exception(wsResults.ToString());
|
||||
_Log.Debug(wsResults.HeaderID);
|
||||
lock (_StaticRuns)
|
||||
{
|
||||
if (!_StaticRuns.ContainsKey(_Logistics.Sequence))
|
||||
_StaticRuns.Add(_Logistics.Sequence, new());
|
||||
_StaticRuns[_Logistics.Sequence].Add(json);
|
||||
}
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<json.Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SendData(dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,96 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Metrology;
|
||||
using Adaptation.Shared.Properties;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer;
|
||||
|
||||
public class WSRequest
|
||||
{
|
||||
public bool SentToMetrology { get; set; }
|
||||
public bool SentToSPC { get; set; }
|
||||
//
|
||||
public long Id { get; set; }
|
||||
public string CellName { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string FilePath { get; set; }
|
||||
public string Layer { get; set; }
|
||||
public string Operator { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Zone { get; set; }
|
||||
public List<json.Detail> Details { get; protected set; }
|
||||
|
||||
[Obsolete("For json")] public WSRequest() { }
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
internal WSRequest(IFileRead fileRead, Logistics logistics, List<json.Description> descriptions)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
Id = 0;
|
||||
FilePath = string.Empty;
|
||||
CellName = logistics.MesEntity;
|
||||
if (descriptions[0] is not json.Description x)
|
||||
throw new Exception();
|
||||
Details = new List<json.Detail>();
|
||||
//Header
|
||||
{
|
||||
Date = x.Date;
|
||||
Layer = x.Layer;
|
||||
Operator = x.Employee;
|
||||
PSN = x.PSN;
|
||||
RDS = x.RDS;
|
||||
Reactor = x.Reactor;
|
||||
Recipe = x.Recipe;
|
||||
UniqueId = x.UniqueId;
|
||||
Zone = x.Zone;
|
||||
}
|
||||
json.Detail detail;
|
||||
foreach (json.Description description in descriptions)
|
||||
{
|
||||
detail = new json.Detail
|
||||
{
|
||||
HeaderUniqueId = description.HeaderUniqueId,
|
||||
UniqueId = description.UniqueId,
|
||||
Depth = description.Depth,
|
||||
Raw = description.Raw,
|
||||
Edited = description.Edited,
|
||||
Resistivity = description.Resistivity,
|
||||
CD = description.CD,
|
||||
};
|
||||
Details.Add(detail);
|
||||
}
|
||||
Date ??= logistics.DateTimeFromSequence.ToString();
|
||||
if (UniqueId is null && Details.Any())
|
||||
throw new Exception();
|
||||
}
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, Logistics logistics, string openInsightMetrologyViewerAPI, DateTime dateTime, string json, List<json.Description> descriptions, string matchDirectory)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
if (string.IsNullOrEmpty(json))
|
||||
{
|
||||
WSRequest wsRequest = new(fileRead, logistics, descriptions);
|
||||
(json, WS.Results wsResults) = WS.SendData(openInsightMetrologyViewerAPI, wsRequest);
|
||||
if (!wsResults.Success)
|
||||
throw new Exception(wsResults.ToString());
|
||||
}
|
||||
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json, new JsonSerializerOptions { PropertyNameCaseInsensitive = true });
|
||||
long wsResultsHeaderID = metrologyWSRequest.HeaderID;
|
||||
string[] jsonFiles = Directory.GetFiles(matchDirectory, "*.data.json", SearchOption.TopDirectoryOnly);
|
||||
if (jsonFiles.Length != 1)
|
||||
throw new Exception($"Invalid source file count for <{wsResultsHeaderID}>!{Environment.NewLine}{json}");
|
||||
List<WS.Attachment> headerAttachments = new() { new WS.Attachment(descriptions[0].HeaderUniqueId, "Data.json", jsonFiles.First()) };
|
||||
WS.AttachFiles(openInsightMetrologyViewerAPI, wsResultsHeaderID, headerAttachments, dataAttachments: null);
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,149 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewerAttachments;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly string _JobIdParentDirectory;
|
||||
|
||||
private readonly string _OpenInsightMetrologyViewerAPI;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_JobIdParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.SourceFileLocation);
|
||||
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
private void PostOpenInsightMetrologyViewerAttachments(string reportFullPath, DateTime dateTime, List<json.Description> descriptions)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
_ = Directory.CreateDirectory(jobIdDirectory);
|
||||
string json;
|
||||
string[] matchDirectories = GetInProcessDirectory(jobIdDirectory);
|
||||
if (!_StaticRuns.ContainsKey(_Logistics.Sequence))
|
||||
json = string.Empty;
|
||||
else
|
||||
{
|
||||
if (_StaticRuns[_Logistics.Sequence].Count != 1)
|
||||
throw new Exception($"{nameof(_StaticRuns)} has too many values for {_Logistics.Sequence}!");
|
||||
json = _StaticRuns[_Logistics.Sequence][0];
|
||||
lock (_StaticRuns)
|
||||
_ = _StaticRuns.Remove(_Logistics.Sequence);
|
||||
}
|
||||
OpenInsightMetrologyViewer.WSRequest.PostOpenInsightMetrologyViewerAttachments(this, _Logistics, _OpenInsightMetrologyViewerAPI, dateTime, json, descriptions, matchDirectories[0]);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<json.Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
PostOpenInsightMetrologyViewerAttachments(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
164
Adaptation/FileHandlers/Processed/FileRead.cs
Normal file
164
Adaptation/FileHandlers/Processed/FileRead.cs
Normal file
@ -0,0 +1,164 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.Processed;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly string _JobIdParentDirectory;
|
||||
private readonly string _JobIdProcessParentDirectory;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_JobIdParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.SourceFileLocation);
|
||||
_JobIdProcessParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.TargetFileLocation);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
private void DirectoryMove(string reportFullPath, DateTime dateTime, List<json.Description> descriptions)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
_ = Directory.CreateDirectory(jobIdDirectory);
|
||||
string[] matchDirectories = GetInProcessDirectory(jobIdDirectory);
|
||||
if (matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
|
||||
OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
|
||||
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
|
||||
string directoryName = $"{Path.GetFileName(matchDirectories[0]).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0]}{_Logistics.DateTimeFromSequence:yyyy-MM-dd_hh;mm_tt_}{DateTime.Now.Ticks - _Logistics.Sequence}";
|
||||
string destinationJobIdDirectory = Path.Combine(_JobIdProcessParentDirectory, _Logistics.JobID, directoryName);
|
||||
string sequenceDirectory = Path.Combine(destinationJobIdDirectory, logisticsSequence);
|
||||
string jsonFileName = Path.Combine(sequenceDirectory, $"{Path.GetFileNameWithoutExtension(reportFullPath)}.json");
|
||||
Directory.Move(matchDirectories[0], destinationJobIdDirectory);
|
||||
if (!Directory.Exists(sequenceDirectory))
|
||||
_ = Directory.CreateDirectory(sequenceDirectory);
|
||||
File.Copy(reportFullPath, Path.Combine(sequenceDirectory, Path.GetFileName(reportFullPath)), overwrite: true);
|
||||
File.WriteAllText(jsonFileName, json);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<json.Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
DirectoryMove(reportFullPath, dateTime, descriptions);
|
||||
else if (!_IsEAFHosted)
|
||||
{
|
||||
OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
|
||||
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
|
||||
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
|
||||
string historicalText = File.ReadAllText(jsonFileName);
|
||||
if (json != historicalText)
|
||||
throw new Exception("File doesn't match historical!");
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
132
Adaptation/FileHandlers/SPaCe/FileRead.cs
Normal file
132
Adaptation/FileHandlers/SPaCe/FileRead.cs
Normal file
@ -0,0 +1,132 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.SPaCe;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
|
||||
{
|
||||
bool isDummyRun = false;
|
||||
string successDirectory = string.Empty;
|
||||
List<(Shared.Properties.IScopeInfo, string)> collection = new();
|
||||
string duplicateDirectory = _FileConnectorConfiguration.TargetFileLocation;
|
||||
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
|
||||
File.Copy(reportFullPath, duplicateFile, overwrite: true);
|
||||
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
FileCopy(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
86
Adaptation/FileHandlers/csv/CSV.cs
Normal file
86
Adaptation/FileHandlers/csv/CSV.cs
Normal file
@ -0,0 +1,86 @@
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class CSV
|
||||
{
|
||||
|
||||
#nullable enable
|
||||
#pragma warning disable CA1834
|
||||
|
||||
public string? FileVersion { get; set; }
|
||||
public Info? Info { get; set; }
|
||||
public Setup? Setup { get; set; }
|
||||
public LayerHeader? LayerHeader { get; set; }
|
||||
public ProfileHeader? ProfileHeader { get; set; }
|
||||
public Calibration? Calibration { get; set; }
|
||||
public RawData? RawData { get; set; }
|
||||
|
||||
internal static CSV GetCSV(string path)
|
||||
{
|
||||
CSV result;
|
||||
int? endInfo = null;
|
||||
int? endSetup = null;
|
||||
int? endLayers = null;
|
||||
int? startInfo = null;
|
||||
int? startSetup = null;
|
||||
int? endProfiles = null;
|
||||
int? startLayers = null;
|
||||
int? startRawData = null;
|
||||
int? startProfiles = null;
|
||||
int? endCalibration = null;
|
||||
int? startCalibration = null;
|
||||
#if NET
|
||||
string[] lines = File.ReadAllLines(path, System.Text.Encoding.Latin1); // µ³®
|
||||
# else
|
||||
string[] lines = File.ReadAllLines(path, System.Text.Encoding.GetEncoding("ISO-8859-1")); // µ³®
|
||||
# endif
|
||||
string? fileVersion = !lines.Any() ? null : GetFileVersion(lines.First());
|
||||
for (int i = 1; i < lines.Length; i++)
|
||||
{
|
||||
if (lines[i].StartsWith("--INFO--"))
|
||||
startInfo = i + 1;
|
||||
else if (lines[i].StartsWith("--SETUP--"))
|
||||
(endInfo, startSetup) = (i, i + 1);
|
||||
else if (lines[i].StartsWith("--LAYERS--"))
|
||||
(endSetup, startLayers) = (i, i + 1);
|
||||
else if (lines[i].StartsWith("--PROFILES--"))
|
||||
(endLayers, startProfiles) = (i, i + 1);
|
||||
else if (lines[i].StartsWith("--CALIBRATION--"))
|
||||
(endProfiles, startCalibration) = (i, i + 1);
|
||||
else if (lines[i].StartsWith("--RAWDATA--"))
|
||||
(endCalibration, startRawData) = (i, i + 1);
|
||||
}
|
||||
RawData? rawData = startRawData is null ? null : RawData.GetRawData(lines, startRawData.Value, lines.Length);
|
||||
Info? info = startInfo is null || endInfo is null ? null : Info.GetInfo(lines, startInfo.Value, endInfo.Value);
|
||||
Setup? setup = startSetup is null || endSetup is null ? null : Setup.GetSetup(lines, startSetup.Value, endSetup.Value);
|
||||
LayerHeader? layerHeader = startLayers is null || endLayers is null ? null : LayerHeader.GetLayerHeader(lines, startLayers.Value, endLayers.Value);
|
||||
ProfileHeader? profileHeader = startProfiles is null || endProfiles is null ? null : ProfileHeader.GetProfileHeader(lines, startProfiles.Value, endProfiles.Value);
|
||||
Calibration? calibration = startCalibration is null || endCalibration is null ? null : Calibration.GetCalibration(lines, startCalibration.Value, endCalibration.Value);
|
||||
result = new()
|
||||
{
|
||||
FileVersion = fileVersion,
|
||||
Info = info,
|
||||
Setup = setup,
|
||||
LayerHeader = layerHeader,
|
||||
ProfileHeader = profileHeader,
|
||||
Calibration = calibration,
|
||||
RawData = rawData,
|
||||
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
private static string GetFileVersion(string line)
|
||||
{
|
||||
string result;
|
||||
string[] segments = line.Split(',');
|
||||
if (segments.Length < 2)
|
||||
result = string.Empty;
|
||||
else
|
||||
result = segments.Last();
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
133
Adaptation/FileHandlers/csv/Calibration.cs
Normal file
133
Adaptation/FileHandlers/csv/Calibration.cs
Normal file
@ -0,0 +1,133 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class Calibration
|
||||
{
|
||||
|
||||
public string NumberOfCalibrationSets { get; set; }
|
||||
public List<DataSet> DataSets { get; set; }
|
||||
|
||||
#nullable enable
|
||||
#pragma warning disable CA1834
|
||||
|
||||
internal static Calibration? GetCalibration(string[] lines, int start, int stop)
|
||||
{
|
||||
Calibration? result;
|
||||
string first;
|
||||
DataSet dataSet;
|
||||
Position position;
|
||||
string[] segments;
|
||||
int? thirdStart = null;
|
||||
int? secondStart = null;
|
||||
List<Position> positions;
|
||||
List<string> values = new();
|
||||
List<DataSet> dataSets = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
for (int i = start; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
first = segments.First();
|
||||
if (first == "DataSet:")
|
||||
{
|
||||
secondStart = i + 1;
|
||||
break;
|
||||
}
|
||||
_ = stringBuilder.Append(first).Append(",");
|
||||
if (segments.Length > 1)
|
||||
values.Add(segments[1]);
|
||||
else
|
||||
values.Add(string.Empty);
|
||||
}
|
||||
string header = "Number of Calibration Sets,";
|
||||
if (secondStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
result = new()
|
||||
{
|
||||
NumberOfCalibrationSets = values[0],
|
||||
DataSets = dataSets,
|
||||
};
|
||||
for (int x = 0; x < int.MaxValue; x++)
|
||||
{
|
||||
values.Clear();
|
||||
_ = stringBuilder.Clear();
|
||||
if (secondStart is null)
|
||||
break;
|
||||
for (int i = secondStart.Value; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
first = segments.First();
|
||||
if (first == "Resistivity(ohm-cm)")
|
||||
{
|
||||
thirdStart = i + 1;
|
||||
break;
|
||||
}
|
||||
if (first == " Sample Set:")
|
||||
continue;
|
||||
_ = stringBuilder.Append(first).Append(",");
|
||||
if (segments.Length > 1)
|
||||
values.Add(segments[1]);
|
||||
else
|
||||
values.Add(string.Empty);
|
||||
}
|
||||
secondStart = null;
|
||||
header = "Operator,Date & Time,Finish,Orientation,North Probe ID,South Probe ID,Polarity,Contact Radius (µm),Probe Spacing (µm),Load (gm),X Step (µm),Name,Plate ID,Type,Points per Sample,Number of Pairs,";
|
||||
if (thirdStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
positions = new();
|
||||
dataSet = new()
|
||||
{
|
||||
Operator = values[0],
|
||||
DateTime = values[1],
|
||||
Finish = values[2],
|
||||
Orientation = values[3],
|
||||
NorthProbeID = values[4],
|
||||
SouthProbeID = values[5],
|
||||
Polarity = values[6],
|
||||
ContactRadius = values[7],
|
||||
ProbeSpacing = values[8],
|
||||
Load = values[9],
|
||||
XStep = values[10],
|
||||
Name = values[11],
|
||||
PlateId = values[12],
|
||||
Type = values[13],
|
||||
PointsPerSample = values[14],
|
||||
NumberOfPairs = values[15],
|
||||
Positions = positions,
|
||||
};
|
||||
dataSets.Add(dataSet);
|
||||
for (int i = thirdStart.Value; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
first = segments.First();
|
||||
if (first == "DataSet:")
|
||||
{
|
||||
secondStart = i + 1;
|
||||
break;
|
||||
}
|
||||
if (segments.Length < 5)
|
||||
continue;
|
||||
position = new()
|
||||
{
|
||||
Resistivity = segments[0],
|
||||
Resistance = segments[1],
|
||||
PercentStandardDeviation = segments[2],
|
||||
Number = segments[3],
|
||||
Name = segments[4],
|
||||
};
|
||||
positions.Add(position);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
27
Adaptation/FileHandlers/csv/DataSet.cs
Normal file
27
Adaptation/FileHandlers/csv/DataSet.cs
Normal file
@ -0,0 +1,27 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class DataSet
|
||||
{
|
||||
|
||||
public string Operator { get; set; }
|
||||
public string DateTime { get; set; }
|
||||
public string Finish { get; set; }
|
||||
public string Orientation { get; set; }
|
||||
public string NorthProbeID { get; set; }
|
||||
public string SouthProbeID { get; set; }
|
||||
public string Polarity { get; set; }
|
||||
public string ContactRadius { get; set; }
|
||||
public string ProbeSpacing { get; set; }
|
||||
public string Load { get; set; }
|
||||
public string XStep { get; set; }
|
||||
// public string SampleSet { get; set; }
|
||||
public string Name { get; set; }
|
||||
public string PlateId { get; set; }
|
||||
public string Type { get; set; }
|
||||
public string PointsPerSample { get; set; }
|
||||
public string NumberOfPairs { get; set; }
|
||||
public List<Position> Positions { get; set; }
|
||||
|
||||
}
|
194
Adaptation/FileHandlers/csv/Description.cs
Normal file
194
Adaptation/FileHandlers/csv/Description.cs
Normal file
@ -0,0 +1,194 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class Description : IDescription, Shared.Properties.IDescription
|
||||
{
|
||||
|
||||
public int Test { get; set; }
|
||||
public int Count { get; set; }
|
||||
public int Index { get; set; }
|
||||
//
|
||||
public string EventName { get; set; }
|
||||
public string NullData { get; set; }
|
||||
public string JobID { get; set; }
|
||||
public string Sequence { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string ReportFullPath { get; set; }
|
||||
public string ProcessJobID { get; set; }
|
||||
public string MID { get; set; }
|
||||
//
|
||||
public string RDS { get; set; }
|
||||
//
|
||||
public string Path { get; set; }
|
||||
|
||||
string IDescription.GetEventDescription() => "File Has been read and parsed";
|
||||
|
||||
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
List<string> results = new();
|
||||
IDescription description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
if (@object is not JsonElement jsonElement)
|
||||
throw new Exception();
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
results.Add(jsonProperty.Name);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetDetailNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
nameof(Layer),
|
||||
nameof(RDS),
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetHeaderNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
nameof(RDS),
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
IDescription IDescription.GetDisplayNames()
|
||||
{
|
||||
Description result = GetDisplayNames();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetParameterNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{ };
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
JsonProperty[] results;
|
||||
IDescription description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
results = ((JsonElement)@object).EnumerateObject().ToArray();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetPairedParameterNames()
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetIgnoreParameterNames(Test test)
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = GetDefault(fileRead, logistics);
|
||||
return result;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
|
||||
{
|
||||
Dictionary<string, string> results = new();
|
||||
IDescription description = GetDisplayNames();
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
{
|
||||
if (!results.ContainsKey(jsonProperty.Name))
|
||||
results.Add(jsonProperty.Name, string.Empty);
|
||||
if (jsonProperty.Value is JsonElement jsonPropertyValue)
|
||||
results[jsonProperty.Name] = jsonPropertyValue.ToString();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
|
||||
{
|
||||
List<IDescription> results = new();
|
||||
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData _)
|
||||
results.Add(GetDefault(fileRead, logistics));
|
||||
else
|
||||
{
|
||||
string nullData;
|
||||
Description description;
|
||||
object configDataNullData = fileRead.NullData;
|
||||
if (configDataNullData is null)
|
||||
nullData = string.Empty;
|
||||
else
|
||||
nullData = configDataNullData.ToString();
|
||||
for (int i = 0; i < iProcessData.Details.Count; i++)
|
||||
{
|
||||
if (iProcessData.Details[i] is not string path)
|
||||
continue;
|
||||
description = new Description
|
||||
{
|
||||
Test = (int)tests[i],
|
||||
Count = tests.Count,
|
||||
Index = i,
|
||||
//
|
||||
EventName = fileRead.EventName,
|
||||
NullData = nullData,
|
||||
JobID = fileRead.CellInstanceName,
|
||||
Sequence = logistics.Sequence.ToString(),
|
||||
MesEntity = logistics.MesEntity,
|
||||
ReportFullPath = logistics.ReportFullPath,
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
//
|
||||
Path = path,
|
||||
};
|
||||
results.Add(description);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static Description GetDisplayNames()
|
||||
{
|
||||
Description result = new();
|
||||
return result;
|
||||
}
|
||||
|
||||
private Description GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = new()
|
||||
{
|
||||
Test = -1,
|
||||
Count = 0,
|
||||
Index = -1,
|
||||
//
|
||||
EventName = fileRead.EventName,
|
||||
NullData = fileRead.NullData,
|
||||
JobID = fileRead.CellInstanceName,
|
||||
Sequence = logistics.Sequence.ToString(),
|
||||
MesEntity = fileRead.MesEntity,
|
||||
ReportFullPath = logistics.ReportFullPath,
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
//
|
||||
RDS = nameof(RDS),
|
||||
//
|
||||
Path = nameof(Path),
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
|
||||
|
||||
}
|
25
Adaptation/FileHandlers/csv/Descriptor.cs
Normal file
25
Adaptation/FileHandlers/csv/Descriptor.cs
Normal file
@ -0,0 +1,25 @@
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class Descriptor
|
||||
{
|
||||
|
||||
public string Employee { get; private set; }
|
||||
public string Layer { get; private set; }
|
||||
public string PSN { get; private set; }
|
||||
public string RDS { get; private set; }
|
||||
public string Reactor { get; private set; }
|
||||
public string Wafer { get; private set; }
|
||||
public string Zone { get; private set; }
|
||||
|
||||
public Descriptor(string employee, string layer, string psn, string rds, string reactor, string wafer, string zone)
|
||||
{
|
||||
Employee = employee;
|
||||
Layer = layer;
|
||||
PSN = psn;
|
||||
RDS = rds;
|
||||
Reactor = reactor;
|
||||
Wafer = wafer;
|
||||
Zone = zone;
|
||||
}
|
||||
|
||||
}
|
115
Adaptation/FileHandlers/csv/FileRead.cs
Normal file
115
Adaptation/FileHandlers/csv/FileRead.cs
Normal file
@ -0,0 +1,115 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private long? _TickOffset;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), true, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
_TickOffset ??= new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
|
||||
_Logistics = new Logistics(this, _TickOffset.Value, reportFullPath, useSplitForMID: true);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
if (_Logistics.FileInfo.Length < _MinFileLength)
|
||||
results.Item4.Add(_Logistics.FileInfo);
|
||||
else
|
||||
{
|
||||
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, tickOffset: _TickOffset.Value);
|
||||
if (!iProcessData.Details.Any())
|
||||
throw new Exception(string.Concat("B) No Data - ", dateTime.Ticks));
|
||||
results = iProcessData.GetResults(this, _Logistics, results.Item4);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
73
Adaptation/FileHandlers/csv/Info.cs
Normal file
73
Adaptation/FileHandlers/csv/Info.cs
Normal file
@ -0,0 +1,73 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class Info
|
||||
{
|
||||
|
||||
public string Operator { get; set; }
|
||||
public string SampleName { get; set; }
|
||||
public string SoftwareVersion { get; set; }
|
||||
public string DateTime { get; set; }
|
||||
public string SystemId { get; set; }
|
||||
public string SystemSite { get; set; }
|
||||
public string SamplePosition { get; set; }
|
||||
public string Units { get; set; }
|
||||
public string CommentLength { get; set; }
|
||||
public List<string> Comments { get; set; }
|
||||
|
||||
#nullable enable
|
||||
#pragma warning disable CA1834
|
||||
|
||||
internal static Info? GetInfo(string[] lines, int start, int stop)
|
||||
{
|
||||
Info? result;
|
||||
string first;
|
||||
string[] segments;
|
||||
int? secondStart = null;
|
||||
List<string> values = new();
|
||||
List<string> comments = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
for (int i = start; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
first = segments.First();
|
||||
if (first == "Comment:")
|
||||
{
|
||||
secondStart = i + 1;
|
||||
break;
|
||||
}
|
||||
_ = stringBuilder.Append(first).Append(",");
|
||||
if (segments.Length > 1)
|
||||
values.Add(segments[1]);
|
||||
else
|
||||
values.Add(string.Empty);
|
||||
}
|
||||
string header = "Operator,Sample Name,Software Version,Date & Time,System ID,System Site,Sample Position,Units,Comment Length,";
|
||||
if (secondStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
result = new()
|
||||
{
|
||||
Operator = values[0],
|
||||
SampleName = values[1],
|
||||
SoftwareVersion = values[2],
|
||||
DateTime = values[3],
|
||||
SystemId = values[4],
|
||||
SystemSite = values[5],
|
||||
SamplePosition = values[6],
|
||||
Units = values[7],
|
||||
CommentLength = values[8],
|
||||
Comments = comments,
|
||||
};
|
||||
for (int i = secondStart.Value; i < stop; i++)
|
||||
comments.Add(lines[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
22
Adaptation/FileHandlers/csv/Layer.cs
Normal file
22
Adaptation/FileHandlers/csv/Layer.cs
Normal file
@ -0,0 +1,22 @@
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class Layer
|
||||
{
|
||||
|
||||
public string FirstPoint { get; set; }
|
||||
public string LastPoint { get; set; }
|
||||
public string Type { get; set; }
|
||||
public string Smoothing { get; set; }
|
||||
public string Apply { get; set; }
|
||||
public string SOrder { get; set; }
|
||||
public string GOrder { get; set; }
|
||||
public string Correction { get; set; }
|
||||
public string Conversion { get; set; }
|
||||
public string JunctionOption { get; set; }
|
||||
public string JunctionConstant { get; set; }
|
||||
public string CurrentDensity { get; set; }
|
||||
public string M1M2Tolerance { get; set; }
|
||||
public string Sheet { get; set; }
|
||||
public string Dose { get; set; }
|
||||
|
||||
}
|
81
Adaptation/FileHandlers/csv/LayerHeader.cs
Normal file
81
Adaptation/FileHandlers/csv/LayerHeader.cs
Normal file
@ -0,0 +1,81 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class LayerHeader
|
||||
{
|
||||
|
||||
public string NumberOfLayers { get; set; }
|
||||
public List<Layer> Layers { get; set; }
|
||||
|
||||
#nullable enable
|
||||
#pragma warning disable CA1834
|
||||
|
||||
internal static LayerHeader? GetLayerHeader(string[] lines, int start, int stop)
|
||||
{
|
||||
LayerHeader? result;
|
||||
Layer layer;
|
||||
string first;
|
||||
string[] segments;
|
||||
int? secondStart = null;
|
||||
List<Layer> layerCollection = new();
|
||||
List<string> values = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
for (int i = start; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
first = segments.First();
|
||||
if (first == "Layer")
|
||||
{
|
||||
secondStart = i + 1;
|
||||
break;
|
||||
}
|
||||
_ = stringBuilder.Append(first).Append(",");
|
||||
if (segments.Length > 1)
|
||||
values.Add(segments[1]);
|
||||
else
|
||||
values.Add(string.Empty);
|
||||
}
|
||||
string header = "Number of Layers,";
|
||||
if (secondStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
result = new()
|
||||
{
|
||||
NumberOfLayers = values[0],
|
||||
Layers = layerCollection,
|
||||
};
|
||||
for (int i = secondStart.Value; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
if (segments.Length < 15)
|
||||
continue;
|
||||
layer = new()
|
||||
{
|
||||
FirstPoint = segments[0],
|
||||
LastPoint = segments[1],
|
||||
Type = segments[2],
|
||||
Smoothing = segments[3],
|
||||
Apply = segments[4],
|
||||
SOrder = segments[5],
|
||||
GOrder = segments[6],
|
||||
Correction = segments[7],
|
||||
Conversion = segments[8],
|
||||
JunctionOption = segments[9],
|
||||
JunctionConstant = segments[10],
|
||||
CurrentDensity = segments[11],
|
||||
M1M2Tolerance = segments[12],
|
||||
Sheet = segments[13],
|
||||
Dose = segments[14],
|
||||
};
|
||||
layerCollection.Add(layer);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
14
Adaptation/FileHandlers/csv/Point.cs
Normal file
14
Adaptation/FileHandlers/csv/Point.cs
Normal file
@ -0,0 +1,14 @@
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class Point
|
||||
{
|
||||
|
||||
public string Number { get; set; }
|
||||
public string Depth { get; set; }
|
||||
public string Resistance { get; set; }
|
||||
public string StageX { get; set; }
|
||||
public string StageY { get; set; }
|
||||
public string StageZ { get; set; }
|
||||
public string StageT { get; set; }
|
||||
|
||||
}
|
12
Adaptation/FileHandlers/csv/Position.cs
Normal file
12
Adaptation/FileHandlers/csv/Position.cs
Normal file
@ -0,0 +1,12 @@
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class Position
|
||||
{
|
||||
|
||||
public string Resistivity { get; set; }
|
||||
public string Resistance { get; set; }
|
||||
public string PercentStandardDeviation { get; set; }
|
||||
public string Number { get; set; }
|
||||
public string Name { get; set; }
|
||||
|
||||
}
|
135
Adaptation/FileHandlers/csv/ProcessData.cs
Normal file
135
Adaptation/FileHandlers/csv/ProcessData.cs
Normal file
@ -0,0 +1,135 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using log4net;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Data;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public partial class ProcessData : IProcessData
|
||||
{
|
||||
|
||||
private readonly List<object> _Details;
|
||||
|
||||
List<object> Shared.Properties.IProcessData.Details => _Details;
|
||||
|
||||
private readonly ILog _Log;
|
||||
|
||||
public ProcessData()
|
||||
{
|
||||
}
|
||||
|
||||
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, long tickOffset)
|
||||
{
|
||||
fileInfoCollection.Clear();
|
||||
_Details = new List<object>();
|
||||
_Log = LogManager.GetLogger(typeof(ProcessData));
|
||||
_ = Parse(fileRead, logistics, fileInfoCollection);
|
||||
}
|
||||
|
||||
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<Test> tests = new();
|
||||
foreach (object item in _Details)
|
||||
tests.Add(Test.SRP2100);
|
||||
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
|
||||
if (tests.Count != descriptions.Count)
|
||||
throw new Exception();
|
||||
for (int i = 0; i < tests.Count; i++)
|
||||
{
|
||||
if (descriptions[i] is not Description description)
|
||||
throw new Exception();
|
||||
if (description.Test != (int)tests[i])
|
||||
throw new Exception();
|
||||
}
|
||||
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
|
||||
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
|
||||
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
|
||||
return results;
|
||||
}
|
||||
|
||||
private static void OnlyOnce(string directory)
|
||||
{
|
||||
CSV csv;
|
||||
string json;
|
||||
string fileName;
|
||||
string innerDirectory;
|
||||
string fileNameWithoutExtension;
|
||||
foreach (string path in Directory.GetFiles(directory, "*.csv", SearchOption.TopDirectoryOnly))
|
||||
{
|
||||
csv = CSV.GetCSV(path);
|
||||
fileNameWithoutExtension = Path.GetFileNameWithoutExtension(path);
|
||||
innerDirectory = Path.Combine(directory, fileNameWithoutExtension);
|
||||
_ = Directory.CreateDirectory(innerDirectory);
|
||||
fileName = Path.Combine(innerDirectory, $"{fileNameWithoutExtension}.csv");
|
||||
File.Move(path, fileName);
|
||||
fileName = Path.Combine(innerDirectory, $"{fileNameWithoutExtension}.json");
|
||||
json = JsonSerializer.Serialize(csv, new JsonSerializerOptions { WriteIndented = true });
|
||||
File.WriteAllText(fileName, json);
|
||||
}
|
||||
foreach (string path in Directory.GetFiles(directory, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
innerDirectory = Path.GetDirectoryName(path);
|
||||
fileName = Path.Combine(innerDirectory, $"{Path.GetFileName(innerDirectory)}{Path.GetExtension(path)}");
|
||||
File.Move(path, fileName);
|
||||
}
|
||||
foreach (string path in Directory.GetFiles(directory, "*.csv", SearchOption.AllDirectories))
|
||||
{
|
||||
innerDirectory = Path.GetDirectoryName(path);
|
||||
Directory.SetLastWriteTime(innerDirectory, new FileInfo(path).LastWriteTime);
|
||||
}
|
||||
}
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
private CSV Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
CSV result;
|
||||
result = CSV.GetCSV(logistics.ReportFullPath);
|
||||
string directory = Path.GetDirectoryName(logistics.ReportFullPath);
|
||||
#if OnlyOnce
|
||||
OnlyOnce(directory);
|
||||
#endif
|
||||
string fileName = Path.Combine(directory, $"{Path.GetFileNameWithoutExtension(logistics.ReportFullPath)}.json");
|
||||
string json = JsonSerializer.Serialize(result, new JsonSerializerOptions { WriteIndented = true });
|
||||
result = JsonSerializer.Deserialize<CSV>(json);
|
||||
if (result is null)
|
||||
throw new NullReferenceException(nameof(result));
|
||||
File.WriteAllText(fileName, json);
|
||||
#if NotSure
|
||||
fileInfoCollection.Add(new(fileName));
|
||||
#endif
|
||||
fileInfoCollection.Add(logistics.FileInfo);
|
||||
_Details.Add(fileName);
|
||||
return result;
|
||||
}
|
||||
|
||||
#nullable enable
|
||||
|
||||
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
|
||||
{
|
||||
List<Description> results = new();
|
||||
Description? description;
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
|
||||
foreach (JsonElement jsonElement in jsonElements)
|
||||
{
|
||||
if (jsonElement.ValueKind != JsonValueKind.Object)
|
||||
throw new Exception();
|
||||
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
|
||||
if (description is null)
|
||||
continue;
|
||||
results.Add(description);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
13
Adaptation/FileHandlers/csv/Profile.cs
Normal file
13
Adaptation/FileHandlers/csv/Profile.cs
Normal file
@ -0,0 +1,13 @@
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class Profile
|
||||
{
|
||||
|
||||
public string Id { get; set; }
|
||||
public string Name { get; set; }
|
||||
public string Label { get; set; }
|
||||
public string Allocated { get; set; }
|
||||
public string Used { get; set; }
|
||||
public string List { get; set; }
|
||||
|
||||
}
|
102
Adaptation/FileHandlers/csv/ProfileHeader.cs
Normal file
102
Adaptation/FileHandlers/csv/ProfileHeader.cs
Normal file
@ -0,0 +1,102 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class ProfileHeader
|
||||
{
|
||||
|
||||
public string NumberOfProfiles { get; set; }
|
||||
public List<Profile> Profiles { get; set; }
|
||||
public List<ProfilePoint> ProfilePoints { get; set; }
|
||||
|
||||
#nullable enable
|
||||
#pragma warning disable CA1834
|
||||
|
||||
internal static ProfileHeader? GetProfileHeader(string[] lines, int start, int stop)
|
||||
{
|
||||
ProfileHeader? result;
|
||||
string first;
|
||||
string[] segments;
|
||||
Profile profileInfo;
|
||||
ProfilePoint profile;
|
||||
int? thirdStart = null;
|
||||
int? secondStart = null;
|
||||
List<string> values = new();
|
||||
List<Profile> profiles = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
List<ProfilePoint> profilePoints = new();
|
||||
for (int i = start; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
first = segments.First();
|
||||
if (first == "ID")
|
||||
{
|
||||
secondStart = i + 1;
|
||||
break;
|
||||
}
|
||||
_ = stringBuilder.Append(first).Append(",");
|
||||
if (segments.Length > 1)
|
||||
values.Add(segments[1]);
|
||||
else
|
||||
values.Add(string.Empty);
|
||||
}
|
||||
string header = "Number of Profiles,";
|
||||
if (secondStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
result = new()
|
||||
{
|
||||
NumberOfProfiles = values[0],
|
||||
Profiles = profiles,
|
||||
ProfilePoints = profilePoints,
|
||||
};
|
||||
for (int i = secondStart.Value; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
first = segments.First();
|
||||
if (first == "Point")
|
||||
{
|
||||
thirdStart = i + 1;
|
||||
break;
|
||||
}
|
||||
if (segments.Length < 6)
|
||||
continue;
|
||||
profileInfo = new()
|
||||
{
|
||||
Id = segments[0],
|
||||
Name = segments[1],
|
||||
Label = segments[2],
|
||||
Allocated = segments[3],
|
||||
Used = segments[4],
|
||||
List = segments[5],
|
||||
};
|
||||
profiles.Add(profileInfo);
|
||||
}
|
||||
if (thirdStart is not null)
|
||||
{
|
||||
for (int i = thirdStart.Value; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
if (segments.Length < 6)
|
||||
continue;
|
||||
profile = new()
|
||||
{
|
||||
Number = segments[0],
|
||||
Depth = segments[1],
|
||||
Raw = segments[2],
|
||||
Edited = segments[3],
|
||||
Resistivity = segments[4],
|
||||
CD = segments[5],
|
||||
};
|
||||
profilePoints.Add(profile);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
13
Adaptation/FileHandlers/csv/ProfilePoint.cs
Normal file
13
Adaptation/FileHandlers/csv/ProfilePoint.cs
Normal file
@ -0,0 +1,13 @@
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class ProfilePoint
|
||||
{
|
||||
|
||||
public string Number { get; set; }
|
||||
public string Depth { get; set; }
|
||||
public string Raw { get; set; }
|
||||
public string Edited { get; set; }
|
||||
public string Resistivity { get; set; }
|
||||
public string CD { get; set; }
|
||||
|
||||
}
|
73
Adaptation/FileHandlers/csv/RawData.cs
Normal file
73
Adaptation/FileHandlers/csv/RawData.cs
Normal file
@ -0,0 +1,73 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class RawData
|
||||
{
|
||||
|
||||
public string TotalPoints { get; set; }
|
||||
public List<Point> Points { get; set; }
|
||||
|
||||
#nullable enable
|
||||
#pragma warning disable CA1834
|
||||
|
||||
internal static RawData? GetRawData(string[] lines, int start, int stop)
|
||||
{
|
||||
RawData? result;
|
||||
Point point;
|
||||
string first;
|
||||
string[] segments;
|
||||
int? secondStart = null;
|
||||
List<Point> points = new();
|
||||
List<string> values = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
for (int i = start; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
first = segments.First();
|
||||
if (first == "Point")
|
||||
{
|
||||
secondStart = i + 1;
|
||||
break;
|
||||
}
|
||||
_ = stringBuilder.Append(first).Append(",");
|
||||
if (segments.Length > 1)
|
||||
values.Add(segments[1]);
|
||||
else
|
||||
values.Add(string.Empty);
|
||||
}
|
||||
string header = "Total Points,";
|
||||
if (secondStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
result = new()
|
||||
{
|
||||
TotalPoints = values[0],
|
||||
Points = points,
|
||||
};
|
||||
for (int i = secondStart.Value; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
if (segments.Length < 4)
|
||||
continue;
|
||||
point = new()
|
||||
{
|
||||
Number = segments[0],
|
||||
Depth = segments[1],
|
||||
Resistance = segments[2],
|
||||
StageX = segments[3],
|
||||
StageY = segments[4],
|
||||
StageZ = segments[5],
|
||||
StageT = segments[6],
|
||||
};
|
||||
points.Add(point);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
88
Adaptation/FileHandlers/csv/Setup.cs
Normal file
88
Adaptation/FileHandlers/csv/Setup.cs
Normal file
@ -0,0 +1,88 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class Setup
|
||||
{
|
||||
|
||||
public string Finish { get; set; }
|
||||
public string NorthProbeID { get; set; }
|
||||
public string SouthProbeID { get; set; }
|
||||
public string MeasurementPolarity { get; set; }
|
||||
public string SineBevelAngle { get; set; }
|
||||
public string ContactRadius { get; set; }
|
||||
public string ProbeSpacing { get; set; }
|
||||
public string ProbeLoad { get; set; }
|
||||
public string Orientation { get; set; }
|
||||
public string NumberOfStepSizes { get; set; }
|
||||
public List<Step> Steps { get; set; }
|
||||
|
||||
#nullable enable
|
||||
#pragma warning disable CA1834
|
||||
|
||||
internal static Setup? GetSetup(string[] lines, int start, int stop)
|
||||
{
|
||||
Setup? result;
|
||||
Step step;
|
||||
string first;
|
||||
string[] segments;
|
||||
int? secondStart = null;
|
||||
List<Step> steps = new();
|
||||
List<string> values = new();
|
||||
StringBuilder stringBuilder = new();
|
||||
for (int i = start; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
first = segments.First();
|
||||
if (first == "Step")
|
||||
{
|
||||
secondStart = i + 1;
|
||||
break;
|
||||
}
|
||||
_ = stringBuilder.Append(first).Append(",");
|
||||
if (segments.Length > 1)
|
||||
values.Add(segments[1]);
|
||||
else
|
||||
values.Add(string.Empty);
|
||||
}
|
||||
string header = "Finish,North Probe ID,South Probe ID,Measurement Polarity,Sine Bevel Angle,Contact Radius (µm),Probe Spacing (µm),Probe Load (gm),Orientation,Number of Step Sizes,";
|
||||
if (secondStart is null || stringBuilder.Length != header.Length || stringBuilder.ToString() != header)
|
||||
result = null;
|
||||
else
|
||||
{
|
||||
result = new()
|
||||
{
|
||||
Finish = values[0],
|
||||
NorthProbeID = values[1],
|
||||
SouthProbeID = values[2],
|
||||
MeasurementPolarity = values[3],
|
||||
SineBevelAngle = values[4],
|
||||
ContactRadius = values[5],
|
||||
ProbeSpacing = values[6],
|
||||
ProbeLoad = values[7],
|
||||
Orientation = values[8],
|
||||
NumberOfStepSizes = values[9],
|
||||
Steps = steps,
|
||||
};
|
||||
for (int i = secondStart.Value; i < stop; i++)
|
||||
{
|
||||
segments = lines[i].Split(new string[] { "," }, StringSplitOptions.None);
|
||||
if (segments.Length < 4)
|
||||
continue;
|
||||
step = new()
|
||||
{
|
||||
Number = segments[0],
|
||||
Points = segments[1],
|
||||
X = segments[2],
|
||||
Y = segments[3],
|
||||
};
|
||||
steps.Add(step);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
11
Adaptation/FileHandlers/csv/Step.cs
Normal file
11
Adaptation/FileHandlers/csv/Step.cs
Normal file
@ -0,0 +1,11 @@
|
||||
namespace Adaptation.FileHandlers.csv;
|
||||
|
||||
public class Step
|
||||
{
|
||||
|
||||
public string Number { get; set; }
|
||||
public string Points { get; set; }
|
||||
public string X { get; set; }
|
||||
public string Y { get; set; }
|
||||
|
||||
}
|
27
Adaptation/FileHandlers/json/CSV.cs
Normal file
27
Adaptation/FileHandlers/json/CSV.cs
Normal file
@ -0,0 +1,27 @@
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class CSV
|
||||
{
|
||||
|
||||
#nullable enable
|
||||
|
||||
public string? FileVersion { get; }
|
||||
public Info? Info { get; }
|
||||
public Setup? Setup { get; }
|
||||
public LayerHeader? LayerHeader { get; }
|
||||
public ProfileHeader? ProfileHeader { get; }
|
||||
public Calibration? Calibration { get; }
|
||||
public RawData? RawData { get; }
|
||||
|
||||
internal CSV(csv.CSV csv)
|
||||
{
|
||||
FileVersion = csv.FileVersion;
|
||||
Info = csv.Info is null ? null : new(csv.Info);
|
||||
Setup = csv.Setup is null ? null : new(csv.Setup);
|
||||
RawData = csv.RawData is null ? null : new(csv.RawData);
|
||||
Calibration = csv.Calibration is null ? null : new(csv.Calibration);
|
||||
LayerHeader = csv.LayerHeader is null ? null : new(csv.LayerHeader);
|
||||
ProfileHeader = csv.ProfileHeader is null ? null : new(csv.ProfileHeader);
|
||||
}
|
||||
|
||||
}
|
59
Adaptation/FileHandlers/json/Calibration.cs
Normal file
59
Adaptation/FileHandlers/json/Calibration.cs
Normal file
@ -0,0 +1,59 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class Calibration
|
||||
{
|
||||
|
||||
public int NumberOfCalibrationSets { get; }
|
||||
public List<DataSet> DataSets { get; }
|
||||
|
||||
internal Calibration(csv.Calibration calibration)
|
||||
{
|
||||
DataSet dataSet;
|
||||
Position position;
|
||||
List<Position> positions;
|
||||
List<DataSet> dataSets = new();
|
||||
NumberOfCalibrationSets = int.Parse(calibration.NumberOfCalibrationSets);
|
||||
DataSets = dataSets;
|
||||
foreach (csv.DataSet csvDataSet in calibration.DataSets)
|
||||
{
|
||||
positions = new();
|
||||
foreach (csv.Position csvPosition in csvDataSet.Positions)
|
||||
{
|
||||
position = new
|
||||
(
|
||||
resistivity: double.Parse(csvPosition.Resistivity),
|
||||
resistance: double.Parse(csvPosition.Resistance),
|
||||
percentStandardDeviation: double.Parse(csvPosition.PercentStandardDeviation),
|
||||
number: int.Parse(csvPosition.Number),
|
||||
name: csvPosition.Name
|
||||
);
|
||||
positions.Add(position);
|
||||
}
|
||||
dataSet = new
|
||||
(
|
||||
@operator: csvDataSet.Operator,
|
||||
dateTime: DateTime.Parse(csvDataSet.DateTime),
|
||||
finish: csvDataSet.Finish,
|
||||
orientation: csvDataSet.Orientation,
|
||||
northProbeID: csvDataSet.NorthProbeID,
|
||||
southProbeID: csvDataSet.SouthProbeID,
|
||||
polarity: csvDataSet.Polarity,
|
||||
contactRadius: double.Parse(csvDataSet.ContactRadius),
|
||||
probeSpacing: double.Parse(csvDataSet.ProbeSpacing),
|
||||
load: double.Parse(csvDataSet.Load),
|
||||
xStep: double.Parse(csvDataSet.XStep),
|
||||
name: csvDataSet.Name,
|
||||
plateId: csvDataSet.PlateId,
|
||||
type: csvDataSet.Type,
|
||||
pointsPerSample: int.Parse(csvDataSet.PointsPerSample),
|
||||
numberOfPairs: int.Parse(csvDataSet.NumberOfPairs),
|
||||
positions: positions
|
||||
);
|
||||
dataSets.Add(dataSet);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
65
Adaptation/FileHandlers/json/DataSet.cs
Normal file
65
Adaptation/FileHandlers/json/DataSet.cs
Normal file
@ -0,0 +1,65 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class DataSet
|
||||
{
|
||||
|
||||
public string Operator { get; }
|
||||
public DateTime DateTime { get; }
|
||||
public string Finish { get; }
|
||||
public string Orientation { get; }
|
||||
public string NorthProbeID { get; }
|
||||
public string SouthProbeID { get; }
|
||||
public string Polarity { get; }
|
||||
public double ContactRadius { get; }
|
||||
public double ProbeSpacing { get; }
|
||||
public double Load { get; }
|
||||
public double XStep { get; }
|
||||
// public string SampleSet { get; }
|
||||
public string Name { get; }
|
||||
public string PlateId { get; }
|
||||
public string Type { get; }
|
||||
public int PointsPerSample { get; }
|
||||
public int NumberOfPairs { get; }
|
||||
public List<Position> Positions { get; }
|
||||
|
||||
public DataSet(string @operator,
|
||||
DateTime dateTime,
|
||||
string finish,
|
||||
string orientation,
|
||||
string northProbeID,
|
||||
string southProbeID,
|
||||
string polarity,
|
||||
double contactRadius,
|
||||
double probeSpacing,
|
||||
double load,
|
||||
double xStep,
|
||||
string name,
|
||||
string plateId,
|
||||
string type,
|
||||
int pointsPerSample,
|
||||
int numberOfPairs,
|
||||
List<Position> positions)
|
||||
{
|
||||
Operator = @operator;
|
||||
DateTime = dateTime;
|
||||
Finish = finish;
|
||||
Orientation = orientation;
|
||||
NorthProbeID = northProbeID;
|
||||
SouthProbeID = southProbeID;
|
||||
Polarity = polarity;
|
||||
ContactRadius = contactRadius;
|
||||
ProbeSpacing = probeSpacing;
|
||||
Load = load;
|
||||
XStep = xStep;
|
||||
Name = name;
|
||||
PlateId = plateId;
|
||||
Type = type;
|
||||
PointsPerSample = pointsPerSample;
|
||||
NumberOfPairs = numberOfPairs;
|
||||
Positions = positions;
|
||||
}
|
||||
|
||||
}
|
245
Adaptation/FileHandlers/json/Description.cs
Normal file
245
Adaptation/FileHandlers/json/Description.cs
Normal file
@ -0,0 +1,245 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class Description : IDescription, Shared.Properties.IDescription
|
||||
{
|
||||
|
||||
public int Test { get; set; }
|
||||
public int Count { get; set; }
|
||||
public int Index { get; set; }
|
||||
//
|
||||
public string EventName { get; set; }
|
||||
public string NullData { get; set; }
|
||||
public string JobID { get; set; }
|
||||
public string Sequence { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string ReportFullPath { get; set; }
|
||||
public string ProcessJobID { get; set; }
|
||||
public string MID { get; set; }
|
||||
//
|
||||
public string Date { get; set; }
|
||||
public string Employee { get; set; }
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string Layer { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Zone { get; set; }
|
||||
//
|
||||
public string Number { get; set; }
|
||||
public string Depth { get; set; }
|
||||
public string Raw { get; set; }
|
||||
public string Edited { get; set; }
|
||||
public string Resistivity { get; set; }
|
||||
public string CD { get; set; }
|
||||
|
||||
string IDescription.GetEventDescription() => "File Has been read and parsed";
|
||||
|
||||
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
List<string> results = new();
|
||||
IDescription description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
if (@object is not JsonElement jsonElement)
|
||||
throw new Exception();
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
results.Add(jsonProperty.Name);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetDetailNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
nameof(HeaderUniqueId),
|
||||
nameof(Layer),
|
||||
nameof(RDS),
|
||||
nameof(UniqueId),
|
||||
nameof(Zone)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetHeaderNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{
|
||||
nameof(Date),
|
||||
nameof(Employee),
|
||||
nameof(PSN),
|
||||
nameof(RDS),
|
||||
nameof(Reactor),
|
||||
nameof(Recipe)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
IDescription IDescription.GetDisplayNames()
|
||||
{
|
||||
Description result = GetDisplayNames();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetParameterNames()
|
||||
{
|
||||
List<string> results = new()
|
||||
{ };
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
JsonProperty[] results;
|
||||
IDescription description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
results = ((JsonElement)@object).EnumerateObject().ToArray();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetPairedParameterNames()
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetIgnoreParameterNames(Test test)
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = GetDefault(fileRead, logistics);
|
||||
return result;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
|
||||
{
|
||||
Dictionary<string, string> results = new();
|
||||
IDescription description = GetDisplayNames();
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
{
|
||||
if (!results.ContainsKey(jsonProperty.Name))
|
||||
results.Add(jsonProperty.Name, string.Empty);
|
||||
if (jsonProperty.Value is JsonElement jsonPropertyValue)
|
||||
results[jsonProperty.Name] = jsonPropertyValue.ToString();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
|
||||
{
|
||||
List<IDescription> results = new();
|
||||
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData processData)
|
||||
results.Add(GetDefault(fileRead, logistics));
|
||||
else
|
||||
{
|
||||
string nullData;
|
||||
Description description;
|
||||
object configDataNullData = fileRead.NullData;
|
||||
if (configDataNullData is null)
|
||||
nullData = string.Empty;
|
||||
else
|
||||
nullData = configDataNullData.ToString();
|
||||
for (int i = 0; i < iProcessData.Details.Count; i++)
|
||||
{
|
||||
if (iProcessData.Details[i] is not Detail detail)
|
||||
continue;
|
||||
description = new Description
|
||||
{
|
||||
Test = (int)tests[i],
|
||||
Count = tests.Count,
|
||||
Index = i,
|
||||
//
|
||||
EventName = fileRead.EventName,
|
||||
NullData = nullData,
|
||||
JobID = fileRead.CellInstanceName,
|
||||
Sequence = logistics.Sequence.ToString(),
|
||||
MesEntity = logistics.MesEntity,
|
||||
ReportFullPath = logistics.ReportFullPath,
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
//
|
||||
Date = processData.Date.ToString(GetDateFormat()),
|
||||
Employee = processData.Employee,
|
||||
Layer = processData.Layer,
|
||||
PSN = processData.PSN,
|
||||
RDS = processData.RDS,
|
||||
Reactor = processData.Reactor,
|
||||
Recipe = processData.Recipe,
|
||||
Zone = processData.Zone,
|
||||
//
|
||||
UniqueId = detail.UniqueId,
|
||||
HeaderUniqueId = detail.HeaderUniqueId,
|
||||
Depth = detail.Depth,
|
||||
Raw = detail.Raw,
|
||||
Edited = detail.Edited,
|
||||
Resistivity = detail.Resistivity,
|
||||
CD = detail.CD,
|
||||
};
|
||||
results.Add(description);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static Description GetDisplayNames()
|
||||
{
|
||||
Description result = new();
|
||||
return result;
|
||||
}
|
||||
|
||||
private Description GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = new()
|
||||
{
|
||||
Test = -1,
|
||||
Count = 0,
|
||||
Index = -1,
|
||||
//
|
||||
EventName = fileRead.EventName,
|
||||
NullData = fileRead.NullData,
|
||||
JobID = fileRead.CellInstanceName,
|
||||
Sequence = logistics.Sequence.ToString(),
|
||||
MesEntity = fileRead.MesEntity,
|
||||
ReportFullPath = logistics.ReportFullPath,
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
//
|
||||
Date = nameof(Date),
|
||||
Employee = nameof(Employee),
|
||||
HeaderUniqueId = nameof(HeaderUniqueId),
|
||||
Layer = nameof(Layer),
|
||||
PSN = nameof(PSN),
|
||||
RDS = nameof(RDS),
|
||||
Reactor = nameof(Reactor),
|
||||
Recipe = nameof(Recipe),
|
||||
UniqueId = nameof(UniqueId),
|
||||
Zone = nameof(Zone),
|
||||
//
|
||||
Number = nameof(Number),
|
||||
Depth = nameof(Depth),
|
||||
Raw = nameof(Raw),
|
||||
Edited = nameof(Edited),
|
||||
Resistivity = nameof(Resistivity),
|
||||
CD = nameof(CD),
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static string GetDateFormat() => "MM/dd/yyyy hh:mm:ss tt";
|
||||
|
||||
}
|
25
Adaptation/FileHandlers/json/Descriptor.cs
Normal file
25
Adaptation/FileHandlers/json/Descriptor.cs
Normal file
@ -0,0 +1,25 @@
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class Descriptor
|
||||
{
|
||||
|
||||
public string Employee { get; private set; }
|
||||
public string Layer { get; private set; }
|
||||
public string PSN { get; private set; }
|
||||
public string RDS { get; private set; }
|
||||
public string Reactor { get; private set; }
|
||||
public string Wafer { get; private set; }
|
||||
public string Zone { get; private set; }
|
||||
|
||||
public Descriptor(string employee, string layer, string psn, string rds, string reactor, string wafer, string zone)
|
||||
{
|
||||
Employee = employee;
|
||||
Layer = layer;
|
||||
PSN = psn;
|
||||
RDS = rds;
|
||||
Reactor = reactor;
|
||||
Wafer = wafer;
|
||||
Zone = zone;
|
||||
}
|
||||
|
||||
}
|
14
Adaptation/FileHandlers/json/Detail.cs
Normal file
14
Adaptation/FileHandlers/json/Detail.cs
Normal file
@ -0,0 +1,14 @@
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class Detail
|
||||
{
|
||||
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Depth { get; set; }
|
||||
public string Raw { get; set; }
|
||||
public string Edited { get; set; }
|
||||
public string Resistivity { get; set; }
|
||||
public string CD { get; set; }
|
||||
|
||||
}
|
130
Adaptation/FileHandlers/json/FileRead.cs
Normal file
130
Adaptation/FileHandlers/json/FileRead.cs
Normal file
@ -0,0 +1,130 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private long? _TickOffset;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), true, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
|
||||
_TickOffset ??= new FileInfo(reportFullPath).LastWriteTime.Ticks - dateTime.Ticks;
|
||||
_Logistics = new Logistics(this, _TickOffset.Value, reportFullPath, useSplitForMID: true);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
if (_Logistics.FileInfo.Length < _MinFileLength)
|
||||
results.Item4.Add(_Logistics.FileInfo);
|
||||
else
|
||||
{
|
||||
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4, tickOffset: _TickOffset.Value);
|
||||
if (iProcessData is not ProcessData processData)
|
||||
throw new Exception(string.Concat("A) No Data - ", dateTime.Ticks));
|
||||
string mid;
|
||||
if (!string.IsNullOrEmpty(processData.Zone) && string.IsNullOrEmpty(processData.Reactor) && string.IsNullOrEmpty(processData.RDS) && string.IsNullOrEmpty(processData.PSN))
|
||||
mid = processData.Zone;
|
||||
else if (!string.IsNullOrEmpty(processData.Employee) && string.IsNullOrEmpty(processData.Reactor) && string.IsNullOrEmpty(processData.RDS) && string.IsNullOrEmpty(processData.PSN))
|
||||
mid = processData.Employee;
|
||||
else
|
||||
{
|
||||
mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
|
||||
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
}
|
||||
SetFileParameterLotID(mid);
|
||||
_Logistics.Update(mid, processData.Reactor);
|
||||
if (!iProcessData.Details.Any())
|
||||
throw new Exception(string.Concat("B) No Data - ", dateTime.Ticks));
|
||||
results = iProcessData.GetResults(this, _Logistics, results.Item4);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
34
Adaptation/FileHandlers/json/Info.cs
Normal file
34
Adaptation/FileHandlers/json/Info.cs
Normal file
@ -0,0 +1,34 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class Info
|
||||
{
|
||||
|
||||
public string Operator { get; }
|
||||
public string SampleName { get; }
|
||||
public string SoftwareVersion { get; }
|
||||
public DateTime DateTime { get; }
|
||||
public string SystemId { get; }
|
||||
public string SystemSite { get; }
|
||||
public int SamplePosition { get; }
|
||||
public int Units { get; }
|
||||
public int CommentLength { get; }
|
||||
public List<string> Comments { get; }
|
||||
|
||||
internal Info(csv.Info info)
|
||||
{
|
||||
Operator = info.Operator;
|
||||
SampleName = info.SampleName;
|
||||
SoftwareVersion = info.SoftwareVersion;
|
||||
DateTime = DateTime.Parse(info.DateTime);
|
||||
SystemId = info.SystemId;
|
||||
SystemSite = info.SystemSite;
|
||||
SamplePosition = int.Parse(info.SamplePosition);
|
||||
Units = int.Parse(info.Units);
|
||||
CommentLength = int.Parse(info.CommentLength);
|
||||
Comments = info.Comments;
|
||||
}
|
||||
|
||||
}
|
55
Adaptation/FileHandlers/json/Layer.cs
Normal file
55
Adaptation/FileHandlers/json/Layer.cs
Normal file
@ -0,0 +1,55 @@
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class Layer
|
||||
{
|
||||
|
||||
public int FirstPoint { get; }
|
||||
public int? LastPoint { get; }
|
||||
public string Type { get; }
|
||||
public string Smoothing { get; }
|
||||
public string Apply { get; }
|
||||
public int SOrder { get; }
|
||||
public int GOrder { get; }
|
||||
public string Correction { get; }
|
||||
public string Conversion { get; }
|
||||
public string JunctionOption { get; }
|
||||
public int JunctionConstant { get; }
|
||||
public double CurrentDensity { get; }
|
||||
public string M1M2Tolerance { get; }
|
||||
public string Sheet { get; }
|
||||
public string Dose { get; }
|
||||
|
||||
public Layer(int firstPoint,
|
||||
int? lastPoint,
|
||||
string type,
|
||||
string smoothing,
|
||||
string apply,
|
||||
int sOrder,
|
||||
int gOrder,
|
||||
string correction,
|
||||
string conversion,
|
||||
string junctionOption,
|
||||
int junctionConstant,
|
||||
double currentDensity,
|
||||
string m1M2Tolerance,
|
||||
string sheet,
|
||||
string dose)
|
||||
{
|
||||
FirstPoint = firstPoint;
|
||||
LastPoint = lastPoint;
|
||||
Type = type;
|
||||
Smoothing = smoothing;
|
||||
Apply = apply;
|
||||
SOrder = sOrder;
|
||||
GOrder = gOrder;
|
||||
Correction = correction;
|
||||
Conversion = conversion;
|
||||
JunctionOption = junctionOption;
|
||||
JunctionConstant = junctionConstant;
|
||||
CurrentDensity = currentDensity;
|
||||
M1M2Tolerance = m1M2Tolerance;
|
||||
Sheet = sheet;
|
||||
Dose = dose;
|
||||
}
|
||||
|
||||
}
|
41
Adaptation/FileHandlers/json/LayerHeader.cs
Normal file
41
Adaptation/FileHandlers/json/LayerHeader.cs
Normal file
@ -0,0 +1,41 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class LayerHeader
|
||||
{
|
||||
|
||||
public int NumberOfLayers { get; }
|
||||
public List<Layer> Layers { get; }
|
||||
|
||||
internal LayerHeader(csv.LayerHeader layerHeader)
|
||||
{
|
||||
Layer layer;
|
||||
List<Layer> layers = new();
|
||||
NumberOfLayers = int.Parse(layerHeader.NumberOfLayers);
|
||||
Layers = layers;
|
||||
foreach (csv.Layer csvLayer in layerHeader.Layers)
|
||||
{
|
||||
layer = new
|
||||
(
|
||||
firstPoint: int.Parse(csvLayer.FirstPoint),
|
||||
lastPoint: string.IsNullOrEmpty(csvLayer.LastPoint) ? null : int.Parse(csvLayer.LastPoint),
|
||||
type: csvLayer.Type,
|
||||
smoothing: csvLayer.Smoothing,
|
||||
apply: csvLayer.Apply,
|
||||
sOrder: int.Parse(csvLayer.SOrder),
|
||||
gOrder: int.Parse(csvLayer.GOrder),
|
||||
correction: csvLayer.Correction,
|
||||
conversion: csvLayer.Conversion,
|
||||
junctionOption: csvLayer.JunctionOption,
|
||||
junctionConstant: int.Parse(csvLayer.JunctionConstant),
|
||||
currentDensity: double.Parse(csvLayer.CurrentDensity),
|
||||
m1M2Tolerance: csvLayer.M1M2Tolerance,
|
||||
sheet: csvLayer.Sheet,
|
||||
dose: csvLayer.Dose
|
||||
);
|
||||
layers.Add(layer);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
31
Adaptation/FileHandlers/json/Point.cs
Normal file
31
Adaptation/FileHandlers/json/Point.cs
Normal file
@ -0,0 +1,31 @@
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class Point
|
||||
{
|
||||
|
||||
public double Number { get; }
|
||||
public double Depth { get; }
|
||||
public double Resistance { get; }
|
||||
public double StageX { get; }
|
||||
public double StageY { get; }
|
||||
public double StageZ { get; }
|
||||
public double StageT { get; }
|
||||
|
||||
public Point(double number,
|
||||
double depth,
|
||||
double resistance,
|
||||
double stageX,
|
||||
double stageY,
|
||||
double stageZ,
|
||||
double stageT)
|
||||
{
|
||||
Number = number;
|
||||
Depth = depth;
|
||||
Resistance = resistance;
|
||||
StageX = stageX;
|
||||
StageY = stageY;
|
||||
StageZ = stageZ;
|
||||
StageT = stageT;
|
||||
}
|
||||
|
||||
}
|
25
Adaptation/FileHandlers/json/Position.cs
Normal file
25
Adaptation/FileHandlers/json/Position.cs
Normal file
@ -0,0 +1,25 @@
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class Position
|
||||
{
|
||||
|
||||
public double Resistivity { get; }
|
||||
public double Resistance { get; }
|
||||
public double PercentStandardDeviation { get; }
|
||||
public int Number { get; }
|
||||
public string Name { get; }
|
||||
|
||||
public Position(double resistivity,
|
||||
double resistance,
|
||||
double percentStandardDeviation,
|
||||
int number,
|
||||
string name)
|
||||
{
|
||||
Resistivity = resistivity;
|
||||
Resistance = resistance;
|
||||
PercentStandardDeviation = percentStandardDeviation;
|
||||
Number = number;
|
||||
Name = name;
|
||||
}
|
||||
|
||||
}
|
558
Adaptation/FileHandlers/json/ProcessData.cs
Normal file
558
Adaptation/FileHandlers/json/ProcessData.cs
Normal file
@ -0,0 +1,558 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Methods;
|
||||
using log4net;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Data;
|
||||
using System.Drawing;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public partial class ProcessData : IProcessData
|
||||
{
|
||||
|
||||
private readonly List<object> _Details;
|
||||
|
||||
public DateTime Date { get; set; }
|
||||
public string Employee { get; set; }
|
||||
public string JobID { get; set; }
|
||||
public string Layer { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string Zone { get; set; }
|
||||
|
||||
List<object> Shared.Properties.IProcessData.Details => _Details;
|
||||
|
||||
private readonly ILog _Log;
|
||||
|
||||
public ProcessData()
|
||||
{
|
||||
}
|
||||
|
||||
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection, long tickOffset)
|
||||
{
|
||||
JobID = logistics.JobID;
|
||||
fileInfoCollection.Clear();
|
||||
_Details = new List<object>();
|
||||
MesEntity = logistics.MesEntity;
|
||||
_Log = LogManager.GetLogger(typeof(ProcessData));
|
||||
CSV csv = Parse(fileRead, logistics, fileInfoCollection);
|
||||
_ = GetImageBytes(csv);
|
||||
string directory = Path.GetDirectoryName(logistics.ReportFullPath);
|
||||
string fileName = Path.Combine(directory, $"{Path.GetFileNameWithoutExtension(logistics.ReportFullPath)}.data.json");
|
||||
string json = JsonSerializer.Serialize(csv, new JsonSerializerOptions { WriteIndented = true });
|
||||
File.WriteAllText(fileName, json);
|
||||
if (!string.IsNullOrEmpty(csv.Info?.SampleName) && csv.ProfileHeader.ProfilePoints.Any())
|
||||
SetValues(logistics, tickOffset, csv);
|
||||
}
|
||||
|
||||
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors) => throw new Exception(string.Concat("See ", nameof(Parse)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<Test> tests = new();
|
||||
foreach (object item in _Details)
|
||||
tests.Add(Test.SRP2100);
|
||||
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
|
||||
if (tests.Count != descriptions.Count)
|
||||
throw new Exception();
|
||||
for (int i = 0; i < tests.Count; i++)
|
||||
{
|
||||
if (descriptions[i] is not Description description)
|
||||
throw new Exception();
|
||||
if (description.Test != (int)tests[i])
|
||||
throw new Exception();
|
||||
}
|
||||
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
|
||||
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
|
||||
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
|
||||
return results;
|
||||
}
|
||||
|
||||
internal static DateTime GetDateTime(Logistics logistics, long tickOffset, DateTime dateTime)
|
||||
{
|
||||
DateTime result;
|
||||
if (dateTime < logistics.DateTimeFromSequence.AddDays(1) && dateTime > logistics.DateTimeFromSequence.AddDays(-1))
|
||||
result = new(dateTime.Ticks + tickOffset);
|
||||
else
|
||||
result = logistics.DateTimeFromSequence;
|
||||
return result;
|
||||
}
|
||||
|
||||
private static (string, string) GetReactorAndRDS(string defaultReactor, string defaultRDS, string text, string formattedText, string[] segments)
|
||||
{
|
||||
string rds;
|
||||
string reactor;
|
||||
if (string.IsNullOrEmpty(text) || segments.Length == 0 || string.IsNullOrEmpty(formattedText))
|
||||
reactor = defaultReactor;
|
||||
else
|
||||
reactor = segments[0];
|
||||
if (segments.Length <= 1 || !int.TryParse(segments[1], out int rdsValue) || rdsValue < 99)
|
||||
rds = defaultRDS;
|
||||
else
|
||||
rds = segments[1];
|
||||
if (reactor.Length > 3)
|
||||
{
|
||||
rds = reactor;
|
||||
reactor = defaultReactor;
|
||||
}
|
||||
return new(reactor, rds);
|
||||
}
|
||||
|
||||
private static (string, string) GetLayerAndPSN(string defaultLayer, string defaultPSN, string[] segments)
|
||||
{
|
||||
string psn;
|
||||
string layer;
|
||||
if (segments.Length <= 2)
|
||||
{
|
||||
psn = defaultPSN;
|
||||
layer = defaultLayer;
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] segmentsB = segments[2].Split('.');
|
||||
psn = segmentsB[0];
|
||||
if (segmentsB.Length <= 1)
|
||||
layer = defaultLayer;
|
||||
else
|
||||
{
|
||||
layer = segmentsB[1];
|
||||
if (layer.Length > 1 && layer[0] == '0')
|
||||
layer = layer.Substring(1);
|
||||
}
|
||||
}
|
||||
return (layer, psn);
|
||||
}
|
||||
|
||||
private static string GetZone(string[] segments)
|
||||
{
|
||||
string result;
|
||||
if (segments.Length <= 3)
|
||||
result = string.Empty;
|
||||
else
|
||||
{
|
||||
result = segments[3];
|
||||
if (result.Length > 1 && result[0] == '0')
|
||||
result = result.Substring(1);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static Descriptor GetDescriptor(string text)
|
||||
{
|
||||
Descriptor result;
|
||||
string psn;
|
||||
string rds;
|
||||
string zone;
|
||||
string layer;
|
||||
string wafer;
|
||||
string reactor;
|
||||
string employee;
|
||||
string defaultPSN = string.Empty;
|
||||
string defaultRDS = string.Empty;
|
||||
string defaultZone = string.Empty;
|
||||
string defaultLayer = string.Empty;
|
||||
string defaultReactor = string.Empty;
|
||||
string defaultEmployee = string.Empty;
|
||||
if (Regex.IsMatch(text, @"^[a-zA-z][0-9]{2,4}$"))
|
||||
{
|
||||
wafer = text.ToUpper();
|
||||
psn = defaultPSN;
|
||||
rds = defaultRDS;
|
||||
zone = defaultZone;
|
||||
layer = defaultLayer;
|
||||
reactor = defaultReactor;
|
||||
employee = defaultEmployee;
|
||||
}
|
||||
else if (string.IsNullOrEmpty(text) || (text.Length is 2 or 3 && Regex.IsMatch(text, "^[a-zA-z]{2,3}")))
|
||||
{
|
||||
wafer = text;
|
||||
employee = text;
|
||||
psn = defaultPSN;
|
||||
rds = defaultRDS;
|
||||
zone = defaultZone;
|
||||
layer = defaultLayer;
|
||||
reactor = defaultReactor;
|
||||
}
|
||||
else if (Regex.IsMatch(text, @"^[0-9]{2}[.][0-9]{1}[.]?[0-9]{0,1}"))
|
||||
{
|
||||
string[] segments = text.Split('.');
|
||||
wafer = text;
|
||||
psn = defaultPSN;
|
||||
rds = defaultRDS;
|
||||
layer = segments[1];
|
||||
reactor = segments[0];
|
||||
employee = defaultEmployee;
|
||||
if (segments.Length <= 2)
|
||||
zone = defaultZone;
|
||||
else
|
||||
zone = segments[2];
|
||||
}
|
||||
else
|
||||
{
|
||||
// Remove illegal characters \/:*?"<>| found in the Batch
|
||||
wafer = Regex.Replace(text, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
if (wafer.Length > 2 && wafer[0] == '1' && (wafer[1] == 'T' || wafer[1] == 't'))
|
||||
wafer = wafer.Substring(2);
|
||||
string[] segments = wafer.Split('-');
|
||||
// bool hasRDS = Regex.IsMatch(wafer, "[-]?([QP][0-9]{4,}|[0-9]{5,})[-]?");
|
||||
(reactor, rds) = GetReactorAndRDS(defaultReactor, defaultRDS, text, wafer, segments);
|
||||
(layer, psn) = GetLayerAndPSN(defaultLayer, defaultPSN, segments);
|
||||
zone = GetZone(segments);
|
||||
if (segments.Length <= 4)
|
||||
employee = defaultEmployee;
|
||||
else
|
||||
employee = segments[4];
|
||||
}
|
||||
result = new(employee, layer, psn, rds, reactor, wafer, zone);
|
||||
return result;
|
||||
}
|
||||
|
||||
#pragma warning disable IDE0060
|
||||
private static CSV Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
|
||||
#pragma warning restore IDE0060
|
||||
{
|
||||
CSV result;
|
||||
string json = File.ReadAllText(logistics.ReportFullPath);
|
||||
string directory = Path.GetDirectoryName(logistics.ReportFullPath);
|
||||
string fileName = Path.Combine(directory, $"{Path.GetFileNameWithoutExtension(logistics.ReportFullPath)}.csv");
|
||||
csv.CSV csv = JsonSerializer.Deserialize<csv.CSV>(json);
|
||||
if (csv is null)
|
||||
throw new NullReferenceException(nameof(csv));
|
||||
result = new(csv);
|
||||
if (File.Exists(fileName))
|
||||
fileInfoCollection.Add(new(fileName));
|
||||
fileInfoCollection.Add(logistics.FileInfo);
|
||||
return result;
|
||||
}
|
||||
|
||||
private void SetValues(Logistics logistics, long tickOffset, CSV csv)
|
||||
{
|
||||
string psn;
|
||||
string rds;
|
||||
string zone;
|
||||
string layer;
|
||||
Detail detail;
|
||||
string reactor;
|
||||
List<Detail> details = new();
|
||||
string recipe = csv.Info.SampleName;
|
||||
string employee = csv.Info.Operator;
|
||||
string timeFormat = "yyyyMMddHHmmss";
|
||||
DateTime dateTime = GetDateTime(logistics, tickOffset, csv.Info.DateTime);
|
||||
string uniqueId = string.Concat(logistics.MesEntity, "_", logistics.DateTimeFromSequence.ToString(timeFormat));
|
||||
// Remove illegal characters \/:*?"<>| found in the Batch
|
||||
Descriptor descriptor = GetDescriptor(csv.Info.SampleName);
|
||||
psn = descriptor.PSN;
|
||||
rds = descriptor.RDS;
|
||||
zone = descriptor.Zone;
|
||||
layer = descriptor.Layer;
|
||||
reactor = descriptor.Reactor;
|
||||
if (string.IsNullOrEmpty(employee))
|
||||
employee = descriptor.Employee;
|
||||
PSN = psn;
|
||||
RDS = rds;
|
||||
Date = dateTime;
|
||||
Zone = zone;
|
||||
Layer = layer;
|
||||
Recipe = recipe;
|
||||
Reactor = reactor;
|
||||
Employee = employee;
|
||||
JobID = logistics.JobID;
|
||||
foreach (ProfilePoint profilePoint in csv.ProfileHeader.ProfilePoints)
|
||||
{
|
||||
detail = new()
|
||||
{
|
||||
HeaderUniqueId = uniqueId,
|
||||
UniqueId = string.Concat(uniqueId, "_Point-", profilePoint.Number),
|
||||
Depth = profilePoint.Depth.ToString(),
|
||||
Raw = profilePoint.Raw.ToString(),
|
||||
Edited = string.Concat(profilePoint.Edited),
|
||||
Resistivity = string.Concat(profilePoint.Resistivity),
|
||||
CD = string.Concat(profilePoint.CD),
|
||||
};
|
||||
details.Add(detail);
|
||||
}
|
||||
_Details.AddRange(details);
|
||||
}
|
||||
|
||||
#nullable enable
|
||||
#pragma warning disable CA1416
|
||||
|
||||
internal static List<Description> GetDescriptions(JsonElement[] jsonElements)
|
||||
{
|
||||
List<Description> results = new();
|
||||
Description? description;
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
|
||||
foreach (JsonElement jsonElement in jsonElements)
|
||||
{
|
||||
if (jsonElement.ValueKind != JsonValueKind.Object)
|
||||
throw new Exception();
|
||||
description = JsonSerializer.Deserialize<Description>(jsonElement.ToString(), jsonSerializerOptions);
|
||||
if (description is null)
|
||||
continue;
|
||||
results.Add(description);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static void GetMinMax(List<ProfilePoint> profilePoints, out double maxDepth, out int concentrationMaxD, out int concentrationMinD, out int resistanceEditedMaxD, out int resistanceEditedMinD, out int resistivityMaxD, out int resistivityMinD)
|
||||
{
|
||||
maxDepth = 30;
|
||||
concentrationMaxD = -99;
|
||||
concentrationMinD = 99;
|
||||
resistanceEditedMaxD = -99;
|
||||
resistanceEditedMinD = 99;
|
||||
resistivityMaxD = -99;
|
||||
resistivityMinD = 99;
|
||||
foreach (ProfilePoint profilePoint in profilePoints)
|
||||
{
|
||||
if (profilePoint.Depth > 0 && profilePoint.Edited is not null && profilePoint.Resistivity is not null && profilePoint.CD is not null)
|
||||
{
|
||||
maxDepth = profilePoint.Depth;
|
||||
if (Math.Log10(profilePoint.Resistivity.Value) < resistivityMinD)
|
||||
resistivityMinD = (int)Math.Log10(profilePoint.Resistivity.Value);
|
||||
if (Math.Ceiling(Math.Log10(profilePoint.Resistivity.Value)) > resistivityMaxD)
|
||||
resistivityMaxD = (int)Math.Ceiling(Math.Log10(profilePoint.Resistivity.Value));
|
||||
if (Math.Log10(profilePoint.Edited.Value) < resistanceEditedMinD)
|
||||
resistanceEditedMinD = (int)Math.Log10(profilePoint.Edited.Value);
|
||||
if (Math.Ceiling(Math.Log10(profilePoint.Edited.Value)) > resistanceEditedMaxD)
|
||||
resistanceEditedMaxD = (int)Math.Ceiling(Math.Log10(profilePoint.Edited.Value));
|
||||
if (Math.Log10(profilePoint.CD.Value) < concentrationMinD)
|
||||
concentrationMinD = (int)Math.Log10(profilePoint.CD.Value);
|
||||
if (Math.Ceiling(Math.Log10(profilePoint.CD.Value)) > concentrationMaxD)
|
||||
concentrationMaxD = (int)Math.Ceiling(Math.Log10(profilePoint.CD.Value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static PointF GetPointF(double x, double y) =>
|
||||
new((float)x, (float)y);
|
||||
|
||||
private static RectangleF GetRectangleF(double left, double top, double width, double height) =>
|
||||
new((float)left, (float)top, (float)width, (float)height);
|
||||
|
||||
private static void DrawLine(Graphics graphics, Pen pen, double x1, double y1, double x2, double y2) =>
|
||||
graphics.DrawLine(pen, (float)x1, (float)y1, (float)x2, (float)y2);
|
||||
|
||||
private static void DrawString(Graphics graphics, string s, Font font, Brush brush, double x, double y) =>
|
||||
graphics.DrawString(s, font, brush, (float)x, (float)y);
|
||||
|
||||
private static void FillEllipse(Graphics graphics, Brush brush, double x, double y, double width, double height) =>
|
||||
graphics.FillEllipse(brush, (float)x, (float)y, (float)width, (float)height);
|
||||
|
||||
private static void DrawString(Graphics graphics, string s, Font font, Brush brush, double x, double y, StringFormat stringFormat) =>
|
||||
graphics.DrawString(s, font, brush, (float)x, (float)y, stringFormat);
|
||||
|
||||
internal static byte[] GetImageBytes(CSV csv)
|
||||
{
|
||||
if (csv.Info is null)
|
||||
throw new NullReferenceException(nameof(csv.Info));
|
||||
if (csv.Setup is null)
|
||||
throw new NullReferenceException(nameof(csv.Setup));
|
||||
if (csv.ProfileHeader is null)
|
||||
throw new NullReferenceException(nameof(csv.ProfileHeader));
|
||||
if (csv.LayerHeader is null)
|
||||
throw new NullReferenceException(nameof(csv.LayerHeader));
|
||||
|
||||
double maxDepth;
|
||||
int concentrationMaxD, concentrationMinD, resistanceEditedMaxD, resistanceEditedMinD, resistivityMaxD, resistivityMinD;
|
||||
GetMinMax(csv.ProfileHeader.ProfilePoints, out maxDepth, out concentrationMaxD, out concentrationMinD, out resistanceEditedMaxD, out resistanceEditedMinD, out resistivityMaxD, out resistivityMinD);
|
||||
|
||||
int decades;
|
||||
byte[] bytes;
|
||||
double point;
|
||||
int width = 694;
|
||||
int height = 714;
|
||||
int position = -1;
|
||||
Pen pen = Pens.Black;
|
||||
int blocksOfDepth = 6;
|
||||
RectangleF[] rectangles;
|
||||
double topChartArea = 90;
|
||||
double widthOfDepthBlock;
|
||||
double leftChartArea = 60;
|
||||
double widthOfBlacks = 120;
|
||||
Brush brush = Brushes.Black;
|
||||
double widthChartArea = 500;
|
||||
double heightChartArea = 600;
|
||||
StringBuilder layers = new();
|
||||
Font consolas = new("Consolas", 9);
|
||||
PointF[]? resistivityPoints = null;
|
||||
StringFormat sfFormatRight = new();
|
||||
Color backgroundColor = Color.White;
|
||||
PointF[]? concentrationPoints = null;
|
||||
Brush resistivityBrush = Brushes.Green;
|
||||
Brush concentrationBrush = Brushes.Blue;
|
||||
Brush resistanceRawBrush = Brushes.Black;
|
||||
Pen resistivityPen = new(Color.Green, 2);
|
||||
Brush resistanceEditedBrush = Brushes.Red;
|
||||
double sizeOfBlock = heightChartArea / 18;
|
||||
Pen concentrationPen = new(Color.Blue, 2);
|
||||
Brush backgroundBrush = Brushes.WhiteSmoke;
|
||||
Pen resistanceRawPen = new(Color.Black, 2);
|
||||
Pen transitionWidthPen = new(Color.Orange);
|
||||
Pen resistanceEditedPen = new(Color.Red, 2);
|
||||
Font consolasBold = new("Consolas", 9); // , FontStyle.Bold)
|
||||
|
||||
concentrationPen.DashStyle = System.Drawing.Drawing2D.DashStyle.Dash;
|
||||
concentrationPen.DashPattern = new float[] { 5, 4 };
|
||||
resistanceEditedPen.DashStyle = System.Drawing.Drawing2D.DashStyle.Custom;
|
||||
resistanceEditedPen.DashPattern = new float[] { 1, 3 };
|
||||
resistanceRawPen.DashStyle = System.Drawing.Drawing2D.DashStyle.Custom;
|
||||
resistanceRawPen.DashPattern = new float[] { 1, 3 };
|
||||
transitionWidthPen.DashStyle = System.Drawing.Drawing2D.DashStyle.Dash;
|
||||
transitionWidthPen.DashPattern = new float[] { 5, 4 };
|
||||
sfFormatRight.Alignment = StringAlignment.Far;
|
||||
|
||||
Bitmap bitmap = new(width, height, System.Drawing.Imaging.PixelFormat.Format32bppArgb);
|
||||
Graphics graphics = Graphics.FromImage(bitmap);
|
||||
graphics.Clear(backgroundColor);
|
||||
|
||||
widthOfDepthBlock = Math.Ceiling(maxDepth / 3) * 3 / 6;
|
||||
// maxDepth = widthOfDepthBlock * blocksOfDepth;
|
||||
|
||||
decades = resistivityMaxD - resistivityMinD;
|
||||
if (resistanceEditedMaxD - resistanceEditedMinD > decades)
|
||||
decades = resistanceEditedMaxD - resistanceEditedMinD;
|
||||
if (concentrationMaxD - concentrationMinD > decades)
|
||||
decades = concentrationMaxD - concentrationMinD;
|
||||
|
||||
rectangles = new RectangleF[1];
|
||||
rectangles[0] = GetRectangleF(leftChartArea, topChartArea, widthChartArea, heightChartArea);
|
||||
graphics.FillRectangles(backgroundBrush, rectangles);
|
||||
rectangles = new RectangleF[18];
|
||||
rectangles[0] = GetRectangleF(leftChartArea, 10, widthChartArea, 65);
|
||||
// rectangles[1] = GetRectangleF(leftChartArea + widthChartArea, topChartArea, widthOfBlacks, sizeOfBlock * 5);
|
||||
rectangles[1] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 0, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[2] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 1, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[3] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 2, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[4] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 3, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[5] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 4, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[6] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 5, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[7] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 6, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[8] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 7, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[9] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 8, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[10] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 9, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[11] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 10, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[12] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 11, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[13] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 12, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[14] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 13, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[15] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 14, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[16] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 15, widthOfBlacks, sizeOfBlock);
|
||||
rectangles[17] = GetRectangleF(leftChartArea + widthChartArea, topChartArea + sizeOfBlock * 16, widthOfBlacks, sizeOfBlock * 2);
|
||||
|
||||
graphics.FillRectangles(Brushes.White, rectangles);
|
||||
graphics.DrawRectangles(pen, rectangles);
|
||||
|
||||
foreach (Layer layer in csv.LayerHeader.Layers)
|
||||
_ = layers.AppendLine(string.Concat("First Pt. ", layer.FirstPoint, " Last Pt. ", layer.LastPoint, " Type ", layer.Type, " Smoothing ", layer.Smoothing, " Correction ", layer.Correction));
|
||||
_ = layers.AppendLine(string.Join(" ", csv.Info.Comments));
|
||||
|
||||
graphics.DrawString(layers.ToString(), consolas, brush, rectangles[0]);
|
||||
graphics.DrawString(string.Concat(csv.Info.SystemId, Environment.NewLine, csv.Info.SoftwareVersion), consolas, brush, rectangles[9]);
|
||||
graphics.DrawString(string.Concat("SURFACE FINISH", Environment.NewLine, csv.Setup.Finish), consolas, brush, rectangles[10]);
|
||||
graphics.DrawString(string.Concat("ORIENTATION", Environment.NewLine, csv.Setup.Orientation), consolas, brush, rectangles[11]);
|
||||
graphics.DrawString(string.Concat("BEVEL ANGLE", Environment.NewLine, csv.Setup.SineBevelAngle), consolas, brush, rectangles[12]);
|
||||
graphics.DrawString(string.Concat("X-STEP (um)", Environment.NewLine, csv.Setup.Steps.First().X), consolas, brush, rectangles[13]);
|
||||
graphics.DrawString(string.Concat("PROBE LOAD (gm)", Environment.NewLine, csv.Setup.ProbeLoad), consolas, brush, rectangles[14]);
|
||||
graphics.DrawString(string.Concat("SPACING (um)", Environment.NewLine, csv.Setup.ProbeSpacing), consolas, brush, rectangles[15]);
|
||||
graphics.DrawString(string.Concat("OPERATOR", Environment.NewLine, csv.Info.Operator), consolas, brush, rectangles[16]);
|
||||
graphics.DrawString(string.Concat("DATE", Environment.NewLine, csv.Info.DateTime.ToString("dd MMM yy"), Environment.NewLine, "TIME", Environment.NewLine, csv.Info.DateTime.ToString("HH:mm:ss tt")), consolas, brush, rectangles[17]);
|
||||
|
||||
DrawLine(graphics, concentrationPen, 13, 6, 13, 40);
|
||||
graphics.DrawString("C", consolasBold, concentrationBrush, 8, 41);
|
||||
graphics.DrawString("D", consolasBold, concentrationBrush, 8, 53);
|
||||
DrawLine(graphics, resistivityPen, 28, 6, 28, 40);
|
||||
graphics.DrawString("ρ", consolasBold, resistivityBrush, 21, 41);
|
||||
graphics.DrawString("c", consolasBold, resistivityBrush, 21, 53);
|
||||
graphics.DrawString("m", consolasBold, resistivityBrush, 21, 62);
|
||||
DrawLine(graphics, resistanceRawPen, 39, 7, 39, 41);
|
||||
graphics.DrawString("Ω", consolasBold, resistanceRawBrush, 34, 41);
|
||||
DrawLine(graphics, resistanceEditedPen, 51, 7, 51, 41);
|
||||
graphics.DrawString("Ω", consolasBold, resistanceEditedBrush, 46, 41);
|
||||
graphics.DrawString("E", consolasBold, resistanceEditedBrush, 46, 53);
|
||||
|
||||
for (int iLoop = decades - 1; iLoop >= 0; iLoop += -1)
|
||||
{
|
||||
for (int iLoop_Sub = 1; iLoop_Sub <= 10; iLoop_Sub++)
|
||||
DrawLine(graphics, Pens.LightGray, leftChartArea, topChartArea + heightChartArea - (iLoop * heightChartArea / decades) + heightChartArea / decades * Math.Log10(iLoop_Sub), leftChartArea + widthChartArea, topChartArea + heightChartArea - (iLoop * heightChartArea / decades) + heightChartArea / decades * Math.Log10(iLoop_Sub));
|
||||
}
|
||||
|
||||
DrawString(graphics, "0", consolas, brush, leftChartArea - 5, topChartArea + heightChartArea + 5);
|
||||
|
||||
for (int iLoop = 0; iLoop <= blocksOfDepth - 1; iLoop++)
|
||||
{
|
||||
for (int iLoop_Sub = 1; iLoop_Sub <= 10; iLoop_Sub++)
|
||||
DrawLine(graphics, Pens.LightGray, leftChartArea + (iLoop * widthChartArea / blocksOfDepth) + iLoop_Sub * widthChartArea / blocksOfDepth / 10, topChartArea, leftChartArea + (iLoop * widthChartArea / blocksOfDepth) + (iLoop_Sub * widthChartArea / blocksOfDepth / 10), topChartArea + heightChartArea);
|
||||
DrawString(graphics, ((iLoop + 1) * widthOfDepthBlock).ToString("0.0"), consolas, brush, leftChartArea + 13 + (iLoop + 1) * widthChartArea / blocksOfDepth, topChartArea + heightChartArea + 5, sfFormatRight);
|
||||
}
|
||||
DrawString(graphics, "(um)", consolas, brush, leftChartArea + widthChartArea + 12, topChartArea + heightChartArea + 5);
|
||||
|
||||
for (int iLoop = 0; iLoop <= decades; iLoop++)
|
||||
{
|
||||
DrawLine(graphics, pen, leftChartArea, topChartArea + (iLoop * heightChartArea / decades), leftChartArea + widthChartArea, topChartArea + (iLoop * heightChartArea / decades));
|
||||
DrawString(graphics, (decades - iLoop + concentrationMinD).ToString("0"), consolasBold, concentrationBrush, 20, topChartArea - 10 + (iLoop * heightChartArea / decades), sfFormatRight);
|
||||
DrawString(graphics, (decades - iLoop + resistivityMinD).ToString("0"), consolasBold, resistivityBrush, 33, topChartArea - 10 + (iLoop * heightChartArea / decades), sfFormatRight);
|
||||
DrawString(graphics, (decades - iLoop + resistanceEditedMinD).ToString("0"), consolasBold, resistanceRawBrush, 45, topChartArea - 10 + (iLoop * heightChartArea / decades), sfFormatRight);
|
||||
DrawString(graphics, (decades - iLoop + resistanceEditedMinD).ToString("0"), consolasBold, resistanceEditedBrush, 58, topChartArea - 10 + (iLoop * heightChartArea / decades), sfFormatRight);
|
||||
}
|
||||
for (int iLoop = 0; iLoop <= blocksOfDepth; iLoop++)
|
||||
DrawLine(graphics, pen, leftChartArea + (iLoop * widthChartArea / blocksOfDepth), topChartArea, leftChartArea + (iLoop * widthChartArea / blocksOfDepth), topChartArea + heightChartArea);
|
||||
|
||||
foreach (ProfilePoint profilePoint in csv.ProfileHeader.ProfilePoints)
|
||||
{
|
||||
if (profilePoint.Depth < 0 || profilePoint.Edited is null || profilePoint.Resistivity is null || profilePoint.CD is null)
|
||||
continue;
|
||||
// Rho (Resistivity || Resistivity_ohm_cm) = Green
|
||||
if (profilePoint.Depth < 0 || profilePoint.Edited.Value == 0 || profilePoint.Resistivity.Value == 0 || profilePoint.CD.Value == 0)
|
||||
continue;
|
||||
point = topChartArea + heightChartArea - ((Math.Log10(profilePoint.Resistivity.Value) - resistivityMinD) / decades * heightChartArea);
|
||||
if (point < (90 - 2))
|
||||
continue;
|
||||
if (point > (topChartArea + heightChartArea + 2))
|
||||
continue;
|
||||
position += 1;
|
||||
PointF[]? resistivityOldPoints = resistivityPoints;
|
||||
resistivityPoints = new PointF[position + 1];
|
||||
if (resistivityOldPoints != null)
|
||||
Array.Copy(resistivityOldPoints, resistivityPoints, Math.Min(position + 1, resistivityOldPoints.Length));
|
||||
PointF[]? concentrationPointsOld = concentrationPoints;
|
||||
concentrationPoints = new PointF[position + 1];
|
||||
if (concentrationPointsOld != null)
|
||||
Array.Copy(concentrationPointsOld, concentrationPoints, Math.Min(position + 1, concentrationPointsOld.Length));
|
||||
resistivityPoints[position] = GetPointF(leftChartArea + profilePoint.Depth / maxDepth * widthChartArea, topChartArea + heightChartArea - ((Math.Log10(profilePoint.Resistivity.Value) - resistivityMinD) / decades * heightChartArea));
|
||||
concentrationPoints[position] = GetPointF(leftChartArea + profilePoint.Depth / maxDepth * widthChartArea, topChartArea + heightChartArea - ((Math.Log10(profilePoint.CD.Value) - concentrationMinD) / decades * heightChartArea));
|
||||
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.HighQuality;
|
||||
FillEllipse(graphics, resistanceRawBrush, leftChartArea + profilePoint.Depth / maxDepth * widthChartArea - 2, topChartArea + heightChartArea - ((Math.Log10(profilePoint.Raw) - resistanceEditedMinD) / decades * heightChartArea) - 2, 3, 3);
|
||||
FillEllipse(graphics, resistanceEditedBrush, leftChartArea + profilePoint.Depth / maxDepth * widthChartArea - 2, topChartArea + heightChartArea - ((Math.Log10(profilePoint.Edited.Value) - resistanceEditedMinD) / decades * heightChartArea) - 2, 3, 3);
|
||||
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.Default;
|
||||
|
||||
}
|
||||
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.HighQuality;
|
||||
if (resistivityPoints is not null)
|
||||
graphics.DrawLines(resistivityPen, resistivityPoints);
|
||||
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.Default;
|
||||
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.HighQuality;
|
||||
if (concentrationPoints is not null)
|
||||
graphics.DrawLines(concentrationPen, concentrationPoints);
|
||||
graphics.SmoothingMode = System.Drawing.Drawing2D.SmoothingMode.Default;
|
||||
|
||||
using MemoryStream msMemoryStream = new();
|
||||
bitmap.Save(msMemoryStream, System.Drawing.Imaging.ImageFormat.Png);
|
||||
bytes = new byte[Convert.ToInt32(msMemoryStream.Length) + 1];
|
||||
_ = msMemoryStream.Read(bytes, 0, bytes.Length);
|
||||
bytes = msMemoryStream.ToArray();
|
||||
return bytes;
|
||||
}
|
||||
|
||||
}
|
28
Adaptation/FileHandlers/json/Profile.cs
Normal file
28
Adaptation/FileHandlers/json/Profile.cs
Normal file
@ -0,0 +1,28 @@
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class Profile
|
||||
{
|
||||
|
||||
public string Id { get; }
|
||||
public string Name { get; }
|
||||
public string Label { get; }
|
||||
public int Allocated { get; }
|
||||
public int Used { get; }
|
||||
public string List { get; }
|
||||
|
||||
public Profile(string id,
|
||||
string name,
|
||||
string label,
|
||||
int allocated,
|
||||
int used,
|
||||
string list)
|
||||
{
|
||||
Id = id;
|
||||
Name = name;
|
||||
Label = label;
|
||||
Allocated = allocated;
|
||||
Used = used;
|
||||
List = list;
|
||||
}
|
||||
|
||||
}
|
49
Adaptation/FileHandlers/json/ProfileHeader.cs
Normal file
49
Adaptation/FileHandlers/json/ProfileHeader.cs
Normal file
@ -0,0 +1,49 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class ProfileHeader
|
||||
{
|
||||
|
||||
public int NumberOfProfiles { get; }
|
||||
public List<Profile> Profiles { get; }
|
||||
public List<ProfilePoint> ProfilePoints { get; }
|
||||
|
||||
internal ProfileHeader(csv.ProfileHeader profileHeader)
|
||||
{
|
||||
Profile profile;
|
||||
ProfilePoint profilePoint;
|
||||
List<Profile> profiles = new();
|
||||
List<ProfilePoint> profilePoints = new();
|
||||
NumberOfProfiles = int.Parse(profileHeader.NumberOfProfiles);
|
||||
Profiles = profiles;
|
||||
ProfilePoints = profilePoints;
|
||||
foreach (csv.Profile csvProfile in profileHeader.Profiles)
|
||||
{
|
||||
profile = new
|
||||
(
|
||||
id: csvProfile.Id,
|
||||
name: csvProfile.Name,
|
||||
label: csvProfile.Label,
|
||||
allocated: int.Parse(csvProfile.Allocated),
|
||||
used: int.Parse(csvProfile.Used),
|
||||
list: csvProfile.List
|
||||
);
|
||||
profiles.Add(profile);
|
||||
}
|
||||
foreach (csv.ProfilePoint csvProfilePoint in profileHeader.ProfilePoints)
|
||||
{
|
||||
profilePoint = new
|
||||
(
|
||||
number: int.Parse(csvProfilePoint.Number),
|
||||
depth: double.Parse(csvProfilePoint.Depth),
|
||||
raw: double.Parse(csvProfilePoint.Raw),
|
||||
edited: double.Parse(csvProfilePoint.Edited),
|
||||
resistivity: double.Parse(csvProfilePoint.Resistivity),
|
||||
cd: double.Parse(csvProfilePoint.CD)
|
||||
);
|
||||
profilePoints.Add(profilePoint);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
28
Adaptation/FileHandlers/json/ProfilePoint.cs
Normal file
28
Adaptation/FileHandlers/json/ProfilePoint.cs
Normal file
@ -0,0 +1,28 @@
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class ProfilePoint
|
||||
{
|
||||
|
||||
public int Number { get; }
|
||||
public double Depth { get; }
|
||||
public double Raw { get; }
|
||||
public double? Edited { get; }
|
||||
public double? Resistivity { get; }
|
||||
public double? CD { get; }
|
||||
|
||||
public ProfilePoint(int number,
|
||||
double depth,
|
||||
double raw,
|
||||
double? edited,
|
||||
double? resistivity,
|
||||
double? cd)
|
||||
{
|
||||
Number = number;
|
||||
Depth = depth;
|
||||
Raw = raw;
|
||||
Edited = edited;
|
||||
Resistivity = resistivity;
|
||||
CD = cd;
|
||||
}
|
||||
|
||||
}
|
33
Adaptation/FileHandlers/json/RawData.cs
Normal file
33
Adaptation/FileHandlers/json/RawData.cs
Normal file
@ -0,0 +1,33 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class RawData
|
||||
{
|
||||
|
||||
public int TotalPoints { get; }
|
||||
public List<Point> Points { get; }
|
||||
|
||||
internal RawData(csv.RawData rawData)
|
||||
{
|
||||
Point point;
|
||||
List<Point> points = new();
|
||||
TotalPoints = int.Parse(rawData.TotalPoints);
|
||||
Points = points;
|
||||
foreach (csv.Point csvPoint in rawData.Points)
|
||||
{
|
||||
point = new
|
||||
(
|
||||
number: double.Parse(csvPoint.Number),
|
||||
depth: double.Parse(csvPoint.Depth),
|
||||
resistance: double.Parse(csvPoint.Resistance),
|
||||
stageX: double.Parse(csvPoint.StageX),
|
||||
stageY: double.Parse(csvPoint.StageY),
|
||||
stageZ: double.Parse(csvPoint.StageZ),
|
||||
stageT: double.Parse(csvPoint.StageT)
|
||||
);
|
||||
points.Add(point);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
49
Adaptation/FileHandlers/json/Setup.cs
Normal file
49
Adaptation/FileHandlers/json/Setup.cs
Normal file
@ -0,0 +1,49 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class Setup
|
||||
{
|
||||
|
||||
public string Finish { get; }
|
||||
public string NorthProbeID { get; }
|
||||
public string SouthProbeID { get; }
|
||||
public string MeasurementPolarity { get; }
|
||||
public double SineBevelAngle { get; }
|
||||
public double ContactRadius { get; }
|
||||
public double ProbeSpacing { get; }
|
||||
public double ProbeLoad { get; }
|
||||
public string Orientation { get; }
|
||||
public int NumberOfStepSizes { get; }
|
||||
public List<Step> Steps { get; }
|
||||
|
||||
internal Setup(csv.Setup setup)
|
||||
{
|
||||
Step step;
|
||||
List<Step> steps = new();
|
||||
Finish = setup.Finish;
|
||||
NorthProbeID = setup.NorthProbeID;
|
||||
SouthProbeID = setup.SouthProbeID;
|
||||
MeasurementPolarity = setup.MeasurementPolarity;
|
||||
SineBevelAngle = double.Parse(setup.SineBevelAngle);
|
||||
ContactRadius = double.Parse(setup.ContactRadius);
|
||||
ProbeSpacing = double.Parse(setup.ProbeSpacing);
|
||||
ProbeLoad = double.Parse(setup.ProbeLoad);
|
||||
Orientation = setup.Orientation;
|
||||
NumberOfStepSizes = int.Parse(setup.NumberOfStepSizes);
|
||||
Steps = steps;
|
||||
foreach (csv.Step csvStep in setup.Steps)
|
||||
{
|
||||
Step step1 = new
|
||||
(
|
||||
number: int.Parse(csvStep.Number),
|
||||
points: int.Parse(csvStep.Points),
|
||||
x: double.Parse(csvStep.X),
|
||||
y: double.Parse(csvStep.Y)
|
||||
);
|
||||
step = step1;
|
||||
steps.Add(step);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
22
Adaptation/FileHandlers/json/Step.cs
Normal file
22
Adaptation/FileHandlers/json/Step.cs
Normal file
@ -0,0 +1,22 @@
|
||||
namespace Adaptation.FileHandlers.json;
|
||||
|
||||
public class Step
|
||||
{
|
||||
|
||||
public int Number { get; }
|
||||
public int Points { get; }
|
||||
public double X { get; }
|
||||
public double Y { get; }
|
||||
|
||||
public Step(int number,
|
||||
int points,
|
||||
double x,
|
||||
double y)
|
||||
{
|
||||
Number = number;
|
||||
Points = points;
|
||||
X = x;
|
||||
Y = y;
|
||||
}
|
||||
|
||||
}
|
Reference in New Issue
Block a user