Refactor and clean up file handling and processing logic
- Removed the FileRead class from Adaptation.FileHandlers.SPaCe as it was no longer needed. - Updated TypeScript and JavaScript files to improve the handling of work item updates, including new checks for WeightedShortestJobFirstFibonacci. - Modified the MESAFIBACKLOG.yml to remove unnecessary file copy tasks. - Enhanced the FileRead class in Adaptation.Shared to improve directory searching logic. - Added XML generation functionality to ProcessDataStandardFormat, allowing for better data representation. - Updated ProcessDataStandardFormatMapping to streamline the creation of mapping instances. - Cleaned up the project file by removing references to obsolete file handlers.
This commit is contained in:
37
Adaptation/.vscode/launch.json
vendored
37
Adaptation/.vscode/launch.json
vendored
@ -1,10 +1,43 @@
|
||||
{
|
||||
"configurations": [
|
||||
{
|
||||
"mode": "debug",
|
||||
"name": "Go launch file",
|
||||
"program": "${file}",
|
||||
"request": "launch",
|
||||
"type": "go"
|
||||
},
|
||||
{
|
||||
"name": "node Launch Current Opened File",
|
||||
"program": "${file}",
|
||||
"request": "launch",
|
||||
"type": "node"
|
||||
},
|
||||
{
|
||||
"cwd": "${workspaceFolder}",
|
||||
"internalConsoleOptions": "neverOpen",
|
||||
"name": "Debug File",
|
||||
"program": "${file}",
|
||||
"request": "launch",
|
||||
"stopOnEntry": false,
|
||||
"type": "bun",
|
||||
"watchMode": false
|
||||
},
|
||||
{
|
||||
"cwd": "${workspaceFolder}",
|
||||
"internalConsoleOptions": "neverOpen",
|
||||
"name": "Run File",
|
||||
"noDebug": true,
|
||||
"program": "${file}",
|
||||
"request": "launch",
|
||||
"type": "bun",
|
||||
"watchMode": false
|
||||
},
|
||||
{
|
||||
"name": ".NET Core Attach",
|
||||
"type": "coreclr",
|
||||
"processId": 32760,
|
||||
"request": "attach",
|
||||
"processId": 20292
|
||||
"type": "coreclr"
|
||||
}
|
||||
]
|
||||
}
|
1
Adaptation/.vscode/settings.json
vendored
1
Adaptation/.vscode/settings.json
vendored
@ -26,6 +26,7 @@
|
||||
"Smpl",
|
||||
"Villach",
|
||||
"Vrng",
|
||||
"VSTS",
|
||||
"Weightest",
|
||||
"WSJF"
|
||||
],
|
||||
|
@ -1,135 +0,0 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.APC;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
|
||||
{
|
||||
bool isDummyRun = false;
|
||||
List<(Shared.Properties.IScopeInfo, string)> collection = new();
|
||||
string successDirectory = _FileConnectorConfiguration.AlternateTargetFolder;
|
||||
string fileNameAfterUnderscoreSplit = GetFileNameAfterUnderscoreSplit(reportFullPath);
|
||||
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.TargetFileLocation, fileNameAfterUnderscoreSplit);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
|
||||
File.Copy(reportFullPath, duplicateFile, overwrite: true);
|
||||
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
FileCopy(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -1,160 +0,0 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.Archive;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly string _JobIdParentDirectory;
|
||||
private readonly string _JobIdArchiveParentDirectory;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_JobIdParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.SourceFileLocation);
|
||||
_JobIdArchiveParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.TargetFileLocation);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private static IEnumerable<string> GetDirectoriesRecursively(string path, string directoryNameSegment = null)
|
||||
{
|
||||
Queue<string> queue = new();
|
||||
queue.Enqueue(path);
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
path = queue.Dequeue();
|
||||
foreach (string subDirectory in Directory.GetDirectories(path))
|
||||
{
|
||||
queue.Enqueue(subDirectory);
|
||||
if (string.IsNullOrEmpty(directoryNameSegment) || Path.GetFileName(subDirectory).Contains(directoryNameSegment))
|
||||
yield return subDirectory;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void MoveArchive(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
if (dateTime == DateTime.MinValue)
|
||||
throw new ArgumentNullException(nameof(dateTime));
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
|
||||
string destinationArchiveDirectory = Path.Combine(_JobIdArchiveParentDirectory, _Logistics.JobID, weekDirectory, day);
|
||||
if (!Directory.Exists(destinationArchiveDirectory))
|
||||
_ = Directory.CreateDirectory(destinationArchiveDirectory);
|
||||
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
_ = Directory.CreateDirectory(jobIdDirectory);
|
||||
if (Directory.GetDirectories(jobIdDirectory).Length == 0)
|
||||
File.Copy(reportFullPath, Path.Combine(destinationArchiveDirectory, Path.GetFileName(reportFullPath)));
|
||||
else
|
||||
{
|
||||
string[] matchDirectories = GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).ToArray();
|
||||
if (matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
|
||||
destinationArchiveDirectory = Path.Combine(destinationArchiveDirectory, Path.GetFileName(sourceDirectory));
|
||||
Directory.Move(sourceDirectory, destinationArchiveDirectory);
|
||||
}
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
MoveArchive(reportFullPath, dateTime);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -14,21 +14,11 @@ public class CellInstanceConnectionName
|
||||
IFileRead result = cellInstanceConnectionName switch
|
||||
{
|
||||
nameof(ADO) => new ADO.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(APC) => new APC.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Archive) => new Archive.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(DownloadWorkItems) => new DownloadWorkItems.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Dummy) => new Dummy.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(IQSSi) => new IQSSi.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(json) => new json.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Kanban) => new Kanban.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Markdown) => new Markdown.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(MoveMatchingFiles) => new MoveMatchingFiles.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(OpenInsight) => new OpenInsight.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(OpenInsightMetrologyViewer) => new OpenInsightMetrologyViewer.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(OpenInsightMetrologyViewerAttachments) => new OpenInsightMetrologyViewerAttachments.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Priority) => new Priority.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Processed) => new Processed.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(SPaCe) => new SPaCe.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
nameof(Violation) => new Violation.FileRead(smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null),
|
||||
_ => throw new Exception($"\"{cellInstanceConnectionName}\" not mapped")
|
||||
};
|
||||
|
@ -1,295 +0,0 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using Infineon.Monitoring.MonA;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.Dummy;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly Timer _Timer;
|
||||
private int _LastDummyRunIndex;
|
||||
private readonly string[] _CellNames;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_LastDummyRunIndex = -1;
|
||||
List<string> cellNames = new();
|
||||
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
|
||||
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Alias");
|
||||
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
|
||||
cellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1]);
|
||||
_CellNames = cellNames.ToArray();
|
||||
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
|
||||
Callback(null);
|
||||
else
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception) => Move(extractResults);
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName) => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract() => throw new Exception(string.Concat("See ", nameof(CallbackFileExists)));
|
||||
|
||||
private void CallbackInProcessCleared(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, string inProcessDirectory, long sequence, bool warning)
|
||||
{
|
||||
const string site = "sjc";
|
||||
string stateName = string.Concat("Dummy_", _EventName);
|
||||
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
|
||||
MonIn monIn = MonIn.GetInstance(monInURL);
|
||||
try
|
||||
{
|
||||
if (warning)
|
||||
{
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Warning);
|
||||
for (int i = 1; i < 12; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
|
||||
string[] files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
foreach (string file in files)
|
||||
File.SetLastWriteTime(file, new DateTime(sequence));
|
||||
if (!_FileConnectorConfiguration.IncludeSubDirectories.Value)
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Move(file, Path.Combine(targetFileLocation, Path.GetFileName(file)));
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
foreach (string directory in directories)
|
||||
_ = Directory.CreateDirectory(string.Concat(targetFileLocation, directory.Substring(inProcessDirectory.Length)));
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(targetFileLocation, file.Substring(inProcessDirectory.Length)));
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Ok);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{
|
||||
_SMTP.SendHighPriorityEmailMessage(subject, body);
|
||||
File.WriteAllText(".email", body);
|
||||
}
|
||||
catch (Exception) { }
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
|
||||
_ = monIn.SendStatus(site, monARessource, stateName, State.Critical);
|
||||
}
|
||||
}
|
||||
|
||||
private void CallbackFileExists(string sourceArchiveFile, string traceDummyFile, string targetFileLocation, string monARessource, long sequence)
|
||||
{
|
||||
string[] files;
|
||||
bool warning = false;
|
||||
if (!_DummyRuns.ContainsKey(monARessource))
|
||||
_DummyRuns.Add(monARessource, new List<long>());
|
||||
if (!_DummyRuns[monARessource].Contains(sequence))
|
||||
_DummyRuns[monARessource].Add(sequence);
|
||||
File.AppendAllLines(traceDummyFile, new string[] { sourceArchiveFile });
|
||||
string inProcessDirectory = Path.Combine("_ProgressPath", "Dummy In-Process", sequence.ToString());
|
||||
if (!Directory.Exists(inProcessDirectory))
|
||||
_ = Directory.CreateDirectory(inProcessDirectory);
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
if (files.Length != 0)
|
||||
{
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
try
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Delete(file);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
if (_FileConnectorConfiguration.IncludeSubDirectories.Value)
|
||||
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.AllDirectories);
|
||||
else
|
||||
files = Directory.GetFiles(targetFileLocation, "*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string file in files)
|
||||
{
|
||||
if (new FileInfo(file).LastWriteTime.Ticks == sequence)
|
||||
{
|
||||
warning = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
CallbackInProcessCleared(sourceArchiveFile, traceDummyFile, targetFileLocation, monARessource, inProcessDirectory, sequence, warning);
|
||||
}
|
||||
|
||||
private string GetCellName(string pathSegment)
|
||||
{
|
||||
string result = string.Empty;
|
||||
foreach (string cellName in _CellNames)
|
||||
{
|
||||
if (pathSegment.ToLower().Contains(cellName.ToLower()))
|
||||
{
|
||||
result = cellName;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (string.IsNullOrEmpty(result))
|
||||
{
|
||||
int count;
|
||||
List<(string CellName, int Count)> cellNames = new();
|
||||
foreach (string cellName in _CellNames)
|
||||
{
|
||||
count = 0;
|
||||
foreach (char @char in cellName.ToLower())
|
||||
count += pathSegment.Length - pathSegment.ToLower().Replace(@char.ToString(), string.Empty).Length;
|
||||
cellNames.Add(new(cellName, count));
|
||||
}
|
||||
result = (from l in cellNames orderby l.CellName.Length, l.Count descending select l.CellName).First();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private void Callback(object state)
|
||||
{
|
||||
try
|
||||
{
|
||||
string sourceParentDirectory;
|
||||
string targetParentDirectory;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
if (!string.IsNullOrEmpty(Path.GetFileName(_FileConnectorConfiguration.SourceFileLocation)))
|
||||
sourceParentDirectory = Path.GetDirectoryName(_FileConnectorConfiguration.SourceFileLocation);
|
||||
else
|
||||
sourceParentDirectory = GetParentParent(_FileConnectorConfiguration.SourceFileLocation);
|
||||
if (!string.IsNullOrEmpty(Path.GetFileName(_FileConnectorConfiguration.TargetFileLocation)))
|
||||
targetParentDirectory = Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation);
|
||||
else
|
||||
targetParentDirectory = GetParentParent(_FileConnectorConfiguration.TargetFileLocation);
|
||||
if (sourceParentDirectory != targetParentDirectory)
|
||||
throw new Exception("Target and source must have the same parent for Si Dummy FileConnectorConfiguration!");
|
||||
bool check = dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday;
|
||||
if (!_IsEAFHosted || check)
|
||||
{
|
||||
string monARessource;
|
||||
string sourceFileFilter;
|
||||
string sourceArchiveFile;
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string traceDummyDirectory = Path.Combine(Path.GetPathRoot(_TracePath), "TracesDummy", _CellInstanceName, "Source", $"{dateTime:yyyy}___Week_{weekOfYear}");
|
||||
if (!Directory.Exists(traceDummyDirectory))
|
||||
_ = Directory.CreateDirectory(traceDummyDirectory);
|
||||
string traceDummyFile = Path.Combine(traceDummyDirectory, $"{dateTime.Ticks} - {_CellInstanceName}.txt");
|
||||
File.AppendAllText(traceDummyFile, string.Empty);
|
||||
for (int i = 0; i < _FileConnectorConfiguration.SourceFileFilters.Count; i++)
|
||||
{
|
||||
_LastDummyRunIndex += 1;
|
||||
if (_LastDummyRunIndex >= _FileConnectorConfiguration.SourceFileFilters.Count)
|
||||
_LastDummyRunIndex = 0;
|
||||
sourceFileFilter = _FileConnectorConfiguration.SourceFileFilters[_LastDummyRunIndex];
|
||||
sourceArchiveFile = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, sourceFileFilter);
|
||||
if (File.Exists(sourceArchiveFile))
|
||||
{
|
||||
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
throw new Exception("Invalid file name for source archive file!");
|
||||
monARessource = GetCellName(sourceArchiveFile);
|
||||
if (string.IsNullOrEmpty(monARessource))
|
||||
throw new Exception("Could not determine which cell archive file is associated with!");
|
||||
if (_IsEAFHosted)
|
||||
CallbackFileExists(sourceArchiveFile, traceDummyFile, _FileConnectorConfiguration.TargetFileLocation, monARessource, sequence);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{
|
||||
_SMTP.SendHighPriorityEmailMessage(subject, body);
|
||||
File.WriteAllText(".email", body);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
try
|
||||
{
|
||||
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
_ = _Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
|
||||
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
|
||||
try
|
||||
{
|
||||
_SMTP.SendHighPriorityEmailMessage(subject, body);
|
||||
File.WriteAllText(".email", body);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -1,134 +0,0 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.IQSSi;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
|
||||
{
|
||||
bool isDummyRun = false;
|
||||
string successDirectory = string.Empty;
|
||||
List<(Shared.Properties.IScopeInfo, string)> collection = new();
|
||||
string duplicateDirectory = Path.Combine(_FileConnectorConfiguration.SourceFileLocation, _CellInstanceName);
|
||||
if (!Directory.Exists(duplicateDirectory))
|
||||
_ = Directory.CreateDirectory(duplicateDirectory);
|
||||
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
|
||||
File.Copy(reportFullPath, duplicateFile, overwrite: true);
|
||||
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
FileCopy(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -1,397 +0,0 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.FileHandlers.MoveMatchingFiles;
|
||||
|
||||
#nullable enable
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
internal class PreWith
|
||||
{
|
||||
|
||||
internal string ErrFile { get; private set; }
|
||||
internal string CheckFile { get; private set; }
|
||||
internal string MatchingFile { get; private set; }
|
||||
internal string CheckDirectory { get; private set; }
|
||||
internal string NoWaitDirectory { get; private set; }
|
||||
|
||||
internal PreWith(string checkDirectory,
|
||||
string checkFile,
|
||||
string errFile,
|
||||
string matchingFile,
|
||||
string noWaitDirectory)
|
||||
{
|
||||
ErrFile = errFile;
|
||||
CheckFile = checkFile;
|
||||
MatchingFile = matchingFile;
|
||||
CheckDirectory = checkDirectory;
|
||||
NoWaitDirectory = noWaitDirectory;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
internal class Pre
|
||||
{
|
||||
|
||||
internal string MatchingFile { get; private set; }
|
||||
internal string CheckFile { get; private set; }
|
||||
|
||||
internal Pre(string matchingFile, string checkFile)
|
||||
{
|
||||
MatchingFile = matchingFile;
|
||||
CheckFile = checkFile;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
internal class Post
|
||||
{
|
||||
|
||||
internal string ErrFile { get; private set; }
|
||||
internal string CheckFile { get; private set; }
|
||||
|
||||
internal Post(string checkFile, string errFile)
|
||||
{
|
||||
ErrFile = errFile;
|
||||
CheckFile = checkFile;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private readonly ProcessDataStandardFormatMapping _ProcessDataStandardFormatMapping;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
string processDataStandardFormatMappingOldColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Old.Column.Names");
|
||||
string processDataStandardFormatMappingNewColumnNames = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.New.Column.Names");
|
||||
string processDataStandardFormatMappingColumnIndices = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Process.Data.Standard.Format.Mapping.Column.Indices");
|
||||
_ProcessDataStandardFormatMapping = GetProcessDataStandardFormatMapping(processDataStandardFormatMappingOldColumnNames,
|
||||
processDataStandardFormatMappingNewColumnNames,
|
||||
processDataStandardFormatMappingColumnIndices);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() =>
|
||||
WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]") ?? throw new Exception(), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private static ProcessDataStandardFormatMapping GetProcessDataStandardFormatMapping(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
|
||||
{
|
||||
ProcessDataStandardFormatMapping result;
|
||||
string[] segmentsB;
|
||||
List<string> distinct = new();
|
||||
Dictionary<string, string> keyValuePairs = new();
|
||||
string args4 = "Time,Test,Count,MesEntity,HeaderUniqueId,UniqueId,Id,Recipe,Date,AreaDeltaFromLastRun,GLimit,HGCV1";
|
||||
string args5 = "Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,AreaDeltaFromLastRun,Variation,Percentage HgCV 4PP Delta,HGCV1";
|
||||
string args6 = "RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09,HGCV1";
|
||||
string args7 = "FlatZMean|MeanFlatZ,GradeMean|MeanGrade,NAvgMean|MeanNAvg,NslMean|MeanNsl,PhaseMean|MeanPhase,RhoAvgMean|MeanRhoAvg,RhoslMean|MeanRhosl,RsMean|MeanRs,VdMean|MeanVd,FlatZRadialGradient|RadialGradientFlatZ,GradeRadialGradient|RadialGradientGrade,NAvgRadialGradient|RadialGradientNAvg,NslRadialGradient|RadialGradientNsl,PhaseRadialGradient|RadialGradientPhase,RhoAvgRadialGradient|RadialGradientRhoAvg,RhoslRadialGradient|RadialGradientRhosl,RsRadialGradient|RadialGradientRs,VdRadialGradient|RadialGradientVd,FlatZStdDev|StandardDeviationPercentageFlatZ,GradeStdDev|StandardDeviationPercentageGrade,NAvgStdDev|StandardDeviationPercentageNAvg,NslStdDev|StandardDeviationPercentageNsl,PhaseStdDev|StandardDeviationPercentagePhase,RhoAvgStdDev|StandardDeviationPercentageRhoAvg,RhoslStdDev|StandardDeviationPercentageRhosl,RsStdDev|StandardDeviationPercentageRs,VdStdDev|StandardDeviationPercentageVd,|HGCV1";
|
||||
// string args8 = "Time,A_LOGISTICS,B_LOGISTICS,Test,Count,Index,MesEntity,Date,Employee,Lot,PSN,Reactor,Recipe,Area,Folder,HeaderUniqueId,Id,Layer,Model,Pattern,Phase,Plan,RampRate,RDS,SetupFile,StartVoltage,StopVoltage,UniqueId,Wafer,WaferSize,Zone,Ccomp,CondType,FlatZ,FlatZMean,FlatZRadialGradient,FlatZStdDev,GLimit,Grade,GradeMean,GradeRadialGradient,GradeStdDev,NAvg,NAvgMean,NAvgRadialGradient,NAvgStdDev,Nsl,NslMean,NslRadialGradient,NslStdDev,PhaseMean,PhaseRadialGradient,PhaseStdDev,RhoAvg,RhoAvgMean,RhoAvgRadialGradient,RhoAvgStdDev,RhoMethod,Rhosl,RhoslMean,RhoslRadialGradient,RhoslStdDev,RsMean,RsRadialGradient,RsStdDev,Vd,VdMean,VdRadialGradient,VdStdDev,Variation,AreaDeltaFromLastRun,Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09";
|
||||
// string args9 = "Time,A_LOGISTICS,B_LOGISTICS,Index,Operator,StartVoltage,Wafer,StopVoltage,Lot,RampRate,Plan,GLimit,Date,Time,SetupFile,WaferSize,Folder,Ccomp,Pattern,Area,CondType,RhoMethod,Model,MeanNAvg,MeanNsl,MeanVd,MeanFlatZ,MeanRhoAvg,MeanRhosl,MeanPhase,MeanGrade,MeanRs,StandardDeviationPercentageNAvg,StandardDeviationPercentageNsl,StandardDeviationPercentageVd,StandardDeviationPercentageFlatZ,StandardDeviationPercentageRhoAvg,StandardDeviationPercentageRhosl,StandardDeviationPercentagePhase,StandardDeviationPercentageGrade,StandardDeviationPercentageRs,RadialGradientNAvg,RadialGradientNsl,RadialGradientVd,RadialGradientFlatZ,RadialGradientRhoAvg,RadialGradientRhosl,RadialGradientPhase,RadialGradientGrade,RadialGradientRs,Site,X,Y,NAvg,RhoAvg,Nsl,Rhosl,Vd,Phase,FlatZ,Grade,XLeft,XRight,BottomY,TopY,RDS,PSN,Reactor,Layer,Zone,Employee,InferredLot,Nine10mmEdgeMean,Nine4mmEdgeMean,NineCriticalPointsAverage,NineCriticalPointsPhaseAngleAverage,NineCriticalPointsStdDev,NineEdgeMeanDelta,NineMean,NineResRangePercent,AreaDeltaFromLastRun,Variation,Percentage HgCV 4PP Delta,RhoAvg01,RhoAvg02,RhoAvg03,RhoAvg04,RhoAvg05,RhoAvg06,RhoAvg07,RhoAvg08,RhoAvg09";
|
||||
// string args10 = "0,1,2,-1,-1,3,-1,12,70,8,66,67,-1,19,16,-1,-1,68,22,18,58,10,9,65,14,5,7,-1,6,15,69,17,20,59,26,44,35,11,60,30,48,39,53,23,41,32,55,24,42,33,29,47,38,54,27,45,36,21,56,28,46,37,31,49,40,57,25,43,34,81,80,72,73,74,75,76,77,78,79,83,84,85,86,87,88,89,90,91";
|
||||
string[] segments = args7.Split(',');
|
||||
ReadOnlyCollection<string> ignoreColumns = new(args4.Split(','));
|
||||
ReadOnlyCollection<string> backfillColumns = new(args5.Split(','));
|
||||
ReadOnlyCollection<string> indexOnlyColumns = new(args6.Split(','));
|
||||
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
|
||||
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
|
||||
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
|
||||
foreach (string segment in segments)
|
||||
{
|
||||
segmentsB = segment.Split('|');
|
||||
if (segmentsB.Length != 2)
|
||||
continue;
|
||||
if (distinct.Contains(segmentsB[0]))
|
||||
continue;
|
||||
distinct.Add(segmentsB[0]);
|
||||
keyValuePairs.Add(segmentsB[0], segmentsB[1]);
|
||||
}
|
||||
result = new(backfillColumns: backfillColumns,
|
||||
columnIndices: columnIndices,
|
||||
newColumnNames: newColumnNames,
|
||||
ignoreColumns: ignoreColumns,
|
||||
indexOnlyColumns: indexOnlyColumns,
|
||||
keyValuePairs: new(keyValuePairs),
|
||||
oldColumnNames: oldColumnNames);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<PreWith> GetPreWithCollection(ReadOnlyCollection<Pre> preCollection)
|
||||
{
|
||||
List<PreWith> results = new();
|
||||
string errFile;
|
||||
PreWith preWith;
|
||||
string? checkDirectory;
|
||||
string noWaitDirectory;
|
||||
foreach (Pre pre in preCollection)
|
||||
{
|
||||
errFile = string.Concat(pre.CheckFile, ".err");
|
||||
checkDirectory = Path.GetDirectoryName(pre.CheckFile);
|
||||
if (string.IsNullOrEmpty(checkDirectory))
|
||||
continue;
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
_ = Directory.CreateDirectory(checkDirectory);
|
||||
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
|
||||
preWith = new(checkDirectory: checkDirectory,
|
||||
checkFile: pre.CheckFile,
|
||||
errFile: errFile,
|
||||
matchingFile: pre.MatchingFile,
|
||||
noWaitDirectory: noWaitDirectory);
|
||||
results.Add(preWith);
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<string> GetSearchDirectories(int numberLength, string parentDirectory)
|
||||
{
|
||||
List<string> results = new();
|
||||
string[] directories = Directory.GetDirectories(parentDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string directory in directories)
|
||||
{
|
||||
if (Path.GetFileName(directory).Length != numberLength)
|
||||
continue;
|
||||
results.Add(directory);
|
||||
}
|
||||
results.Sort();
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private static void CreatePointerFile(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
|
||||
{
|
||||
string checkFile;
|
||||
string writeFile;
|
||||
string? directoryName;
|
||||
int parentDirectoryLength = parentDirectory.Length;
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
directoryName = Path.GetDirectoryName(matchingFile);
|
||||
if (directoryName is null)
|
||||
continue;
|
||||
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
|
||||
if (File.Exists(writeFile))
|
||||
continue;
|
||||
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
|
||||
}
|
||||
}
|
||||
|
||||
private static ReadOnlyCollection<Pre> GetPreCollection(int numberLength, string parentDirectory, ReadOnlyCollection<string> matchingFiles)
|
||||
{
|
||||
List<Pre> results = new();
|
||||
Pre pre;
|
||||
string checkFile;
|
||||
int parentDirectoryLength = parentDirectory.Length;
|
||||
foreach (string matchingFile in matchingFiles)
|
||||
{
|
||||
checkFile = $"{matchingFile[0]}{matchingFile.Substring(parentDirectoryLength + numberLength + 1)}";
|
||||
pre = new(matchingFile, checkFile);
|
||||
results.Add(pre);
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private void MoveCollection(DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
|
||||
{
|
||||
ReadOnlyCollection<Post> postCollection = GetPostCollection(dateTime, processDataStandardFormat, preWithCollection);
|
||||
if (postCollection.Count != 0)
|
||||
{
|
||||
Thread.Sleep(500);
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach (Post post in postCollection)
|
||||
{
|
||||
if (File.Exists(post.ErrFile))
|
||||
_ = stringBuilder.AppendLine(File.ReadAllText(post.ErrFile));
|
||||
if (File.Exists(post.CheckFile))
|
||||
_ = stringBuilder.AppendLine($"<{post.CheckFile}> was not consumed by the end!");
|
||||
}
|
||||
if (stringBuilder.Length > 0)
|
||||
throw new Exception(stringBuilder.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
private ReadOnlyCollection<Post> GetPostCollection(DateTime dateTime, ProcessDataStandardFormat processDataStandardFormat, ReadOnlyCollection<PreWith> preWithCollection)
|
||||
{
|
||||
List<Post> results = new();
|
||||
Post post;
|
||||
long preWait;
|
||||
foreach (PreWith preWith in preWithCollection)
|
||||
{
|
||||
if (!_IsEAFHosted)
|
||||
continue;
|
||||
if (!_StaticRuns.TryGetValue(_Logistics.Sequence, out List<Shared.Metrology.WS.Results>? wsResults))
|
||||
wsResults = null;
|
||||
ProcessDataStandardFormat.Write(preWith.CheckFile, processDataStandardFormat, wsResults);
|
||||
File.Delete(preWith.MatchingFile);
|
||||
if (Directory.Exists(preWith.NoWaitDirectory))
|
||||
{
|
||||
post = new(preWith.CheckFile, preWith.ErrFile);
|
||||
results.Add(post);
|
||||
continue;
|
||||
}
|
||||
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
|
||||
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
|
||||
else
|
||||
preWait = DateTime.Now.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
|
||||
for (short i = 0; i < short.MaxValue; i++)
|
||||
{
|
||||
if (DateTime.Now.Ticks > preWait)
|
||||
break;
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
for (int i = 0; i < int.MaxValue; i++)
|
||||
{
|
||||
if (File.Exists(preWith.ErrFile))
|
||||
throw new Exception(File.ReadAllText(preWith.ErrFile));
|
||||
if (!File.Exists(preWith.CheckFile))
|
||||
break;
|
||||
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
|
||||
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, Array.Empty<Test>(), Array.Empty<JsonElement>(), new List<FileInfo>());
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath, _ProcessDataStandardFormatMapping);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
if (!_IsEAFHosted)
|
||||
ProcessDataStandardFormat.Write("../../.pdsf", processDataStandardFormat, wsResults: null);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
int numberLength = 2;
|
||||
long ticks = dateTime.Ticks;
|
||||
string parentParentDirectory = GetParentParent(reportFullPath);
|
||||
ReadOnlyCollection<string> searchDirectories = GetSearchDirectories(numberLength, parentParentDirectory);
|
||||
ReadOnlyCollection<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
|
||||
if (matchingFiles.Count != searchDirectories.Count)
|
||||
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
|
||||
if (_IsEAFHosted)
|
||||
{
|
||||
try
|
||||
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
ReadOnlyCollection<Pre> preCollection = GetPreCollection(numberLength, parentParentDirectory, matchingFiles);
|
||||
ReadOnlyCollection<PreWith> preWithCollection = GetPreWithCollection(preCollection);
|
||||
MoveCollection(dateTime, processDataStandardFormat, preWithCollection);
|
||||
return results;
|
||||
}
|
||||
|
||||
private ReadOnlyCollection<string> GetMatchingFiles(long ticks, string reportFullPath, ReadOnlyCollection<string> searchDirectories)
|
||||
{
|
||||
List<string> results = new();
|
||||
string[] found;
|
||||
string fileName = Path.GetFileName(reportFullPath);
|
||||
foreach (string searchDirectory in searchDirectories)
|
||||
{
|
||||
for (int i = 0; i < int.MaxValue; i++)
|
||||
{
|
||||
found = Directory.GetFiles(searchDirectory, fileName, SearchOption.AllDirectories);
|
||||
if (found.Length != 0)
|
||||
{
|
||||
results.AddRange(found);
|
||||
break;
|
||||
}
|
||||
if (new TimeSpan(DateTime.Now.Ticks - ticks).TotalSeconds > _BreakAfterSeconds)
|
||||
break;
|
||||
}
|
||||
}
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
}
|
@ -1,133 +0,0 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.OpenInsight;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private static void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, List<Description> descriptions, Test[] tests)
|
||||
{
|
||||
if (string.IsNullOrEmpty(reportFullPath))
|
||||
throw new ArgumentException($"'{nameof(reportFullPath)}' cannot be null or empty.", nameof(reportFullPath));
|
||||
if (dateTime == DateTime.MinValue)
|
||||
throw new ArgumentNullException(nameof(dateTime));
|
||||
if (descriptions is null)
|
||||
throw new ArgumentNullException(nameof(descriptions));
|
||||
if (tests is null)
|
||||
throw new ArgumentNullException(nameof(tests));
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SaveOpenInsightFile(reportFullPath, dateTime, descriptions, tests);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -1,144 +0,0 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
#pragma warning disable IDE0060, CA1822
|
||||
private void SendData(string reportFullPath, DateTime dateTime, List<Description> descriptions)
|
||||
#pragma warning restore IDE0060, CA1822
|
||||
{
|
||||
// WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
// int weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
|
||||
// string directory = Path.Combine(_OpenInsightMetrologyViewerFileShare, dateTime.Year.ToString(), $"WW{weekOfYear:00}");
|
||||
// (string jsonResults, WS.Results wsResults) = WS.SendData(_OpenInsightMetrologyViewerAPI, _Logistics.Sequence, directory, wsRequest);
|
||||
// if (!wsResults.Success)
|
||||
// throw new Exception(wsResults.ToString());
|
||||
// _Log.Debug(wsResults.HeaderId);
|
||||
// lock (_StaticRuns)
|
||||
// {
|
||||
// if (!_StaticRuns.ContainsKey(_Logistics.Sequence))
|
||||
// _StaticRuns.Add(_Logistics.Sequence, new());
|
||||
// _StaticRuns[_Logistics.Sequence].Add(jsonResults);
|
||||
// }
|
||||
// string checkDirectory = Path.Combine(directory, wsResults.HeaderId.ToString());
|
||||
// if (!Directory.Exists(checkDirectory))
|
||||
// _ = Directory.CreateDirectory(checkDirectory);
|
||||
// File.Copy(reportFullPath, Path.Combine(checkDirectory, Path.GetFileName(reportFullPath)), overwrite: true);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
SendData(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -1,161 +0,0 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewerAttachments;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly string _OpenInsightMetrologyViewerAPI;
|
||||
private readonly string _OpenInsightMetrologyViewerFileShare;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
|
||||
_OpenInsightMetrologyViewerFileShare = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerFileShare");
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
#nullable enable
|
||||
|
||||
private string? GetHeaderIdDirectory(long headerId)
|
||||
{
|
||||
string? result = null;
|
||||
int weekNum;
|
||||
string year;
|
||||
string weekDirectory;
|
||||
string checkDirectory;
|
||||
DateTime[] dateTimes = new DateTime[] { _Logistics.DateTimeFromSequence, _Logistics.DateTimeFromSequence.AddDays(-6.66) };
|
||||
foreach (DateTime dateTime in dateTimes)
|
||||
{
|
||||
year = dateTime.Year.ToString();
|
||||
weekNum = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday);
|
||||
weekDirectory = Path.Combine(_OpenInsightMetrologyViewerFileShare, dateTime.Year.ToString(), $"WW{weekNum:00}");
|
||||
if (!Directory.Exists(weekDirectory))
|
||||
_ = Directory.CreateDirectory(weekDirectory);
|
||||
checkDirectory = Path.Combine(weekDirectory, $"-{headerId}");
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
continue;
|
||||
result = checkDirectory;
|
||||
break;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static void PostOpenInsightMetrologyViewerAttachments(List<Description> descriptions)
|
||||
{
|
||||
if (descriptions is null)
|
||||
throw new ArgumentNullException(nameof(descriptions));
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
if (dateTime == DateTime.MinValue)
|
||||
throw new ArgumentNullException(nameof(dateTime));
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
PostOpenInsightMetrologyViewerAttachments(descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -1,192 +0,0 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.Processed;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
private readonly string _JobIdParentDirectory;
|
||||
private readonly string _JobIdProcessParentDirectory;
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
_JobIdParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.SourceFileLocation);
|
||||
_JobIdProcessParentDirectory = GetJobIdParentDirectory(_FileConnectorConfiguration.TargetFileLocation);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private void DirectoryMove(string reportFullPath, DateTime dateTime, List<Description> descriptions)
|
||||
{
|
||||
if (dateTime == DateTime.MinValue)
|
||||
throw new ArgumentNullException(nameof(dateTime));
|
||||
if (descriptions is null)
|
||||
throw new ArgumentNullException(nameof(descriptions));
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
_ = _Logistics.Sequence.ToString();
|
||||
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
_ = Directory.CreateDirectory(jobIdDirectory);
|
||||
string[] matchDirectories = GetInProcessDirectory(jobIdDirectory);
|
||||
if (matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
|
||||
if (_JobIdProcessParentDirectory is null)
|
||||
{ }
|
||||
// OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
// JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
|
||||
// string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
|
||||
// string directoryName = $"{Path.GetFileName(matchDirectories[0]).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0]}{_Logistics.DateTimeFromSequence:yyyy-MM-dd_hh;mm_tt_}{DateTime.Now.Ticks - _Logistics.Sequence}";
|
||||
// string destinationJobIdDirectory = Path.Combine(_JobIdProcessParentDirectory, _Logistics.JobID, directoryName);
|
||||
// string sequenceDirectory = Path.Combine(destinationJobIdDirectory, logisticsSequence);
|
||||
// string jsonFileName = Path.Combine(sequenceDirectory, $"{Path.GetFileNameWithoutExtension(reportFullPath)}.json");
|
||||
// Directory.Move(matchDirectories[0], destinationJobIdDirectory);
|
||||
// if (!Directory.Exists(sequenceDirectory))
|
||||
// _ = Directory.CreateDirectory(sequenceDirectory);
|
||||
// File.Copy(reportFullPath, Path.Combine(sequenceDirectory, Path.GetFileName(reportFullPath)), overwrite: true);
|
||||
// File.WriteAllText(jsonFileName, json);
|
||||
}
|
||||
|
||||
private static void MoveMatchingFile(string jobIdDirectory, string matchDirectory)
|
||||
{
|
||||
string checkFile;
|
||||
string jobIdDirectoryFileName;
|
||||
string matchDirectoryFileName;
|
||||
string[] jobIdDirectoryFiles = Directory.GetFiles(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
string[] matchDirectoryFiles = Directory.GetFiles(matchDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string jobIdDirectoryFile in jobIdDirectoryFiles)
|
||||
{
|
||||
jobIdDirectoryFileName = Path.GetFileName(jobIdDirectoryFile);
|
||||
foreach (string matchDirectoryFile in matchDirectoryFiles)
|
||||
{
|
||||
matchDirectoryFileName = Path.GetFileName(matchDirectoryFile);
|
||||
if (jobIdDirectoryFileName.StartsWith(matchDirectoryFileName))
|
||||
{
|
||||
checkFile = Path.Combine(matchDirectory, jobIdDirectoryFileName);
|
||||
if (File.Exists(checkFile))
|
||||
continue;
|
||||
File.Move(jobIdDirectoryFile, checkFile);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Description> descriptions = json.ProcessData.GetDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
DirectoryMove(reportFullPath, dateTime, descriptions);
|
||||
else if (!_IsEAFHosted)
|
||||
{
|
||||
// OpenInsightMetrologyViewer.WSRequest wsRequest = new(this, _Logistics, descriptions);
|
||||
// JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
|
||||
// string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
|
||||
// string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
|
||||
// string historicalText = File.ReadAllText(jsonFileName);
|
||||
// if (json != historicalText)
|
||||
// throw new Exception("File doesn't match historical!");
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -1,132 +0,0 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Duplicator;
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.FileHandlers.SPaCe;
|
||||
|
||||
public class FileRead : Shared.FileRead, IFileRead
|
||||
{
|
||||
|
||||
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
|
||||
base(new Description(), false, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
|
||||
{
|
||||
_MinFileLength = 10;
|
||||
_NullData = string.Empty;
|
||||
_Logistics = new(this);
|
||||
if (_FileParameter is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (_ModelObjectParameterDefinitions is null)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!_IsDuplicator)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
|
||||
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = exception is not null;
|
||||
if (!isErrorFile && !string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
Move(extractResults);
|
||||
}
|
||||
|
||||
void IFileRead.WaitForThread() => WaitForThread(thread: null, threadExceptions: null);
|
||||
|
||||
string IFileRead.GetEventDescription()
|
||||
{
|
||||
string result = _Description.GetEventDescription();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IFileRead.GetHeaderNames()
|
||||
{
|
||||
List<string> results = _Description.GetHeaderNames();
|
||||
return results;
|
||||
}
|
||||
|
||||
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IFileRead.GetDefault()
|
||||
{
|
||||
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
|
||||
return results;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
|
||||
{
|
||||
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
|
||||
{
|
||||
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (string.IsNullOrEmpty(eventName))
|
||||
throw new Exception();
|
||||
_ReportFullPath = reportFullPath;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
results = GetExtractResult(reportFullPath, dateTime);
|
||||
if (results.Item3 is null)
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, Array.Empty<Test>(), JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
|
||||
if (results.Item3.Length > 0 && _IsEAFHosted)
|
||||
WritePDSF(this, results.Item3);
|
||||
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
|
||||
return results;
|
||||
}
|
||||
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
List<string> headerNames = _Description.GetHeaderNames();
|
||||
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
|
||||
results = ReExtract(this, headerNames, keyValuePairs);
|
||||
return results;
|
||||
}
|
||||
|
||||
private void FileCopy<T>(string reportFullPath, DateTime dateTime, List<T> descriptions) where T : Shared.Properties.IDescription
|
||||
{
|
||||
bool isDummyRun = false;
|
||||
string successDirectory = string.Empty;
|
||||
List<(Shared.Properties.IScopeInfo, string)> collection = new();
|
||||
string duplicateDirectory = _FileConnectorConfiguration.TargetFileLocation;
|
||||
string duplicateFile = Path.Combine(duplicateDirectory, Path.GetFileName(reportFullPath));
|
||||
File.Copy(reportFullPath, duplicateFile, overwrite: true);
|
||||
WaitForFileConsumption(dateTime, descriptions, isDummyRun, successDirectory, duplicateDirectory, collection, duplicateFile);
|
||||
}
|
||||
|
||||
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
ProcessDataStandardFormat processDataStandardFormat = ProcessDataStandardFormat.GetProcessDataStandardFormat(reportFullPath);
|
||||
_Logistics = new Logistics(reportFullPath, processDataStandardFormat);
|
||||
SetFileParameterLotIDToLogisticsMID();
|
||||
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(processDataStandardFormat);
|
||||
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
|
||||
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
|
||||
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
|
||||
FileCopy(reportFullPath, dateTime, descriptions);
|
||||
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(string.Join(Environment.NewLine, processDataStandardFormat.Logistics), tests, jsonElements, new List<FileInfo>());
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
@ -144,6 +144,12 @@ function updateRecordCoD(b: any, r: any, t: any, c: any, e: any, w: any, workIte
|
||||
let timeCriticality = workItem.TimeCriticality;
|
||||
let riskReductionMinusOpportunityEnablement = workItem.RiskReductionMinusOpportunityEnablement;
|
||||
let weightedShortestJobFirst = workItem.WeightedShortestJobFirst == undefined ? null : workItem.WeightedShortestJobFirst.toFixed(2);
|
||||
let weightedShortestJobFirstFibonacci = workItem.WeightedShortestJobFirstFibonacci == undefined ? null : workItem.WeightedShortestJobFirstFibonacci.toFixed(2);
|
||||
if (weightedShortestJobFirst == null || weightedShortestJobFirstFibonacci == null || weightedShortestJobFirst != weightedShortestJobFirstFibonacci) {
|
||||
if (weightedShortestJobFirst != weightedShortestJobFirstFibonacci) {
|
||||
|
||||
}
|
||||
}
|
||||
workItem.CumulativeStoryPoints = ' ';
|
||||
workItem.TotalStoryPoints = totalStoryPoints + ' User Story Point(s)';
|
||||
workItem.AbsoluteDelta = data.Effort == undefined || data.Effort.FibonacciAverage == undefined || totalStoryPoints == undefined || totalStoryPoints === 0 ? ' ' : round(Math.abs(data.Effort.FibonacciAverage - ((totalStoryPoints / highestTotalStoryPoints) * 5)), 1);
|
||||
@ -167,44 +173,55 @@ function updateRecordCoD(b: any, r: any, t: any, c: any, e: any, w: any, workIte
|
||||
workItem.api = '';
|
||||
} else {
|
||||
workItem.api = `
|
||||
### Work Item Patch ${check} WSJF ${workItem.Id} ${weightedShortestJobFirst} != ${workItem.WeightedShortestJobFirst}
|
||||
### Work Item Patch ${check} WSJF ${workItem.Id} ${weightedShortestJobFirst} != ${workItem.WeightedShortestJobFirst}
|
||||
|
||||
patch {{Factory-Integration}}/_apis/wit/workitems/${workItem.Id}?api-version=7.0
|
||||
Authorization: Basic {{PAT}}
|
||||
Content-Type: application/json-patch+json
|
||||
patch {{Factory-Integration}}/_apis/wit/workitems/${workItem.Id}?api-version=7.0
|
||||
Authorization: Basic {{PAT}}
|
||||
Content-Type: application/json-patch+json
|
||||
|
||||
[
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Microsoft.VSTS.Common.BusinessValue",
|
||||
"value": "${workItem.BusinessValue}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Microsoft.VSTS.Scheduling.Effort",
|
||||
"value": "${workItem.Effort}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Custom.RRminusOE",
|
||||
"value": "${workItem.RiskReductionMinusOpportunityEnablement}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Microsoft.VSTS.Common.TimeCriticality",
|
||||
"value": "${workItem.TimeCriticality}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Custom.WSJF",
|
||||
"value": "${workItem.WeightedShortestJobFirst}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Custom.WSJFFib",
|
||||
"value": "${workItem.WeightedShortestJobFirst}"
|
||||
}
|
||||
]`;
|
||||
[
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Microsoft.VSTS.Common.BusinessValue",
|
||||
"value": "${workItem.BusinessValue}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Microsoft.VSTS.Scheduling.Effort",
|
||||
"value": "${workItem.Effort}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Custom.RRminusOE",
|
||||
"value": "${workItem.RiskReductionMinusOpportunityEnablement}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Microsoft.VSTS.Common.TimeCriticality",
|
||||
"value": "${workItem.TimeCriticality}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Custom.WSJF",
|
||||
"value": "${workItem.WeightedShortestJobFirst}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Custom.WSJFFib",
|
||||
"value": "${workItem.WeightedShortestJobFirst}"
|
||||
}
|
||||
]
|
||||
|
||||
HTTP 200
|
||||
[Asserts]
|
||||
header "Content-Type" == "application/json; charset=utf-8; api-version=7.0"
|
||||
jsonpath "$.id" == ${workItem.Id}
|
||||
jsonpath "$.fields['Microsoft.VSTS.Common.BusinessValue']" == ${workItem.BusinessValue}
|
||||
jsonpath "$.fields['Microsoft.VSTS.Scheduling.Effort']" == ${workItem.Effort}
|
||||
jsonpath "$.fields['Custom.RRminusOE']" == ${workItem.RiskReductionMinusOpportunityEnablement}
|
||||
jsonpath "$.fields['Microsoft.VSTS.Common.TimeCriticality']" == ${workItem.TimeCriticality}
|
||||
jsonpath "$.fields['Custom.WSJF']" == ${workItem.WeightedShortestJobFirst}
|
||||
jsonpath "$.fields['Custom.WSJFFib']" == ${workItem.WeightedShortestJobFirst}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -276,9 +293,9 @@ function getRecords(b: any, r: any, t: any, c: any, e: any, w: any, data: any, d
|
||||
return records;
|
||||
}
|
||||
|
||||
function getHtmlTextAndHttp(fromHtml: any, b: any, r: any, t: any, c: any, e: any, w: any, records: any) {
|
||||
function getHtmlTextAndApi(fromHtml: any, b: any, r: any, t: any, c: any, e: any, w: any, records: any) {
|
||||
let record;
|
||||
let http = '';
|
||||
let api = '';
|
||||
let lineA = '';
|
||||
let lineB = '';
|
||||
let lineC = '';
|
||||
@ -287,7 +304,7 @@ function getHtmlTextAndHttp(fromHtml: any, b: any, r: any, t: any, c: any, e: an
|
||||
for (let i = 0; i < records.length; i++) {
|
||||
record = records[i];
|
||||
if (record.api !== '') {
|
||||
http += record.api + '\r\n';
|
||||
api += record.api + '\r\n';
|
||||
}
|
||||
text += record.Id + '\t' +
|
||||
record.RiskReductionMinusOpportunityEnablement + '\t' +
|
||||
@ -362,7 +379,7 @@ function getHtmlTextAndHttp(fromHtml: any, b: any, r: any, t: any, c: any, e: an
|
||||
console.log(text);
|
||||
html += lineA + lineB + lineC;
|
||||
}
|
||||
return { html, text, http };
|
||||
return { html, text, http: api };
|
||||
}
|
||||
|
||||
const username = '';
|
||||
@ -425,7 +442,7 @@ fetch(workItems.b)
|
||||
if (dataA.length > 0)
|
||||
console.log(dataA[0]);
|
||||
const records = getRecords(b, r, t, c, e, w, dataA, dataB, workItems);
|
||||
let result = getHtmlTextAndHttp(fromHtml, b, r, t, c, e, w, records);
|
||||
let result = getHtmlTextAndApi(fromHtml, b, r, t, c, e, w, records);
|
||||
if (result == undefined) { }
|
||||
})
|
||||
.catch((e) => console.error(e));
|
||||
|
@ -167,44 +167,55 @@ function updateRecordCoD(b, r, t, c, e, w, workItem, highestTotalStoryPoints, da
|
||||
workItem.api = '';
|
||||
} else {
|
||||
workItem.api = `
|
||||
### Work Item Patch ${check} WSJF ${workItem.Id} ${weightedShortestJobFirst} != ${workItem.WeightedShortestJobFirst}
|
||||
### Work Item Patch ${check} WSJF ${workItem.Id} ${weightedShortestJobFirst} != ${workItem.WeightedShortestJobFirst}
|
||||
|
||||
patch {{Factory-Integration}}/_apis/wit/workitems/${workItem.Id}?api-version=7.0
|
||||
Authorization: Basic {{PAT}}
|
||||
Content-Type: application/json-patch+json
|
||||
patch {{Factory-Integration}}/_apis/wit/workitems/${workItem.Id}?api-version=7.0
|
||||
Authorization: Basic {{PAT}}
|
||||
Content-Type: application/json-patch+json
|
||||
|
||||
[
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Microsoft.VSTS.Common.BusinessValue",
|
||||
"value": "${workItem.BusinessValue}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Microsoft.VSTS.Scheduling.Effort",
|
||||
"value": "${workItem.Effort}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Custom.RRminusOE",
|
||||
"value": "${workItem.RiskReductionMinusOpportunityEnablement}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Microsoft.VSTS.Common.TimeCriticality",
|
||||
"value": "${workItem.TimeCriticality}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Custom.WSJF",
|
||||
"value": "${workItem.WeightedShortestJobFirst}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Custom.WSJFFib",
|
||||
"value": "${workItem.WeightedShortestJobFirst}"
|
||||
}
|
||||
]`;
|
||||
[
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Microsoft.VSTS.Common.BusinessValue",
|
||||
"value": "${workItem.BusinessValue}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Microsoft.VSTS.Scheduling.Effort",
|
||||
"value": "${workItem.Effort}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Custom.RRminusOE",
|
||||
"value": "${workItem.RiskReductionMinusOpportunityEnablement}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Microsoft.VSTS.Common.TimeCriticality",
|
||||
"value": "${workItem.TimeCriticality}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Custom.WSJF",
|
||||
"value": "${workItem.WeightedShortestJobFirst}"
|
||||
},
|
||||
{
|
||||
"op": "replace",
|
||||
"path": "/fields/Custom.WSJFFib",
|
||||
"value": "${workItem.WeightedShortestJobFirst}"
|
||||
}
|
||||
]
|
||||
|
||||
HTTP 200
|
||||
[Asserts]
|
||||
header "Content-Type" == "application/json; charset=utf-8; api-version=7.0"
|
||||
jsonpath "$.id" == ${workItem.Id}
|
||||
jsonpath "$.fields['Microsoft.VSTS.Common.BusinessValue']" == ${workItem.BusinessValue}
|
||||
jsonpath "$.fields['Microsoft.VSTS.Scheduling.Effort']" == ${workItem.Effort}
|
||||
jsonpath "$.fields['Custom.RRminusOE']" == ${workItem.RiskReductionMinusOpportunityEnablement}
|
||||
jsonpath "$.fields['Microsoft.VSTS.Common.TimeCriticality']" == ${workItem.TimeCriticality}
|
||||
jsonpath "$.fields['Custom.WSJF']" == ${workItem.WeightedShortestJobFirst}
|
||||
jsonpath "$.fields['Custom.WSJFFib']" == ${workItem.WeightedShortestJobFirst}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -306,9 +317,9 @@ function sendValue(fromHtml, element, page, id) {
|
||||
}
|
||||
}
|
||||
|
||||
function getHtmlTextAndHttp(fromHtml, b, r, t, c, e, w, records) {
|
||||
function getHtmlTextAndApi(fromHtml, b, r, t, c, e, w, records) {
|
||||
let record;
|
||||
let http = '';
|
||||
let api = '';
|
||||
let lineA = '';
|
||||
let lineB = '';
|
||||
let lineC = '';
|
||||
@ -317,7 +328,7 @@ function getHtmlTextAndHttp(fromHtml, b, r, t, c, e, w, records) {
|
||||
for (let i = 0; i < records.length; i++) {
|
||||
record = records[i];
|
||||
if (record.api !== '') {
|
||||
http += record.api + '\r\n';
|
||||
api += record.api + '\r\n';
|
||||
}
|
||||
text += record.Id + '\t' +
|
||||
record.RiskReductionMinusOpportunityEnablement + '\t' +
|
||||
@ -392,7 +403,7 @@ function getHtmlTextAndHttp(fromHtml, b, r, t, c, e, w, records) {
|
||||
console.log(text);
|
||||
html += lineA + lineB + lineC;
|
||||
}
|
||||
return { html, text, http };
|
||||
return { html, text, http: api };
|
||||
}
|
||||
|
||||
function updateSite(c, w) {
|
||||
@ -430,7 +441,7 @@ function setDocument(fromHtml, b, r, t, c, e, w, dataA, dataB, workItems) {
|
||||
console.log(dataA.length);
|
||||
if (dataA.length > 0)
|
||||
console.log(dataA[0]);
|
||||
let result = getHtmlTextAndHttp(fromHtml, b, r, t, c, e, w, records);
|
||||
let result = getHtmlTextAndApi(fromHtml, b, r, t, c, e, w, records);
|
||||
if (fromHtml) {
|
||||
document.getElementById('HeaderGrid').innerHTML = result.html.replaceAll('>null<', '> <');
|
||||
if (_windowLocationHRef.indexOf('=WSJF') === -1) {
|
||||
@ -659,7 +670,7 @@ if (typeof document == 'undefined') {
|
||||
if (dataA.length > 0)
|
||||
console.log(dataA[0]);
|
||||
const records = getRecords(b, r, t, c, e, w, dataA, dataB, workItems);
|
||||
let result = getHtmlTextAndHttp(fromHtml, b, r, t, c, e, w, records);
|
||||
let result = getHtmlTextAndApi(fromHtml, b, r, t, c, e, w, records);
|
||||
if (result == undefined) { }
|
||||
initIndex(fromHtml, username, machineId, windowLocationHRef, workItems, b, r, t, c, e, w, apiUrl, signalRUrl);
|
||||
})
|
||||
|
@ -41,24 +41,6 @@ stages:
|
||||
displayName: "Nuget Clear"
|
||||
enabled: false
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: 'Copy GhostPCL Files to: D:\EAF-Mesa-Integration\copy'
|
||||
inputs:
|
||||
Contents: "*"
|
||||
SourceFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL'
|
||||
TargetFolder: 'D:\EAF-Mesa-Integration\copy\GhostPCL'
|
||||
OverWrite: true
|
||||
enabled: true
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: 'Copy LincPDFC Files to: D:\EAF-Mesa-Integration\copy'
|
||||
inputs:
|
||||
Contents: "*"
|
||||
SourceFolder: '\\mesfs.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC'
|
||||
TargetFolder: 'D:\EAF-Mesa-Integration\copy\LincPDFC'
|
||||
OverWrite: true
|
||||
enabled: true
|
||||
|
||||
- script: |
|
||||
"C:\program files\dotnet\dotnet.exe" user-secrets init
|
||||
"C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"
|
||||
@ -202,24 +184,6 @@ stages:
|
||||
displayName: "Nuget Clear"
|
||||
enabled: false
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: 'Copy GhostPCL Files to: D:\EAF-Mesa-Integration\copy'
|
||||
inputs:
|
||||
Contents: "*"
|
||||
SourceFolder: '\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\GhostPCL'
|
||||
TargetFolder: 'D:\EAF-Mesa-Integration\copy\GhostPCL'
|
||||
OverWrite: true
|
||||
enabled: true
|
||||
|
||||
- task: CopyFiles@2
|
||||
displayName: 'Copy LincPDFC Files to: D:\EAF-Mesa-Integration\copy'
|
||||
inputs:
|
||||
Contents: "*"
|
||||
SourceFolder: '\\mestsa003.infineon.com\EC_EAFRepository\Staging\DeploymentStorage\LincPDFC'
|
||||
TargetFolder: 'D:\EAF-Mesa-Integration\copy\LincPDFC'
|
||||
OverWrite: true
|
||||
enabled: true
|
||||
|
||||
- script: |
|
||||
"C:\program files\dotnet\dotnet.exe" user-secrets init
|
||||
"C:\program files\dotnet\dotnet.exe" user-secrets set "BuildNumber" "$(Build.BuildId)"
|
||||
|
@ -383,17 +383,24 @@ public class FileRead : Properties.IFileRead
|
||||
else
|
||||
{
|
||||
string[] files;
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
string[] directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string directory in directories)
|
||||
string[] directories;
|
||||
string logisticsSequence;
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length == 0)
|
||||
continue;
|
||||
results.Add(directory);
|
||||
logisticsSequence = (_Logistics.Sequence + -i).ToString();
|
||||
directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string directory in directories)
|
||||
{
|
||||
files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length == 0)
|
||||
continue;
|
||||
results.Add(directory);
|
||||
}
|
||||
if (results.Count == 1)
|
||||
break;
|
||||
}
|
||||
}
|
||||
if ((results is null) || results.Count != 1)
|
||||
if (results.Count != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
return results.ToArray();
|
||||
}
|
||||
|
@ -2,12 +2,14 @@ using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.Shared;
|
||||
|
||||
@ -227,19 +229,19 @@ internal class ProcessDataStandardFormat
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
|
||||
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping pdsfMapping)
|
||||
internal static ProcessDataStandardFormat GetProcessDataStandardFormat(string reportFullPath, ProcessDataStandardFormatMapping processDataStandardFormatMapping)
|
||||
{
|
||||
ProcessDataStandardFormat result;
|
||||
const int columnsLine = 6;
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
ProcessDataStandardFormat processDataStandardFormat = GetProcessDataStandardFormat(fileInfo.LastWriteTime, columnsLine, fileInfo.FullName, lines: null);
|
||||
JsonElement[]? jsonElements = pdsfMapping.OldColumnNames.Count != pdsfMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
|
||||
JsonElement[]? jsonElements = processDataStandardFormatMapping.OldColumnNames.Count != processDataStandardFormatMapping.ColumnIndices.Count ? null : GetFullArray(processDataStandardFormat);
|
||||
JsonProperty[]? jsonProperties = jsonElements is null || jsonElements.Length == 0 ? null : jsonElements[0].EnumerateObject().ToArray();
|
||||
if (jsonElements is null || jsonProperties is null || jsonProperties.Length != pdsfMapping.NewColumnNames.Count)
|
||||
if (jsonElements is null || jsonProperties is null || jsonProperties.Length != processDataStandardFormatMapping.NewColumnNames.Count)
|
||||
result = processDataStandardFormat;
|
||||
else
|
||||
{
|
||||
result = GetProcessDataStandardFormat(pdsfMapping, jsonElements, processDataStandardFormat);
|
||||
result = GetProcessDataStandardFormat(processDataStandardFormatMapping, jsonElements, processDataStandardFormat);
|
||||
if (result.Sequence is null || result.Columns.Count == 0 || result.Body.Count == 0 || result.Logistics.Count == 0)
|
||||
result = processDataStandardFormat;
|
||||
}
|
||||
@ -335,12 +337,14 @@ internal class ProcessDataStandardFormat
|
||||
int column;
|
||||
string value;
|
||||
JsonProperty jsonProperty;
|
||||
List<string> debug = new();
|
||||
List<string> values = new();
|
||||
List<string> results = new();
|
||||
JsonProperty[] jsonProperties;
|
||||
List<string> unknownColumns = new();
|
||||
for (int i = 0; i < jsonElements.Length; i++)
|
||||
{
|
||||
debug.Clear();
|
||||
values.Clear();
|
||||
if (jsonElements[i].ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
@ -354,16 +358,22 @@ internal class ProcessDataStandardFormat
|
||||
{
|
||||
column = processDataStandardFormatMapping.ColumnIndices[c];
|
||||
if (column == -1)
|
||||
{
|
||||
value = processDataStandardFormatMapping.OldColumnNames[c];
|
||||
debug.Add($"<Item C=-01 Name=\"{value}\" DataType=\"8\" XmlType=\"1\" XPath=\"//records/record/{value}\" />");
|
||||
}
|
||||
else
|
||||
{
|
||||
jsonProperty = jsonProperties[column];
|
||||
value = jsonProperty.Value.ToString();
|
||||
debug.Add($"<Item C={column + 2:000} Name=\"{processDataStandardFormatMapping.OldColumnNames[c]}\" DataType=\"8\" XmlType=\"1\" XPath=\"//records/record/{jsonProperty.Name}\" />");
|
||||
}
|
||||
values.Add(value);
|
||||
}
|
||||
results.Add(string.Join("\t", values));
|
||||
}
|
||||
if (Debugger.IsAttached)
|
||||
File.WriteAllText("../../.txt", string.Join(Environment.NewLine, debug.OrderBy(l => l)));
|
||||
result = new(body: new(results),
|
||||
columns: processDataStandardFormatMapping.OldColumnNames,
|
||||
footer: processDataStandardFormat.Footer,
|
||||
@ -378,7 +388,6 @@ internal class ProcessDataStandardFormat
|
||||
{
|
||||
if (processDataStandardFormat.InputPDSF is null)
|
||||
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
|
||||
#pragma warning disable CA1845, IDE0057
|
||||
string result;
|
||||
string line;
|
||||
string value;
|
||||
@ -518,6 +527,8 @@ internal class ProcessDataStandardFormat
|
||||
internal static void Write(string path, ProcessDataStandardFormat processDataStandardFormat, List<Metrology.WS.Results>? wsResults)
|
||||
{
|
||||
List<string> results = new();
|
||||
if (processDataStandardFormat.InputPDSF is null)
|
||||
throw new NullReferenceException(nameof(processDataStandardFormat.InputPDSF));
|
||||
if (processDataStandardFormat.Sequence is null)
|
||||
throw new NullReferenceException(nameof(processDataStandardFormat.Sequence));
|
||||
string endOffset = "E#######T";
|
||||
@ -555,25 +566,25 @@ internal class ProcessDataStandardFormat
|
||||
}
|
||||
}
|
||||
results.Add("END_HEADER");
|
||||
if (processDataStandardFormat.InputPDSF is not null)
|
||||
{
|
||||
results.Add(string.Empty);
|
||||
List<char> hyphens = new();
|
||||
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => l.Replace('\t', '|')));
|
||||
results.Add(string.Empty);
|
||||
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
|
||||
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
|
||||
hyphens.Add('-');
|
||||
results.Add($"|{string.Join("|", hyphens)}|");
|
||||
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => l.Replace('\t', '|')));
|
||||
results.Add(string.Empty);
|
||||
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => l.Replace('\t', '|')));
|
||||
results.Add(string.Empty);
|
||||
results.Add("EOF");
|
||||
results.Add(string.Empty);
|
||||
string json = GetJson(processDataStandardFormat);
|
||||
results.Add(json);
|
||||
}
|
||||
results.Add(string.Empty);
|
||||
List<char> hyphens = new();
|
||||
results.AddRange(processDataStandardFormat.InputPDSF.Header.Select(l => $"|{l.Replace('\t', '|')}|"));
|
||||
results.Add(string.Empty);
|
||||
results.Add($"|{string.Join("|", processDataStandardFormat.InputPDSF.Columns)}|");
|
||||
for (int i = 0; i < processDataStandardFormat.InputPDSF.Columns.Count; i++)
|
||||
hyphens.Add('-');
|
||||
results.Add($"|{string.Join("|", hyphens)}|");
|
||||
results.AddRange(processDataStandardFormat.InputPDSF.Body.Select(l => $"|{l.Replace('\t', '|')}|"));
|
||||
results.Add(string.Empty);
|
||||
results.AddRange(processDataStandardFormat.InputPDSF.Footer.Select(l => $"|{l.Replace('\t', '|')}|"));
|
||||
results.Add(string.Empty);
|
||||
string xml = GetXml(processDataStandardFormat);
|
||||
results.Add(xml);
|
||||
results.Add(string.Empty);
|
||||
results.Add("EOF");
|
||||
results.Add(string.Empty);
|
||||
string json = GetJson(processDataStandardFormat);
|
||||
results.Add(json);
|
||||
File.WriteAllText(path, string.Join(Environment.NewLine, results));
|
||||
}
|
||||
|
||||
@ -855,6 +866,96 @@ internal class ProcessDataStandardFormat
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static string GetXml(ProcessDataStandardFormat processDataStandardFormat)
|
||||
{
|
||||
string result;
|
||||
string tag;
|
||||
string value;
|
||||
string[] segments;
|
||||
List<string> values;
|
||||
Dictionary<string, List<string>> results = new();
|
||||
ReadOnlyCollection<string> body = processDataStandardFormat.InputPDSF is null ?
|
||||
processDataStandardFormat.Body : processDataStandardFormat.InputPDSF.Body;
|
||||
ReadOnlyCollection<string> columns = processDataStandardFormat.InputPDSF is null ?
|
||||
processDataStandardFormat.Columns : processDataStandardFormat.InputPDSF.Columns;
|
||||
List<string> lines = new() { "<?xml version=\"1.0\" encoding=\"UTF-8\"?>", "<records>" };
|
||||
for (int i = 0; i < body.Count; i++)
|
||||
{
|
||||
segments = body[i].Trim().Split('\t');
|
||||
if (segments.Length != columns.Count)
|
||||
break;
|
||||
for (int c = 0; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("&", "&")
|
||||
.Replace("<", "<")
|
||||
.Replace(">", ">")
|
||||
.Replace("\"", """)
|
||||
.Replace("'", "'");
|
||||
tag = Regex.Replace(columns[c].Trim('"'), @"[^a-zA-Z0-9]", "_").Split('\r')[0].Split('\n')[0];
|
||||
if (i == 0)
|
||||
{
|
||||
if (results.ContainsKey(tag))
|
||||
continue;
|
||||
results.Add(tag, new List<string>());
|
||||
}
|
||||
results[tag].Add(value);
|
||||
}
|
||||
}
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in results)
|
||||
{
|
||||
if (body.Count < 3)
|
||||
break;
|
||||
if (keyValuePair.Value.Count != body.Count)
|
||||
continue;
|
||||
values = keyValuePair.Value.Distinct().ToList();
|
||||
if (values.Count == 2 && (string.IsNullOrEmpty(values[0]) || string.IsNullOrEmpty(values[1])))
|
||||
{
|
||||
for (int i = 0; i < body.Count; i++)
|
||||
keyValuePair.Value[i] = string.Empty;
|
||||
foreach (string v in values)
|
||||
{
|
||||
if (string.IsNullOrEmpty(v))
|
||||
continue;
|
||||
keyValuePair.Value[0] = v;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (int i = 0; i < body.Count; i++)
|
||||
{
|
||||
lines.Add(" <record>");
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in results)
|
||||
{
|
||||
if (keyValuePair.Value.Count != body.Count)
|
||||
continue;
|
||||
lines.Add(string.Concat(" <", keyValuePair.Key, '>', keyValuePair.Value[i], "</", keyValuePair.Key, '>'));
|
||||
}
|
||||
lines.Add(" </record>");
|
||||
}
|
||||
lines.Add("</records>");
|
||||
result = string.Join(Environment.NewLine, lines);
|
||||
return result;
|
||||
}
|
||||
|
||||
internal static string GetXml(string reportFullPath, string[]? lines = null)
|
||||
{
|
||||
string result;
|
||||
bool foundXml = false;
|
||||
List<string> results = new();
|
||||
lines ??= File.ReadAllLines(reportFullPath);
|
||||
foreach (string line in lines)
|
||||
{
|
||||
if (line.StartsWith("<?xml"))
|
||||
foundXml = true;
|
||||
if (!foundXml)
|
||||
continue;
|
||||
if (line.StartsWith("EOF"))
|
||||
break;
|
||||
results.Add(line);
|
||||
}
|
||||
result = string.Join(Environment.NewLine, results);
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
[JsonSourceGenerationOptions(WriteIndented = true)]
|
||||
|
@ -1,33 +1,34 @@
|
||||
using System.Collections.ObjectModel;
|
||||
using System.Linq;
|
||||
|
||||
namespace Adaptation.Shared;
|
||||
|
||||
public class ProcessDataStandardFormatMapping
|
||||
{
|
||||
|
||||
public ReadOnlyCollection<string> BackfillColumns { get; private set; }
|
||||
public ReadOnlyCollection<int> ColumnIndices { get; private set; }
|
||||
public ReadOnlyCollection<string> IgnoreColumns { get; private set; }
|
||||
public ReadOnlyCollection<string> IndexOnlyColumns { get; private set; }
|
||||
public ReadOnlyDictionary<string, string> KeyValuePairs { get; private set; }
|
||||
public ReadOnlyCollection<string> NewColumnNames { get; private set; }
|
||||
public ReadOnlyCollection<string> OldColumnNames { get; private set; }
|
||||
|
||||
public ProcessDataStandardFormatMapping(ReadOnlyCollection<string> backfillColumns,
|
||||
ReadOnlyCollection<int> columnIndices,
|
||||
ReadOnlyCollection<string> ignoreColumns,
|
||||
ReadOnlyCollection<string> indexOnlyColumns,
|
||||
ReadOnlyDictionary<string, string> keyValuePairs,
|
||||
public ProcessDataStandardFormatMapping(ReadOnlyCollection<int> columnIndices,
|
||||
ReadOnlyCollection<string> newColumnNames,
|
||||
ReadOnlyCollection<string> oldColumnNames)
|
||||
{
|
||||
BackfillColumns = backfillColumns;
|
||||
ColumnIndices = columnIndices;
|
||||
IgnoreColumns = ignoreColumns;
|
||||
IndexOnlyColumns = indexOnlyColumns;
|
||||
KeyValuePairs = keyValuePairs;
|
||||
NewColumnNames = newColumnNames;
|
||||
OldColumnNames = oldColumnNames;
|
||||
}
|
||||
|
||||
internal static ProcessDataStandardFormatMapping Get(string processDataStandardFormatMappingOldColumnNames, string processDataStandardFormatMappingNewColumnNames, string processDataStandardFormatMappingColumnIndices)
|
||||
{
|
||||
ProcessDataStandardFormatMapping result;
|
||||
ReadOnlyCollection<string> newColumnNames = new(processDataStandardFormatMappingNewColumnNames.Split(','));
|
||||
ReadOnlyCollection<string> oldColumnNames = new(processDataStandardFormatMappingOldColumnNames.Split(','));
|
||||
ReadOnlyCollection<int> columnIndices = new(processDataStandardFormatMappingColumnIndices.Split(',').Select(int.Parse).ToArray());
|
||||
result = new(columnIndices: columnIndices,
|
||||
newColumnNames: newColumnNames,
|
||||
oldColumnNames: oldColumnNames);
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
@ -106,8 +106,6 @@
|
||||
<Compile Include="Adaptation\Eaf\Management\ConfigurationData\Semiconductor\CellInstances\SecsConnectionConfiguration.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\ADO\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\ADO\ProcessData.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\APC\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Archive\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\CellInstanceConnectionName.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\ConvertExcelToJson\FIBacklogMesa.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\ConvertExcelToJson\FileRead.cs" />
|
||||
@ -115,8 +113,6 @@
|
||||
<Compile Include="Adaptation\FileHandlers\CopyToPaths\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\DownloadExcelFile\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\DownloadWorkItems\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Dummy\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\IQSSi\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\json\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\json\ProcessData.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\json\WIQL\Attribute.cs" />
|
||||
@ -144,10 +140,6 @@
|
||||
<Compile Include="Adaptation\FileHandlers\Kanban\ProcessData.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Markdown\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Markdown\ProcessData.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\MoveMatchingFiles\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\OpenInsight\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewer\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\OpenInsightMetrologyViewerAttachments\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Priority\Aggregation.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Priority\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Priority\Notification.cs" />
|
||||
@ -155,8 +147,6 @@
|
||||
<Compile Include="Adaptation\FileHandlers\Priority\Startup.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Priority\WeightedShortestJobFirstModule.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Priority\WorkItem.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Processed\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\SPaCe\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Violation\FileRead.cs" />
|
||||
<Compile Include="Adaptation\FileHandlers\Violation\ProcessData.cs" />
|
||||
<Compile Include="Adaptation\Ifx\Eaf\Common\Configuration\ConnectionSetting.cs" />
|
||||
|
Reference in New Issue
Block a user