Match TFS Changeset 303348

This commit is contained in:
2022-02-01 18:02:51 -07:00
parent d9057d2071
commit 0cfcb46ee7
118 changed files with 17934 additions and 13 deletions

View File

@ -0,0 +1,35 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
namespace Adaptation.FileHandlers
{
public class CellInstanceConnectionName
{
internal static IFileRead Get(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted)
{
IFileRead result;
int levelIsArchive = 7;
int levelIsXToArchive = 6;
bool isDuplicator = cellInstanceConnectionName.StartsWith(cellInstanceName);
if (isDuplicator)
result = new MET08RESIMAPCDE.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, levelIsXToArchive, levelIsArchive);
else
{
result = cellInstanceConnectionName switch
{
nameof(DownloadRsMFile) => new DownloadRsMFile.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, levelIsXToArchive, levelIsArchive),
nameof(RsM) => new RsM.FileRead(smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, levelIsXToArchive, levelIsArchive),
_ => throw new Exception(),
};
}
return result;
}
}
}

View File

@ -0,0 +1,283 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net;
using System.Text.Json;
using System.Threading;
namespace Adaptation.FileHandlers.DownloadRsMFile
{
public class FileRead : Shared.FileRead, IFileRead
{
private readonly Timer _Timer;
private readonly WebClient _WebClient;
private readonly string _StaticFileServer;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, hyphenXToArchive, hyphenIsArchive)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_WebClient = new WebClient();
_StaticFileServer = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, string.Concat("CellInstance.", cellInstanceName, ".StaticFileServer"));
if (!Debugger.IsAttached && fileConnectorConfiguration.PreProcessingMode != FileConnectorConfiguration.PreProcessingModeEnum.Process)
_Timer = new Timer(Callback, null, (int)(fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000), Timeout.Infinite);
else
{
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
Callback(null);
}
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
Move(this, extractResults, exception);
}
void IFileRead.WaitForThread()
{
WaitForThread(thread: null, threadExceptions: null);
}
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, new Test[] { }, JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra)
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
void IFileRead.MoveArchive()
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
void IFileRead.Callback(object state)
{
Callback(state);
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
if (reportFullPath is null)
{ }
if (dateTime == DateTime.MinValue)
{ }
throw new Exception(string.Concat("See ", nameof(Callback)));
}
private void DownloadRsMFile()
{
if (_WebClient is null)
throw new Exception();
if (string.IsNullOrEmpty(_StaticFileServer))
throw new Exception();
string logText;
string runJson;
string rootJson;
string[] logLines;
string runFileName;
string[] logSegments;
string targetFileName;
string runFullFileName;
FileInfo targetFileInfo;
FileInfo alternateFileInfo;
List<string> runFullFileNameSegments;
string dateTimeFormat = "yy/MM/dd HH:mm:ss";
NginxFileSystem[] runNginxFileSystemCollection;
NginxFileSystem[] rootNginxFileSystemCollection;
DateTime fileAgeThresholdDateTime = DateTime.Now;
string nginxFormat = "ddd, dd MMM yyyy HH:mm:ss zzz";
List<Tuple<DateTime, FileInfo, FileInfo, string>> possibleDownload = new();
string[] segments = _FileConnectorConfiguration.FileAgeThreshold.Split(':');
JsonSerializerOptions propertyNameCaseInsensitiveJsonSerializerOptions = new() { PropertyNameCaseInsensitive = true };
for (int i = 0; i < segments.Length; i++)
{
fileAgeThresholdDateTime = i switch
{
0 => fileAgeThresholdDateTime.AddDays(double.Parse(segments[i]) * -1),
1 => fileAgeThresholdDateTime.AddHours(double.Parse(segments[i]) * -1),
2 => fileAgeThresholdDateTime.AddMinutes(double.Parse(segments[i]) * -1),
3 => fileAgeThresholdDateTime.AddSeconds(double.Parse(segments[i]) * -1),
_ => throw new Exception(),
};
}
rootJson = _WebClient.DownloadString(string.Concat("http://", _StaticFileServer));
rootNginxFileSystemCollection = JsonSerializer.Deserialize<NginxFileSystem[]>(rootJson, propertyNameCaseInsensitiveJsonSerializerOptions);
foreach (NginxFileSystem rootNginxFileSystem in rootNginxFileSystemCollection)
{
if (!(from l in _FileConnectorConfiguration.SourceFileFilters where rootNginxFileSystem.Name == l select false).Any())
continue;
logText = _WebClient.DownloadString(string.Concat("http://", _StaticFileServer, '/', rootNginxFileSystem.Name));
logLines = logText.Split(new string[] { Environment.NewLine }, StringSplitOptions.None);
foreach (string logLine in logLines)
{
if (string.IsNullOrEmpty(logLine))
continue;
logSegments = logLine.Split('<');
if (logSegments.Length < 1 || !DateTime.TryParseExact(logSegments[0].Trim(), dateTimeFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime tvsDateTime))
continue;
if (tvsDateTime < fileAgeThresholdDateTime)
continue;
if (logSegments.Length < 2)
continue;
runFullFileName = logSegments[1].Split('>')[0];
if (!(from l in _FileConnectorConfiguration.SourceFileFilters where runFullFileName.EndsWith(l) select false).Any())
continue;
runFullFileNameSegments = runFullFileName.Split('\\').ToList();
runFileName = runFullFileNameSegments[runFullFileNameSegments.Count - 1];
runFullFileNameSegments.RemoveAt(runFullFileNameSegments.Count - 1);
runJson = _WebClient.DownloadString(string.Concat("http://", _StaticFileServer, '/', string.Join("/", runFullFileNameSegments)));
runFullFileNameSegments.Add(runFileName);
runNginxFileSystemCollection = JsonSerializer.Deserialize<NginxFileSystem[]>(runJson, propertyNameCaseInsensitiveJsonSerializerOptions);
foreach (NginxFileSystem matchNginxFileSystem in runNginxFileSystemCollection)
{
if (matchNginxFileSystem.Name != runFileName)
continue;
if (!(from l in _FileConnectorConfiguration.SourceFileFilters where matchNginxFileSystem.Name.EndsWith(l) select false).Any())
continue;
if (!DateTime.TryParseExact(matchNginxFileSystem.MTime.Replace("GMT", "+00:00"), nginxFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime matchNginxFileSystemDateTime))
continue;
if (matchNginxFileSystemDateTime < fileAgeThresholdDateTime)
continue;
targetFileInfo = new FileInfo(Path.Combine(_FileConnectorConfiguration.TargetFileLocation, runFullFileName));
if (!Directory.Exists(targetFileInfo.Directory.FullName))
Directory.CreateDirectory(targetFileInfo.Directory.FullName);
if (targetFileInfo.Exists && targetFileInfo.LastWriteTime == matchNginxFileSystemDateTime)
continue;
alternateFileInfo = new(Path.Combine(_FileConnectorConfiguration.AlternateTargetFolder, matchNginxFileSystem.Name));
targetFileName = string.Concat("http://", _StaticFileServer, '/', string.Join("/", runFullFileNameSegments));
possibleDownload.Add(new(matchNginxFileSystemDateTime, targetFileInfo, alternateFileInfo, targetFileName));
break;
}
if (possibleDownload.Any())
break;
}
if (possibleDownload.Any())
break;
}
if (possibleDownload.Any())
{
possibleDownload = (from l in possibleDownload orderby l.Item1 select l).ToList();
alternateFileInfo = possibleDownload[0].Item3;
targetFileName = possibleDownload[0].Item4;
targetFileInfo = possibleDownload[0].Item2;
DateTime matchNginxFileSystemDateTime = possibleDownload[0].Item1;
if (alternateFileInfo.Exists)
File.Delete(alternateFileInfo.FullName);
if (targetFileInfo.Exists)
File.Delete(targetFileInfo.FullName);
_WebClient.DownloadFile(targetFileName, targetFileInfo.FullName);
targetFileInfo.LastWriteTime = matchNginxFileSystemDateTime;
File.Copy(targetFileInfo.FullName, alternateFileInfo.FullName);
}
}
private void Callback(object state)
{
try
{
if (_IsEAFHosted)
DownloadRsMFile();
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
try
{
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
}
}
}

View File

@ -0,0 +1,12 @@
namespace Adaptation.FileHandlers.DownloadRsMFile
{
internal class NginxFileSystem
{
public string Name { get; set; }
public string Type { get; set; }
public string MTime { get; set; }
public float Size { get; set; }
}
}

View File

@ -0,0 +1,552 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Duplicator;
using Adaptation.Shared.Methods;
using Adaptation.Shared.Metrology;
using Infineon.Monitoring.MonA;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
namespace Adaptation.FileHandlers.MET08RESIMAPCDE
{
public class FileRead : Shared.FileRead, IFileRead
{
private readonly Timer _Timer;
private int _LastDummyRunIndex;
private readonly string _IqsFile;
private readonly int _HyphenIsDummy;
private readonly int _HyphenIsNaEDA;
private readonly string _MemoryPath;
private readonly int _HyphenIsXToAPC;
private readonly int _HyphenIsXToIQSSi;
private readonly int _HyphenIsXToSPaCe;
private readonly int _HyphenIsXToOpenInsight;
private readonly string _EventNameFileReadDaily;
private readonly string _OpenInsightFilePattern;
private readonly string _OpenInsightMetrologyViewerAPI;
private readonly Dictionary<string, string> _CellNames;
private readonly int _HyphenIsXToOpenInsightMetrologyViewer;
private readonly int _HyphenIsXToOpenInsightMetrologyViewerAttachments;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, hyphenXToArchive, hyphenIsArchive)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
if (hyphenIsArchive != (int)Hyphen.IsArchive)
throw new Exception(cellInstanceConnectionName);
if (hyphenXToArchive != (int)Hyphen.IsXToArchive)
throw new Exception(cellInstanceConnectionName);
_LastDummyRunIndex = -1;
if (_HyphenIsNaEDA == 0)
{ }
if (_HyphenIsXToSPaCe == 0)
{ }
if (_HyphenIsXToIQSSi == 0)
{ }
_CellNames = new Dictionary<string, string>();
_HyphenIsNaEDA = (int)Hyphen.IsNaEDA;
_HyphenIsDummy = (int)Hyphen.IsDummy;
_HyphenIsXToAPC = (int)Hyphen.IsXToAPC;
_HyphenIsXToIQSSi = (int)Hyphen.IsXToIQSSi;
_HyphenIsXToSPaCe = (int)Hyphen.IsXToSPaCe;
_HyphenIsXToOpenInsight = (int)Hyphen.IsXToOpenInsight;
_EventNameFileReadDaily = string.Concat(_EventNameFileRead, "Daily");
_IqsFile = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.File");
_MemoryPath = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Path.Memory");
_HyphenIsXToOpenInsightMetrologyViewer = (int)Hyphen.IsXToOpenInsightMetrologyViewer;
_HyphenIsXToOpenInsightMetrologyViewerAttachments = (int)Hyphen.IsXToOpenInsightMetrologyViewerAttachments;
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
ModelObjectParameterDefinition[] cellInstanceCollection = GetProperties(cellInstanceConnectionName, modelObjectParameters, "CellInstance.", ".Path");
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in cellInstanceCollection)
_CellNames.Add(modelObjectParameterDefinition.Name.Split('.')[1], modelObjectParameterDefinition.Value);
if (_Hyphens == _HyphenIsDummy)
{
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
{
_Timer = new Timer(Callback, null, Timeout.Infinite, Timeout.Infinite);
Callback(null);
}
else
{
int milliSeconds;
milliSeconds = (int)((fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000) / 2);
_Timer = new Timer(Callback, null, milliSeconds, Timeout.Infinite);
milliSeconds += 2000;
}
}
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
Move(this, extractResults, exception);
}
void IFileRead.WaitForThread()
{
WaitForThread(thread: null, threadExceptions: null);
}
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, new Test[] { }, JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra)
{
if (!(_Description is Description))
throw new Exception();
}
void IFileRead.Callback(object state)
{
Callback(state);
}
void IFileRead.MoveArchive()
{
string logisticsSequence = _Logistics.Sequence.ToString();
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
string jobIdDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
Directory.CreateDirectory(jobIdDirectory);
//string destinationArchiveDirectory = string.Concat(jobIdDirectory, @"\!Archive\", weekDirectory);
string destinationArchiveDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\Archive\", _Logistics.JobID, @"\", weekDirectory);
if (!Directory.Exists(destinationArchiveDirectory))
Directory.CreateDirectory(destinationArchiveDirectory);
string[] matchDirectories = new string[] { GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).FirstOrDefault() };
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
destinationArchiveDirectory = string.Concat(destinationArchiveDirectory, @"\", Path.GetFileName(sourceDirectory));
Directory.Move(sourceDirectory, destinationArchiveDirectory);
}
protected List<pcl.Description> GetDescriptions(JsonElement[] jsonElements)
{
List<pcl.Description> results = new();
pcl.Description description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<pcl.Description>(jsonElement.ToString(), jsonSerializerOptions);
results.Add(description);
}
return results;
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
string duplicateDirectory;
Tuple<string, string[], string[]> pdsf = ProcessDataStandardFormat.GetLogisticsColumnsAndBody(reportFullPath);
_Logistics = new Logistics(reportFullPath, pdsf.Item1);
SetFileParameterLotIDToLogisticsMID();
JsonElement[] jsonElements = ProcessDataStandardFormat.GetArray(pdsf);
List<pcl.Description> descriptions = GetDescriptions(jsonElements);
Tuple<Test[], Dictionary<Test, List<Shared.Properties.IDescription>>> tuple = GetTuple(this, from l in descriptions select (Shared.Properties.IDescription)l, extra: false);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tuple.Item1, jsonElements, new List<FileInfo>());
bool isNotUsedInsightMetrologyViewerAttachments = (!(_FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) && _Hyphens == _HyphenIsXToOpenInsightMetrologyViewerAttachments);
bool isDummyRun = (_DummyRuns.Any() && _DummyRuns.ContainsKey(_Logistics.JobID) && _DummyRuns[_Logistics.JobID].Any() && (from l in _DummyRuns[_Logistics.JobID] where l == _Logistics.Sequence select 1).Any());
if (isDummyRun)
{
try
{ File.SetLastWriteTime(reportFullPath, dateTime); }
catch (Exception) { }
}
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
if (_Hyphens != _HyphenIsXToOpenInsight)
duplicateDirectory = string.Concat(_FileConnectorConfiguration.TargetFileLocation, @"\", segments[0]);
else
duplicateDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\Data");
if (segments.Length > 2)
duplicateDirectory = string.Concat(duplicateDirectory, @"-", segments[2]);
if (!Directory.Exists(duplicateDirectory))
Directory.CreateDirectory(duplicateDirectory);
if ((isDummyRun || isNotUsedInsightMetrologyViewerAttachments || _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0) && _Hyphens != _HyphenIsXToArchive && _Hyphens != _HyphenIsArchive)
{
if (!Directory.Exists(duplicateDirectory))
Directory.CreateDirectory(duplicateDirectory);
string successDirectory;
if (_Hyphens != _HyphenIsXToAPC)
successDirectory = string.Empty;
else
{
successDirectory = string.Concat(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation), @"\ViewerPath");
if (!Directory.Exists(successDirectory))
Directory.CreateDirectory(successDirectory);
}
List<Tuple<Shared.Properties.IScopeInfo, string>> tuples = new();
string duplicateFile = string.Concat(duplicateDirectory, @"\", Path.GetFileName(reportFullPath));
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
string logisticsSequenceMemoryDirectory = string.Concat(_MemoryPath, @"\", _EquipmentType, @"\Source\", weekDirectory, @"\", _Logistics.Sequence);
if (!Directory.Exists(logisticsSequenceMemoryDirectory))
Directory.CreateDirectory(logisticsSequenceMemoryDirectory);
if (_Hyphens == _HyphenIsXToAPC)
{
if (!isDummyRun && _IsEAFHosted)
File.Copy(reportFullPath, duplicateFile, overwrite: true);
}
else
{
if (_Hyphens == _HyphenIsXToOpenInsightMetrologyViewer)
{
WSRequest wsRequest = new(this, _Logistics, descriptions);
if (!isDummyRun && _IsEAFHosted)
{
Tuple<string, WS.Results> wsResults = WS.SendData(_OpenInsightMetrologyViewerAPI, wsRequest);
if (!wsResults.Item2.Success)
throw new Exception(wsResults.ToString());
_Log.Debug(wsResults.Item2.HeaderID);
File.WriteAllText(string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json"), wsResults.Item1);
}
}
else
{
Test test;
string lines;
Shared.Properties.IScopeInfo scopeInfo;
foreach (KeyValuePair<Test, List<Shared.Properties.IDescription>> keyValuePair in tuple.Item2)
{
test = keyValuePair.Key;
//scopeInfo = new ScopeInfo(test);
if (_Hyphens != _HyphenIsXToOpenInsight)
scopeInfo = new ScopeInfo(test, _IqsFile);
else
scopeInfo = new ScopeInfo(test, _OpenInsightFilePattern);
//lines = ProcessDataStandardFormat.GetLines(this, scopeInfo, names, values, dateFormat: "M/d/yyyy hh:mm:ss tt", timeFormat: string.Empty, pairedColumns: ExtractResultPairedColumns);
lines = ProcessData.GetLines(this, _Logistics, descriptions);
tuples.Add(new Tuple<Shared.Properties.IScopeInfo, string>(scopeInfo, lines));
}
}
if (_Hyphens == _HyphenIsXToOpenInsightMetrologyViewerAttachments)
{
string[] matchDirectories = Shared1567(reportFullPath, tuples);
if (!isDummyRun && _IsEAFHosted && !isNotUsedInsightMetrologyViewerAttachments)
ProcessData.PostOpenInsightMetrologyViewerAttachments(this, dateTime, logisticsSequenceMemoryDirectory, descriptions, matchDirectories[0]);
}
}
if (_Hyphens != _HyphenIsXToOpenInsightMetrologyViewer && _Hyphens != _HyphenIsXToOpenInsightMetrologyViewerAttachments)
Shared0413(dateTime, isDummyRun, successDirectory, duplicateDirectory, tuples, duplicateFile);
}
if (_Hyphens == _HyphenIsXToOpenInsightMetrologyViewerAttachments)
{
string destinationDirectory;
//string destinationDirectory = WriteScopeInfo(_ProgressPath, _Logistics, dateTime, duplicateDirectory, tuples);
FileInfo fileInfo = new(reportFullPath);
string logisticsSequence = _Logistics.Sequence.ToString();
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\", _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
Directory.CreateDirectory(jobIdDirectory);
string[] matchDirectories;
if (!_IsEAFHosted)
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
else
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
destinationDirectory = matchDirectories[0];
if (isDummyRun)
Shared0607(reportFullPath, duplicateDirectory, logisticsSequence, destinationDirectory);
else
{
WSRequest wsRequest = new(this, _Logistics, descriptions);
JsonSerializerOptions jsonSerializerOptions = new() { WriteIndented = true };
string json = JsonSerializer.Serialize(wsRequest, wsRequest.GetType(), jsonSerializerOptions);
if (_IsEAFHosted)
Shared1277(reportFullPath, destinationDirectory, logisticsSequence, jobIdDirectory, json);
else
{
string jsonFileName = Path.ChangeExtension(reportFullPath, ".json");
string historicalText = File.ReadAllText(jsonFileName);
if (json != historicalText)
throw new Exception("File doesn't match historical!");
}
}
}
return results;
}
private void CallbackIsDummy(string traceDummyFile, List<Tuple<string, string, string, string, int>> tuples, bool fileConnectorConfigurationIncludeSubDirectories, bool includeSubDirectoriesExtra)
{
int fileCount;
string[] files;
string monARessource;
string checkDirectory;
string sourceArchiveFile;
string inProcessDirectory;
const string site = "sjc";
string stateName = string.Concat("Dummy_", _EventName);
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
MonIn monIn = MonIn.GetInstance(monInURL);
foreach (Tuple<string, string, string, string, int> item in tuples)
{
monARessource = item.Item1;
sourceArchiveFile = item.Item2;
inProcessDirectory = item.Item3;
checkDirectory = item.Item4;
fileCount = item.Item5;
try
{
if (fileCount > 0 || string.IsNullOrEmpty(checkDirectory))
{
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
monIn.SendStatus(site, monARessource, stateName, State.Warning);
for (int i = 1; i < 12; i++)
Thread.Sleep(500);
}
else if (inProcessDirectory == checkDirectory)
continue;
if (!_IsEAFHosted)
continue;
if (!File.Exists(sourceArchiveFile))
continue;
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
continue;
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
if (fileConnectorConfigurationIncludeSubDirectories && includeSubDirectoriesExtra)
{
if (_EventName == _EventNameFileRead)
checkDirectory = string.Concat(checkDirectory, @"\", sequence);
else if (_EventName == _EventNameFileReadDaily)
checkDirectory = string.Concat(checkDirectory, @"\Source\", sequence);
else
throw new Exception();
}
if (fileConnectorConfigurationIncludeSubDirectories)
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
else
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
if (files.Length > 250)
throw new Exception("Safety net!");
foreach (string file in files)
File.SetLastWriteTime(file, new DateTime(sequence));
if (!fileConnectorConfigurationIncludeSubDirectories)
{
foreach (string file in files)
File.Move(file, string.Concat(checkDirectory, @"\", Path.GetFileName(file)));
}
else
{
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
foreach (string directory in directories)
Directory.CreateDirectory(string.Concat(checkDirectory, directory.Substring(inProcessDirectory.Length)));
foreach (string file in files)
File.Move(file, string.Concat(checkDirectory, file.Substring(inProcessDirectory.Length)));
}
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
monIn.SendStatus(site, monARessource, stateName, State.Ok);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
monIn.SendStatus(site, monARessource, stateName, State.Critical);
}
}
}
private void Callback(object state)
{
if (_Hyphens != _HyphenIsDummy)
throw new Exception();
try
{
DateTime dateTime = DateTime.Now;
bool check = (dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday);
if (check)
{
int fileCount;
string[] files;
string monARessource;
string checkDirectory;
string sourceArchiveFile;
string sourceFileLocation;
string inProcessDirectory;
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string traceDummyDirectory = string.Concat(Path.GetPathRoot(_TracePath), @"\TracesDummy\", _CellInstanceName, @"\Source\", dateTime.ToString("yyyy"), "___Week_", weekOfYear);
if (!Directory.Exists(traceDummyDirectory))
Directory.CreateDirectory(traceDummyDirectory);
string traceDummyFile = string.Concat(traceDummyDirectory, @"\", dateTime.Ticks, " - ", _CellInstanceName, ".txt");
File.AppendAllText(traceDummyFile, string.Empty);
List<Tuple<string, string, string, string, int>> tuples = new();
string progressDirectory = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\_ Progress"));
if (progressDirectory != _ProgressPath || !Directory.Exists(progressDirectory))
throw new Exception("Invalid progress path");
foreach (KeyValuePair<string, string> keyValuePair in _CellNames)
{
monARessource = keyValuePair.Key;
if (!keyValuePair.Value.Contains(@"\"))
continue;
foreach (string sourceFileFilter in _FileConnectorConfiguration.SourceFileFilter.Split('|'))
{
if (sourceFileFilter.ToLower().StartsWith(keyValuePair.Value.Replace(@"\", string.Empty)))
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
else if (_FileConnectorConfiguration.SourceFileLocation.ToLower().EndsWith(keyValuePair.Value))
sourceFileLocation = Path.GetFullPath(_FileConnectorConfiguration.SourceFileLocation);
else
sourceFileLocation = Path.GetFullPath(string.Concat(_FileConnectorConfiguration.SourceFileLocation, @"\", keyValuePair.Value));
sourceArchiveFile = Path.GetFullPath(string.Concat(sourceFileLocation, @"\", sourceFileFilter));
if (!File.Exists(sourceArchiveFile))
continue;
if (!_DummyRuns.ContainsKey(monARessource))
_DummyRuns.Add(monARessource, new List<long>());
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceFileFilter, sourceFileLocation, sourceArchiveFile, 0));
}
}
File.AppendAllLines(traceDummyFile, from l in tuples select l.Item4);
if (tuples.Any())
{
_LastDummyRunIndex += 1;
if (_LastDummyRunIndex >= tuples.Count)
_LastDummyRunIndex = 0;
monARessource = tuples[_LastDummyRunIndex].Item1;
string sourceFileFilter = tuples[_LastDummyRunIndex].Item2;
sourceFileLocation = tuples[_LastDummyRunIndex].Item3;
sourceArchiveFile = tuples[_LastDummyRunIndex].Item4;
//fileCount = tuples[_LastDummyRunIndex].Item5;
tuples.Clear();
if (long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
{
if (!_DummyRuns[monARessource].Contains(sequence))
_DummyRuns[monARessource].Add(sequence);
inProcessDirectory = string.Concat(progressDirectory, @"\Dummy_in process\", sequence);
checkDirectory = inProcessDirectory;
if (!Directory.Exists(checkDirectory))
Directory.CreateDirectory(checkDirectory);
files = Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories);
fileCount = files.Length;
if (files.Any())
{
if (files.Length > 250)
throw new Exception("Safety net!");
try
{
foreach (string file in files)
File.Delete(file);
}
catch (Exception) { }
}
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
checkDirectory = sourceFileLocation;
files = Directory.GetFiles(checkDirectory, string.Concat("*", sequence, "*"), SearchOption.TopDirectoryOnly);
fileCount = files.Length;
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
}
}
if (tuples.Any())
//CallbackIsDummy(traceDummyFile, tuples, FileConnectorConfiguration.IncludeSubDirectories.Value, includeSubDirectoriesExtra: false);
CallbackIsDummy(traceDummyFile, tuples, fileConnectorConfigurationIncludeSubDirectories: true, includeSubDirectoriesExtra: true);
}
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
try
{
TimeSpan timeSpan = new(DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
_Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
}
catch (Exception exception)
{
string subject = string.Concat("Exception:", _CellInstanceConnectionName);
string body = string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace);
try
{ _SMTP.SendHighPriorityEmailMessage(subject, body); }
catch (Exception) { }
}
}
}
}

View File

@ -0,0 +1,18 @@
namespace Adaptation.FileHandlers.MET08RESIMAPCDE
{
public enum Hyphen
{
IsXToOpenInsightMetrologyViewer, //MetrologyWS.SendData(file, string.Concat("http://", serverName, "/api/inbound/CDE"));
IsXToIQSSi, //NA <d7p1:FileScanningIntervalInSeconds>-361</d7p1:FileScanningIntervalInSeconds>
IsXToOpenInsight, //NA <d7p1:FileScanningIntervalInSeconds>-363</d7p1:FileScanningIntervalInSeconds>
IsXToOpenInsightMetrologyViewerAttachments, //Site-None <d7p1:FileScanningIntervalInSeconds>-362</d7p1:FileScanningIntervalInSeconds>
IsXToAPC,
IsXToSPaCe,
IsXToArchive,
IsArchive,
IsDummy,
IsNaEDA
}
}

View File

@ -0,0 +1,56 @@
using Adaptation.Shared;
using Adaptation.Shared.Properties;
using System;
using System.Collections.Generic;
using System.Text;
namespace Adaptation.FileHandlers.MET08RESIMAPCDE
{
public class ProcessData
{
internal static List<Tuple<int, Enum, string>> HyphenTuples => new()
{
new Tuple<int, Enum, string>(0, Hyphen.IsNaEDA, @"\EC_EDA\Staging\Traces\~\Source"),
new Tuple<int, Enum, string>(15, Hyphen.IsXToOpenInsightMetrologyViewer, @"\EC_EAFLog\TracesMES\~\Source"),
new Tuple<int, Enum, string>(-36, Hyphen.IsXToIQSSi, @"\EC_SPC_Si\Traces\~\PollPath"),
new Tuple<int, Enum, string>(-36, Hyphen.IsXToOpenInsight, @"\\messa01ec.ec.local\APPS\Metrology\~\Source"),
new Tuple<int, Enum, string>(-36, Hyphen.IsXToOpenInsightMetrologyViewerAttachments, @"\EC_Characterization_Si\In Process\~\Source"),
new Tuple<int, Enum, string>(360, Hyphen.IsXToAPC, @"\EC_APC\Staging\Traces\~\PollPath"),
new Tuple<int, Enum, string>(-36, Hyphen.IsXToSPaCe, @"\EC_SPC_Si\Traces\~\Source"),
new Tuple<int, Enum, string>(180, Hyphen.IsXToArchive, @"\EC_EAFLog\TracesArchive\~\Source"),
new Tuple<int, Enum, string>(36, Hyphen.IsArchive, @"\EC_Characterization_Si\Processed")
//new Tuple<int, Enum, string>("IsDummy"
};
internal static string GetLines(IFileRead fileRead, Logistics logistics, List<pcl.Description> descriptions)
{
StringBuilder result = new();
if (fileRead is null)
{ }
if (logistics is null)
{ }
if (descriptions is null)
{ }
return result.ToString();
}
internal static void PostOpenInsightMetrologyViewerAttachments(IFileRead fileRead, DateTime dateTime, string logisticsSequenceMemoryDirectory, List<pcl.Description> descriptions, string matchDirectory)
{
if (fileRead is null)
{ }
if (dateTime == DateTime.MinValue)
{ }
if (logisticsSequenceMemoryDirectory is null)
{ }
if (descriptions is null)
{ }
if (matchDirectory is null)
{ }
//Not used
}
}
}

View File

@ -0,0 +1,107 @@
using Adaptation.Shared;
using Adaptation.Shared.Properties;
using System;
using System.Collections.Generic;
using System.Linq;
namespace Adaptation.FileHandlers.MET08RESIMAPCDE
{
public class WSRequest
{
public bool SentToMetrology { get; set; }
public bool SentToSPC { get; set; }
//
public string AutoOptimizeGain { get; set; }
public string AutoProbeHeightSet { get; set; }
public string Avg { get; set; }
public string CellName { get; set; }
public string DLRatio { get; set; }
public string DataReject { get; set; }
public string Date { get; set; }
public string Engineer { get; set; }
public string EquipId { get; set; }
public string FileName { get; set; }
public string FilePath { get; set; }
public string Id { get; set; }
public string Layer { get; set; }
public string LotId { get; set; }
public string Op { get; set; }
public string PSN { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string ResistivitySpec { get; set; }
public string Run { get; set; }
public string SemiRadial { get; set; }
public string StDev { get; set; }
public string Temp { get; set; }
public string UniqueId { get; set; }
public string Zone { get; set; }
public List<pcl.Detail> Details { get; protected set; }
[Obsolete("For json")] public WSRequest() { }
internal WSRequest(IFileRead fileRead, Logistics logistics, List<pcl.Description> descriptions)
{
Id = "-1";
if (fileRead is null)
{ }
CellName = logistics.MesEntity;
Details = new List<pcl.Detail>();
if (descriptions[0] is not pcl.Description x)
throw new Exception();
//Header
{
AutoOptimizeGain = x.AutoOptimizeGain;
AutoProbeHeightSet = x.AutoProbeHeightSet;
Avg = x.Avg;
DLRatio = x.DLRatio;
DataReject = x.DataReject;
Date = x.Date;
Op = x.Employee;
Engineer = x.Engineer;
EquipId = x.EquipId;
FileName = x.FileName;
Layer = x.Layer;
LotId = x.Lot;
PSN = x.PSN;
RDS = x.RDS;
Reactor = x.Reactor;
Recipe = x.Recipe;
ResistivitySpec = x.ResistivitySpec;
Run = x.Run;
SemiRadial = x.SemiRadial;
StDev = x.StdDev;
Temp = x.Temp;
UniqueId = x.UniqueId;
Zone = x.Zone;
}
pcl.Detail detail;
foreach (pcl.Description description in descriptions)
{
detail = new pcl.Detail
{
HeaderUniqueId = description.HeaderUniqueId,
Merit = description.Merit,
Pt = description.Pt,
R = description.R,
Rs = description.Rs,
T = description.T,
UniqueId = description.UniqueId
};
Details.Add(detail);
}
if (Date is null)
Date = logistics.DateTimeFromSequence.ToString();
if (UniqueId is null && Details.Any())
UniqueId = Details[0].HeaderUniqueId;
string onlyWSRequest = string.Empty;
FilePath = onlyWSRequest;
}
}
}

View File

@ -0,0 +1,316 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.RsM
{
public class Description : IDescription
{
public int Test { get; set; }
public int Count { get; set; }
public int Index { get; set; }
//
public string EventName { get; set; }
public string NullData { get; set; }
public string JobID { get; set; }
public string Sequence { get; set; }
public string MesEntity { get; set; }
public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; }
public string MID { get; set; }
//
public string Date { get; set; }
public string Employee { get; set; }
public string Lot { get; set; }
public string PSN { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
//
public string AutoOptimizeGain { get; set; }
public string AutoProbeHeightSet { get; set; }
public string Avg { get; set; }
public string DataReject { get; set; }
public string DLRatio { get; set; }
public string Merit { get; set; }
public string Pt { get; set; }
public string R { get; set; }
public string ResistivitySpec { get; set; }
public string Rs { get; set; }
public string SemiRadial { get; set; }
public string StdDev { get; set; }
public string T { get; set; }
public string Temp { get; set; }
//
public string Engineer { get; set; }
public string EquipId { get; set; }
public string FileName { get; set; }
public string HeaderUniqueId { get; set; }
public string Id { get; set; }
public string Layer { get; set; }
public string RDS { get; set; }
public string Run { get; set; }
public string UniqueId { get; set; }
public string Zone { get; set; }
string IDescription.GetEventDescription()
{
return "File Has been read and parsed";
}
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
{
List<string> results = new();
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
if (@object is not JsonElement jsonElement)
throw new Exception();
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
results.Add(jsonProperty.Name);
return results;
}
List<string> IDescription.GetDetailNames()
{
List<string> results = new()
{
nameof(AutoOptimizeGain),
nameof(AutoProbeHeightSet),
nameof(Avg),
nameof(DataReject),
nameof(DLRatio),
nameof(Merit),
nameof(Pt),
nameof(R),
nameof(ResistivitySpec),
nameof(Rs),
nameof(SemiRadial),
nameof(StdDev),
nameof(T),
nameof(Temp)
};
return results;
}
List<string> IDescription.GetHeaderNames()
{
List<string> results = new()
{
nameof(Date),
nameof(Employee),
nameof(Lot),
nameof(PSN),
nameof(Reactor),
nameof(Recipe)
};
return results;
}
IDescription IDescription.GetDisplayNames()
{
Description result = GetDisplayNames();
return result;
}
List<string> IDescription.GetParameterNames()
{
List<string> results = new()
{
nameof(Engineer),
nameof(EquipId),
nameof(FileName),
nameof(HeaderUniqueId),
nameof(Id),
nameof(Layer),
nameof(RDS),
nameof(Run),
nameof(UniqueId),
nameof(Zone)
};
return results;
}
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
{
JsonProperty[] results;
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
results = ((JsonElement)@object).EnumerateObject().ToArray();
return results;
}
List<string> IDescription.GetPairedParameterNames()
{
List<string> results = new();
return results;
}
List<string> IDescription.GetIgnoreParameterNames(Test test)
{
List<string> results = new();
return results;
}
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
{
Description result = GetDefault(fileRead, logistics);
return result;
}
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
{
Dictionary<string, string> results = new();
IDescription description = GetDisplayNames();
string json = JsonSerializer.Serialize(description, description.GetType());
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, string.Empty);
if (jsonProperty.Value is JsonElement jsonPropertyValue)
results[jsonProperty.Name] = jsonPropertyValue.ToString();
}
return results;
}
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
{
List<IDescription> results = new();
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData processData)
results.Add(GetDefault(fileRead, logistics));
else
{
string nullData;
Description description;
object configDataNullData = fileRead.NullData;
if (configDataNullData is null)
nullData = string.Empty;
else
nullData = configDataNullData.ToString();
for (int i = 0; i < iProcessData.Details.Count; i++)
{
if (iProcessData.Details[i] is not Detail detail)
continue;
description = new Description
{
Test = (int)tests[i],
Count = tests.Count,
Index = i,
//
EventName = fileRead.EventName,
NullData = nullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = logistics.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = processData.Date,
Employee = processData.Engineer,
Lot = processData.Lot,
PSN = processData.PSN,
Reactor = processData.Reactor,
Recipe = processData.Recipe,
//
AutoOptimizeGain = processData.AutoOptimizeGain,
AutoProbeHeightSet = processData.AutoProbeHeightSet,
Avg = processData.Avg,
DataReject = processData.DataReject,
DLRatio = processData.DLRatio,
Merit = detail.Merit,
Pt = detail.Pt,
R = detail.R,
ResistivitySpec = processData.ResistivitySpec,
Rs = detail.Rs,
SemiRadial = processData.SemiRadial,
StdDev = processData.StandardDeviationPercentage,
T = detail.T,
Temp = processData.Temp,
//
Engineer = processData.Engineer,
EquipId = processData.EquipId,
FileName = processData.FileName,
HeaderUniqueId = detail.HeaderUniqueId,
Id = processData.UniqueId,
Layer = processData.Layer,
RDS = processData.RDS,
Run = processData.Run,
UniqueId = detail.UniqueId,
Zone = processData.Zone
};
results.Add(description);
}
}
return results;
}
private Description GetDisplayNames()
{
Description result = new();
return result;
}
private Description GetDefault(IFileRead fileRead, Logistics logistics)
{
Description result = new()
{
Test = -1,
Count = 0,
Index = -1,
//
EventName = fileRead.EventName,
NullData = fileRead.NullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = fileRead.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = nameof(Date),
Employee = nameof(Employee),
Lot = nameof(Lot),
PSN = nameof(PSN),
Reactor = nameof(Reactor),
Recipe = nameof(Recipe),
//
AutoOptimizeGain = nameof(AutoOptimizeGain),
AutoProbeHeightSet = nameof(AutoProbeHeightSet),
Avg = nameof(Avg),
DataReject = nameof(DataReject),
DLRatio = nameof(DLRatio),
Merit = nameof(Merit),
Pt = nameof(Pt),
R = nameof(R),
ResistivitySpec = nameof(ResistivitySpec),
Rs = nameof(Rs),
SemiRadial = nameof(SemiRadial),
StdDev = nameof(StdDev),
T = nameof(T),
Temp = nameof(Temp),
//
Engineer = nameof(Engineer),
EquipId = nameof(EquipId),
FileName = nameof(FileName),
HeaderUniqueId = nameof(HeaderUniqueId),
Id = nameof(Id),
Layer = nameof(Layer),
RDS = nameof(RDS),
Run = nameof(Run),
UniqueId = nameof(UniqueId),
Zone = nameof(Zone),
};
return result;
}
}
}

View File

@ -0,0 +1,22 @@
namespace Adaptation.FileHandlers.RsM
{
public class Detail
{
public string HeaderUniqueId { get; set; }
public string Merit { get; set; }
public string Pt { get; set; }
public string R { get; set; }
public string Rs { get; set; }
public string T { get; set; }
public string UniqueId { get; set; }
public override string ToString()
{
return string.Concat(Merit, ";", Pt, ";", R, ";", Rs, ";", T);
}
}
}

View File

@ -0,0 +1,149 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.RsM
{
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive) :
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, hyphenXToArchive, hyphenIsArchive)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
Move(this, extractResults, exception);
}
void IFileRead.WaitForThread()
{
WaitForThread(thread: null, threadExceptions: null);
}
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, new Test[] { }, JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra)
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
void IFileRead.MoveArchive()
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
void IFileRead.Callback(object state)
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
SetFileParameterLotIDToLogisticsMID();
if (reportFullPath.Length < _MinFileLength)
results.Item4.Add(new FileInfo(reportFullPath));
else
{
string logBody = string.Empty;
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4);
if (iProcessData is ProcessData processData)
{
string mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
_Logistics.MID = mid;
SetFileParameterLotID(mid);
_Logistics.ProcessJobID = processData.Reactor;
logBody = processData.LogBody;
}
if (!iProcessData.Details.Any())
throw new Exception(string.Concat("No Data - ", dateTime.Ticks));
results = iProcessData.GetResults(this, _Logistics, results.Item4);
if (!_IsEAFHosted)
results = new(logBody, results.Item2, results.Item3, results.Item4);
}
return results;
}
}
}

View File

@ -0,0 +1,279 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Data;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.RsM
{
public class ProcessData : IProcessData
{
private readonly List<object> _Details;
public string JobID { get; set; }
public string MesEntity { get; set; }
public string AutoOptimizeGain { get; set; }
public string AutoProbeHeightSet { get; set; }
public string Avg { get; set; }
public string DLRatio { get; set; }
public string DataReject { get; set; }
public string Date { get; set; }
public DateTime DateTime { get; set; }
public string Employee { get; set; }
public string EquipId { get; set; }
public string Engineer { get; set; }
public string FileName { get; set; }
public string Layer { get; set; }
public string Lot { get; set; }
public string LogBody { get; set; }
public string PSN { get; set; }
public string Project { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string RecipeName { get; set; }
public string ResistivitySpec { get; set; }
public string Run { get; set; }
public string SemiRadial { get; set; }
public string StandardDeviation { get; set; }
public string StandardDeviationPercentage { get; set; }
public string Temp { get; set; }
public string Title { get; set; }
public string UniqueId { get; set; }
public string Zone { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details;
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
JobID = logistics.JobID;
fileInfoCollection.Clear();
_Details = new List<object>();
MesEntity = logistics.MesEntity;
Parse(fileRead, logistics, fileInfoCollection);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors)
{
throw new Exception(string.Concat("See ", nameof(Parse)));
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.CDE);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
private void SetTitleData(string[] segments)
{
if (segments.Length > 0)
{
Title = segments[0];
// Remove illegal characters \/:*?"<>| found in the Run.
Run = Regex.Replace(segments[0], @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
string[] parsedBatch = segments[0].Split('-');
if (parsedBatch.Length > 0)
Reactor = parsedBatch[0];
if (parsedBatch.Length > 1)
RDS = parsedBatch[1];
if (parsedBatch.Length > 2)
{
string[] parsedPSN = parsedBatch[2].Split('.');
if (parsedPSN.Length > 0)
PSN = parsedPSN[0];
if (parsedPSN.Length > 1)
Layer = parsedPSN[1];
}
if (parsedBatch.Length > 3)
Zone = parsedBatch[3];
}
}
private void SetFileNameData(string[] segments)
{
if (segments.Length > 1)
FileName = segments[0];
if (segments.Length > 2)
{
Project = segments[1];
RecipeName = segments[2];
Recipe = string.Concat(segments[1], " \\ ", segments[2]);
}
}
internal static DateTime GetDateTime(Logistics logistics, string dateTimeText)
{
DateTime result;
string inputDateFormat = "HH:mm MM/dd/yy";
if (dateTimeText.Length != inputDateFormat.Length)
result = logistics.DateTimeFromSequence;
else
{
if (!DateTime.TryParseExact(dateTimeText, inputDateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
result = logistics.DateTimeFromSequence;
else
{
if (dateTimeParsed < logistics.DateTimeFromSequence.AddDays(1) && dateTimeParsed > logistics.DateTimeFromSequence.AddDays(-1))
result = dateTimeParsed;
else
result = logistics.DateTimeFromSequence;
}
}
return result;
}
private void SetDateTimeData(Logistics logistics, string[] segments)
{
DateTime dateTime;
if (segments.Length < 2)
dateTime = logistics.DateTimeFromSequence;
else
{
string dateTimeText = string.Concat(segments[0], ' ', segments[1]);
dateTime = GetDateTime(logistics, dateTimeText);
}
DateTime = dateTime;
Date = dateTime.ToString();
if (segments.Length > 3 && float.TryParse(segments[2], out float temp))
Temp = temp.ToString("0.0");
if (segments.Length > 7 && segments[6] == "Avg=")
Avg = segments[7];
if (segments.Length > 7 && segments[8] == "Dev=")
StandardDeviation = segments[9];
if (!string.IsNullOrEmpty(Avg) && !string.IsNullOrEmpty(StandardDeviation) && float.TryParse(Avg, out float average) && float.TryParse(StandardDeviation, out float standardDeviation))
StandardDeviationPercentage = Math.Round(standardDeviation / average, 4).ToString("0.00%");
}
private void SetOperatorData(string[] segments)
{
if (segments.Length > 1)
Employee = segments[0];
if (segments.Length > 2)
EquipId = segments[1];
}
private void SetEngineerData(string[] segments)
{
if (segments.Length > 1)
Engineer = segments[0];
}
private void SetNumProbePointsData(string[] segments)
{
if (segments.Length > 6)
DataReject = segments[6];
}
private Detail GetRData(string[] segments)
{
Detail result = new();
if (segments.Length > 0 && float.TryParse(segments[0], out float r))
result.R = r.ToString("0.0");
if (segments.Length > 1 && float.TryParse(segments[1], out float t))
result.T = t.ToString("0.0");
if (segments.Length > 2 && float.TryParse(segments[2], out float rs))
result.Rs = rs.ToString("0.0000");
if (segments.Length > 12 && float.TryParse(segments[12], out float merit))
result.Merit = merit.ToString("0.00");
result.Pt = "-1";
result.UniqueId = string.Empty;
return result;
}
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
if (fileRead is null)
{ }
Lot = "LotID";
Detail detail;
if (fileInfoCollection is null)
{ }
string timeFormat = "yyyyMMddHHmmss";
string[] separator = new string[] { " " };
string[] lines = File.ReadAllLines(logistics.ReportFullPath);
for (int i = 0; i < lines.Length; i++)
{
if (lines[i].Contains(",<Title>"))
SetTitleData(lines[i].Split(separator, StringSplitOptions.RemoveEmptyEntries));
else if (lines[i].Contains(",<FileName, Proj,Rcpe, LotID,WfrID>"))
SetFileNameData(lines[i].Split(separator, StringSplitOptions.RemoveEmptyEntries));
else if (lines[i].Contains(",<DateTime,Temp,TCR%,N|P>"))
SetDateTimeData(logistics, lines[i].Split(separator, StringSplitOptions.RemoveEmptyEntries));
else if (lines[i].Contains(",<Operator, Epuipment>"))
SetOperatorData(lines[i].Split(separator, StringSplitOptions.RemoveEmptyEntries));
else if (lines[i].Contains(",<Engineer>"))
SetEngineerData(lines[i].Split(separator, StringSplitOptions.RemoveEmptyEntries));
else if (lines[i].Contains(",<NumProbePoints, SingleOrDualProbeConfig, #ActPrbPts, Rsens,IdrvMx,VinGain, DataRejectSigma, MeritThreshold>"))
SetNumProbePointsData(lines[i].Split(separator, StringSplitOptions.RemoveEmptyEntries));
else if (lines[i].Contains(",<R,Th,Data, Rs,RsA,RsB, #Smpl, x,y, Irng,Vrng,ChiSq,merit,DataIntegrity>"))
{
for (int z = i; z < lines.Length; z++)
{
i = z;
if (string.IsNullOrEmpty(lines[z]))
continue;
detail = GetRData(lines[z].Split(separator, StringSplitOptions.RemoveEmptyEntries));
_Details.Add(detail);
}
}
}
UniqueId = string.Concat(EquipId, "_", Run, "_", logistics.DateTimeFromSequence.ToString(timeFormat));
for (int i = 0; i < _Details.Count; i++)
{
if (_Details[i] is not Detail item)
continue;
item.HeaderUniqueId = UniqueId;
item.Pt = (i + 1).ToString();
item.UniqueId = string.Concat(item, "_Point-", item.Pt);
}
// Remove illegal characters \/:*?"<>| found in the Lot.
Lot = Regex.Replace(Lot, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
StringBuilder stringBuilder = new();
string reportFileName = Path.GetFileName(logistics.ReportFullPath);
stringBuilder.AppendLine($"RUN [{Title}]");
stringBuilder.AppendLine($"Recipe {Project} \\ {RecipeName} RESISTIVITY {"####"}");
stringBuilder.AppendLine($"EQUIP# {EquipId} Engineer {Engineer}");
stringBuilder.AppendLine($"LotID {Lot} D.L.RATIO {"#.####"}");
stringBuilder.AppendLine($"OPERATOR {Employee} TEMP {Temp} {DateTime:HH:mm MM/dd/yy}");
stringBuilder.AppendLine($"AutoOptimizeGain = {"###"} AutoProbeHeightSet = {"##"}");
stringBuilder.AppendLine($"DataReject > {"#.#"}Sigma");
stringBuilder.AppendLine($"0 ..\\{Project}.prj\\{RecipeName}.rcp\\{reportFileName} {DateTime:HH:mm MM/dd/yy}");
stringBuilder.AppendLine($"pt# R Th Rs[Ohm/sq@T] Merit");
for (int i = 0; i < _Details.Count; i++)
{
if (_Details[i] is not Detail item)
continue;
stringBuilder.AppendLine($"{item.Pt} {item.R} {item.T} {item.Rs} {item.Merit}");
}
stringBuilder.AppendLine($"Avg = {Avg} {StandardDeviationPercentage} SEMI Radial= {"#.##%"}");
LogBody = stringBuilder.ToString();
}
}
}

View File

@ -0,0 +1,315 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
namespace Adaptation.FileHandlers.pcl
{
public class Description : IDescription, Shared.Properties.IDescription
{
public int Test { get; set; }
public int Count { get; set; }
public int Index { get; set; }
//
public string EventName { get; set; }
public string NullData { get; set; }
public string JobID { get; set; }
public string Sequence { get; set; }
public string MesEntity { get; set; }
public string ReportFullPath { get; set; }
public string ProcessJobID { get; set; }
public string MID { get; set; }
//
public string Date { get; set; }
public string Employee { get; set; }
public string Lot { get; set; }
public string PSN { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
//
public string AutoOptimizeGain { get; set; }
public string AutoProbeHeightSet { get; set; }
public string Avg { get; set; }
public string DataReject { get; set; }
public string DLRatio { get; set; }
public string Merit { get; set; }
public string Pt { get; set; }
public string R { get; set; }
public string ResistivitySpec { get; set; }
public string Rs { get; set; }
public string SemiRadial { get; set; }
public string StdDev { get; set; }
public string T { get; set; }
public string Temp { get; set; }
//
public string Engineer { get; set; }
public string EquipId { get; set; }
public string FileName { get; set; }
public string HeaderUniqueId { get; set; }
public string Id { get; set; }
public string Layer { get; set; }
public string RDS { get; set; }
public string Run { get; set; }
public string UniqueId { get; set; }
public string Zone { get; set; }
string IDescription.GetEventDescription()
{
return "File Has been read and parsed";
}
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
{
List<string> results = new();
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
if (@object is not JsonElement jsonElement)
throw new Exception();
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
results.Add(jsonProperty.Name);
return results;
}
List<string> IDescription.GetDetailNames()
{
List<string> results = new()
{
nameof(AutoOptimizeGain),
nameof(AutoProbeHeightSet),
nameof(Avg),
nameof(DataReject),
nameof(DLRatio),
nameof(Merit),
nameof(Pt),
nameof(R),
nameof(ResistivitySpec),
nameof(Rs),
nameof(SemiRadial),
nameof(StdDev),
nameof(T),
nameof(Temp)
};
return results;
}
List<string> IDescription.GetHeaderNames()
{
List<string> results = new()
{
nameof(Date),
nameof(Employee),
nameof(Lot),
nameof(PSN),
nameof(Reactor),
nameof(Recipe)
};
return results;
}
IDescription IDescription.GetDisplayNames()
{
Description result = GetDisplayNames();
return result;
}
List<string> IDescription.GetParameterNames()
{
List<string> results = new()
{
nameof(Engineer),
nameof(EquipId),
nameof(FileName),
nameof(HeaderUniqueId),
nameof(Id),
nameof(Layer),
nameof(RDS),
nameof(Run),
nameof(UniqueId),
nameof(Zone)
};
return results;
}
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
{
JsonProperty[] results;
IDescription description = GetDefault(fileRead, logistics);
string json = JsonSerializer.Serialize(description, description.GetType());
object @object = JsonSerializer.Deserialize<object>(json);
results = ((JsonElement)@object).EnumerateObject().ToArray();
return results;
}
List<string> IDescription.GetPairedParameterNames()
{
List<string> results = new();
return results;
}
List<string> IDescription.GetIgnoreParameterNames(Test test)
{
List<string> results = new();
return results;
}
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
{
Description result = GetDefault(fileRead, logistics);
return result;
}
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
{
Dictionary<string, string> results = new();
IDescription description = GetDisplayNames();
string json = JsonSerializer.Serialize(description, description.GetType());
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
{
if (!results.ContainsKey(jsonProperty.Name))
results.Add(jsonProperty.Name, string.Empty);
if (jsonProperty.Value is JsonElement jsonPropertyValue)
results[jsonProperty.Name] = jsonPropertyValue.ToString();
}
return results;
}
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
{
List<IDescription> results = new();
if (iProcessData is null || !iProcessData.Details.Any() || iProcessData is not ProcessData processData)
results.Add(GetDefault(fileRead, logistics));
else
{
string nullData;
Description description;
object configDataNullData = fileRead.NullData;
if (configDataNullData is null)
nullData = string.Empty;
else
nullData = configDataNullData.ToString();
for (int i = 0; i < iProcessData.Details.Count; i++)
{
if (iProcessData.Details[i] is not Detail detail)
continue;
description = new Description
{
Test = (int)tests[i],
Count = tests.Count,
Index = i,
//
EventName = fileRead.EventName,
NullData = nullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = logistics.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = processData.Date,
Employee = processData.Engineer,
Lot = processData.Lot,
PSN = processData.PSN,
Reactor = processData.Reactor,
Recipe = processData.Recipe,
//
AutoOptimizeGain = processData.AutoOptimizeGain,
AutoProbeHeightSet = processData.AutoProbeHeightSet,
Avg = processData.Avg,
DataReject = processData.DataReject,
DLRatio = processData.DLRatio,
Merit = detail.Merit,
Pt = detail.Pt,
R = detail.R,
ResistivitySpec = processData.ResistivitySpec,
Rs = detail.Rs,
SemiRadial = processData.SemiRadial,
StdDev = processData.StdDev,
T = detail.T,
Temp = processData.Temp,
//
Engineer = processData.Engineer,
EquipId = processData.EquipId,
FileName = processData.FileName,
HeaderUniqueId = detail.HeaderUniqueId,
Id = processData.UniqueId,
Layer = processData.Layer,
RDS = processData.RDS,
Run = processData.Run,
UniqueId = detail.UniqueId,
Zone = processData.Zone
};
results.Add(description);
}
}
return results;
}
private Description GetDisplayNames()
{
Description result = new();
return result;
}
private Description GetDefault(IFileRead fileRead, Logistics logistics)
{
Description result = new()
{
Test = -1,
Count = 0,
Index = -1,
//
EventName = fileRead.EventName,
NullData = fileRead.NullData,
JobID = fileRead.CellInstanceName,
Sequence = logistics.Sequence.ToString(),
MesEntity = fileRead.MesEntity,
ReportFullPath = logistics.ReportFullPath,
ProcessJobID = logistics.ProcessJobID,
MID = logistics.MID,
//
Date = nameof(Date),
Employee = nameof(Employee),
Lot = nameof(Lot),
PSN = nameof(PSN),
Reactor = nameof(Reactor),
Recipe = nameof(Recipe),
//
AutoOptimizeGain = nameof(AutoOptimizeGain),
AutoProbeHeightSet = nameof(AutoProbeHeightSet),
Avg = nameof(Avg),
DataReject = nameof(DataReject),
DLRatio = nameof(DLRatio),
Merit = nameof(Merit),
Pt = nameof(Pt),
R = nameof(R),
ResistivitySpec = nameof(ResistivitySpec),
Rs = nameof(Rs),
SemiRadial = nameof(SemiRadial),
StdDev = nameof(StdDev),
T = nameof(T),
Temp = nameof(Temp),
//
Engineer = nameof(Engineer),
EquipId = nameof(EquipId),
FileName = nameof(FileName),
HeaderUniqueId = nameof(HeaderUniqueId),
Id = nameof(Id),
Layer = nameof(Layer),
RDS = nameof(RDS),
Run = nameof(Run),
UniqueId = nameof(UniqueId),
Zone = nameof(Zone),
};
return result;
}
}
}

View File

@ -0,0 +1,22 @@
namespace Adaptation.FileHandlers.pcl
{
public class Detail
{
public string HeaderUniqueId { get; set; }
public string Merit { get; set; }
public string Pt { get; set; }
public string R { get; set; }
public string Rs { get; set; }
public string T { get; set; }
public string UniqueId { get; set; }
public override string ToString()
{
return string.Concat(Merit, ";", Pt, ";", R, ";", Rs, ";", T);
}
}
}

View File

@ -0,0 +1,145 @@
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.pcl
{
public class FileRead : Shared.FileRead, IFileRead
{
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive) :
base(new Description(), true, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, useCyclicalForDescription, isEAFHosted, hyphenXToArchive, hyphenIsArchive)
{
_MinFileLength = 10;
_NullData = string.Empty;
_Logistics = new Logistics(this);
if (_FileParameter is null)
throw new Exception(cellInstanceConnectionName);
if (_ModelObjectParameterDefinitions is null)
throw new Exception(cellInstanceConnectionName);
if (_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
{
Move(this, extractResults, exception);
}
void IFileRead.WaitForThread()
{
WaitForThread(thread: null, threadExceptions: null);
}
string IFileRead.GetEventDescription()
{
string result = _Description.GetEventDescription();
return result;
}
List<string> IFileRead.GetHeaderNames()
{
List<string> results = _Description.GetHeaderNames();
return results;
}
string[] IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results = Move(extractResults, to, from, resolvedFileLocation, exception);
return results;
}
JsonProperty[] IFileRead.GetDefault()
{
JsonProperty[] results = _Description.GetDefault(this, _Logistics);
return results;
}
Dictionary<string, string> IFileRead.GetDisplayNamesJsonElement()
{
Dictionary<string, string> results = _Description.GetDisplayNamesJsonElement(this);
return results;
}
List<IDescription> IFileRead.GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData)
{
List<IDescription> results = _Description.GetDescriptions(fileRead, _Logistics, tests, processData);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.GetExtractResult(string reportFullPath, string eventName)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (string.IsNullOrEmpty(eventName))
throw new Exception();
_ReportFullPath = reportFullPath;
DateTime dateTime = DateTime.Now;
results = GetExtractResult(reportFullPath, dateTime);
if (results.Item3 is null)
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(results.Item1, new Test[] { }, JsonSerializer.Deserialize<JsonElement[]>("[]"), results.Item4);
if (results.Item3.Length > 0 && _IsEAFHosted)
WritePDSF(this, results.Item3);
UpdateLastTicksDuration(DateTime.Now.Ticks - dateTime.Ticks);
return results;
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IFileRead.ReExtract()
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<string> headerNames = _Description.GetHeaderNames();
Dictionary<string, string> keyValuePairs = _Description.GetDisplayNamesJsonElement(this);
results = ReExtract(this, headerNames, keyValuePairs);
return results;
}
void IFileRead.CheckTests(Test[] tests, bool extra)
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
void IFileRead.MoveArchive()
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
void IFileRead.Callback(object state)
{
throw new Exception(string.Concat("Not ", nameof(_IsDuplicator)));
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results = new(string.Empty, null, null, new List<FileInfo>());
_Logistics = new Logistics(this, reportFullPath, useSplitForMID: true);
SetFileParameterLotIDToLogisticsMID();
if (reportFullPath.Length < _MinFileLength)
results.Item4.Add(new FileInfo(reportFullPath));
else
{
IProcessData iProcessData = new ProcessData(this, _Logistics, results.Item4);
if (iProcessData is ProcessData processData)
{
string mid = string.Concat(processData.Reactor, "-", processData.RDS, "-", processData.PSN);
mid = Regex.Replace(mid, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
_Logistics.MID = mid;
SetFileParameterLotID(mid);
_Logistics.ProcessJobID = processData.Reactor;
}
if (!iProcessData.Details.Any())
throw new Exception(string.Concat("No Data - ", dateTime.Ticks));
results = iProcessData.GetResults(this, _Logistics, results.Item4);
}
return results;
}
}
}

View File

@ -0,0 +1,452 @@
using Adaptation.Shared;
using Adaptation.Shared.Methods;
using log4net;
using System;
using System.Collections.Generic;
using System.Data;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
namespace Adaptation.FileHandlers.pcl
{
public class ProcessData : IProcessData
{
private int _I;
private string _Data;
private readonly ILog _Log;
private readonly List<object> _Details;
public string JobID { get; set; }
public string MesEntity { get; set; }
public string AutoOptimizeGain { get; set; }
public string AutoProbeHeightSet { get; set; }
public string Avg { get; set; }
public string DLRatio { get; set; }
public string DataReject { get; set; }
public string Date { get; set; }
public string Employee { get; set; }
public string EquipId { get; set; }
public string Engineer { get; set; }
public string FileName { get; set; }
public string Layer { get; set; }
public string Lot { get; set; }
public string PSN { get; set; }
public string RDS { get; set; }
public string Reactor { get; set; }
public string Recipe { get; set; }
public string ResistivitySpec { get; set; }
public string Run { get; set; }
public string SemiRadial { get; set; }
public string StdDev { get; set; }
public string Temp { get; set; }
public string UniqueId { get; set; }
public string Zone { get; set; }
List<object> Shared.Properties.IProcessData.Details => _Details;
public ProcessData(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
fileInfoCollection.Clear();
_Details = new List<object>();
_I = 0;
_Data = string.Empty;
JobID = logistics.JobID;
MesEntity = logistics.MesEntity;
_Log = LogManager.GetLogger(typeof(ProcessData));
Parse(fileRead, logistics, fileInfoCollection);
}
string IProcessData.GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors)
{
throw new Exception(string.Concat("See ", nameof(Parse)));
}
Tuple<string, Test[], JsonElement[], List<FileInfo>> IProcessData.GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
List<Test> tests = new();
foreach (object item in _Details)
tests.Add(Test.CDE);
List<IDescription> descriptions = fileRead.GetDescriptions(fileRead, tests, this);
if (tests.Count != descriptions.Count)
throw new Exception();
for (int i = 0; i < tests.Count; i++)
{
if (descriptions[i] is not Description description)
throw new Exception();
if (description.Test != (int)tests[i])
throw new Exception();
}
List<Description> fileReadDescriptions = (from l in descriptions select (Description)l).ToList();
string json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
JsonElement[] jsonElements = JsonSerializer.Deserialize<JsonElement[]>(json);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(logistics.Logistics1[0], tests.ToArray(), jsonElements, fileInfoCollection);
return results;
}
private string GetBefore(string text)
{
string str;
string str1;
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
str = _Data.Substring(_I);
_I = _Data.Length;
str1 = str.Trim();
}
else
{
str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
str1 = str.Trim();
}
return str1;
}
private string GetBefore(string text, bool trim)
{
string str;
string before;
if (!trim)
{
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
str = _Data.Substring(_I);
_I = _Data.Length;
before = str;
}
else
{
str = _Data.Substring(_I, num - _I);
_I = num + text.Length;
before = str;
}
}
else
{
before = GetBefore(text);
}
return before;
}
private string GetToEOL()
{
return GetBefore("\n");
}
private string GetToEOL(bool trim)
{
string str;
str = (!trim ? GetBefore("\n", false) : GetToEOL());
return str;
}
private string GetToken()
{
while (true)
{
if ((_I >= _Data.Length || !IsNullOrWhiteSpace(_Data.Substring(_I, 1))))
break;
_I++;
}
int num = _I;
while (true)
{
if ((num >= _Data.Length || IsNullOrWhiteSpace(_Data.Substring(num, 1))))
break;
num++;
}
string str = _Data.Substring(_I, num - _I);
_I = num;
return str.Trim();
}
private string GetToText(string text)
{
string str = _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
return str;
}
private bool IsBlankLine()
{
int num = _Data.IndexOf("\n", _I);
return IsNullOrWhiteSpace((num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I)));
}
private bool IsNullOrWhiteSpace(string text)
{
bool flag;
int num = 0;
while (true)
{
if (num >= text.Length)
{
flag = true;
break;
}
else if (char.IsWhiteSpace(text[num]))
{
num++;
}
else
{
flag = false;
break;
}
}
return flag;
}
private string PeekNextLine()
{
int num = _I;
string toEOL = GetToEOL();
_I = num;
return toEOL;
}
private void ScanPast(string text)
{
int num = _Data.IndexOf(text, _I);
if (num <= -1)
{
_I = _Data.Length;
}
else
{
_I = num + text.Length;
}
}
internal static DateTime GetDateTime(Logistics logistics, string dateTimeText)
{
DateTime result;
string inputDateFormat = "HH:mm MM/dd/yy";
if (dateTimeText.Length != inputDateFormat.Length)
result = logistics.DateTimeFromSequence;
else
{
if (!DateTime.TryParseExact(dateTimeText, inputDateFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTimeParsed))
result = logistics.DateTimeFromSequence;
else
{
if (dateTimeParsed < logistics.DateTimeFromSequence.AddDays(1) && dateTimeParsed > logistics.DateTimeFromSequence.AddDays(-1))
result = dateTimeParsed;
else
result = logistics.DateTimeFromSequence;
}
}
return result;
}
private void Parse(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection)
{
if (fileRead is null)
{ }
// Convert the source file to UTF8Encoding format and then back to string for processing. This convertion
// shall eliminate the special HEX characters such as 0x18 "CANCEL" and 0x20 "SPACE" captured via nPort.
string rawText = File.ReadAllText(logistics.ReportFullPath);
UTF8Encoding utf8Encoding = new();
byte[] bytes = utf8Encoding.GetBytes(rawText);
string convertedText = utf8Encoding.GetString(bytes);
// Replaces all control characters with a space, except for the TAB (0x09), LF (0x0A), CR (0x0D), and
// normal ASCII characters, which are valid characters for SharePoint.
string receivedData = Regex.Replace(convertedText, @"[^\u0009\u000A\u000D\u0020-\u007E]", " ");
string log = receivedData;
for (short i = 0; i < short.MaxValue; i++)
{
if (!log.Contains(" "))
break;
log = log.Replace(" ", " ");
}
log = log.Replace(" ", "\t").Replace(": ", "\t").Replace(":\t", "\t");
IEnumerable<string> lines = (from l in log.Split('\r') select l.Trim());
string logFile = Path.ChangeExtension(logistics.ReportFullPath, ".log");
File.WriteAllLines(logFile, lines);
fileInfoCollection.Add(new FileInfo(logFile));
//parse file
string h = string.Empty;
receivedData = receivedData.Replace("\r", "\n").Trim();
_I = 0;
_Data = string.Empty;
if (string.IsNullOrEmpty(receivedData))
throw new Exception("No data!");
Detail detail;
_I = 0;
_Data = receivedData;
ScanPast("RUN:");
Run = GetToEOL();
ScanPast("Recipe:");
Recipe = GetBefore("RESISTIVITY SPEC:");
if (string.IsNullOrEmpty(Recipe))
{
_I = 0;
_Data = receivedData;
ScanPast("RUN:");
Run = GetToEOL();
ScanPast("DEVICE:");
Recipe = GetBefore("RESISTIVITY SPEC:");
}
ResistivitySpec = GetToEOL();
ScanPast("EQUIP#:");
EquipId = GetBefore("Engineer:");
Engineer = GetToEOL();
ScanPast("LotID:");
Lot = GetBefore("D.L.RATIO:");
// Remove illegal characters \/:*?"<>| found in the Lot.
Lot = Regex.Replace(Lot, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
DLRatio = GetToEOL();
ScanPast("OPERATOR:");
Employee = GetBefore("TEMP:");
Temp = GetToken();
string dateTimeText = GetToEOL();
DateTime dateTime = GetDateTime(logistics, dateTimeText);
Date = dateTime.ToString();
ScanPast("AutoOptimizeGain =");
AutoOptimizeGain = GetBefore("AutoProbeHeightSet =");
AutoProbeHeightSet = GetToEOL();
ScanPast("DataReject");
DataReject = GetToEOL();
GetToEOL();
FileName = GetToEOL();
GetToEOL();
GetToEOL();
bool check = false;
while (!IsBlankLine())
{
detail = new Detail() { Pt = GetToken() };
if (detail.Pt.Contains("Avg"))
break;
else if (!detail.Pt.Contains(":"))
{
detail.R = GetToken();
detail.T = GetToken();
detail.Rs = GetToken();
detail.Merit = GetToken();
detail.UniqueId = string.Concat("_Point-", _Details.Count + 1);
GetToEOL();
_Details.Add(detail);
}
else
{
check = true;
break;
}
}
_I = 0;
_Data = receivedData;
if (!check)
{
ScanPast("Avg =");
Avg = GetToken();
StdDev = GetToken();
ScanPast("SEMI Radial=");
SemiRadial = GetToEOL();
}
else
{
ScanPast("RsAv ");
Avg = GetBefore("+/-");
StdDev = GetToken();
_Log.Debug($"****ProcessData - RsAv StDev={StdDev}");
ScanPast("(Mx+Mn)");
SemiRadial = GetToken();
_Log.Debug($"****ProcessData - RsAv SemiRadial={SemiRadial}");
GetToEOL();
int num = 0;
GetBefore(": ");
for (string i = GetToken(); !string.IsNullOrEmpty(i); i = GetToken())
{
if (!i.Contains(":"))
{
detail = new Detail();
int num1 = num + 1;
num = num1;
_Log.Debug($"****ProcessData - RsAv Point={num1}");
detail.Pt = num1.ToString();
detail.Rs = i;
detail.Merit = GetToken().Replace("|", "");
detail.UniqueId = string.Concat("_Point-", _Details.Count + 1);
_Details.Add(detail);
}
}
}
//Id = -1;
Run = Run.Trim();
if (!Run.StartsWith("[") && !Run.EndsWith("]"))
throw new Exception("Lot summary data is invalid or missing.");
Run = Run.Replace("[", "");
Run = Run.Replace("]", "");
Run = Regex.Replace(Run, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
_Log.Debug($"****ParseData - cde.Run:'{Run}'");
if (string.IsNullOrEmpty(Run))
throw new Exception("Batch (Run) information does not exist");
//parse out batch and validate
string[] parsedBatch = Run.Split('-');
if (parsedBatch.Length >= 1)
Reactor = parsedBatch[0];
if (parsedBatch.Length >= 2)
RDS = parsedBatch[1];
if (parsedBatch.Length >= 3)
{
string[] parsedPSN = parsedBatch[2].Split('.');
if (parsedPSN.Length >= 1)
PSN = parsedPSN[0];
if (parsedPSN.Length >= 2)
Layer = parsedPSN[1];
}
if (parsedBatch.Length >= 4)
Zone = parsedBatch[3];
//create filename / unique id
string timeFormat = "yyyyMMddHHmmss";
//fix equip
StringBuilder equipFixed = new();
foreach (char c in EquipId)
{
if (char.IsLetterOrDigit(c) || c == '-' || c == '.')
{
equipFixed.Append(c);
}
}
EquipId = equipFixed.ToString();
_Log.Debug($"****ParseData - cde.EquipId:'{EquipId}'");
// The "cde.Run" string is used as part of the SharePoint header unique ID. The "cde.Run" ID is typed
// at the tool by the users. The characters are not controlled and the user can type any characters like
// "\", "*", ".", " ", etc. Some of these characters are not valid and thus can't be used for the
// SharePoint header unique ID. Therefore, we need to filter out invalid characters and only keep the
// important ones.
StringBuilder runFixed = new();
foreach (char c in Run)
{
if (char.IsLetterOrDigit(c) || c == '-' || c == '.')
runFixed.Append(c);
}
Run = runFixed.ToString();
UniqueId = string.Concat(EquipId, "_", Run, "_", logistics.DateTimeFromSequence.ToString(timeFormat));
foreach (Detail item in _Details)
{
item.HeaderUniqueId = UniqueId;
item.UniqueId = string.Concat(item, item.UniqueId);
}
fileInfoCollection.Add(new FileInfo(logistics.ReportFullPath));
}
}
}