Match TFS Changeset 303349
This commit is contained in:
148
Adaptation/Shared/Duplicator/Description.cs
Normal file
148
Adaptation/Shared/Duplicator/Description.cs
Normal file
@ -0,0 +1,148 @@
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.Shared.Duplicator
|
||||
{
|
||||
|
||||
public class Description : IDescription, Properties.IDescription
|
||||
{
|
||||
|
||||
public int Test { get; set; }
|
||||
public int Count { get; set; }
|
||||
public int Index { get; set; }
|
||||
//
|
||||
public string EventName { get; set; }
|
||||
public string NullData { get; set; }
|
||||
public string JobID { get; set; }
|
||||
public string Sequence { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string ReportFullPath { get; set; }
|
||||
public string ProcessJobID { get; set; }
|
||||
public string MID { get; set; }
|
||||
public string Date { get; set; } //2021-10-23
|
||||
|
||||
string IDescription.GetEventDescription()
|
||||
{
|
||||
return "File Has been read and parsed";
|
||||
}
|
||||
|
||||
List<string> IDescription.GetNames(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
List<string> results = new();
|
||||
IDescription description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
if (@object is not JsonElement jsonElement)
|
||||
throw new Exception();
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
results.Add(jsonProperty.Name);
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetDetailNames()
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetHeaderNames()
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
IDescription IDescription.GetDisplayNames()
|
||||
{
|
||||
Description result = GetDisplayNames();
|
||||
return result;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetParameterNames()
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
JsonProperty[] IDescription.GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
JsonProperty[] results;
|
||||
IDescription description = GetDefault(fileRead, logistics);
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
results = ((JsonElement)@object).EnumerateObject().ToArray();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetPairedParameterNames()
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
List<string> IDescription.GetIgnoreParameterNames(Test test)
|
||||
{
|
||||
List<string> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
IDescription IDescription.GetDefaultDescription(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = GetDefault(fileRead, logistics);
|
||||
return result;
|
||||
}
|
||||
|
||||
Dictionary<string, string> IDescription.GetDisplayNamesJsonElement(IFileRead fileRead)
|
||||
{
|
||||
Dictionary<string, string> results = new();
|
||||
IDescription description = GetDisplayNames();
|
||||
string json = JsonSerializer.Serialize(description, description.GetType());
|
||||
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
{
|
||||
if (!results.ContainsKey(jsonProperty.Name))
|
||||
results.Add(jsonProperty.Name, string.Empty);
|
||||
if (jsonProperty.Value is JsonElement jsonPropertyValue)
|
||||
results[jsonProperty.Name] = jsonPropertyValue.ToString();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
List<IDescription> IDescription.GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData)
|
||||
{
|
||||
List<IDescription> results = new();
|
||||
return results;
|
||||
}
|
||||
|
||||
private Description GetDisplayNames()
|
||||
{
|
||||
Description result = new();
|
||||
return result;
|
||||
}
|
||||
|
||||
private Description GetDefault(IFileRead fileRead, Logistics logistics)
|
||||
{
|
||||
Description result = new()
|
||||
{
|
||||
Test = -1,
|
||||
Count = 0,
|
||||
Index = -1,
|
||||
//
|
||||
EventName = fileRead.EventName,
|
||||
NullData = fileRead.NullData,
|
||||
JobID = fileRead.CellInstanceName,
|
||||
Sequence = logistics.Sequence.ToString(),
|
||||
MesEntity = fileRead.MesEntity,
|
||||
ReportFullPath = logistics.ReportFullPath,
|
||||
ProcessJobID = logistics.ProcessJobID,
|
||||
MID = logistics.MID,
|
||||
Date = logistics.DateTimeFromSequence.ToUniversalTime().ToString("MM/dd/yyyy HH:mm:ss")
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
834
Adaptation/Shared/FileRead.cs
Normal file
834
Adaptation/Shared/FileRead.cs
Normal file
@ -0,0 +1,834 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared.Methods;
|
||||
using log4net;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.Shared
|
||||
{
|
||||
|
||||
public class FileRead : Properties.IFileRead
|
||||
{
|
||||
|
||||
protected string _NullData;
|
||||
protected readonly ILog _Log;
|
||||
protected long _MinFileLength;
|
||||
protected Logistics _Logistics;
|
||||
protected readonly ISMTP _SMTP;
|
||||
protected readonly int _Hyphens;
|
||||
protected readonly bool _IsEvent;
|
||||
protected string _ReportFullPath;
|
||||
protected long _LastTicksDuration;
|
||||
protected readonly bool _IsEAFHosted;
|
||||
protected readonly string _EventName;
|
||||
protected readonly string _MesEntity;
|
||||
protected readonly string _TracePath;
|
||||
protected readonly bool _IsDuplicator;
|
||||
protected readonly Calendar _Calendar;
|
||||
protected readonly bool _IsSourceTimer;
|
||||
protected readonly string _VillachPath;
|
||||
protected readonly int _HyphenIsArchive;
|
||||
protected readonly string _ProgressPath;
|
||||
protected readonly string _EquipmentType;
|
||||
protected readonly int _HyphenIsXToArchive;
|
||||
protected readonly long _BreakAfterSeconds;
|
||||
protected readonly string _ExceptionSubject;
|
||||
protected readonly string _CellInstanceName;
|
||||
protected readonly string _EventNameFileRead;
|
||||
protected readonly IDescription _Description;
|
||||
protected readonly bool _UseCyclicalForDescription;
|
||||
protected readonly string _CellInstanceConnectionName;
|
||||
protected readonly string _CellInstanceConnectionNameBase;
|
||||
protected readonly Dictionary<string, List<long>> _DummyRuns;
|
||||
protected readonly Dictionary<string, string> _FileParameter;
|
||||
protected readonly string _ParameterizedModelObjectDefinitionType;
|
||||
protected readonly FileConnectorConfiguration _FileConnectorConfiguration;
|
||||
protected readonly IList<ModelObjectParameterDefinition> _ModelObjectParameterDefinitions;
|
||||
|
||||
bool Properties.IFileRead.IsEvent => _IsEvent;
|
||||
string Properties.IFileRead.NullData => _NullData;
|
||||
string Properties.IFileRead.EventName => _EventName;
|
||||
string Properties.IFileRead.MesEntity => _MesEntity;
|
||||
bool Properties.IFileRead.IsEAFHosted => _IsEAFHosted;
|
||||
string Properties.IFileRead.EquipmentType => _EquipmentType;
|
||||
string Properties.IFileRead.ReportFullPath => _ReportFullPath;
|
||||
string Properties.IFileRead.CellInstanceName => _CellInstanceName;
|
||||
string Properties.IFileRead.ExceptionSubject => _ExceptionSubject;
|
||||
bool Properties.IFileRead.UseCyclicalForDescription => _UseCyclicalForDescription;
|
||||
string Properties.IFileRead.CellInstanceConnectionName => _CellInstanceConnectionName;
|
||||
string Properties.IFileRead.ParameterizedModelObjectDefinitionType => _ParameterizedModelObjectDefinitionType;
|
||||
|
||||
public FileRead(IDescription description, bool isEvent, ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted, int hyphenXToArchive, int hyphenIsArchive)
|
||||
{
|
||||
_SMTP = smtp;
|
||||
_IsEvent = isEvent;
|
||||
_DummyRuns = dummyRuns;
|
||||
_LastTicksDuration = 0;
|
||||
_IsEAFHosted = isEAFHosted;
|
||||
_Description = description;
|
||||
_FileParameter = fileParameter;
|
||||
_ReportFullPath = string.Empty;
|
||||
_HyphenIsArchive = hyphenIsArchive;
|
||||
_CellInstanceName = cellInstanceName;
|
||||
_HyphenIsXToArchive = hyphenXToArchive;
|
||||
_Calendar = new CultureInfo("en-US").Calendar;
|
||||
_Log = LogManager.GetLogger(typeof(FileRead));
|
||||
_UseCyclicalForDescription = useCyclicalForDescription;
|
||||
_CellInstanceConnectionName = cellInstanceConnectionName;
|
||||
_ModelObjectParameterDefinitions = modelObjectParameters;
|
||||
_FileConnectorConfiguration = fileConnectorConfiguration;
|
||||
_ParameterizedModelObjectDefinitionType = parameterizedModelObjectDefinitionType;
|
||||
_IsSourceTimer = (fileConnectorConfiguration.SourceFileFilter.StartsWith("*Timer.txt"));
|
||||
string cellInstanceConnectionNameBase = cellInstanceConnectionName.Replace("-", string.Empty);
|
||||
_Hyphens = (cellInstanceConnectionName.Length - cellInstanceConnectionNameBase.Length);
|
||||
_ExceptionSubject = string.Concat("Exception:", _CellInstanceConnectionName, _FileConnectorConfiguration?.SourceDirectoryCloaking);
|
||||
string suffix;
|
||||
string[] segments = _ParameterizedModelObjectDefinitionType.Split('.');
|
||||
string @namespace = segments[0];
|
||||
string eventNameFileRead = "FileRead";
|
||||
string eventName = segments[segments.Length - 1];
|
||||
bool isDuplicator = segments[0] == cellInstanceName;
|
||||
_IsDuplicator = isDuplicator;
|
||||
_CellInstanceConnectionNameBase = cellInstanceConnectionNameBase;
|
||||
if (eventName == eventNameFileRead)
|
||||
suffix = string.Empty;
|
||||
else
|
||||
suffix = string.Concat('_', eventName.Split(new string[] { eventNameFileRead }, StringSplitOptions.RemoveEmptyEntries)[1]);
|
||||
string parameterizedModelObjectDefinitionTypeAppended = string.Concat(@namespace, suffix);
|
||||
if (!isEAFHosted)
|
||||
{
|
||||
if (string.IsNullOrEmpty(equipmentTypeName) || equipmentTypeName != parameterizedModelObjectDefinitionTypeAppended)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (!string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
// if (string.IsNullOrEmpty(equipmentDictionaryName) && !isEvent)
|
||||
// throw new Exception(cellInstanceConnectionName);
|
||||
// if (!string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
|
||||
// throw new Exception(cellInstanceConnectionName);
|
||||
}
|
||||
ModelObjectParameterDefinition[] paths = GetProperties(cellInstanceConnectionName, modelObjectParameters, "Path.");
|
||||
if (paths.Length < 4)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
if (isDuplicator)
|
||||
_MesEntity = string.Empty;
|
||||
else
|
||||
_MesEntity = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, string.Concat("CellInstance.", cellInstanceName, ".Alias"));
|
||||
_TracePath = (from l in paths where l.Name.EndsWith("Trace") select l.Value).FirstOrDefault();
|
||||
_VillachPath = (from l in paths where l.Name.EndsWith("Villach") select l.Value).FirstOrDefault();
|
||||
_ProgressPath = (from l in paths where l.Name.EndsWith("Progress") select l.Value).FirstOrDefault();
|
||||
_EventName = eventName;
|
||||
_EventNameFileRead = eventNameFileRead;
|
||||
_EquipmentType = parameterizedModelObjectDefinitionTypeAppended;
|
||||
long breakAfterSeconds;
|
||||
if (_FileConnectorConfiguration is null)
|
||||
breakAfterSeconds = 360;
|
||||
else
|
||||
{
|
||||
if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.TimeBased)
|
||||
breakAfterSeconds = 360;
|
||||
else
|
||||
breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value);
|
||||
}
|
||||
_BreakAfterSeconds = breakAfterSeconds;
|
||||
UpdateLastTicksDuration(breakAfterSeconds * 10000000);
|
||||
if (_IsDuplicator)
|
||||
{
|
||||
if (string.IsNullOrEmpty(_FileConnectorConfiguration.TargetFileLocation) || string.IsNullOrEmpty(_FileConnectorConfiguration.ErrorTargetFileLocation))
|
||||
throw new Exception("_Configuration is empty?");
|
||||
if (_FileConnectorConfiguration.TargetFileLocation.Contains('%') || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains('%'))
|
||||
throw new Exception("_Configuration is incorrect for a duplicator!");
|
||||
if (!(_FileConnectorConfiguration is null))
|
||||
{
|
||||
if (string.IsNullOrEmpty(_FileConnectorConfiguration.SourceDirectoryCloaking))
|
||||
throw new Exception("SourceDirectoryCloaking is empty?");
|
||||
if (!_FileConnectorConfiguration.SourceDirectoryCloaking.StartsWith("~"))
|
||||
throw new Exception("SourceDirectoryCloaking is incorrect for a duplicator!");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected string GetPropertyValue(string cellInstanceConnectionName, IList<ModelObjectParameterDefinition> modelObjectParameters, string propertyName)
|
||||
{
|
||||
string result;
|
||||
List<string> results = (from l in modelObjectParameters where l.Name == propertyName select l.Value).ToList();
|
||||
if (results.Count != 1)
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
result = results[0];
|
||||
return result;
|
||||
}
|
||||
|
||||
protected ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList<ModelObjectParameterDefinition> modelObjectParameters, string propertyNamePrefix)
|
||||
{
|
||||
ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) select l).ToArray();
|
||||
if (!results.Any())
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
return results;
|
||||
}
|
||||
|
||||
protected ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList<ModelObjectParameterDefinition> modelObjectParameters, string propertyNamePrefix, string propertyNameSuffix)
|
||||
{
|
||||
ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) && l.Name.EndsWith(propertyNameSuffix) select l).ToArray();
|
||||
if (!results.Any())
|
||||
throw new Exception(cellInstanceConnectionName);
|
||||
return results;
|
||||
}
|
||||
|
||||
protected void UpdateLastTicksDuration(long ticksDuration)
|
||||
{
|
||||
if (ticksDuration < 50000000)
|
||||
ticksDuration = 50000000;
|
||||
_LastTicksDuration = (long)Math.Ceiling(ticksDuration * .667);
|
||||
}
|
||||
|
||||
protected void WaitForThread(Thread thread, List<Exception> threadExceptions)
|
||||
{
|
||||
if (!(thread is null))
|
||||
{
|
||||
ThreadState threadState;
|
||||
for (short i = 0; i < short.MaxValue; i++)
|
||||
{
|
||||
if (thread is null)
|
||||
break;
|
||||
else
|
||||
{
|
||||
threadState = thread.ThreadState;
|
||||
if (threadState != ThreadState.Running && threadState != ThreadState.WaitSleepJoin)
|
||||
break;
|
||||
}
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
lock (threadExceptions)
|
||||
{
|
||||
if (threadExceptions.Any())
|
||||
{
|
||||
foreach (Exception item in threadExceptions)
|
||||
_Log.Error(string.Concat(item.Message, Environment.NewLine, Environment.NewLine, item.StackTrace));
|
||||
Exception exception = threadExceptions[0];
|
||||
threadExceptions.Clear();
|
||||
throw exception;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void CreateProgressDirectory(string progressPath, Logistics logistics, int? duplicator, string[] exceptionLines)
|
||||
{
|
||||
string progressDirectory;
|
||||
StringBuilder stringBuilder = new();
|
||||
if (duplicator is null || duplicator.Value == 0)
|
||||
progressDirectory = string.Concat(progressPath, @"\EquipmentIntegration");
|
||||
else
|
||||
{
|
||||
stringBuilder.Clear();
|
||||
for (int i = 0; i < duplicator.Value; i++)
|
||||
{
|
||||
if (i > 0 && (i % 2) == 0)
|
||||
stringBuilder.Append(' ');
|
||||
stringBuilder.Append('-');
|
||||
}
|
||||
progressDirectory = string.Concat(progressPath, @"\", (duplicator.Value + 1).ToString().PadLeft(2, '0'), " ", stringBuilder).Trim();
|
||||
}
|
||||
DateTime dateTime = DateTime.Now;
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
progressDirectory = string.Concat(progressDirectory, @"\", dateTime.ToString("yyyy"), "_Week_", weekOfYear, @"\", logistics.MID, "_", logistics.Sequence, "_", DateTime.Now.Ticks - logistics.Sequence);
|
||||
if (!Directory.Exists(progressDirectory))
|
||||
Directory.CreateDirectory(progressDirectory);
|
||||
if (!(exceptionLines is null))
|
||||
{
|
||||
string fileName = string.Concat(progressDirectory, @"\readme.txt");
|
||||
try
|
||||
{ File.WriteAllLines(fileName, exceptionLines); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
protected string[] Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
|
||||
{
|
||||
string[] results;
|
||||
bool isErrorFile = !(exception is null);
|
||||
if (!to.EndsWith(@"\"))
|
||||
string.Concat(to, @"\");
|
||||
if (!isErrorFile)
|
||||
results = new string[] { };
|
||||
else
|
||||
{
|
||||
results = new string[] { _Logistics.Sequence.ToString(), _Logistics.ReportFullPath, from, resolvedFileLocation, to, string.Empty, string.Empty, exception.Message, string.Empty, string.Empty, exception.StackTrace };
|
||||
Shared0449(to, results);
|
||||
}
|
||||
if (!(extractResults is null) && !(extractResults.Item4 is null) && extractResults.Item4.Any())
|
||||
{
|
||||
string itemFile;
|
||||
List<string> directories = new();
|
||||
foreach (FileInfo sourceFile in extractResults.Item4)
|
||||
{
|
||||
if (sourceFile.FullName != _Logistics.ReportFullPath)
|
||||
{
|
||||
itemFile = sourceFile.FullName.Replace(from, to);
|
||||
Shared1880(itemFile, directories, sourceFile, isErrorFile);
|
||||
}
|
||||
else if (!isErrorFile && !(_Logistics is null))
|
||||
Shared1811(to, sourceFile);
|
||||
}
|
||||
Shared0231(directories);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
protected IEnumerable<string> GetDirectoriesRecursively(string path, string directoryNameSegment = null)
|
||||
{
|
||||
Queue<string> queue = new();
|
||||
queue.Enqueue(path);
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
path = queue.Dequeue();
|
||||
foreach (string subDirectory in Directory.GetDirectories(path))
|
||||
{
|
||||
queue.Enqueue(subDirectory);
|
||||
if (string.IsNullOrEmpty(directoryNameSegment) || Path.GetFileName(subDirectory).Contains(directoryNameSegment))
|
||||
yield return subDirectory;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected string GetProcessedDirectory(string progressPath, Logistics logistics, DateTime dateTime, string duplicateDirectory)
|
||||
{
|
||||
string result = duplicateDirectory;
|
||||
string logisticsSequence = logistics.Sequence.ToString();
|
||||
string[] matchDirectories;
|
||||
if (!_IsEAFHosted)
|
||||
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(logistics.ReportFullPath)) };
|
||||
else
|
||||
matchDirectories = new string[] { GetDirectoriesRecursively(Path.GetDirectoryName(progressPath), logisticsSequence).FirstOrDefault() };
|
||||
if (matchDirectories.Length == 0 || string.IsNullOrEmpty(matchDirectories[0]))
|
||||
matchDirectories = Directory.GetDirectories(duplicateDirectory, string.Concat('*', logisticsSequence, '*'), SearchOption.AllDirectories);
|
||||
if ((matchDirectories is null) || matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
if (!matchDirectories[0].Contains("_processed"))
|
||||
{
|
||||
result = string.Concat(matchDirectories[0].Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0], logistics.DateTimeFromSequence.ToString("yyyy-MM-dd_hh;mm_tt_"), dateTime.Ticks - logistics.Sequence, "_processed");
|
||||
Directory.Move(matchDirectories[0], result);
|
||||
result = string.Concat(result, @"\", logistics.Sequence);
|
||||
if (!Directory.Exists(result))
|
||||
Directory.CreateDirectory(result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
protected string WriteScopeInfo(string progressPath, Logistics logistics, DateTime dateTime, string duplicateDirectory, List<Tuple<Properties.IScopeInfo, string>> tuples)
|
||||
{
|
||||
string result = GetProcessedDirectory(progressPath, logistics, dateTime, duplicateDirectory);
|
||||
string tupleFile;
|
||||
string fileName = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
|
||||
string duplicateFile = string.Concat(result, @"\", fileName, ".pdsf");
|
||||
foreach (Tuple<Properties.IScopeInfo, string> tuple in tuples)
|
||||
{
|
||||
if (tuple.Item1.FileName.StartsWith(@"\"))
|
||||
tupleFile = tuple.Item1.FileName;
|
||||
else
|
||||
tupleFile = string.Concat(result, @"\", fileName, "_", tuple.Item1.FileNameWithoutExtension, ".pdsfc");
|
||||
File.WriteAllText(tupleFile, tuple.Item2);
|
||||
}
|
||||
File.Copy(logistics.ReportFullPath, duplicateFile, overwrite: true);
|
||||
return result;
|
||||
}
|
||||
|
||||
protected string GetTupleFile(Logistics logistics, Properties.IScopeInfo scopeInfo, string duplicateDirectory)
|
||||
{
|
||||
string result;
|
||||
string rds;
|
||||
string dateValue;
|
||||
string datePlaceholder;
|
||||
string[] segments = logistics.MID.Split('-');
|
||||
if (segments.Length < 2)
|
||||
rds = "%RDS%";
|
||||
else
|
||||
rds = segments[1];
|
||||
segments = scopeInfo.FileName.Split(new string[] { "DateTime:" }, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (segments.Length == 0)
|
||||
result = string.Concat(duplicateDirectory, @"\", scopeInfo.FileNameWithoutExtension.Replace("%RDS%", rds));
|
||||
else
|
||||
{
|
||||
datePlaceholder = "%DateTime%";
|
||||
segments = segments[1].Split('%');
|
||||
dateValue = logistics.DateTimeFromSequence.ToString(segments[0]);
|
||||
foreach (string segment in scopeInfo.FileName.Split('%'))
|
||||
{
|
||||
if (!segment.Contains(segments[0]))
|
||||
continue;
|
||||
datePlaceholder = string.Concat('%', segment, '%');
|
||||
}
|
||||
result = string.Concat(duplicateDirectory, @"\", scopeInfo.FileName.Replace("%RDS%", rds).Replace(datePlaceholder, dateValue));
|
||||
}
|
||||
if (result.Contains('%'))
|
||||
throw new Exception("Placeholder exists!");
|
||||
return result;
|
||||
}
|
||||
|
||||
protected void WaitForFileConsumption(string sourceDirectoryCloaking, Logistics logistics, DateTime dateTime, string successDirectory, string duplicateDirectory, string duplicateFile, List<Tuple<Properties.IScopeInfo, string>> tuples)
|
||||
{
|
||||
bool check;
|
||||
long preWait;
|
||||
string tupleFile;
|
||||
List<int> consumedFileIndices = new();
|
||||
List<string> duplicateFiles = new();
|
||||
bool moreThanAnHour = (_BreakAfterSeconds > 3600);
|
||||
StringBuilder stringBuilder = new();
|
||||
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
|
||||
if (moreThanAnHour)
|
||||
preWait = dateTime.AddSeconds(30).Ticks;
|
||||
else
|
||||
preWait = dateTime.AddTicks(_LastTicksDuration).Ticks;
|
||||
if (!tuples.Any())
|
||||
duplicateFiles.Add(duplicateFile);
|
||||
string fileName = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
|
||||
string successFile = string.Concat(successDirectory, @"\", Path.GetFileName(logistics.ReportFullPath));
|
||||
foreach (Tuple<Properties.IScopeInfo, string> tuple in tuples)
|
||||
{
|
||||
if (tuple.Item1.FileName.StartsWith(@"\"))
|
||||
tupleFile = tuple.Item1.FileName;
|
||||
else if (!tuple.Item1.FileName.Contains('%'))
|
||||
tupleFile = string.Concat(duplicateDirectory, @"\", fileName, "_", tuple.Item1.FileNameWithoutExtension, ".pdsfc");
|
||||
else
|
||||
tupleFile = GetTupleFile(logistics, tuple.Item1, duplicateDirectory);
|
||||
duplicateFiles.Add(tupleFile);
|
||||
File.WriteAllText(tupleFile, tuple.Item2);
|
||||
}
|
||||
for (short i = 0; i < short.MaxValue; i++)
|
||||
{
|
||||
if (DateTime.Now.Ticks > preWait)
|
||||
break;
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
if (!moreThanAnHour)
|
||||
{
|
||||
for (short z = 0; z < short.MaxValue; z++)
|
||||
{
|
||||
try
|
||||
{
|
||||
check = (string.IsNullOrEmpty(successDirectory) || File.Exists(successFile));
|
||||
if (check)
|
||||
{
|
||||
consumedFileIndices.Clear();
|
||||
for (int i = 0; i < duplicateFiles.Count; i++)
|
||||
{
|
||||
if (!File.Exists(duplicateFiles[i]))
|
||||
consumedFileIndices.Add(i);
|
||||
}
|
||||
if (consumedFileIndices.Count == duplicateFiles.Count)
|
||||
break;
|
||||
}
|
||||
}
|
||||
catch (Exception) { }
|
||||
if (DateTime.Now.Ticks > breakAfter)
|
||||
{
|
||||
for (int i = 0; i < duplicateFiles.Count; i++)
|
||||
{
|
||||
if (File.Exists(duplicateFiles[i]))
|
||||
{
|
||||
try
|
||||
{ File.Delete(duplicateFiles[i]); }
|
||||
catch (Exception) { }
|
||||
stringBuilder.Append("<").Append(duplicateFiles[i]).Append("> ");
|
||||
}
|
||||
}
|
||||
throw new Exception(string.Concat("After {", _BreakAfterSeconds, "} seconds, right side of {", sourceDirectoryCloaking, "} didn't consume file(s) ", stringBuilder));
|
||||
}
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void SetFileParameter(string key, string value)
|
||||
{
|
||||
if (_FileConnectorConfiguration is null || _FileConnectorConfiguration.TargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.TargetFileName.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileName.Contains(string.Concat("%", key, "%")))
|
||||
{
|
||||
if (_FileParameter.ContainsKey(key))
|
||||
_FileParameter[key] = value;
|
||||
else
|
||||
_FileParameter.Add(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
protected void SetFileParameterLotIDToLogisticsMID(bool includeLogisticsSequence = true)
|
||||
{
|
||||
string key;
|
||||
if (!includeLogisticsSequence)
|
||||
key = "LotID";
|
||||
else
|
||||
key = "LotIDWithLogisticsSequence";
|
||||
string value = string.Concat(_Logistics.MID, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
|
||||
SetFileParameter(key, value);
|
||||
}
|
||||
|
||||
protected void SetFileParameterLotID(string value, bool includeLogisticsSequence = true)
|
||||
{
|
||||
string key;
|
||||
if (!includeLogisticsSequence)
|
||||
key = "LotID";
|
||||
else
|
||||
{
|
||||
key = "LotIDWithLogisticsSequence";
|
||||
value = string.Concat(value, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
|
||||
}
|
||||
SetFileParameter(key, value);
|
||||
}
|
||||
|
||||
protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
|
||||
{
|
||||
string directory;
|
||||
if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType)
|
||||
directory = Path.Combine(_VillachPath, _EquipmentType, "Target");
|
||||
else
|
||||
directory = Path.Combine(_TracePath, _EquipmentType, "Source", _CellInstanceName, _CellInstanceConnectionName);
|
||||
if (!Directory.Exists(directory))
|
||||
Directory.CreateDirectory(directory);
|
||||
string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
|
||||
string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
|
||||
File.WriteAllText(file, lines);
|
||||
if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
|
||||
{
|
||||
try
|
||||
{ File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
protected void Move(IFileRead fileRead, Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
|
||||
{
|
||||
bool isErrorFile = !(exception is null);
|
||||
if (!isErrorFile && _IsDuplicator)
|
||||
{
|
||||
if (_Hyphens == _HyphenIsXToArchive)
|
||||
Shared0192();
|
||||
else if (_IsEAFHosted && _Hyphens == _HyphenIsArchive)
|
||||
fileRead.MoveArchive();
|
||||
if (_IsEAFHosted && !string.IsNullOrEmpty(_ProgressPath))
|
||||
CreateProgressDirectory(_ProgressPath, _Logistics, _Hyphens, exceptionLines: null);
|
||||
}
|
||||
if (!_IsEAFHosted)
|
||||
{
|
||||
string to;
|
||||
if (!_FileConnectorConfiguration.TargetFileLocation.EndsWith(Path.DirectorySeparatorChar.ToString()))
|
||||
to = _FileConnectorConfiguration.TargetFileLocation;
|
||||
else
|
||||
to = Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation);
|
||||
foreach (KeyValuePair<string, string> keyValuePair in _FileParameter)
|
||||
to = to.Replace(string.Concat('%', keyValuePair.Key, '%'), keyValuePair.Value);
|
||||
if (to.Contains("%"))
|
||||
_Log.Debug("Can't debug without EAF Hosting");
|
||||
else
|
||||
Move(extractResults, to, _FileConnectorConfiguration.SourceFileLocation, resolvedFileLocation: string.Empty, exception: null);
|
||||
}
|
||||
}
|
||||
|
||||
protected void TriggerEvents(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, List<string> headerNames, Dictionary<string, string> keyValuePairs)
|
||||
{
|
||||
object value;
|
||||
string description;
|
||||
List<object[]> list;
|
||||
for (int i = 0; i < extractResults.Item3.Length; i++)
|
||||
{
|
||||
_Log.Debug(string.Concat("TriggerEvent - {", _Logistics.ReportFullPath, "} ", i, " of ", extractResults.Item3.Length));
|
||||
foreach (JsonProperty jsonProperty in extractResults.Item3[i].EnumerateObject())
|
||||
{
|
||||
if (jsonProperty.Value.ValueKind != JsonValueKind.String || !keyValuePairs.ContainsKey(jsonProperty.Name))
|
||||
description = string.Empty;
|
||||
else
|
||||
description = keyValuePairs[jsonProperty.Name].Split('|')[0];
|
||||
if (!_UseCyclicalForDescription || headerNames.Contains(jsonProperty.Name))
|
||||
value = jsonProperty.Value.ToString();
|
||||
else
|
||||
{
|
||||
list = new List<object[]>();
|
||||
for (int z = 0; z < extractResults.Item3.Length; z++)
|
||||
list.Add(new object[] { z, extractResults.Item3[z].GetProperty(jsonProperty.Name).ToString() });
|
||||
value = list;
|
||||
}
|
||||
}
|
||||
if (_UseCyclicalForDescription)
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
protected Tuple<string, Test[], JsonElement[], List<FileInfo>> ReExtract(IFileRead fileRead, List<string> headerNames, Dictionary<string, string> keyValuePairs)
|
||||
{
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
|
||||
if (!Directory.Exists(_FileConnectorConfiguration.SourceFileLocation))
|
||||
results = null;
|
||||
else
|
||||
{
|
||||
string[] segments;
|
||||
string[] matches = null;
|
||||
foreach (string subSourceFileFilter in _FileConnectorConfiguration.SourceFileFilters)
|
||||
{
|
||||
segments = subSourceFileFilter.Split('\\');
|
||||
if (_FileConnectorConfiguration.IncludeSubDirectories.Value)
|
||||
matches = Directory.GetFiles(_FileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.AllDirectories);
|
||||
else
|
||||
matches = Directory.GetFiles(_FileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.TopDirectoryOnly);
|
||||
if (matches.Any())
|
||||
break;
|
||||
}
|
||||
if (matches is null || !matches.Any())
|
||||
results = null;
|
||||
else
|
||||
{
|
||||
_ReportFullPath = matches[0];
|
||||
results = fileRead.GetExtractResult(_ReportFullPath, _EventName);
|
||||
if (!_IsEAFHosted)
|
||||
TriggerEvents(results, headerNames, keyValuePairs);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
protected Dictionary<Test, List<Properties.IDescription>> GetKeyValuePairs(List<Properties.IDescription> descriptions)
|
||||
{
|
||||
Dictionary<Test, List<Properties.IDescription>> results = new();
|
||||
Test testKey;
|
||||
for (int i = 0; i < descriptions.Count; i++)
|
||||
{
|
||||
testKey = (Test)descriptions[i].Test;
|
||||
if (!results.ContainsKey(testKey))
|
||||
results.Add(testKey, new List<Properties.IDescription>());
|
||||
results[testKey].Add(descriptions[i]);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
protected List<Properties.IDescription> GetDuplicatorDescriptions(JsonElement[] jsonElements)
|
||||
{
|
||||
List<Properties.IDescription> results = new();
|
||||
Duplicator.Description description;
|
||||
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
|
||||
foreach (JsonElement jsonElement in jsonElements)
|
||||
{
|
||||
if (jsonElement.ValueKind != JsonValueKind.Object)
|
||||
throw new Exception();
|
||||
description = JsonSerializer.Deserialize<Duplicator.Description>(jsonElement.ToString(), jsonSerializerOptions);
|
||||
results.Add(description);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
protected Tuple<Test[], Dictionary<Test, List<Properties.IDescription>>> GetTuple(IFileRead fileRead, IEnumerable<Properties.IDescription> descriptions, bool extra = false)
|
||||
{
|
||||
Tuple<Test[], Dictionary<Test, List<Properties.IDescription>>> result;
|
||||
Dictionary<Test, List<Properties.IDescription>> keyValuePairs = GetKeyValuePairs(descriptions.ToList());
|
||||
Test[] tests = (from l in keyValuePairs select l.Key).ToArray();
|
||||
fileRead.CheckTests(tests, extra);
|
||||
result = new Tuple<Test[], Dictionary<Test, List<Properties.IDescription>>>(tests, keyValuePairs);
|
||||
return result;
|
||||
}
|
||||
|
||||
protected void Shared0449(string to, string[] exceptionLines)
|
||||
{
|
||||
if (_IsDuplicator)
|
||||
CreateProgressDirectory(_ProgressPath, _Logistics, _Hyphens, exceptionLines);
|
||||
else
|
||||
{
|
||||
string fileName = string.Concat(to, @"\readme.txt");
|
||||
try
|
||||
{
|
||||
if (!Directory.Exists(to))
|
||||
Directory.CreateDirectory(to);
|
||||
File.WriteAllLines(fileName, exceptionLines);
|
||||
}
|
||||
catch (Exception ex) { _Log.Error(ex.Message); }
|
||||
}
|
||||
}
|
||||
|
||||
protected void Shared1880(string itemFile, List<string> directories, FileInfo sourceFile, bool isErrorFile)
|
||||
{
|
||||
string itemDirectory;
|
||||
directories.Add(Path.GetDirectoryName(sourceFile.FullName));
|
||||
itemDirectory = Path.GetDirectoryName(itemFile);
|
||||
FileConnectorConfiguration.PostProcessingModeEnum processingModeEnum;
|
||||
if (!isErrorFile)
|
||||
processingModeEnum = _FileConnectorConfiguration.PostProcessingMode.Value;
|
||||
else
|
||||
processingModeEnum = _FileConnectorConfiguration.ErrorPostProcessingMode.Value;
|
||||
if (processingModeEnum != FileConnectorConfiguration.PostProcessingModeEnum.Delete && !Directory.Exists(itemDirectory))
|
||||
{
|
||||
Directory.CreateDirectory(itemDirectory);
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
Directory.SetCreationTime(itemDirectory, fileInfo.LastWriteTime);
|
||||
}
|
||||
if (_IsEAFHosted)
|
||||
{
|
||||
switch (processingModeEnum)
|
||||
{
|
||||
case FileConnectorConfiguration.PostProcessingModeEnum.Move:
|
||||
File.Move(sourceFile.FullName, itemFile);
|
||||
break;
|
||||
case FileConnectorConfiguration.PostProcessingModeEnum.Copy:
|
||||
File.Copy(sourceFile.FullName, itemFile);
|
||||
break;
|
||||
case FileConnectorConfiguration.PostProcessingModeEnum.Delete:
|
||||
File.Delete(sourceFile.FullName);
|
||||
break;
|
||||
default:
|
||||
throw new Exception();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void Shared1811(string to, FileInfo sourceFile)
|
||||
{
|
||||
if (!_IsDuplicator && _FileConnectorConfiguration.SourceFileFilter != "*" && sourceFile.Exists && sourceFile.Length < _MinFileLength)
|
||||
{
|
||||
string directoryName = Path.GetFileName(to);
|
||||
string jobIdDirectory = Path.GetDirectoryName(to);
|
||||
DateTime dateTime = DateTime.Now.AddMinutes(-15);
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
|
||||
string destinationDirectory = string.Concat(jobIdDirectory, @"\_ Ignore 100 bytes\", weekDirectory, @"\", directoryName);
|
||||
if (!Directory.Exists(destinationDirectory))
|
||||
Directory.CreateDirectory(destinationDirectory);
|
||||
File.Move(sourceFile.FullName, string.Concat(destinationDirectory, @"\", sourceFile.Name));
|
||||
try
|
||||
{
|
||||
string[] checkDirectories = Directory.GetDirectories(jobIdDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string checkDirectory in checkDirectories)
|
||||
{
|
||||
if (!checkDirectory.Contains("_"))
|
||||
continue;
|
||||
if (Directory.GetDirectories(checkDirectory, "*", SearchOption.TopDirectoryOnly).Any())
|
||||
continue;
|
||||
if (Directory.GetFiles(checkDirectory, "*", SearchOption.TopDirectoryOnly).Any())
|
||||
continue;
|
||||
if (Directory.GetDirectories(checkDirectory, "*", SearchOption.AllDirectories).Any())
|
||||
continue;
|
||||
if (Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories).Any())
|
||||
continue;
|
||||
if (new DirectoryInfo(checkDirectory).CreationTime > dateTime)
|
||||
continue;
|
||||
Directory.Delete(checkDirectory, recursive: false);
|
||||
}
|
||||
}
|
||||
catch (Exception) { throw; }
|
||||
}
|
||||
}
|
||||
|
||||
protected void Shared0231(List<string> directories)
|
||||
{
|
||||
if (_FileConnectorConfiguration.PostProcessingMode != FileConnectorConfiguration.PostProcessingModeEnum.Copy)
|
||||
{
|
||||
foreach (string directory in (from l in directories orderby l.Split('\\').Length descending select l).Distinct())
|
||||
{
|
||||
if (Directory.Exists(directory) && !Directory.GetFiles(directory).Any())
|
||||
Directory.Delete(directory);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void Shared0413(DateTime dateTime, bool isDummyRun, string successDirectory, string duplicateDirectory, List<Tuple<Properties.IScopeInfo, string>> tuples, string duplicateFile)
|
||||
{
|
||||
if (!isDummyRun && _IsEAFHosted)
|
||||
WaitForFileConsumption(_FileConnectorConfiguration.SourceDirectoryCloaking, _Logistics, dateTime, successDirectory, duplicateDirectory, duplicateFile, tuples);
|
||||
else
|
||||
{
|
||||
long breakAfter = DateTime.Now.AddSeconds(_FileConnectorConfiguration.ConnectionRetryInterval.Value).Ticks;
|
||||
for (short i = 0; i < short.MaxValue; i++)
|
||||
{
|
||||
if (!_IsEAFHosted || DateTime.Now.Ticks > breakAfter)
|
||||
break;
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void Shared0607(string reportFullPath, string duplicateDirectory, string logisticsSequence, string destinationDirectory)
|
||||
{
|
||||
if (destinationDirectory == duplicateDirectory)
|
||||
throw new Exception("Check Target File Folder for %LotIDWithLogisticsSequence%_in process on CI (not Duplicator)");
|
||||
if (destinationDirectory.EndsWith(logisticsSequence))
|
||||
destinationDirectory = Path.GetDirectoryName(destinationDirectory);
|
||||
string[] deleteFiles = Directory.GetFiles(destinationDirectory, "*", SearchOption.AllDirectories);
|
||||
if (deleteFiles.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
foreach (string file in deleteFiles)
|
||||
File.Delete(file);
|
||||
Directory.Delete(destinationDirectory, recursive: true);
|
||||
File.Delete(reportFullPath);
|
||||
}
|
||||
|
||||
protected void Shared0192()
|
||||
{
|
||||
if (!string.IsNullOrEmpty(_Logistics.ReportFullPath))
|
||||
{
|
||||
FileInfo fileInfo = new(_Logistics.ReportFullPath);
|
||||
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
|
||||
File.SetLastWriteTime(_Logistics.ReportFullPath, fileInfo.CreationTime);
|
||||
}
|
||||
}
|
||||
|
||||
protected string[] Shared1567(string reportFullPath, List<Tuple<Properties.IScopeInfo, string>> tuples)
|
||||
{
|
||||
string[] results;
|
||||
string historicalText;
|
||||
string logisticsSequence = _Logistics.Sequence.ToString();
|
||||
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\", _Logistics.JobID);
|
||||
if (!Directory.Exists(jobIdDirectory))
|
||||
Directory.CreateDirectory(jobIdDirectory);
|
||||
string[] matchDirectories;
|
||||
if (!_IsEAFHosted)
|
||||
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
|
||||
else
|
||||
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
|
||||
if ((matchDirectories is null) || matchDirectories.Length != 1)
|
||||
throw new Exception("Didn't find directory by logistics sequence");
|
||||
string fileName = Path.GetFileNameWithoutExtension(reportFullPath);
|
||||
string sequenceDirectory = string.Concat(matchDirectories[0], @"\", logisticsSequence);
|
||||
if (!Directory.Exists(sequenceDirectory))
|
||||
Directory.CreateDirectory(sequenceDirectory);
|
||||
foreach (Tuple<Properties.IScopeInfo, string> tuple in tuples)
|
||||
{
|
||||
fileName = string.Concat(sequenceDirectory, @"\", fileName, "_", tuple.Item1.FileNameWithoutExtension, ".pdsfc");
|
||||
if (_IsEAFHosted)
|
||||
File.WriteAllText(fileName, tuple.Item2);
|
||||
else
|
||||
{
|
||||
if (File.Exists(fileName))
|
||||
{
|
||||
historicalText = File.ReadAllText(fileName);
|
||||
if (tuple.Item2 != historicalText)
|
||||
throw new Exception("File doesn't match historical!");
|
||||
}
|
||||
}
|
||||
}
|
||||
results = matchDirectories;
|
||||
return results;
|
||||
}
|
||||
|
||||
protected void Shared1277(string reportFullPath, string destinationDirectory, string logisticsSequence, string jobIdDirectory, string json)
|
||||
{
|
||||
string ecCharacterizationSi = Path.GetDirectoryName(Path.GetDirectoryName(jobIdDirectory));
|
||||
string destinationJobIdDirectory = string.Concat(ecCharacterizationSi, @"\Processed\", _Logistics.JobID);
|
||||
if (!Directory.Exists(destinationJobIdDirectory))
|
||||
Directory.CreateDirectory(destinationJobIdDirectory);
|
||||
destinationJobIdDirectory = string.Concat(destinationJobIdDirectory, @"\", Path.GetFileName(destinationDirectory).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0], _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd_hh;mm_tt_"), DateTime.Now.Ticks - _Logistics.Sequence);
|
||||
string sequenceDirectory = string.Concat(destinationJobIdDirectory, @"\", logisticsSequence);
|
||||
string jsonFileName = string.Concat(sequenceDirectory, @"\", Path.GetFileNameWithoutExtension(reportFullPath), ".json");
|
||||
Directory.Move(destinationDirectory, destinationJobIdDirectory);
|
||||
if (!Directory.Exists(sequenceDirectory))
|
||||
Directory.CreateDirectory(sequenceDirectory);
|
||||
File.Copy(reportFullPath, string.Concat(sequenceDirectory, @"\", Path.GetFileName(reportFullPath)), overwrite: true);
|
||||
File.WriteAllText(jsonFileName, json);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// 2021-12-17 -> Shared - FileRead
|
223
Adaptation/Shared/Logistics.cs
Normal file
223
Adaptation/Shared/Logistics.cs
Normal file
@ -0,0 +1,223 @@
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
|
||||
namespace Adaptation.Shared
|
||||
{
|
||||
|
||||
public class Logistics : ILogistics
|
||||
{
|
||||
|
||||
public object NullData { get; private set; }
|
||||
public string JobID { get; private set; } //CellName
|
||||
public long Sequence { get; private set; } //Ticks
|
||||
public DateTime DateTimeFromSequence { get; private set; }
|
||||
public double TotalSecondsSinceLastWriteTimeFromSequence { get; private set; }
|
||||
public string MesEntity { get; private set; } //SPC
|
||||
public string ReportFullPath { get; private set; } //Extract file
|
||||
public string ProcessJobID { get; set; } //Reactor (duplicate but I want it in the logistics)
|
||||
public string MID { get; set; } //Lot & Pocket || Lot
|
||||
public List<string> Tags { get; set; }
|
||||
public List<string> Logistics1 { get; set; }
|
||||
public List<Logistics2> Logistics2 { get; set; }
|
||||
|
||||
public Logistics(IFileRead fileRead)
|
||||
{
|
||||
DateTime dateTime = DateTime.Now;
|
||||
NullData = null;
|
||||
Sequence = dateTime.Ticks;
|
||||
DateTimeFromSequence = dateTime;
|
||||
JobID = fileRead.CellInstanceName;
|
||||
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
|
||||
MesEntity = DefaultMesEntity(dateTime);
|
||||
ReportFullPath = string.Empty;
|
||||
ProcessJobID = nameof(ProcessJobID);
|
||||
MID = nameof(MID);
|
||||
Tags = new List<string>();
|
||||
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
|
||||
Logistics2 = new List<Logistics2>();
|
||||
}
|
||||
|
||||
public Logistics(IFileRead fileRead, string reportFullPath, bool useSplitForMID, int? fileInfoLength = null)
|
||||
{
|
||||
if (string.IsNullOrEmpty(fileRead.CellInstanceName))
|
||||
throw new Exception();
|
||||
if (string.IsNullOrEmpty(fileRead.MesEntity))
|
||||
throw new Exception();
|
||||
NullData = fileRead.NullData;
|
||||
FileInfo fileInfo = new(reportFullPath);
|
||||
DateTime dateTime = fileInfo.LastWriteTime;
|
||||
if (fileInfoLength.HasValue && fileInfo.Length < fileInfoLength.Value)
|
||||
dateTime = dateTime.AddTicks(-1);
|
||||
JobID = fileRead.CellInstanceName;
|
||||
Sequence = dateTime.Ticks;
|
||||
DateTimeFromSequence = dateTime;
|
||||
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
|
||||
MesEntity = fileRead.MesEntity;
|
||||
ReportFullPath = fileInfo.FullName;
|
||||
ProcessJobID = nameof(ProcessJobID);
|
||||
string fileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileInfo.FullName);
|
||||
if (useSplitForMID)
|
||||
{
|
||||
if (fileNameWithoutExtension.IndexOf(".") > -1)
|
||||
fileNameWithoutExtension = fileNameWithoutExtension.Split('.')[0].Trim();
|
||||
if (fileNameWithoutExtension.IndexOf("_") > -1)
|
||||
fileNameWithoutExtension = fileNameWithoutExtension.Split('_')[0].Trim();
|
||||
if (fileNameWithoutExtension.IndexOf("-") > -1)
|
||||
fileNameWithoutExtension = fileNameWithoutExtension.Split('-')[0].Trim();
|
||||
}
|
||||
MID = string.Concat(fileNameWithoutExtension.Substring(0, 1).ToUpper(), fileNameWithoutExtension.Substring(1).ToLower());
|
||||
Tags = new List<string>();
|
||||
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
|
||||
Logistics2 = new List<Logistics2>();
|
||||
}
|
||||
|
||||
public Logistics(string reportFullPath, string logistics)
|
||||
{
|
||||
string key;
|
||||
DateTime dateTime;
|
||||
string[] segments;
|
||||
Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
|
||||
if (!Logistics1.Any() || !Logistics1[0].StartsWith("LOGISTICS_1"))
|
||||
{
|
||||
NullData = null;
|
||||
JobID = "null";
|
||||
dateTime = new FileInfo(reportFullPath).LastWriteTime;
|
||||
Sequence = dateTime.Ticks;
|
||||
DateTimeFromSequence = dateTime;
|
||||
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
|
||||
MesEntity = DefaultMesEntity(dateTime);
|
||||
ReportFullPath = reportFullPath;
|
||||
ProcessJobID = "R##";
|
||||
MID = "null";
|
||||
Tags = new List<string>();
|
||||
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
|
||||
Logistics2 = new List<Logistics2>();
|
||||
}
|
||||
else
|
||||
{
|
||||
string logistics1Line1 = Logistics1[0];
|
||||
key = "NULL_DATA=";
|
||||
if (!logistics1Line1.Contains(key))
|
||||
NullData = null;
|
||||
else
|
||||
{
|
||||
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
NullData = segments[1].Split(';')[0];
|
||||
}
|
||||
key = "JOBID=";
|
||||
if (!logistics1Line1.Contains(key))
|
||||
JobID = "null";
|
||||
else
|
||||
{
|
||||
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
JobID = segments[1].Split(';')[0];
|
||||
}
|
||||
key = "SEQUENCE=";
|
||||
if (!logistics1Line1.Contains(key))
|
||||
dateTime = new FileInfo(reportFullPath).LastWriteTime;
|
||||
else
|
||||
{
|
||||
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
if (!long.TryParse(segments[1].Split(';')[0].Split('.')[0], out long sequence) || sequence < new DateTime(1999, 1, 1).Ticks)
|
||||
dateTime = new FileInfo(reportFullPath).LastWriteTime;
|
||||
else
|
||||
dateTime = new DateTime(sequence);
|
||||
}
|
||||
Sequence = dateTime.Ticks;
|
||||
DateTimeFromSequence = dateTime;
|
||||
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
|
||||
DateTime lastWriteTime = new FileInfo(reportFullPath).LastWriteTime;
|
||||
if (TotalSecondsSinceLastWriteTimeFromSequence > 600)
|
||||
{
|
||||
if (lastWriteTime != dateTime)
|
||||
try
|
||||
{ File.SetLastWriteTime(reportFullPath, dateTime); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
key = "MES_ENTITY=";
|
||||
if (!logistics1Line1.Contains(key))
|
||||
MesEntity = DefaultMesEntity(dateTime);
|
||||
else
|
||||
{
|
||||
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
MesEntity = segments[1].Split(';')[0];
|
||||
}
|
||||
ReportFullPath = reportFullPath;
|
||||
key = "PROCESS_JOBID=";
|
||||
if (!logistics1Line1.Contains(key))
|
||||
ProcessJobID = "R##";
|
||||
else
|
||||
{
|
||||
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
ProcessJobID = segments[1].Split(';')[0];
|
||||
}
|
||||
key = "MID=";
|
||||
if (!logistics1Line1.Contains(key))
|
||||
MID = "null";
|
||||
else
|
||||
{
|
||||
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
MID = segments[1].Split(';')[0];
|
||||
}
|
||||
}
|
||||
Logistics2 logistics2;
|
||||
Tags = new List<string>();
|
||||
Logistics2 = new List<Logistics2>();
|
||||
for (int i = 1; i < Logistics1.Count(); i++)
|
||||
{
|
||||
if (Logistics1[i].StartsWith("LOGISTICS_2"))
|
||||
{
|
||||
logistics2 = new Logistics2(Logistics1[i]);
|
||||
Logistics2.Add(logistics2);
|
||||
}
|
||||
}
|
||||
for (int i = Logistics1.Count() - 1; i > -1; i--)
|
||||
{
|
||||
if (Logistics1[i].StartsWith("LOGISTICS_2"))
|
||||
Logistics1.RemoveAt(i);
|
||||
}
|
||||
}
|
||||
|
||||
public Logistics ShallowCopy()
|
||||
{
|
||||
return (Logistics)MemberwiseClone();
|
||||
}
|
||||
|
||||
private string DefaultMesEntity(DateTime dateTime)
|
||||
{
|
||||
return string.Concat(dateTime.Ticks, "_MES_ENTITY");
|
||||
}
|
||||
|
||||
internal string GetLotViaMostCommonMethod()
|
||||
{
|
||||
return MID.Substring(0, MID.Length - 2);
|
||||
}
|
||||
|
||||
internal string GetPocketNumberViaMostCommonMethod()
|
||||
{
|
||||
return MID.Substring(MID.Length - 2);
|
||||
}
|
||||
|
||||
internal void Update(string dateTime, string processJobID, string mid)
|
||||
{
|
||||
if (!DateTime.TryParse(dateTime, out DateTime dateTimeCasted))
|
||||
dateTimeCasted = DateTime.Now;
|
||||
NullData = null;
|
||||
//JobID = Description.GetCellName();
|
||||
Sequence = dateTimeCasted.Ticks;
|
||||
DateTimeFromSequence = dateTimeCasted;
|
||||
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTimeCasted).TotalSeconds;
|
||||
//MesEntity = DefaultMesEntity(dateTime);
|
||||
//ReportFullPath = string.Empty;
|
||||
ProcessJobID = processJobID;
|
||||
MID = mid;
|
||||
Tags = new List<string>();
|
||||
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
|
||||
Logistics2 = new List<Logistics2>();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
81
Adaptation/Shared/Logistics2.cs
Normal file
81
Adaptation/Shared/Logistics2.cs
Normal file
@ -0,0 +1,81 @@
|
||||
using System;
|
||||
|
||||
namespace Adaptation.Shared
|
||||
{
|
||||
|
||||
public class Logistics2 : Methods.ILogistics2
|
||||
{
|
||||
|
||||
public string MID { get; private set; }
|
||||
public string RunNumber { get; private set; }
|
||||
public string SatelliteGroup { get; private set; }
|
||||
public string PartNumber { get; private set; }
|
||||
public string PocketNumber { get; private set; }
|
||||
public string WaferLot { get; private set; }
|
||||
public string Recipe { get; private set; }
|
||||
|
||||
public Logistics2(string logistics2)
|
||||
{
|
||||
string key;
|
||||
string[] segments;
|
||||
key = "JOBID=";
|
||||
if (!logistics2.Contains(key))
|
||||
MID = "null";
|
||||
else
|
||||
{
|
||||
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
MID = segments[1].Split(';')[0];
|
||||
}
|
||||
key = "MID=";
|
||||
if (!logistics2.Contains(key))
|
||||
RunNumber = "null";
|
||||
else
|
||||
{
|
||||
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
RunNumber = segments[1].Split(';')[0];
|
||||
}
|
||||
key = "INFO=";
|
||||
if (!logistics2.Contains(key))
|
||||
SatelliteGroup = "null";
|
||||
else
|
||||
{
|
||||
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
SatelliteGroup = segments[1].Split(';')[0];
|
||||
}
|
||||
key = "PRODUCT=";
|
||||
if (!logistics2.Contains(key))
|
||||
PartNumber = "null";
|
||||
else
|
||||
{
|
||||
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
PartNumber = segments[1].Split(';')[0];
|
||||
}
|
||||
key = "CHAMBER=";
|
||||
if (!logistics2.Contains(key))
|
||||
PocketNumber = "null";
|
||||
else
|
||||
{
|
||||
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
PocketNumber = segments[1].Split(';')[0];
|
||||
}
|
||||
key = "WAFER_ID=";
|
||||
if (!logistics2.Contains(key))
|
||||
WaferLot = "null";
|
||||
else
|
||||
{
|
||||
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
WaferLot = segments[1].Split(';')[0];
|
||||
}
|
||||
key = "PPID=";
|
||||
if (!logistics2.Contains(key))
|
||||
Recipe = "null";
|
||||
else
|
||||
{
|
||||
segments = logistics2.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
|
||||
Recipe = segments[1].Split(';')[0];
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
25
Adaptation/Shared/Methods/IDescription.cs
Normal file
25
Adaptation/Shared/Methods/IDescription.cs
Normal file
@ -0,0 +1,25 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.Shared.Methods
|
||||
{
|
||||
|
||||
public interface IDescription
|
||||
{
|
||||
|
||||
string GetEventDescription();
|
||||
List<string> GetDetailNames();
|
||||
List<string> GetHeaderNames();
|
||||
IDescription GetDisplayNames();
|
||||
List<string> GetParameterNames();
|
||||
List<string> GetPairedParameterNames();
|
||||
List<string> GetIgnoreParameterNames(Test test);
|
||||
List<string> GetNames(IFileRead fileRead, Logistics logistics);
|
||||
JsonProperty[] GetDefault(IFileRead fileRead, Logistics logistics);
|
||||
Dictionary<string, string> GetDisplayNamesJsonElement(IFileRead fileRead);
|
||||
IDescription GetDefaultDescription(IFileRead fileRead, Logistics logistics);
|
||||
List<IDescription> GetDescriptions(IFileRead fileRead, Logistics logistics, List<Test> tests, IProcessData iProcessData);
|
||||
|
||||
}
|
||||
|
||||
}
|
26
Adaptation/Shared/Methods/IFileRead.cs
Normal file
26
Adaptation/Shared/Methods/IFileRead.cs
Normal file
@ -0,0 +1,26 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.Shared.Methods
|
||||
{
|
||||
|
||||
public interface IFileRead : Properties.IFileRead
|
||||
{
|
||||
void MoveArchive();
|
||||
void WaitForThread();
|
||||
JsonProperty[] GetDefault();
|
||||
void Callback(object state);
|
||||
string GetEventDescription();
|
||||
List<string> GetHeaderNames();
|
||||
void CheckTests(Test[] tests, bool extra);
|
||||
Dictionary<string, string> GetDisplayNamesJsonElement();
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> ReExtract();
|
||||
List<IDescription> GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData);
|
||||
void Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception = null);
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, string eventName);
|
||||
string[] Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception);
|
||||
}
|
||||
|
||||
}
|
8
Adaptation/Shared/Methods/ILogistics.cs
Normal file
8
Adaptation/Shared/Methods/ILogistics.cs
Normal file
@ -0,0 +1,8 @@
|
||||
namespace Adaptation.Shared.Methods
|
||||
{
|
||||
|
||||
public interface ILogistics : Properties.ILogistics
|
||||
{
|
||||
}
|
||||
|
||||
}
|
8
Adaptation/Shared/Methods/ILogistics2.cs
Normal file
8
Adaptation/Shared/Methods/ILogistics2.cs
Normal file
@ -0,0 +1,8 @@
|
||||
namespace Adaptation.Shared.Methods
|
||||
{
|
||||
|
||||
public interface ILogistics2 : Properties.ILogistics2
|
||||
{
|
||||
}
|
||||
|
||||
}
|
17
Adaptation/Shared/Methods/IProcessData.cs
Normal file
17
Adaptation/Shared/Methods/IProcessData.cs
Normal file
@ -0,0 +1,17 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.Shared.Methods
|
||||
{
|
||||
|
||||
public interface IProcessData : Properties.IProcessData
|
||||
{
|
||||
|
||||
string GetCurrentReactor(IFileRead fileRead, Logistics logistics, Dictionary<string, string> reactors);
|
||||
Tuple<string, Test[], JsonElement[], List<FileInfo>> GetResults(IFileRead fileRead, Logistics logistics, List<FileInfo> fileInfoCollection);
|
||||
|
||||
}
|
||||
|
||||
}
|
9
Adaptation/Shared/Methods/ISMTP.cs
Normal file
9
Adaptation/Shared/Methods/ISMTP.cs
Normal file
@ -0,0 +1,9 @@
|
||||
namespace Adaptation.Shared.Methods
|
||||
{
|
||||
public interface ISMTP
|
||||
{
|
||||
void SendLowPriorityEmailMessage(string subject, string body);
|
||||
void SendHighPriorityEmailMessage(string subject, string body);
|
||||
void SendNormalPriorityEmailMessage(string subject, string body);
|
||||
}
|
||||
}
|
306
Adaptation/Shared/Metrology/ScopeInfo.cs
Normal file
306
Adaptation/Shared/Metrology/ScopeInfo.cs
Normal file
@ -0,0 +1,306 @@
|
||||
using System;
|
||||
using System.IO;
|
||||
|
||||
namespace Adaptation.Shared.Metrology
|
||||
{
|
||||
|
||||
public class ScopeInfo : Properties.IScopeInfo
|
||||
{
|
||||
|
||||
public Test Test { get; private set; }
|
||||
public Enum Enum { get; private set; }
|
||||
public string HTML { get; private set; }
|
||||
public string Title { get; private set; }
|
||||
public string FileName { get; private set; }
|
||||
public int TestValue { get; private set; }
|
||||
public string Header { get; private set; }
|
||||
public string QueryFilter { get; private set; }
|
||||
public string FileNameWithoutExtension { get; private set; }
|
||||
|
||||
public ScopeInfo(Test test, string fileName, string queryFilter = "", string title = "", string html = "")
|
||||
{
|
||||
Enum = test;
|
||||
Test = test;
|
||||
HTML = html;
|
||||
Title = title;
|
||||
FileName = fileName;
|
||||
TestValue = (int)test;
|
||||
Header = string.Empty;
|
||||
QueryFilter = queryFilter;
|
||||
FileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileName);
|
||||
}
|
||||
|
||||
public ScopeInfo(Test test)
|
||||
{
|
||||
Enum = test;
|
||||
Test = test;
|
||||
TestValue = (int)test;
|
||||
switch (Test)
|
||||
{
|
||||
case Test.AFMRoughness:
|
||||
FileNameWithoutExtension = "afm_iqs_01";
|
||||
Header = string.Empty;
|
||||
QueryFilter = "AFM Roughness";
|
||||
Title = "AFM";
|
||||
HTML = @"GaN Epi Data\10 - afm.html";
|
||||
break;
|
||||
case Test.BreakdownVoltageCenter:
|
||||
FileNameWithoutExtension = "bv_iqs_01";
|
||||
Header = "Reactor;fDate;fRecipeName;Lot;fPocketNumber;g4Scribe;BV Position;BV Value;Tool";
|
||||
QueryFilter = "Breakdown Voltage";
|
||||
Title = "Breakdown Voltage-Center";
|
||||
HTML = @"GaN Epi Data\03 - bv-production.html";
|
||||
break;
|
||||
case Test.BreakdownVoltageEdge:
|
||||
FileNameWithoutExtension = "bv_iqs_01_Edge";
|
||||
Header = "Reactor;fDate;fRecipeName;Lot;fPocketNumber;g4Scribe;BV Position;BV Value;Tool";
|
||||
QueryFilter = "Breakdown Voltage - Edge";
|
||||
Title = "Breakdown Voltage-Edge";
|
||||
HTML = @"GaN Epi Data\03 - bv-production.html";
|
||||
break;
|
||||
case Test.BreakdownVoltageMiddle8in:
|
||||
FileNameWithoutExtension = "bv_iqs_01_Middle";
|
||||
Header = "Reactor;fDate;fRecipeName;Lot;fPocketNumber;g4Scribe;BV Position;BV Value;Tool";
|
||||
QueryFilter = "Breakdown Voltage - Middle";
|
||||
Title = "Breakdown Voltage-Middle (8 in)";
|
||||
HTML = @"GaN Epi Data\03 - bv-production.html";
|
||||
break;
|
||||
case Test.CV:
|
||||
FileNameWithoutExtension = "cv_iqs_01";
|
||||
Header = "Reactor;fDate;fPart;Lot;pocketNumber;g4Scribe;Position;Vp;NdMin;Tool ID;CV Ns;CV Cap";
|
||||
QueryFilter = "CV_Ns";
|
||||
Title = "CV";
|
||||
HTML = @"GaN Epi Data\05 - cv.html";
|
||||
break;
|
||||
case Test.MonthlyCV:
|
||||
FileNameWithoutExtension = "cv_iqs_01";
|
||||
Header = "Reactor;fDate;fPart;Lot;pocketNumber;g4Scribe;Position;Vp;NdMin;Tool ID;CV Ns;CV Cap";
|
||||
QueryFilter = "CV_Ns";
|
||||
Title = "CV Monthly Verification";
|
||||
HTML = @"Metrology\07 - cv_verif_monthly.html";
|
||||
break;
|
||||
case Test.WeeklyCV:
|
||||
FileNameWithoutExtension = "cv_iqs_01";
|
||||
Header = "Reactor;fDate;fPart;Lot;pocketNumber;g4Scribe;Position;Vp;NdMin;Tool ID;CV Ns;CV Cap";
|
||||
QueryFilter = "CV_Ns";
|
||||
Title = "CV Weekly Verification";
|
||||
HTML = @"Metrology\16 - cv_verif_weekly.html";
|
||||
break;
|
||||
case Test.CandelaKlarfDC:
|
||||
FileNameWithoutExtension = "candela_iqs_01";
|
||||
Header = "LotID;OperatorID;RecipeName;CandelaRecipe;WaferID;PocketNumber;RunDate;Epi;SlipLines;Cracks;EpiDef;HazeSpot;SmallLpd;MediumLpd;LargeLpd;Cracks_A;Spirals;Craters;8620 Small;Pits;Tool ID;Defect Count";
|
||||
QueryFilter = "Candela Cracking";
|
||||
Title = "Candela";
|
||||
HTML = @"GaN Epi Data\12 - candela.html";
|
||||
break;
|
||||
case Test.CandelaLaser:
|
||||
FileNameWithoutExtension = "candela_iqs_01";
|
||||
Header = "LotID;OperatorID;RecipeName;CandelaRecipe;WaferID;PocketNumber;RunDate;Epi;SlipLines;Cracks;EpiDef;HazeSpot;SmallLpd;MediumLpd;LargeLpd;Cracks_A;Spirals;Craters;Pits;Tool ID;Defect Count";
|
||||
QueryFilter = "Candela Cracking";
|
||||
Title = "Candela";
|
||||
HTML = @"GaN Epi Data\12 - candela.html";
|
||||
break;
|
||||
case Test.CandelaVerify:
|
||||
FileNameWithoutExtension = "candela_iqs_01";
|
||||
Header = string.Concat("LotID;OperatorID;RecipeName;CandelaRecipe;WaferID;PocketNumber;RunDate;RunID;Reactor;", "Slip Lines;Cracks;Epi Def;Haze Spot;Small LPD;Medium LPD;Large LPD;Cracks_A;Spirals;Craters;8620 Small;Pits;Tool ID;Defect Count");
|
||||
QueryFilter = "Candela Cracking";
|
||||
Title = "Candela";
|
||||
HTML = @"GaN Epi Data\12 - candela.html";
|
||||
break;
|
||||
case Test.CandelaPSL:
|
||||
FileNameWithoutExtension = "candela_iqs_01";
|
||||
Header = string.Empty;
|
||||
QueryFilter = "102-83nm";
|
||||
Title = "Candela";
|
||||
HTML = @"GaN Epi Data\12 - candela.html";
|
||||
break;
|
||||
case Test.CandelaProdU:
|
||||
FileNameWithoutExtension = "candela_iqs_01";
|
||||
Header = string.Empty;
|
||||
QueryFilter = "SPE verification";
|
||||
Title = "Candela";
|
||||
HTML = @"GaN Epi Data\12 - candela.html";
|
||||
break;
|
||||
case Test.Denton:
|
||||
FileNameWithoutExtension = "denton_iqs_01";
|
||||
Header = "Tool;fDate;Run;Recipe;Operator;Name;Value";
|
||||
QueryFilter = "Denton_Voltage_AVG";
|
||||
Title = "Denton Data";
|
||||
HTML = @"Support Process\03 - ebeam02_denton_v1.html";
|
||||
break;
|
||||
case Test.Hall:
|
||||
FileNameWithoutExtension = "hall_iqs_01";
|
||||
Header = "Lot;Tool;TimeDate;RunDate;RunID;Part;Reactor;Scribe;PocketNumber;Tool ID;Name;Value";
|
||||
QueryFilter = "Hall Rs";
|
||||
Title = "Hall Data";
|
||||
HTML = @"GaN Epi Data\04 - hall.html";
|
||||
break;
|
||||
case Test.MonthlyHall:
|
||||
FileNameWithoutExtension = "hall_iqs_01";
|
||||
Header = "Lot;Tool;TimeDate;RunDate;RunID;Part;Reactor;Scribe;PocketNumber;Tool ID;Name;Value";
|
||||
QueryFilter = "Hall Rs";
|
||||
Title = "Hall Monthly Verification";
|
||||
HTML = @"Metrology\06 - hall_verif_monthly.html";
|
||||
break;
|
||||
case Test.WeeklyHall:
|
||||
FileNameWithoutExtension = "hall_iqs_01";
|
||||
Header = "Lot;Tool;TimeDate;RunDate;RunID;Part;Reactor;Scribe;PocketNumber;Tool ID;Name;Value";
|
||||
QueryFilter = "Hall Rs";
|
||||
Title = "Hall Weekly Verification";
|
||||
HTML = @"Metrology\15 - hall_verif_weekly.html";
|
||||
break;
|
||||
case Test.Lehighton:
|
||||
FileNameWithoutExtension = "lei_iqs_01";
|
||||
Header = "Reactor;Date;Recipe;Lot;Pocket;Scribe;Tool;Name;Value";
|
||||
QueryFilter = "LEI RS Average value";
|
||||
Title = "Lehighton";
|
||||
HTML = @"GaN Epi Data\13 - lehighton.html";
|
||||
break;
|
||||
case Test.VerificationLehighton:
|
||||
FileNameWithoutExtension = "___";
|
||||
Header = "Reactor;Date;Recipe;Lot;Pocket;Scribe;Tool;Name;Value";
|
||||
QueryFilter = "___";
|
||||
Title = "LEI Weekly Verification 2 Ohm cm";
|
||||
HTML = @"Metrology\14 - lei_verif_weekly.html.html";
|
||||
break;
|
||||
case Test.Microscope:
|
||||
FileNameWithoutExtension = string.Empty;
|
||||
Header = string.Empty;
|
||||
QueryFilter = "Microscope Center 5x";
|
||||
Title = "Total Microscope Defects";
|
||||
HTML = string.Empty;
|
||||
break;
|
||||
case Test.RPMXY:
|
||||
FileNameWithoutExtension = "RPM_Data";
|
||||
Header = "Lot;Date;Recipe;Reactor;Scribe;Pocket;Tool;Name;Value";
|
||||
QueryFilter = "Barrier_Composition_RPM_XY";
|
||||
Title = "RPM XY Data ***&*** View Data";
|
||||
HTML = @"GaN Epi Data\09 - rpm --- 08 - photoluminescence.html";
|
||||
break;
|
||||
case Test.RPMAverage:
|
||||
FileNameWithoutExtension = "RPMdata-short";
|
||||
Header = "fProductId;fDate;average;stdDev;fRecipeName;Reactor;g4Scribe;Pocket Number;Tool ID;Recipe From Rpm File";
|
||||
QueryFilter = "Epi Thickness Mean";
|
||||
Title = "RPM Average Data";
|
||||
HTML = @"GaN Epi Data\09 - rpm.html";
|
||||
break;
|
||||
case Test.RPMPLRatio:
|
||||
FileNameWithoutExtension = "PHOTOLUMINESCENCE_data-short";
|
||||
Header = "fProductId;fDate;g4Scribe;fRecipeName;bandEdge_nm;bandEdge_V;yellowBand_Pmw;yellowBand_nm;yellowBand_V;Reactor;Pocket Number;Tool ID";
|
||||
QueryFilter = "PL Ratio";
|
||||
Title = "Photoluminescence: PL Ratio";
|
||||
HTML = @"GaN Epi Data\08 - photoluminescence.html";
|
||||
break;
|
||||
case Test.DailyRPMXY:
|
||||
FileNameWithoutExtension = "RPM_Data";
|
||||
Header = "Lot;Date;Recipe;Reactor;Scribe;Pocket;Tool;Name;Value";
|
||||
QueryFilter = "Barrier_Composition_RPM_XY";
|
||||
Title = "";
|
||||
HTML = @"Metrology\?";
|
||||
break;
|
||||
case Test.DailyRPMAverage:
|
||||
FileNameWithoutExtension = "RPMdata-short";
|
||||
Header = "fProductId;fDate;average;stdDev;fRecipeName;Reactor;g4Scribe;Pocket Number;Tool ID;Recipe From Rpm File";
|
||||
QueryFilter = "Epi Thickness Mean";
|
||||
Title = "";
|
||||
HTML = @"Metrology\?";
|
||||
break;
|
||||
case Test.DailyRPMPLRatio:
|
||||
FileNameWithoutExtension = "PHOTOLUMINESCENCE_data-short";
|
||||
Header = "fProductId;fDate;g4Scribe;fRecipeName;bandEdge_nm;bandEdge_V;yellowBand_Pmw;yellowBand_nm;yellowBand_V;Reactor;Pocket Number;Tool ID";
|
||||
QueryFilter = "PL Ratio";
|
||||
Title = "RPM Daily Verification";
|
||||
HTML = @"Metrology\17 - rpm_verif_daily.html";
|
||||
break;
|
||||
case Test.VerificationRPM:
|
||||
FileNameWithoutExtension = "PhotoLuminescence_Ver";
|
||||
Header = "Part;Process;Date;Test;Value";
|
||||
QueryFilter = "PL Edge Wavelength";
|
||||
Title = "PL Daily Verification - [PL Edge Wavelength]";
|
||||
HTML = @"Metrology\18 - photoluminescence_verif_daily.html";
|
||||
break;
|
||||
case Test.Photoreflectance:
|
||||
FileNameWithoutExtension = "photoreflect_iqs_01";
|
||||
Header = "Lot;Date;Part;Reactor;Scribe;Pocket;Tool;Point;WaferPosition_PR;PR_Peak";
|
||||
QueryFilter = "PR Barrier Composition";
|
||||
Title = "Photoreflectance 6 in, Photoreflectance 8 in";
|
||||
HTML = @"GaN Epi Data\07 - photoreflectance.html";
|
||||
break;
|
||||
case Test.UV:
|
||||
FileNameWithoutExtension = "uv_iqs_01";
|
||||
Header = string.Empty;
|
||||
QueryFilter = "UV Broken";
|
||||
Title = "UV";
|
||||
HTML = @"GaN Epi Data\15 - uv 2.1.html";
|
||||
break;
|
||||
case Test.VpdIcpmsAnalyte:
|
||||
FileNameWithoutExtension = "VPD_iqs_01";
|
||||
Header = "Reactor;RunID;RunDate;PartNumber;PocketNumber;WaferScribe;Analyte;Value";
|
||||
QueryFilter = "Mg";
|
||||
Title = "VpdIcpmsAnalyteData";
|
||||
HTML = @"";
|
||||
break;
|
||||
case Test.WarpAndBow:
|
||||
FileNameWithoutExtension = "warp_iqs_01";
|
||||
Header = "fDate;fRecipeName;fProductId;g4Scribe;warp;bow;tool;Reactor;Pocket ID;bow_range;BowX;BowY;CenterBow";
|
||||
QueryFilter = "BowCenter";
|
||||
Title = "Warp and Bow";
|
||||
HTML = @"GaN Epi Data\14 - warp.html";
|
||||
break;
|
||||
case Test.VerificationWarpAndBow:
|
||||
FileNameWithoutExtension = "warp_ver_iqs_01";
|
||||
Header = "Part;Process;Date;WaferScribe;totWarp;bow";
|
||||
QueryFilter = "Bow Calibration";
|
||||
Title = "6 Inch Warp/Bow Daily Verification, 8 Inch Warp/Bow Daily Verification";
|
||||
HTML = @"Metrology\19 - warp_cal_daily.html";
|
||||
break;
|
||||
case Test.XRDXY:
|
||||
FileNameWithoutExtension = "xrd_iqs_NEW_01";
|
||||
Header = "Reactor;fDate;fRecipeName;Lot;pocketNumber;g4Scribe;ToolID;Name;Value;Group";
|
||||
QueryFilter = "SL Period";
|
||||
Title = "XRD XY Raw Data Viewer";
|
||||
HTML = @"GaN Epi Data\11 - xrd.html";
|
||||
break;
|
||||
case Test.XRDWeightedAverage:
|
||||
FileNameWithoutExtension = "xrd_iqs_NEW_01_WtAVG";
|
||||
Header = "Reactor;fDate;fRecipeName;Lot;pocketNumber;g4Scribe;Name;Value;Group";
|
||||
//QueryFilter = "Al% Barrier WTAVG";
|
||||
QueryFilter = "SL Period WTAVG";
|
||||
Title = "XRD Weighted Average Data";
|
||||
HTML = @"GaN Epi Data\11 - xrd.html";
|
||||
break;
|
||||
case Test.MonthlyXRD:
|
||||
FileNameWithoutExtension = "xrd_monthly_ver_iqs_01";
|
||||
Header = "Part;Process;Date;TestName;Value";
|
||||
QueryFilter = "XRD 2-Theta Position";
|
||||
Title = "XRD Monthly Verification";
|
||||
HTML = @"Metrology\03 - xrd_verif_monthly.html";
|
||||
break;
|
||||
case Test.WeeklyXRD:
|
||||
FileNameWithoutExtension = "xrd_weekly_ver_iqs_01";
|
||||
Header = "Part;Process;Lot;Date;TestName;Value";
|
||||
QueryFilter = "XRD Weekly AL% Center";
|
||||
Title = "XRD Weekly Verification";
|
||||
HTML = @"Metrology\12 - xrd_verif_weekly.html";
|
||||
break;
|
||||
case Test.JVXRD:
|
||||
FileNameWithoutExtension = "xrd_iqs_NEW_01";
|
||||
Header = "Reactor;fDate;fRecipeName;Lot;pocketNumber;g4Scribe;ToolID;Name;Value;Group";
|
||||
QueryFilter = "SL Period";
|
||||
Title = "XRD XY Raw Data Viewer";
|
||||
HTML = @"GaN Epi Data\11 - xrd.html";
|
||||
break;
|
||||
default:
|
||||
throw new Exception();
|
||||
}
|
||||
FileName = string.Concat(FileNameWithoutExtension, ".txt");
|
||||
}
|
||||
|
||||
public ScopeInfo ShallowCopy()
|
||||
{
|
||||
return (ScopeInfo)MemberwiseClone();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
24
Adaptation/Shared/Metrology/WS.Attachment.cs
Normal file
24
Adaptation/Shared/Metrology/WS.Attachment.cs
Normal file
@ -0,0 +1,24 @@
|
||||
namespace Adaptation.Shared.Metrology
|
||||
{
|
||||
|
||||
public partial class WS
|
||||
{
|
||||
public class Attachment
|
||||
{
|
||||
|
||||
public string UniqueId { get; set; }
|
||||
public string DestinationFileName { get; set; }
|
||||
public string SourceFileName { get; set; }
|
||||
|
||||
public Attachment(string uniqueId, string destinationFileName, string sourceFileName)
|
||||
{
|
||||
UniqueId = uniqueId;
|
||||
DestinationFileName = destinationFileName;
|
||||
SourceFileName = sourceFileName;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
33
Adaptation/Shared/Metrology/WS.Results.cs
Normal file
33
Adaptation/Shared/Metrology/WS.Results.cs
Normal file
@ -0,0 +1,33 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.Shared.Metrology
|
||||
{
|
||||
|
||||
public partial class WS
|
||||
{
|
||||
// this class represents the response from the Inbound API endpoint
|
||||
public class Results
|
||||
{
|
||||
// true or false if data was written to the database
|
||||
public bool Success { get; set; }
|
||||
|
||||
// if true, contains ID of the Header record in the database
|
||||
public long HeaderID { get; set; }
|
||||
|
||||
// if false, this collection will contain a list of errors
|
||||
public List<string> Errors { get; set; }
|
||||
|
||||
// this collection will contain a list of warnings, they will not prevent data from being saved
|
||||
public List<string> Warnings { get; set; }
|
||||
|
||||
// this is just a helper function to make displaying the results easier
|
||||
public override string ToString()
|
||||
{
|
||||
return JsonSerializer.Serialize(this, GetType());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
127
Adaptation/Shared/Metrology/WS.cs
Normal file
127
Adaptation/Shared/Metrology/WS.cs
Normal file
@ -0,0 +1,127 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Net.Http;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.Shared.Metrology
|
||||
{
|
||||
|
||||
public partial class WS
|
||||
{
|
||||
|
||||
public static Tuple<string, Results> SendData(string url, object payload, int timeoutSeconds = 120)
|
||||
{
|
||||
Results results = new Results();
|
||||
string resultsJson = string.Empty;
|
||||
try
|
||||
{
|
||||
string json = JsonSerializer.Serialize(payload, payload.GetType());
|
||||
if (string.IsNullOrEmpty(url) || !url.Contains(":") || !url.Contains("."))
|
||||
throw new Exception("Invalid URL");
|
||||
using (HttpClient httpClient = new HttpClient())
|
||||
{
|
||||
httpClient.Timeout = new TimeSpan(0, 0, 0, timeoutSeconds, 0);
|
||||
HttpRequestMessage httpRequestMessage = new HttpRequestMessage
|
||||
{
|
||||
RequestUri = new Uri(url),
|
||||
Method = HttpMethod.Post,
|
||||
Content = new StringContent(json, Encoding.UTF8, "application/json")
|
||||
};
|
||||
HttpResponseMessage httpResponseMessage = httpClient.SendAsync(httpRequestMessage, HttpCompletionOption.ResponseContentRead).Result;
|
||||
resultsJson = httpResponseMessage.Content.ReadAsStringAsync().Result;
|
||||
results = JsonSerializer.Deserialize<Results>(resultsJson);
|
||||
}
|
||||
if (!results.Success)
|
||||
results.Errors.Add(results.ToString());
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Exception exception = e;
|
||||
StringBuilder stringBuilder = new StringBuilder();
|
||||
while (!(exception is null))
|
||||
{
|
||||
stringBuilder.AppendLine(exception.Message);
|
||||
exception = exception.InnerException;
|
||||
}
|
||||
if (results.Errors is null)
|
||||
results.Errors = new List<string>();
|
||||
results.Errors.Add(stringBuilder.ToString());
|
||||
}
|
||||
return new Tuple<string, Results>(resultsJson, results);
|
||||
}
|
||||
|
||||
// this method is a wrapper for attaching a file to either a header or data record
|
||||
// URL is the same URL used for SendData, ex: http://localhost/api/inbound/CDE
|
||||
// attachToHeaderId is the ID returned by SendData
|
||||
// attachToDataUniqueId is the string unique ID for the data record, aka the Title of the Sharepoint list entry
|
||||
// fileContents is a byte array with the contents of the file
|
||||
// fileName is which attachment this is, image.pdf, data.pdf, data.txt, header.pdf, etc
|
||||
// timeoutSeconds is configured as the request timeout
|
||||
// this method will either succeed or throw an exception
|
||||
// also, this has been made synchronous
|
||||
public static void AttachFile(string url, long attachToHeaderId, string attachToDataUniqueId, byte[] fileContents, string fileName, int timeoutSeconds = 60)
|
||||
{
|
||||
using (HttpClient httpClient = new HttpClient())
|
||||
{
|
||||
string requestUrl = url + "/attachment?headerid=" + attachToHeaderId.ToString();
|
||||
if (!string.IsNullOrWhiteSpace(attachToDataUniqueId))
|
||||
{
|
||||
requestUrl += "&datauniqueid=";
|
||||
requestUrl += System.Net.WebUtility.UrlEncode(attachToDataUniqueId);
|
||||
}
|
||||
requestUrl += "&filename="; // this is just so the web server log shows the filename
|
||||
requestUrl += System.Net.WebUtility.UrlEncode(fileName);
|
||||
|
||||
httpClient.Timeout = new TimeSpan(0, 0, 0, timeoutSeconds, 0);
|
||||
|
||||
MultipartFormDataContent multipartFormDataContent = new MultipartFormDataContent();
|
||||
ByteArrayContent byteArrayContent = new ByteArrayContent(fileContents);
|
||||
byteArrayContent.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream");
|
||||
|
||||
multipartFormDataContent.Add(byteArrayContent, "attachment", fileName);
|
||||
|
||||
HttpResponseMessage httpResponseMessage = httpClient.PostAsync(requestUrl, multipartFormDataContent).Result;
|
||||
|
||||
if (httpResponseMessage.IsSuccessStatusCode)
|
||||
return;
|
||||
|
||||
string resultBody = httpResponseMessage.Content.ReadAsStringAsync().Result;
|
||||
|
||||
throw new Exception("Attachment failed: " + resultBody);
|
||||
}
|
||||
}
|
||||
|
||||
public static void AttachFiles(string url, long headerID, List<Attachment> headerAttachments = null, List<Attachment> dataAttachments = null)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!(headerAttachments is null))
|
||||
{
|
||||
foreach (Attachment attachment in headerAttachments)
|
||||
AttachFile(url, headerID, "", System.IO.File.ReadAllBytes(attachment.SourceFileName), attachment.DestinationFileName);
|
||||
}
|
||||
if (!(dataAttachments is null))
|
||||
{
|
||||
foreach (Attachment attachment in dataAttachments)
|
||||
AttachFile(url, headerID, attachment.UniqueId, System.IO.File.ReadAllBytes(attachment.SourceFileName), attachment.DestinationFileName);
|
||||
}
|
||||
//MessageBox.Show(r.ToString());
|
||||
}
|
||||
catch (Exception e)
|
||||
{
|
||||
Exception exception = e;
|
||||
StringBuilder stringBuilder = new StringBuilder();
|
||||
while (!(exception is null))
|
||||
{
|
||||
stringBuilder.AppendLine(exception.Message);
|
||||
exception = exception.InnerException;
|
||||
}
|
||||
//MessageBox.Show(msgs.ToString(), "Exception", //MessageBoxButtons.OK, //MessageBoxIcon.Error);
|
||||
throw new Exception(stringBuilder.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
13
Adaptation/Shared/ParameterType.cs
Normal file
13
Adaptation/Shared/ParameterType.cs
Normal file
@ -0,0 +1,13 @@
|
||||
namespace Adaptation.Shared
|
||||
{
|
||||
|
||||
public enum ParameterType
|
||||
{
|
||||
String = 0,
|
||||
Integer = 2,
|
||||
Double = 3,
|
||||
Boolean = 4,
|
||||
StructuredType = 5
|
||||
}
|
||||
|
||||
}
|
426
Adaptation/Shared/ProcessDataStandardFormat.cs
Normal file
426
Adaptation/Shared/ProcessDataStandardFormat.cs
Normal file
@ -0,0 +1,426 @@
|
||||
using Adaptation.Shared.Methods;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.Shared
|
||||
{
|
||||
|
||||
public class ProcessDataStandardFormat
|
||||
{
|
||||
|
||||
public const string RecordStart = "RECORD_START";
|
||||
|
||||
public enum SearchFor
|
||||
{
|
||||
EquipmentIntegration = 1,
|
||||
BusinessIntegration = 2,
|
||||
SystemExport = 3,
|
||||
Archive = 4
|
||||
}
|
||||
|
||||
public static string GetPDSFText(IFileRead fileRead, Logistics logistics, JsonElement[] jsonElements, string logisticsText)
|
||||
{
|
||||
string result;
|
||||
if (!jsonElements.Any())
|
||||
result = string.Empty;
|
||||
else
|
||||
{
|
||||
int columns = 0;
|
||||
List<string> lines;
|
||||
string endOffset = "E#######T";
|
||||
string dataOffset = "D#######T";
|
||||
string headerOffset = "H#######T";
|
||||
string format = "MM/dd/yyyy HH:mm:ss";
|
||||
StringBuilder stringBuilder = new();
|
||||
lines = new string[] { "HEADER_TAG\tHEADER_VALUE", "FORMAT\t2.00", "NUMBER_PASSES\t0001", string.Concat("HEADER_OFFSET\t", headerOffset), string.Concat("DATA_OFFSET\t", dataOffset), string.Concat("END_OFFSET\t", endOffset) }.ToList();
|
||||
stringBuilder.Append("\"Time\"").Append('\t');
|
||||
stringBuilder.Append("\"A_LOGISTICS\"").Append('\t');
|
||||
stringBuilder.Append("\"B_LOGISTICS\"").Append('\t');
|
||||
for (int i = 0; i < jsonElements.Length;)
|
||||
{
|
||||
foreach (JsonProperty jsonProperty in jsonElements[0].EnumerateObject())
|
||||
{
|
||||
columns += 1;
|
||||
stringBuilder.Append("\"").Append(jsonProperty.Name).Append("\"").Append('\t');
|
||||
}
|
||||
break;
|
||||
}
|
||||
stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
lines.Add(stringBuilder.ToString());
|
||||
for (int i = 0; i < jsonElements.Length; i++)
|
||||
{
|
||||
stringBuilder.Clear();
|
||||
stringBuilder.Append("0.1").Append('\t');
|
||||
stringBuilder.Append("1").Append('\t');
|
||||
stringBuilder.Append("2").Append('\t');
|
||||
foreach (JsonProperty jsonProperty in jsonElements[i].EnumerateObject())
|
||||
stringBuilder.Append(jsonProperty.Value).Append('\t');
|
||||
stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
lines.Add(stringBuilder.ToString());
|
||||
}
|
||||
lines.Add(string.Concat("NUM_DATA_ROWS ", jsonElements.Length.ToString().PadLeft(9, '0')));
|
||||
lines.Add(string.Concat("NUM_DATA_COLUMNS ", (columns + 3).ToString().PadLeft(9, '0')));
|
||||
lines.Add("DELIMITER ;");
|
||||
lines.Add(string.Concat("START_TIME_FORMAT ", format));
|
||||
lines.Add(string.Concat("START_TIME ", logistics.DateTimeFromSequence.ToString(format))); //12/26/2019 15:22:44
|
||||
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "A_LOGISTICS"));
|
||||
lines.Add(string.Concat("LOGISTICS_COLUMN", '\t', "B_LOGISTICS"));
|
||||
if (!string.IsNullOrEmpty(logisticsText))
|
||||
lines.Add(logisticsText);
|
||||
else
|
||||
{
|
||||
lines.Add(string.Concat("LOGISTICS_1", '\t', "A_CHAMBER=;A_INFO=", fileRead.EventName, ";A_INFO2=", fileRead.EquipmentType, ";A_JOBID=", fileRead.CellInstanceName, ";A_MES_ENTITY=", fileRead.MesEntity, ";A_MID=", logistics.MID, ";A_NULL_DATA=", fileRead.NullData, ";A_PPID=NO_PPID;A_PROCESS_JOBID=", logistics.ProcessJobID, ";A_PRODUCT=;A_SEQUENCE=", logistics.Sequence, ";A_WAFER_ID=;"));
|
||||
lines.Add(string.Concat("LOGISTICS_2", '\t', "B_CHAMBER=;B_INFO=", fileRead.EventName, ";B_INFO2=", fileRead.EquipmentType, ";B_JOBID=", fileRead.CellInstanceName, ";B_MES_ENTITY=", fileRead.MesEntity, ";B_MID=", logistics.MID, ";B_NULL_DATA=", fileRead.NullData, ";B_PPID=NO_PPID;B_PROCESS_JOBID=", logistics.ProcessJobID, ";B_PRODUCT=;B_SEQUENCE=", logistics.Sequence, ";B_WAFER_ID=;"));
|
||||
lines.Add("END_HEADER");
|
||||
}
|
||||
stringBuilder.Clear();
|
||||
foreach (string line in lines)
|
||||
stringBuilder.AppendLine(line);
|
||||
result = stringBuilder.ToString();
|
||||
result = result.Replace(headerOffset, result.IndexOf("NUM_DATA_ROWS").ToString().PadLeft(9, '0')).
|
||||
Replace(dataOffset, result.IndexOf('"').ToString().PadLeft(9, '0')).
|
||||
Replace(endOffset, result.Length.ToString().PadLeft(9, '0'));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public static Tuple<string, string[], string[]> GetLogisticsColumnsAndBody(string reportFullPath, string[] lines = null)
|
||||
{
|
||||
string segment;
|
||||
List<string> body = new();
|
||||
StringBuilder logistics = new();
|
||||
if (lines is null)
|
||||
lines = File.ReadAllLines(reportFullPath);
|
||||
string[] segments;
|
||||
if (lines.Length < 7)
|
||||
segments = new string[] { };
|
||||
else
|
||||
segments = lines[6].Trim().Split('\t');
|
||||
List<string> columns = new();
|
||||
for (int c = 0; c < segments.Length; c++)
|
||||
{
|
||||
segment = segments[c].Substring(1, segments[c].Length - 2);
|
||||
if (!columns.Contains(segment))
|
||||
columns.Add(segment);
|
||||
else
|
||||
{
|
||||
for (short i = 1; i < short.MaxValue; i++)
|
||||
{
|
||||
segment = string.Concat(segment, "_", i);
|
||||
if (!columns.Contains(segment))
|
||||
{
|
||||
columns.Add(segment);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
bool lookForLogistics = false;
|
||||
for (int r = 7; r < lines.Count(); r++)
|
||||
{
|
||||
if (lines[r].StartsWith("NUM_DATA_ROWS"))
|
||||
lookForLogistics = true;
|
||||
if (!lookForLogistics)
|
||||
{
|
||||
body.Add(lines[r]);
|
||||
continue;
|
||||
}
|
||||
if (lines[r].StartsWith("LOGISTICS_1"))
|
||||
{
|
||||
for (int i = r; i < lines.Count(); i++)
|
||||
{
|
||||
if (lines[r].StartsWith("END_HEADER"))
|
||||
break;
|
||||
logistics.AppendLine(lines[i]);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return new Tuple<string, string[], string[]>(logistics.ToString(), columns.ToArray(), body.ToArray());
|
||||
}
|
||||
|
||||
public static JsonElement[] GetArray(Tuple<string, string[], string[]> pdsf, bool lookForNumbers = false)
|
||||
{
|
||||
JsonElement[] results;
|
||||
string logistics = pdsf.Item1;
|
||||
string[] columns = pdsf.Item2;
|
||||
string[] bodyLines = pdsf.Item3;
|
||||
if (!bodyLines.Any() || !bodyLines[0].Contains('\t'))
|
||||
results = JsonSerializer.Deserialize<JsonElement[]>("[]");
|
||||
else
|
||||
{
|
||||
string value;
|
||||
string[] segments;
|
||||
StringBuilder stringBuilder = new();
|
||||
foreach (string bodyLine in bodyLines)
|
||||
{
|
||||
stringBuilder.Append('{');
|
||||
segments = bodyLine.Trim().Split('\t');
|
||||
if (!lookForNumbers)
|
||||
{
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
value = segments[c].Replace("\"", "\\\"").Replace("\\", "\\\\");
|
||||
if (string.IsNullOrEmpty(value))
|
||||
stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append("null,");
|
||||
else if (value.All(char.IsDigit))
|
||||
stringBuilder.Append('"').Append(columns[c]).Append("\":").Append(value).Append(",");
|
||||
else
|
||||
stringBuilder.Append('"').Append(columns[c]).Append("\":\"").Append(value).Append("\",");
|
||||
}
|
||||
}
|
||||
stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
stringBuilder.AppendLine("},");
|
||||
}
|
||||
stringBuilder.Remove(stringBuilder.Length - 3, 3);
|
||||
results = JsonSerializer.Deserialize<JsonElement[]>(string.Concat("[", stringBuilder, "]"));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
public static Dictionary<string, List<string>> GetDictionary(Tuple<string, string[], string[]> pdsf)
|
||||
{
|
||||
Dictionary<string, List<string>> results = new();
|
||||
string[] segments;
|
||||
string[] columns = pdsf.Item2;
|
||||
string[] bodyLines = pdsf.Item3;
|
||||
foreach (string column in columns)
|
||||
results.Add(column, new List<string>());
|
||||
foreach (string bodyLine in bodyLines)
|
||||
{
|
||||
segments = bodyLine.Split('\t');
|
||||
for (int c = 1; c < segments.Length; c++)
|
||||
{
|
||||
if (c >= columns.Length)
|
||||
continue;
|
||||
results[columns[c]].Add(segments[c]);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
public static Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>> GetTestDictionary(Tuple<string, string[], string[]> pdsf)
|
||||
{
|
||||
Dictionary<Test, Dictionary<string, List<string>>> results = new();
|
||||
string testColumn = nameof(Test);
|
||||
Dictionary<string, List<string>> keyValuePairs = GetDictionary(pdsf);
|
||||
if (!keyValuePairs.ContainsKey(testColumn))
|
||||
throw new Exception();
|
||||
int min;
|
||||
int max;
|
||||
Test testKey;
|
||||
List<string> vs;
|
||||
string columnKey;
|
||||
Dictionary<Test, List<int>> tests = new();
|
||||
for (int i = 0; i < keyValuePairs[testColumn].Count; i++)
|
||||
{
|
||||
if (Enum.TryParse(keyValuePairs[testColumn][i], out Test test))
|
||||
{
|
||||
if (!results.ContainsKey(test))
|
||||
{
|
||||
tests.Add(test, new List<int>());
|
||||
results.Add(test, new Dictionary<string, List<string>>());
|
||||
}
|
||||
tests[test].Add(i);
|
||||
}
|
||||
}
|
||||
foreach (KeyValuePair<Test, List<int>> testKeyValuePair in tests)
|
||||
{
|
||||
testKey = testKeyValuePair.Key;
|
||||
min = testKeyValuePair.Value.Min();
|
||||
max = testKeyValuePair.Value.Max() + 1;
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
results[testKey].Add(keyValuePair.Key, new List<string>());
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
{
|
||||
vs = keyValuePair.Value;
|
||||
columnKey = keyValuePair.Key;
|
||||
for (int i = min; i < max; i++)
|
||||
{
|
||||
if (vs.Count > i)
|
||||
results[testKey][columnKey].Add(vs[i]);
|
||||
else
|
||||
results[testKey][columnKey].Add(string.Empty);
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Tuple<string, Dictionary<Test, Dictionary<string, List<string>>>>(pdsf.Item1, results);
|
||||
}
|
||||
|
||||
private static string GetString(SearchFor searchFor, bool addSpaces, char separator = ' ')
|
||||
{
|
||||
if (!addSpaces)
|
||||
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), searchFor);
|
||||
else
|
||||
return string.Concat(((int)searchFor).ToString().PadLeft(2, '0'), separator, searchFor.ToString().Replace("In", string.Concat(separator, "In")).Replace("Ex", string.Concat(separator, "Ex")));
|
||||
}
|
||||
|
||||
public static string EquipmentIntegration(bool addSpaces = true, char separator = ' ')
|
||||
{
|
||||
return GetString(SearchFor.EquipmentIntegration, addSpaces, separator);
|
||||
}
|
||||
|
||||
public static string BusinessIntegration(bool addSpaces = true, char separator = ' ')
|
||||
{
|
||||
return GetString(SearchFor.BusinessIntegration, addSpaces, separator);
|
||||
}
|
||||
|
||||
public static string SystemExport(bool addSpaces = true, char separator = ' ')
|
||||
{
|
||||
return GetString(SearchFor.SystemExport, addSpaces, separator);
|
||||
}
|
||||
|
||||
public static string Archive(bool addSpaces = true, char separator = ' ')
|
||||
{
|
||||
return GetString(SearchFor.Archive, addSpaces, separator);
|
||||
}
|
||||
|
||||
public static string GetLines(Logistics logistics, Properties.IScopeInfo scopeInfo, List<string> names, Dictionary<string, List<string>> keyValuePairs, string dateFormat, string timeFormat, List<string> pairedParameterNames, bool useDateTimeFromSequence = true, string format = "", List<string> ignoreParameterNames = null)
|
||||
{
|
||||
StringBuilder result = new();
|
||||
if (ignoreParameterNames is null)
|
||||
ignoreParameterNames = new List<string>();
|
||||
if (useDateTimeFromSequence && !string.IsNullOrEmpty(format))
|
||||
throw new Exception();
|
||||
else if (!useDateTimeFromSequence && string.IsNullOrEmpty(format))
|
||||
throw new Exception();
|
||||
string nullData;
|
||||
const string columnDate = "Date";
|
||||
const string columnTime = "Time";
|
||||
const string firstDuplicate = "_1";
|
||||
result.AppendLine(scopeInfo.Header);
|
||||
StringBuilder line = new();
|
||||
if (logistics.NullData is null)
|
||||
nullData = string.Empty;
|
||||
else
|
||||
nullData = logistics.NullData.ToString();
|
||||
int count = (from l in keyValuePairs select l.Value.Count).Min();
|
||||
for (int r = 0; r < count; r++)
|
||||
{
|
||||
line.Clear();
|
||||
line.Append("!");
|
||||
foreach (KeyValuePair<string, List<string>> keyValuePair in keyValuePairs)
|
||||
{
|
||||
if (!names.Contains(keyValuePair.Key))
|
||||
continue;
|
||||
if (ignoreParameterNames.Contains(keyValuePair.Key))
|
||||
continue;
|
||||
if (pairedParameterNames.Contains(keyValuePair.Key))
|
||||
{
|
||||
if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
|
||||
continue;
|
||||
else
|
||||
result.Append(line).Append(keyValuePair.Key).Append(';').AppendLine(keyValuePair.Value[r]);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (useDateTimeFromSequence && keyValuePair.Key == columnDate)
|
||||
line.Append(logistics.DateTimeFromSequence.ToString(dateFormat));
|
||||
else if (useDateTimeFromSequence && keyValuePair.Key == columnTime)
|
||||
line.Append(logistics.DateTimeFromSequence.ToString(timeFormat));
|
||||
else if (!useDateTimeFromSequence && keyValuePair.Key == columnDate && keyValuePair.Value[r].Length == format.Length)
|
||||
line.Append(DateTime.ParseExact(keyValuePair.Value[r], format, CultureInfo.InvariantCulture).ToString(dateFormat));
|
||||
else if (!useDateTimeFromSequence && keyValuePair.Key == columnTime && keyValuePairs.ContainsKey(string.Concat(keyValuePair.Key, firstDuplicate)) && keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r].Length == format.Length)
|
||||
line.Append(DateTime.ParseExact(keyValuePairs[string.Concat(keyValuePair.Key, firstDuplicate)][r], format, CultureInfo.InvariantCulture).ToString(timeFormat));
|
||||
else if (string.IsNullOrEmpty(keyValuePair.Value[r]) || keyValuePair.Value[r] == nullData)
|
||||
line.Append(nullData);
|
||||
else
|
||||
line.Append(keyValuePair.Value[r]);
|
||||
line.Append(';');
|
||||
}
|
||||
}
|
||||
if (!pairedParameterNames.Any())
|
||||
{
|
||||
line.Remove(line.Length - 1, 1);
|
||||
result.AppendLine(line.ToString());
|
||||
}
|
||||
}
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
public static List<string> PDSFToFixedWidth(string reportFullPath)
|
||||
{
|
||||
List<string> results = new();
|
||||
if (!File.Exists(reportFullPath))
|
||||
throw new Exception();
|
||||
int[] group;
|
||||
string line;
|
||||
int startsAt = 0;
|
||||
string[] segments;
|
||||
int? currentGroup = null;
|
||||
char inputSeperator = '\t';
|
||||
char outputSeperator = '\t';
|
||||
List<int> vs = new();
|
||||
List<int[]> groups = new();
|
||||
string[] lines = File.ReadAllLines(reportFullPath);
|
||||
StringBuilder stringBuilder = new();
|
||||
for (int i = 0; i < lines.Length; i++)
|
||||
{
|
||||
if (string.IsNullOrEmpty(lines[i]))
|
||||
continue;
|
||||
segments = lines[i].Split(inputSeperator);
|
||||
if (currentGroup is null)
|
||||
currentGroup = segments.Length;
|
||||
if (segments.Length != currentGroup)
|
||||
{
|
||||
currentGroup = segments.Length;
|
||||
groups.Add(new int[] { startsAt, i - 1 });
|
||||
startsAt = i;
|
||||
}
|
||||
}
|
||||
if (startsAt == lines.Length - 1 && lines[0].Split(inputSeperator).Length != currentGroup)
|
||||
groups.Add(new int[] { lines.Length - 1, lines.Length - 1 });
|
||||
for (int g = 0; g < groups.Count; g++)
|
||||
{
|
||||
vs.Clear();
|
||||
group = groups[g];
|
||||
line = lines[group[0]];
|
||||
segments = line.Split(inputSeperator);
|
||||
for (int s = 0; s < segments.Length; s++)
|
||||
vs.Add(segments[s].Length);
|
||||
for (int i = group[0]; i <= group[1]; i++)
|
||||
{
|
||||
line = lines[i];
|
||||
segments = line.Split(inputSeperator);
|
||||
for (int s = 0; s < segments.Length; s++)
|
||||
{
|
||||
if (vs[s] < segments[s].Length)
|
||||
vs[s] = segments[s].Length;
|
||||
}
|
||||
}
|
||||
stringBuilder.Clear();
|
||||
for (int s = 0; s < segments.Length; s++)
|
||||
stringBuilder.Append((s + 1).ToString().PadLeft(vs[s], ' ')).Append(outputSeperator);
|
||||
stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
results.Add(stringBuilder.ToString());
|
||||
for (int i = group[0]; i <= group[1]; i++)
|
||||
{
|
||||
line = lines[i];
|
||||
stringBuilder.Clear();
|
||||
segments = line.Split(inputSeperator);
|
||||
for (int s = 0; s < segments.Length; s++)
|
||||
stringBuilder.Append(segments[s].PadLeft(vs[s], ' ')).Append(outputSeperator);
|
||||
stringBuilder.Remove(stringBuilder.Length - 1, 1);
|
||||
results.Add(stringBuilder.ToString());
|
||||
}
|
||||
results.Add(string.Empty);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
13
Adaptation/Shared/Properties/IDescription.cs
Normal file
13
Adaptation/Shared/Properties/IDescription.cs
Normal file
@ -0,0 +1,13 @@
|
||||
namespace Adaptation.Shared.Properties
|
||||
{
|
||||
|
||||
public interface IDescription
|
||||
{
|
||||
|
||||
int Test { get; }
|
||||
int Count { get; }
|
||||
int Index { get; }
|
||||
|
||||
}
|
||||
|
||||
}
|
20
Adaptation/Shared/Properties/IFileRead.cs
Normal file
20
Adaptation/Shared/Properties/IFileRead.cs
Normal file
@ -0,0 +1,20 @@
|
||||
namespace Adaptation.Shared.Properties
|
||||
{
|
||||
|
||||
public interface IFileRead
|
||||
{
|
||||
bool IsEvent { get; }
|
||||
string NullData { get; }
|
||||
string MesEntity { get; }
|
||||
bool IsEAFHosted { get; }
|
||||
string EventName { get; }
|
||||
string EquipmentType { get; }
|
||||
string ReportFullPath { get; }
|
||||
string CellInstanceName { get; }
|
||||
string ExceptionSubject { get; }
|
||||
bool UseCyclicalForDescription { get; }
|
||||
string CellInstanceConnectionName { get; }
|
||||
string ParameterizedModelObjectDefinitionType { get; }
|
||||
}
|
||||
|
||||
}
|
25
Adaptation/Shared/Properties/ILogistics.cs
Normal file
25
Adaptation/Shared/Properties/ILogistics.cs
Normal file
@ -0,0 +1,25 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.Shared.Properties
|
||||
{
|
||||
|
||||
public interface ILogistics
|
||||
{
|
||||
|
||||
public object NullData { get; }
|
||||
public string JobID { get; } //CellName
|
||||
public long Sequence { get; } //Ticks
|
||||
public DateTime DateTimeFromSequence { get; }
|
||||
public double TotalSecondsSinceLastWriteTimeFromSequence { get; }
|
||||
public string MesEntity { get; } //SPC
|
||||
public string ReportFullPath { get; } //Extract file
|
||||
public string ProcessJobID { get; set; } //Reactor (duplicate but I want it in the logistics)
|
||||
public string MID { get; set; } //Lot & Pocket || Lot
|
||||
public List<string> Tags { get; set; }
|
||||
public List<string> Logistics1 { get; set; }
|
||||
public List<Logistics2> Logistics2 { get; set; }
|
||||
|
||||
}
|
||||
|
||||
}
|
17
Adaptation/Shared/Properties/ILogistics2.cs
Normal file
17
Adaptation/Shared/Properties/ILogistics2.cs
Normal file
@ -0,0 +1,17 @@
|
||||
namespace Adaptation.Shared.Properties
|
||||
{
|
||||
|
||||
public interface ILogistics2
|
||||
{
|
||||
|
||||
public string MID { get; }
|
||||
public string RunNumber { get; }
|
||||
public string SatelliteGroup { get; }
|
||||
public string PartNumber { get; }
|
||||
public string PocketNumber { get; }
|
||||
public string WaferLot { get; }
|
||||
public string Recipe { get; }
|
||||
|
||||
}
|
||||
|
||||
}
|
13
Adaptation/Shared/Properties/IProcessData.cs
Normal file
13
Adaptation/Shared/Properties/IProcessData.cs
Normal file
@ -0,0 +1,13 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.Shared.Properties
|
||||
{
|
||||
|
||||
public interface IProcessData
|
||||
{
|
||||
|
||||
List<object> Details { get; }
|
||||
|
||||
}
|
||||
|
||||
}
|
20
Adaptation/Shared/Properties/IScopeInfo.cs
Normal file
20
Adaptation/Shared/Properties/IScopeInfo.cs
Normal file
@ -0,0 +1,20 @@
|
||||
using System;
|
||||
|
||||
namespace Adaptation.Shared.Properties
|
||||
{
|
||||
|
||||
public interface IScopeInfo
|
||||
{
|
||||
|
||||
Enum Enum { get; }
|
||||
string HTML { get; }
|
||||
string Title { get; }
|
||||
string FileName { get; }
|
||||
int TestValue { get; }
|
||||
string Header { get; }
|
||||
string QueryFilter { get; }
|
||||
string FileNameWithoutExtension { get; }
|
||||
|
||||
}
|
||||
|
||||
}
|
57
Adaptation/Shared/Test.cs
Normal file
57
Adaptation/Shared/Test.cs
Normal file
@ -0,0 +1,57 @@
|
||||
namespace Adaptation.Shared
|
||||
{
|
||||
|
||||
public enum Test
|
||||
{
|
||||
AFMRoughness = 34,
|
||||
BioRadQS408M = 25,
|
||||
BioRadStratus = 26,
|
||||
BreakdownVoltageCenter = 0,
|
||||
BreakdownVoltageEdge = 1,
|
||||
BreakdownVoltageMiddle8in = 2,
|
||||
CandelaKlarfDC = 6,
|
||||
CandelaLaser = 36,
|
||||
CandelaProdU = 39,
|
||||
CandelaPSL = 38,
|
||||
CandelaVerify = 37,
|
||||
CDE = 24,
|
||||
CV = 3,
|
||||
DailyRPMAverage = 19,
|
||||
DailyRPMPLRatio = 20,
|
||||
DailyRPMXY = 18,
|
||||
Denton = 9,
|
||||
DiffusionLength = 45,
|
||||
Hall = 10,
|
||||
HgCV = 23,
|
||||
Lehighton = 13,
|
||||
Microscope = 46,
|
||||
MonthlyCV = 4,
|
||||
MonthlyHall = 11,
|
||||
MonthlyXRD = 32,
|
||||
Photoreflectance = 22,
|
||||
PlatoA = 48, //Largest
|
||||
RPMAverage = 16,
|
||||
RPMPLRatio = 17,
|
||||
RPMXY = 15,
|
||||
SP1 = 8,
|
||||
Tencor = 7,
|
||||
UV = 35,
|
||||
VerificationLehighton = 14,
|
||||
VerificationRPM = 21,
|
||||
VerificationWarpAndBow = 29,
|
||||
VpdIcpmsAnalyte = 27,
|
||||
WarpAndBow = 28,
|
||||
WeeklyCV = 5,
|
||||
WeeklyHall = 12,
|
||||
WeeklyXRD = 33,
|
||||
WeeklyXRDAIcomp = 40,
|
||||
WeeklyXRDFWHM002 = 41,
|
||||
WeeklyXRDFWHM105 = 42,
|
||||
WeeklyXRDSLStks = 43,
|
||||
WeeklyXRDXRR = 44,
|
||||
XRDWeightedAverage = 31,
|
||||
JVXRD = 47,
|
||||
XRDXY = 30
|
||||
}
|
||||
|
||||
}
|
Reference in New Issue
Block a user