DEP08SIASM - v2.43.0 - Running but

with nuget_System.Text.Json v5.0.1
This commit is contained in:
2022-06-28 11:57:49 -07:00
parent c7884587bb
commit 1a152fafe0
75 changed files with 3067 additions and 2859 deletions

View File

@ -1,33 +0,0 @@
using Adaptation.Shared.Properties;
using System;
using System.IO;
namespace Adaptation.Shared.Deposition;
public class ScopeInfo : IScopeInfo
{
public Enum Enum { get; private set; }
public Test Test { get; private set; }
public string HTML { get; private set; }
public string Title { get; private set; }
public int TestValue { get; private set; }
public string Header { get; private set; }
public string FileName { get; private set; }
public string QueryFilter { get; private set; }
public string FileNameWithoutExtension { get; private set; }
public ScopeInfo(IFileRead fileRead, Test test, string extra)
{
Enum = test;
Test = test;
HTML = string.Empty;
Title = string.Empty;
TestValue = (int)test;
Header = string.Empty;
QueryFilter = string.Empty;
FileName = Path.GetFileName(fileRead.ReportFullPath);
FileNameWithoutExtension = extra;
}
}

View File

@ -6,7 +6,7 @@ using System.Text.Json;
namespace Adaptation.Shared.Duplicator;
public class Description : IDescription, Shared.Properties.IDescription
public class Description : IDescription, Properties.IDescription
{
public int Test { get; set; }

View File

@ -33,8 +33,6 @@ public class FileRead : Properties.IFileRead
protected readonly bool _IsDuplicator;
protected readonly Calendar _Calendar;
protected readonly bool _IsSourceTimer;
protected readonly string _VillachPath;
protected readonly string _ProgressPath;
protected readonly string _EquipmentType;
protected readonly long _BreakAfterSeconds;
protected readonly string _ExceptionSubject;
@ -46,6 +44,7 @@ public class FileRead : Properties.IFileRead
protected readonly string _CellInstanceConnectionNameBase;
protected readonly Dictionary<string, List<long>> _DummyRuns;
protected readonly Dictionary<string, string> _FileParameter;
protected readonly Dictionary<long, List<string>> _StaticRuns;
protected readonly string _ParameterizedModelObjectDefinitionType;
protected readonly FileConnectorConfiguration _FileConnectorConfiguration;
protected readonly IList<ModelObjectParameterDefinition> _ModelObjectParameterDefinitions;
@ -63,12 +62,13 @@ public class FileRead : Properties.IFileRead
string Properties.IFileRead.CellInstanceConnectionName => _CellInstanceConnectionName;
string Properties.IFileRead.ParameterizedModelObjectDefinitionType => _ParameterizedModelObjectDefinitionType;
public FileRead(IDescription description, bool isEvent, ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, bool useCyclicalForDescription, bool isEAFHosted)
public FileRead(IDescription description, bool isEvent, ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted)
{
_SMTP = smtp;
_IsEvent = isEvent;
_DummyRuns = dummyRuns;
_LastTicksDuration = 0;
_StaticRuns = staticRuns;
_IsEAFHosted = isEAFHosted;
_Description = description;
_FileParameter = fileParameter;
@ -84,6 +84,7 @@ public class FileRead : Properties.IFileRead
_IsSourceTimer = fileConnectorConfiguration.SourceFileFilter.StartsWith("*Timer.txt");
string cellInstanceConnectionNameBase = cellInstanceConnectionName.Replace("-", string.Empty);
_Hyphens = cellInstanceConnectionName.Length - cellInstanceConnectionNameBase.Length;
_TracePath = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "Path.Trace");
_ExceptionSubject = string.Concat("Exception:", _CellInstanceConnectionName, _FileConnectorConfiguration?.SourceDirectoryCloaking);
string suffix;
string[] segments = _ParameterizedModelObjectDefinitionType.Split('.');
@ -111,16 +112,10 @@ public class FileRead : Properties.IFileRead
// if (!string.IsNullOrEmpty(equipmentDictionaryName) && isEvent)
// throw new Exception(cellInstanceConnectionName);
}
ModelObjectParameterDefinition[] paths = GetProperties(cellInstanceConnectionName, modelObjectParameters, "Path.");
if (paths.Length < 3)
throw new Exception(cellInstanceConnectionName);
if (isDuplicator)
_MesEntity = string.Empty;
else
_MesEntity = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, string.Concat("CellInstance.", cellInstanceName, ".Alias"));
_TracePath = (from l in paths where l.Name.EndsWith("Trace") select l.Value).FirstOrDefault();
_VillachPath = (from l in paths where l.Name.EndsWith("Villach") select l.Value).FirstOrDefault();
_ProgressPath = (from l in paths where l.Name.EndsWith("Progress") select l.Value).FirstOrDefault();
_EventName = eventName;
_EventNameFileRead = eventNameFileRead;
_EquipmentType = parameterizedModelObjectDefinitionTypeAppended;
@ -130,9 +125,11 @@ public class FileRead : Properties.IFileRead
else
{
if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.TimeBased)
breakAfterSeconds = 360;
else
breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileHandleTimeout.Value);
else if (_FileConnectorConfiguration.FileScanningOption == FileConnectorConfiguration.FileScanningOptionEnum.FileWatcher)
breakAfterSeconds = Math.Abs(_FileConnectorConfiguration.FileScanningIntervalInSeconds.Value);
else
throw new Exception();
}
_BreakAfterSeconds = breakAfterSeconds;
UpdateLastTicksDuration(breakAfterSeconds * 10000000);
@ -142,13 +139,13 @@ public class FileRead : Properties.IFileRead
throw new Exception("_Configuration is empty?");
if (_FileConnectorConfiguration.TargetFileLocation.Contains('%') || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains('%'))
throw new Exception("_Configuration is incorrect for a duplicator!");
if (_FileConnectorConfiguration is not null)
{
if (string.IsNullOrEmpty(_FileConnectorConfiguration.SourceDirectoryCloaking))
throw new Exception("SourceDirectoryCloaking is empty?");
if (!_FileConnectorConfiguration.SourceDirectoryCloaking.StartsWith("~"))
throw new Exception("SourceDirectoryCloaking is incorrect for a duplicator!");
}
// if (_FileConnectorConfiguration is not null)
// {
// if (string.IsNullOrEmpty(_FileConnectorConfiguration.SourceDirectoryCloaking))
// throw new Exception("SourceDirectoryCloaking is empty?");
// if (!_FileConnectorConfiguration.SourceDirectoryCloaking.StartsWith("~"))
// throw new Exception("SourceDirectoryCloaking is incorrect for a duplicator!");
// }
}
}
@ -183,6 +180,7 @@ public class FileRead : Properties.IFileRead
if (ticksDuration < 50000000)
ticksDuration = 50000000;
_LastTicksDuration = (long)Math.Ceiling(ticksDuration * .667);
_Log.Info($"{new TimeSpan(ticksDuration).TotalMilliseconds} TotalMillisecond(s) to process {Environment.NewLine}{_CellInstanceConnectionName}{Environment.NewLine}<{_ReportFullPath}>");
}
protected void WaitForThread(Thread thread, List<Exception> threadExceptions)
@ -216,35 +214,16 @@ public class FileRead : Properties.IFileRead
}
}
protected void CreateProgressDirectory(string[] exceptionLines)
private void WriteAllLines(string to, string[] exceptionLines)
{
string progressDirectory;
StringBuilder stringBuilder = new();
if (_Hyphens == 0)
progressDirectory = Path.Combine(_ProgressPath, _CellInstanceConnectionName);
else
string fileName = string.Concat(to, @"\readme.txt");
try
{
_ = stringBuilder.Clear();
for (int i = 0; i < _Hyphens; i++)
{
if (i > 0 && (i % 2) == 0)
_ = stringBuilder.Append(' ');
_ = stringBuilder.Append('-');
}
progressDirectory = string.Concat(_ProgressPath, @"\", (_Hyphens + 1).ToString().PadLeft(2, '0'), " ", stringBuilder).Trim();
}
DateTime dateTime = DateTime.Now;
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
progressDirectory = string.Concat(progressDirectory, @"\", dateTime.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.MID, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
if (!Directory.Exists(progressDirectory))
_ = Directory.CreateDirectory(progressDirectory);
if (exceptionLines is not null)
{
string fileName = string.Concat(progressDirectory, @"\readme.txt");
try
{ File.WriteAllLines(fileName, exceptionLines); }
catch (Exception) { }
if (!Directory.Exists(to))
_ = Directory.CreateDirectory(to);
File.WriteAllLines(fileName, exceptionLines);
}
catch (Exception ex) { _Log.Error(ex.Message); }
}
protected string[] Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
@ -258,7 +237,8 @@ public class FileRead : Properties.IFileRead
else
{
results = new string[] { _Logistics.Sequence.ToString(), _Logistics.ReportFullPath, from, resolvedFileLocation, to, string.Empty, string.Empty, exception.Message, string.Empty, string.Empty, exception.StackTrace };
Shared0449(to, results);
if (!_IsDuplicator)
WriteAllLines(to, results);
}
if (extractResults is not null && extractResults.Item4 is not null && extractResults.Item4.Any())
{
@ -279,64 +259,6 @@ public class FileRead : Properties.IFileRead
return results;
}
protected static IEnumerable<string> GetDirectoriesRecursively(string path, string directoryNameSegment = null)
{
Queue<string> queue = new();
queue.Enqueue(path);
while (queue.Count > 0)
{
path = queue.Dequeue();
foreach (string subDirectory in Directory.GetDirectories(path))
{
queue.Enqueue(subDirectory);
if (string.IsNullOrEmpty(directoryNameSegment) || Path.GetFileName(subDirectory).Contains(directoryNameSegment))
yield return subDirectory;
}
}
}
protected string GetProcessedDirectory(string progressPath, Logistics logistics, DateTime dateTime, string duplicateDirectory)
{
string result = duplicateDirectory;
string logisticsSequence = logistics.Sequence.ToString();
string[] matchDirectories;
if (!_IsEAFHosted)
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(logistics.ReportFullPath)) };
else
matchDirectories = new string[] { GetDirectoriesRecursively(Path.GetDirectoryName(progressPath), logisticsSequence).FirstOrDefault() };
if (matchDirectories.Length == 0 || string.IsNullOrEmpty(matchDirectories[0]))
matchDirectories = Directory.GetDirectories(duplicateDirectory, string.Concat('*', logisticsSequence, '*'), SearchOption.AllDirectories);
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
if (!matchDirectories[0].Contains("_processed"))
{
result = string.Concat(matchDirectories[0].Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0], logistics.DateTimeFromSequence.ToString("yyyy-MM-dd_hh;mm_tt_"), dateTime.Ticks - logistics.Sequence, "_processed");
Directory.Move(matchDirectories[0], result);
result = string.Concat(result, @"\", logistics.Sequence);
if (!Directory.Exists(result))
_ = Directory.CreateDirectory(result);
}
return result;
}
protected string WriteScopeInfo(string progressPath, Logistics logistics, DateTime dateTime, string duplicateDirectory, List<Tuple<Properties.IScopeInfo, string>> tuples)
{
string result = GetProcessedDirectory(progressPath, logistics, dateTime, duplicateDirectory);
string tupleFile;
string fileName = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
string duplicateFile = string.Concat(result, @"\", fileName, ".pdsf");
foreach (Tuple<Properties.IScopeInfo, string> tuple in tuples)
{
if (tuple.Item1.FileName.StartsWith(@"\"))
tupleFile = tuple.Item1.FileName;
else
tupleFile = string.Concat(result, @"\", fileName, "_", tuple.Item1.FileNameWithoutExtension, ".pdsfc");
File.WriteAllText(tupleFile, tuple.Item2);
}
File.Copy(logistics.ReportFullPath, duplicateFile, overwrite: true);
return result;
}
protected static string GetTupleFile(Logistics logistics, Properties.IScopeInfo scopeInfo, string duplicateDirectory)
{
string result;
@ -369,34 +291,34 @@ public class FileRead : Properties.IFileRead
return result;
}
protected void WaitForFileConsumption(string sourceDirectoryCloaking, Logistics logistics, DateTime dateTime, string successDirectory, string duplicateDirectory, string duplicateFile, List<Tuple<Properties.IScopeInfo, string>> tuples)
protected void WaitForFileConsumption(string sourceDirectoryCloaking, Logistics logistics, DateTime dateTime, string successDirectory, string duplicateDirectory, string duplicateFile, List<(Properties.IScopeInfo, string)> tuples)
{
bool check;
long preWait;
string tupleFile;
List<int> consumedFileIndices = new();
List<string> duplicateFiles = new();
bool moreThanAnHour = (_BreakAfterSeconds > 3600);
StringBuilder stringBuilder = new();
List<int> consumedFileIndices = new();
bool moreThanAnHour = _BreakAfterSeconds > 3600;
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
if (moreThanAnHour)
preWait = dateTime.AddSeconds(30).Ticks;
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
preWait = dateTime.AddMilliseconds(1234).Ticks;
else
preWait = dateTime.AddTicks(_LastTicksDuration).Ticks;
preWait = dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (!tuples.Any())
duplicateFiles.Add(duplicateFile);
string fileName = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
string successFile = string.Concat(successDirectory, @"\", Path.GetFileName(logistics.ReportFullPath));
foreach (Tuple<Properties.IScopeInfo, string> tuple in tuples)
foreach ((Properties.IScopeInfo scopeInfo, string text) in tuples)
{
if (tuple.Item1.FileName.StartsWith(@"\"))
tupleFile = tuple.Item1.FileName;
else if (!tuple.Item1.FileName.Contains('%'))
tupleFile = string.Concat(duplicateDirectory, @"\", fileName, "_", tuple.Item1.FileNameWithoutExtension, ".pdsfc");
if (scopeInfo.FileName.StartsWith(@"\"))
tupleFile = scopeInfo.FileName;
else if (!scopeInfo.FileName.Contains('%'))
tupleFile = string.Concat(duplicateDirectory, @"\", fileName, "_", scopeInfo.FileNameWithoutExtension, ".pdsfc");
else
tupleFile = GetTupleFile(logistics, tuple.Item1, duplicateDirectory);
tupleFile = GetTupleFile(logistics, scopeInfo, duplicateDirectory);
duplicateFiles.Add(tupleFile);
File.WriteAllText(tupleFile, tuple.Item2);
File.WriteAllText(tupleFile, text);
}
for (short i = 0; i < short.MaxValue; i++)
{
@ -410,7 +332,7 @@ public class FileRead : Properties.IFileRead
{
try
{
check = (string.IsNullOrEmpty(successDirectory) || File.Exists(successFile));
check = string.IsNullOrEmpty(successDirectory) || File.Exists(successFile);
if (check)
{
consumedFileIndices.Clear();
@ -482,7 +404,7 @@ public class FileRead : Properties.IFileRead
{
string directory;
if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType)
directory = Path.Combine(_VillachPath, _EquipmentType, "Target");
directory = Path.Combine(_TracePath, _EquipmentType, "Target", _CellInstanceName, _CellInstanceConnectionName);
else
directory = Path.Combine(_TracePath, _EquipmentType, "Source", _CellInstanceName, _CellInstanceConnectionName);
if (!Directory.Exists(directory))
@ -498,14 +420,8 @@ public class FileRead : Properties.IFileRead
}
}
protected void Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)
protected void Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults)
{
bool isErrorFile = exception is not null;
if (!isErrorFile && _IsDuplicator)
{
if (_IsEAFHosted && !string.IsNullOrEmpty(_ProgressPath))
CreateProgressDirectory(exceptionLines: null);
}
if (!_IsEAFHosted)
{
string to;
@ -583,20 +499,6 @@ public class FileRead : Properties.IFileRead
return results;
}
protected static Dictionary<Test, List<Properties.IDescription>> GetKeyValuePairs(List<Properties.IDescription> descriptions)
{
Dictionary<Test, List<Properties.IDescription>> results = new();
Test testKey;
for (int i = 0; i < descriptions.Count; i++)
{
testKey = (Test)descriptions[i].Test;
if (!results.ContainsKey(testKey))
results.Add(testKey, new List<Properties.IDescription>());
results[testKey].Add(descriptions[i]);
}
return results;
}
protected static List<Properties.IDescription> GetDuplicatorDescriptions(JsonElement[] jsonElements)
{
List<Properties.IDescription> results = new();
@ -612,34 +514,7 @@ public class FileRead : Properties.IFileRead
return results;
}
protected static Tuple<Test[], Dictionary<Test, List<Properties.IDescription>>> GetTuple(IFileRead fileRead, IEnumerable<Properties.IDescription> descriptions, bool extra = false)
{
Tuple<Test[], Dictionary<Test, List<Properties.IDescription>>> result;
Dictionary<Test, List<Properties.IDescription>> keyValuePairs = GetKeyValuePairs(descriptions.ToList());
Test[] tests = (from l in keyValuePairs select l.Key).ToArray();
fileRead.CheckTests(tests, extra);
result = new Tuple<Test[], Dictionary<Test, List<Properties.IDescription>>>(tests, keyValuePairs);
return result;
}
protected void Shared0449(string to, string[] exceptionLines)
{
if (_IsDuplicator)
CreateProgressDirectory(exceptionLines: null);
else
{
string fileName = string.Concat(to, @"\readme.txt");
try
{
if (!Directory.Exists(to))
_ = Directory.CreateDirectory(to);
File.WriteAllLines(fileName, exceptionLines);
}
catch (Exception ex) { _Log.Error(ex.Message); }
}
}
protected void Shared1880(string itemFile, List<string> directories, FileInfo sourceFile, bool isErrorFile)
private void Shared1880(string itemFile, List<string> directories, FileInfo sourceFile, bool isErrorFile)
{
string itemDirectory;
directories.Add(Path.GetDirectoryName(sourceFile.FullName));
@ -674,7 +549,7 @@ public class FileRead : Properties.IFileRead
}
}
protected void Shared1811(string to, FileInfo sourceFile)
private void Shared1811(string to, FileInfo sourceFile)
{
if (!_IsDuplicator && _FileConnectorConfiguration.SourceFileFilter != "*" && sourceFile.Exists && sourceFile.Length < _MinFileLength)
{
@ -682,7 +557,7 @@ public class FileRead : Properties.IFileRead
string jobIdDirectory = Path.GetDirectoryName(to);
DateTime dateTime = DateTime.Now.AddMinutes(-15);
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = string.Concat(_Logistics.DateTimeFromSequence.ToString("yyyy"), "_Week_", weekOfYear, @"\", _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd"));
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string destinationDirectory = string.Concat(jobIdDirectory, @"\_ Ignore 100 bytes\", weekDirectory, @"\", directoryName);
if (!Directory.Exists(destinationDirectory))
_ = Directory.CreateDirectory(destinationDirectory);
@ -711,7 +586,7 @@ public class FileRead : Properties.IFileRead
}
}
protected void Shared0231(List<string> directories)
private void Shared0231(List<string> directories)
{
if (_FileConnectorConfiguration.PostProcessingMode != FileConnectorConfiguration.PostProcessingModeEnum.Copy)
{
@ -723,7 +598,7 @@ public class FileRead : Properties.IFileRead
}
}
protected void Shared0413(DateTime dateTime, bool isDummyRun, string successDirectory, string duplicateDirectory, List<Tuple<Properties.IScopeInfo, string>> tuples, string duplicateFile)
protected void WaitForFileConsumption(DateTime dateTime, bool isDummyRun, string successDirectory, string duplicateDirectory, List<(Properties.IScopeInfo, string)> tuples, string duplicateFile)
{
if (!isDummyRun && _IsEAFHosted)
WaitForFileConsumption(_FileConnectorConfiguration.SourceDirectoryCloaking, _Logistics, dateTime, successDirectory, duplicateDirectory, duplicateFile, tuples);
@ -739,75 +614,50 @@ public class FileRead : Properties.IFileRead
}
}
protected static void Shared0607(string reportFullPath, string duplicateDirectory, string logisticsSequence, string destinationDirectory)
internal static string GetJobIdParentDirectory(string directory)
{
if (destinationDirectory == duplicateDirectory)
throw new Exception("Check Target File Folder for %LotIDWithLogisticsSequence%_in process on CI (not Duplicator)");
if (destinationDirectory.EndsWith(logisticsSequence))
destinationDirectory = Path.GetDirectoryName(destinationDirectory);
string[] deleteFiles = Directory.GetFiles(destinationDirectory, "*", SearchOption.AllDirectories);
if (deleteFiles.Length > 250)
throw new Exception("Safety net!");
foreach (string file in deleteFiles)
File.Delete(file);
Directory.Delete(destinationDirectory, recursive: true);
File.Delete(reportFullPath);
string result;
if (!string.IsNullOrEmpty(Path.GetFileName(directory)))
result = Path.GetFullPath(GetParentParent(directory));
else
result = Path.GetFullPath(GetParentParent(Path.GetDirectoryName(directory)));
if (!Directory.Exists(result))
_ = Directory.CreateDirectory(result);
return result;
}
protected string[] Shared1567(string reportFullPath, List<Tuple<Properties.IScopeInfo, string>> tuples)
internal string[] GetInProcessDirectory(string jobIdDirectory)
{
string[] results;
string historicalText;
string logisticsSequence = _Logistics.Sequence.ToString();
string jobIdDirectory = string.Concat(Path.GetDirectoryName(Path.GetDirectoryName(_FileConnectorConfiguration.TargetFileLocation)), @"\", _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
string[] matchDirectories;
if (!_IsEAFHosted)
matchDirectories = new string[] { Path.GetDirectoryName(Path.GetDirectoryName(reportFullPath)) };
results = new string[] { jobIdDirectory };
else
matchDirectories = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
string fileName = Path.GetFileNameWithoutExtension(reportFullPath);
string sequenceDirectory = string.Concat(matchDirectories[0], @"\", logisticsSequence);
if (!Directory.Exists(sequenceDirectory))
_ = Directory.CreateDirectory(sequenceDirectory);
foreach (Tuple<Properties.IScopeInfo, string> tuple in tuples)
{
fileName = string.Concat(sequenceDirectory, @"\", fileName, "_", tuple.Item1.FileNameWithoutExtension, ".pdsfc");
if (_IsEAFHosted)
File.WriteAllText(fileName, tuple.Item2);
else
{
if (File.Exists(fileName))
{
historicalText = File.ReadAllText(fileName);
if (tuple.Item2 != historicalText)
throw new Exception("File doesn't match historical!");
}
}
string logisticsSequence = _Logistics.Sequence.ToString();
results = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
}
results = matchDirectories;
if ((results is null) || results.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
return results;
}
protected void Shared1277(string reportFullPath, string destinationDirectory, string logisticsSequence, string jobIdDirectory, string json)
internal static string GetFileNameAfterUnderscoreSplit(string reportFullPath)
{
string ecCharacterizationSi = Path.GetDirectoryName(Path.GetDirectoryName(jobIdDirectory));
string destinationJobIdDirectory = string.Concat(ecCharacterizationSi, @"\Processed\", _Logistics.JobID);
if (!Directory.Exists(destinationJobIdDirectory))
_ = Directory.CreateDirectory(destinationJobIdDirectory);
destinationJobIdDirectory = string.Concat(destinationJobIdDirectory, @"\", Path.GetFileName(destinationDirectory).Split(new string[] { logisticsSequence }, StringSplitOptions.None)[0], _Logistics.DateTimeFromSequence.ToString("yyyy-MM-dd_hh;mm_tt_"), DateTime.Now.Ticks - _Logistics.Sequence);
string sequenceDirectory = string.Concat(destinationJobIdDirectory, @"\", logisticsSequence);
string jsonFileName = string.Concat(sequenceDirectory, @"\", Path.GetFileNameWithoutExtension(reportFullPath), ".json");
Directory.Move(destinationDirectory, destinationJobIdDirectory);
if (!Directory.Exists(sequenceDirectory))
_ = Directory.CreateDirectory(sequenceDirectory);
File.Copy(reportFullPath, string.Concat(sequenceDirectory, @"\", Path.GetFileName(reportFullPath)), overwrite: true);
File.WriteAllText(jsonFileName, json);
string result;
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
if (segments.Length <= 2)
result = segments[0];
else
result = string.Concat(segments[0], segments[2]);
return result;
}
internal static string GetParentParent(string value)
{
string result = Path.GetDirectoryName(Path.GetDirectoryName(value));
return result;
}
}
// 2022-02-14 -> Shared - FileRead
// 2022-06-08 -> Shared - FileRead

View File

@ -9,34 +9,46 @@ namespace Adaptation.Shared;
public class Logistics : ILogistics
{
public object NullData { get; private set; }
public string JobID { get; private set; } //CellName
public long Sequence { get; private set; } //Ticks
public DateTime DateTimeFromSequence { get; private set; }
public double TotalSecondsSinceLastWriteTimeFromSequence { get; private set; }
public string MesEntity { get; private set; } //SPC
public string ReportFullPath { get; private set; } //Extract file
public string ProcessJobID { get; set; } //Reactor (duplicate but I want it in the logistics)
public string MID { get; set; } //Lot & Pocket || Lot
public List<string> Tags { get; set; }
public List<string> Logistics1 { get; set; }
public List<Logistics2> Logistics2 { get; set; }
protected readonly DateTime _DateTimeFromSequence;
protected readonly FileInfo _FileInfo;
protected readonly string _JobID;
protected readonly List<string> _Logistics1;
protected readonly List<Logistics2> _Logistics2;
protected string _MID;
protected readonly string _MesEntity;
protected readonly object _NullData;
protected string _ProcessJobID;
protected readonly string _ReportFullPath;
protected readonly long _Sequence;
protected readonly double _TotalSecondsSinceLastWriteTimeFromSequence;
public DateTime DateTimeFromSequence => _DateTimeFromSequence;
public FileInfo FileInfo => _FileInfo;
public string JobID => _JobID;
public List<string> Logistics1 => _Logistics1;
public List<Logistics2> Logistics2 => _Logistics2;
public string MID => _MID;
public string MesEntity => _MesEntity;
public object NullData => _NullData;
public string ProcessJobID => _ProcessJobID;
public string ReportFullPath => _ReportFullPath;
public long Sequence => _Sequence;
public double TotalSecondsSinceLastWriteTimeFromSequence => _TotalSecondsSinceLastWriteTimeFromSequence;
public Logistics(IFileRead fileRead)
{
DateTime dateTime = DateTime.Now;
NullData = null;
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
JobID = fileRead.CellInstanceName;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
MesEntity = DefaultMesEntity(dateTime);
ReportFullPath = string.Empty;
ProcessJobID = nameof(ProcessJobID);
MID = nameof(MID);
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
_NullData = null;
_Sequence = dateTime.Ticks;
_DateTimeFromSequence = dateTime;
_JobID = fileRead.CellInstanceName;
_TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
_MesEntity = DefaultMesEntity(dateTime);
_ReportFullPath = string.Empty;
_ProcessJobID = nameof(ProcessJobID);
_MID = nameof(MID);
_Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
_Logistics2 = new List<Logistics2>();
}
public Logistics(IFileRead fileRead, string reportFullPath, bool useSplitForMID, int? fileInfoLength = null)
@ -45,19 +57,19 @@ public class Logistics : ILogistics
throw new Exception();
if (string.IsNullOrEmpty(fileRead.MesEntity))
throw new Exception();
NullData = fileRead.NullData;
FileInfo fileInfo = new(reportFullPath);
DateTime dateTime = fileInfo.LastWriteTime;
if (fileInfoLength.HasValue && fileInfo.Length < fileInfoLength.Value)
_NullData = fileRead.NullData;
_FileInfo = new(reportFullPath);
DateTime dateTime = _FileInfo.LastWriteTime;
if (fileInfoLength.HasValue && _FileInfo.Length < fileInfoLength.Value)
dateTime = dateTime.AddTicks(-1);
JobID = fileRead.CellInstanceName;
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
MesEntity = fileRead.MesEntity;
ReportFullPath = fileInfo.FullName;
ProcessJobID = nameof(ProcessJobID);
string fileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileInfo.FullName);
_JobID = fileRead.CellInstanceName;
_Sequence = dateTime.Ticks;
_DateTimeFromSequence = dateTime;
_TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
_MesEntity = fileRead.MesEntity;
_ReportFullPath = _FileInfo.FullName;
_ProcessJobID = nameof(ProcessJobID);
string fileNameWithoutExtension = Path.GetFileNameWithoutExtension(_FileInfo.FullName);
if (useSplitForMID)
{
if (fileNameWithoutExtension.IndexOf(".") > -1)
@ -67,10 +79,9 @@ public class Logistics : ILogistics
if (fileNameWithoutExtension.IndexOf("-") > -1)
fileNameWithoutExtension = fileNameWithoutExtension.Split('-')[0].Trim();
}
MID = string.Concat(fileNameWithoutExtension.Substring(0, 1).ToUpper(), fileNameWithoutExtension.Substring(1).ToLower());
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
_MID = string.Concat(fileNameWithoutExtension.Substring(0, 1).ToUpper(), fileNameWithoutExtension.Substring(1).ToLower());
_Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
_Logistics2 = new List<Logistics2>();
}
public Logistics(string reportFullPath, string logistics)
@ -78,57 +89,57 @@ public class Logistics : ILogistics
string key;
DateTime dateTime;
string[] segments;
Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
_FileInfo = new(reportFullPath);
_Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
if (!Logistics1.Any() || !Logistics1[0].StartsWith("LOGISTICS_1"))
{
NullData = null;
JobID = "null";
dateTime = new FileInfo(reportFullPath).LastWriteTime;
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
MesEntity = DefaultMesEntity(dateTime);
ReportFullPath = reportFullPath;
ProcessJobID = "R##";
MID = "null";
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
_NullData = null;
_JobID = "null";
dateTime = _FileInfo.LastWriteTime;
_Sequence = dateTime.Ticks;
_DateTimeFromSequence = dateTime;
_TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
_MesEntity = DefaultMesEntity(dateTime);
_ReportFullPath = reportFullPath;
_ProcessJobID = "R##";
_MID = "null";
_Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
_Logistics2 = new List<Logistics2>();
}
else
{
string logistics1Line1 = Logistics1[0];
key = "NULL_DATA=";
if (!logistics1Line1.Contains(key))
NullData = null;
_NullData = null;
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
NullData = segments[1].Split(';')[0];
_NullData = segments[1].Split(';')[0];
}
key = "JOBID=";
if (!logistics1Line1.Contains(key))
JobID = "null";
_JobID = "null";
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
JobID = segments[1].Split(';')[0];
_JobID = segments[1].Split(';')[0];
}
key = "SEQUENCE=";
if (!logistics1Line1.Contains(key))
dateTime = new FileInfo(reportFullPath).LastWriteTime;
dateTime = _FileInfo.LastWriteTime;
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
if (!long.TryParse(segments[1].Split(';')[0].Split('.')[0], out long sequence) || sequence < new DateTime(1999, 1, 1).Ticks)
dateTime = new FileInfo(reportFullPath).LastWriteTime;
dateTime = _FileInfo.LastWriteTime;
else
dateTime = new DateTime(sequence);
}
Sequence = dateTime.Ticks;
DateTimeFromSequence = dateTime;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
DateTime lastWriteTime = new FileInfo(reportFullPath).LastWriteTime;
_Sequence = dateTime.Ticks;
_DateTimeFromSequence = dateTime;
_TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTime).TotalSeconds;
DateTime lastWriteTime = _FileInfo.LastWriteTime;
if (TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
if (lastWriteTime != dateTime)
@ -138,33 +149,32 @@ public class Logistics : ILogistics
}
key = "MES_ENTITY=";
if (!logistics1Line1.Contains(key))
MesEntity = DefaultMesEntity(dateTime);
_MesEntity = DefaultMesEntity(dateTime);
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
MesEntity = segments[1].Split(';')[0];
_MesEntity = segments[1].Split(';')[0];
}
ReportFullPath = reportFullPath;
_ReportFullPath = reportFullPath;
key = "PROCESS_JOBID=";
if (!logistics1Line1.Contains(key))
ProcessJobID = "R##";
_ProcessJobID = "R##";
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
ProcessJobID = segments[1].Split(';')[0];
_ProcessJobID = segments[1].Split(';')[0];
}
key = "MID=";
if (!logistics1Line1.Contains(key))
MID = "null";
_MID = "null";
else
{
segments = logistics1Line1.Split(new string[] { key }, StringSplitOptions.RemoveEmptyEntries);
MID = segments[1].Split(';')[0];
_MID = segments[1].Split(';')[0];
}
}
Logistics2 logistics2;
Tags = new List<string>();
Logistics2 = new List<Logistics2>();
_Logistics2 = new List<Logistics2>();
for (int i = 1; i < Logistics1.Count; i++)
{
if (Logistics1[i].StartsWith("LOGISTICS_2"))
@ -180,29 +190,12 @@ public class Logistics : ILogistics
}
}
public Logistics ShallowCopy() => (Logistics)MemberwiseClone();
private static string DefaultMesEntity(DateTime dateTime) => string.Concat(dateTime.Ticks, "_MES_ENTITY");
internal string GetLotViaMostCommonMethod() => MID.Substring(0, MID.Length - 2);
internal string GetPocketNumberViaMostCommonMethod() => MID.Substring(MID.Length - 2);
internal void Update(string dateTime, string processJobID, string mid)
internal void Update(string mid, string processJobID)
{
if (!DateTime.TryParse(dateTime, out DateTime dateTimeCasted))
dateTimeCasted = DateTime.Now;
NullData = null;
//JobID = Description.GetCellName();
Sequence = dateTimeCasted.Ticks;
DateTimeFromSequence = dateTimeCasted;
TotalSecondsSinceLastWriteTimeFromSequence = (DateTime.Now - dateTimeCasted).TotalSeconds;
//MesEntity = DefaultMesEntity(dateTime);
//ReportFullPath = string.Empty;
ProcessJobID = processJobID;
MID = mid;
Tags = new List<string>();
Logistics1 = new string[] { string.Concat("LOGISTICS_1", '\t', "A_JOBID=", JobID, ";A_MES_ENTITY=", MesEntity, ";") }.ToList();
Logistics2 = new List<Logistics2>();
_MID = mid;
_ProcessJobID = processJobID;
}
}

View File

@ -12,7 +12,6 @@ public interface IFileRead : Properties.IFileRead
JsonProperty[] GetDefault();
string GetEventDescription();
List<string> GetHeaderNames();
void CheckTests(Test[] tests, bool extra);
Dictionary<string, string> GetDisplayNamesJsonElement();
Tuple<string, Test[], JsonElement[], List<FileInfo>> ReExtract();
List<IDescription> GetDescriptions(IFileRead fileRead, List<Test> tests, IProcessData processData);

View File

@ -0,0 +1,300 @@
using System;
using System.IO;
namespace Adaptation.Shared.Metrology;
public class ScopeInfo : Properties.IScopeInfo
{
public Test Test { get; private set; }
public Enum Enum { get; private set; }
public string HTML { get; private set; }
public string Title { get; private set; }
public string FileName { get; private set; }
public int TestValue { get; private set; }
public string Header { get; private set; }
public string QueryFilter { get; private set; }
public string FileNameWithoutExtension { get; private set; }
public ScopeInfo(Test test, string fileName, string queryFilter = "", string title = "", string html = "")
{
Enum = test;
Test = test;
HTML = html;
Title = title;
FileName = fileName;
TestValue = (int)test;
Header = string.Empty;
QueryFilter = queryFilter;
FileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileName);
}
public ScopeInfo(Test test)
{
Enum = test;
Test = test;
TestValue = (int)test;
switch (Test)
{
case Test.AFMRoughness:
FileNameWithoutExtension = "afm_iqs_01";
Header = string.Empty;
QueryFilter = "AFM Roughness";
Title = "AFM";
HTML = @"GaN Epi Data\10 - afm.html";
break;
case Test.BreakdownVoltageCenter:
FileNameWithoutExtension = "bv_iqs_01";
Header = "Reactor;fDate;fRecipeName;Lot;fPocketNumber;g4Scribe;BV Position;BV Value;Tool";
QueryFilter = "Breakdown Voltage";
Title = "Breakdown Voltage-Center";
HTML = @"GaN Epi Data\03 - bv-production.html";
break;
case Test.BreakdownVoltageEdge:
FileNameWithoutExtension = "bv_iqs_01_Edge";
Header = "Reactor;fDate;fRecipeName;Lot;fPocketNumber;g4Scribe;BV Position;BV Value;Tool";
QueryFilter = "Breakdown Voltage - Edge";
Title = "Breakdown Voltage-Edge";
HTML = @"GaN Epi Data\03 - bv-production.html";
break;
case Test.BreakdownVoltageMiddle8in:
FileNameWithoutExtension = "bv_iqs_01_Middle";
Header = "Reactor;fDate;fRecipeName;Lot;fPocketNumber;g4Scribe;BV Position;BV Value;Tool";
QueryFilter = "Breakdown Voltage - Middle";
Title = "Breakdown Voltage-Middle (8 in)";
HTML = @"GaN Epi Data\03 - bv-production.html";
break;
case Test.CV:
FileNameWithoutExtension = "cv_iqs_01";
Header = "Reactor;fDate;fPart;Lot;pocketNumber;g4Scribe;Position;Vp;NdMin;Tool ID;CV Ns;CV Cap";
QueryFilter = "CV_Ns";
Title = "CV";
HTML = @"GaN Epi Data\05 - cv.html";
break;
case Test.MonthlyCV:
FileNameWithoutExtension = "cv_iqs_01";
Header = "Reactor;fDate;fPart;Lot;pocketNumber;g4Scribe;Position;Vp;NdMin;Tool ID;CV Ns;CV Cap";
QueryFilter = "CV_Ns";
Title = "CV Monthly Verification";
HTML = @"Metrology\07 - cv_verif_monthly.html";
break;
case Test.WeeklyCV:
FileNameWithoutExtension = "cv_iqs_01";
Header = "Reactor;fDate;fPart;Lot;pocketNumber;g4Scribe;Position;Vp;NdMin;Tool ID;CV Ns;CV Cap";
QueryFilter = "CV_Ns";
Title = "CV Weekly Verification";
HTML = @"Metrology\16 - cv_verif_weekly.html";
break;
case Test.CandelaKlarfDC:
FileNameWithoutExtension = "candela_iqs_01";
Header = "LotID;OperatorID;RecipeName;CandelaRecipe;WaferID;PocketNumber;RunDate;Epi;SlipLines;Cracks;EpiDef;HazeSpot;SmallLpd;MediumLpd;LargeLpd;Cracks_A;Spirals;Craters;8620 Small;Pits;Tool ID;Defect Count";
QueryFilter = "Candela Cracking";
Title = "Candela";
HTML = @"GaN Epi Data\12 - candela.html";
break;
case Test.CandelaLaser:
FileNameWithoutExtension = "candela_iqs_01";
Header = "LotID;OperatorID;RecipeName;CandelaRecipe;WaferID;PocketNumber;RunDate;Epi;SlipLines;Cracks;EpiDef;HazeSpot;SmallLpd;MediumLpd;LargeLpd;Cracks_A;Spirals;Craters;Pits;Tool ID;Defect Count";
QueryFilter = "Candela Cracking";
Title = "Candela";
HTML = @"GaN Epi Data\12 - candela.html";
break;
case Test.CandelaVerify:
FileNameWithoutExtension = "candela_iqs_01";
Header = string.Concat("LotID;OperatorID;RecipeName;CandelaRecipe;WaferID;PocketNumber;RunDate;RunID;Reactor;", "Slip Lines;Cracks;Epi Def;Haze Spot;Small LPD;Medium LPD;Large LPD;Cracks_A;Spirals;Craters;8620 Small;Pits;Tool ID;Defect Count");
QueryFilter = "Candela Cracking";
Title = "Candela";
HTML = @"GaN Epi Data\12 - candela.html";
break;
case Test.CandelaPSL:
FileNameWithoutExtension = "candela_iqs_01";
Header = string.Empty;
QueryFilter = "102-83nm";
Title = "Candela";
HTML = @"GaN Epi Data\12 - candela.html";
break;
case Test.CandelaProdU:
FileNameWithoutExtension = "candela_iqs_01";
Header = string.Empty;
QueryFilter = "SPE verification";
Title = "Candela";
HTML = @"GaN Epi Data\12 - candela.html";
break;
case Test.Denton:
FileNameWithoutExtension = "denton_iqs_01";
Header = "Tool;fDate;Run;Recipe;Operator;Name;Value";
QueryFilter = "Denton_Voltage_AVG";
Title = "Denton Data";
HTML = @"Support Process\03 - ebeam02_denton_v1.html";
break;
case Test.Hall:
FileNameWithoutExtension = "hall_iqs_01";
Header = "Lot;Tool;TimeDate;RunDate;RunID;Part;Reactor;Scribe;PocketNumber;Tool ID;Name;Value";
QueryFilter = "Hall Rs";
Title = "Hall Data";
HTML = @"GaN Epi Data\04 - hall.html";
break;
case Test.MonthlyHall:
FileNameWithoutExtension = "hall_iqs_01";
Header = "Lot;Tool;TimeDate;RunDate;RunID;Part;Reactor;Scribe;PocketNumber;Tool ID;Name;Value";
QueryFilter = "Hall Rs";
Title = "Hall Monthly Verification";
HTML = @"Metrology\06 - hall_verif_monthly.html";
break;
case Test.WeeklyHall:
FileNameWithoutExtension = "hall_iqs_01";
Header = "Lot;Tool;TimeDate;RunDate;RunID;Part;Reactor;Scribe;PocketNumber;Tool ID;Name;Value";
QueryFilter = "Hall Rs";
Title = "Hall Weekly Verification";
HTML = @"Metrology\15 - hall_verif_weekly.html";
break;
case Test.Lehighton:
FileNameWithoutExtension = "lei_iqs_01";
Header = "Reactor;Date;Recipe;Lot;Pocket;Scribe;Tool;Name;Value";
QueryFilter = "LEI RS Average value";
Title = "Lehighton";
HTML = @"GaN Epi Data\13 - lehighton.html";
break;
case Test.VerificationLehighton:
FileNameWithoutExtension = "___";
Header = "Reactor;Date;Recipe;Lot;Pocket;Scribe;Tool;Name;Value";
QueryFilter = "___";
Title = "LEI Weekly Verification 2 Ohm cm";
HTML = @"Metrology\14 - lei_verif_weekly.html.html";
break;
case Test.Microscope:
FileNameWithoutExtension = string.Empty;
Header = string.Empty;
QueryFilter = "Microscope Center 5x";
Title = "Total Microscope Defects";
HTML = string.Empty;
break;
case Test.RPMXY:
FileNameWithoutExtension = "RPM_Data";
Header = "Lot;Date;Recipe;Reactor;Scribe;Pocket;Tool;Name;Value";
QueryFilter = "Barrier_Composition_RPM_XY";
Title = "RPM XY Data ***&*** View Data";
HTML = @"GaN Epi Data\09 - rpm --- 08 - photoluminescence.html";
break;
case Test.RPMAverage:
FileNameWithoutExtension = "RPMdata-short";
Header = "fProductId;fDate;average;stdDev;fRecipeName;Reactor;g4Scribe;Pocket Number;Tool ID;Recipe From Rpm File";
QueryFilter = "Epi Thickness Mean";
Title = "RPM Average Data";
HTML = @"GaN Epi Data\09 - rpm.html";
break;
case Test.RPMPLRatio:
FileNameWithoutExtension = "PHOTOLUMINESCENCE_data-short";
Header = "fProductId;fDate;g4Scribe;fRecipeName;bandEdge_nm;bandEdge_V;yellowBand_Pmw;yellowBand_nm;yellowBand_V;Reactor;Pocket Number;Tool ID";
QueryFilter = "PL Ratio";
Title = "Photoluminescence: PL Ratio";
HTML = @"GaN Epi Data\08 - photoluminescence.html";
break;
case Test.DailyRPMXY:
FileNameWithoutExtension = "RPM_Data";
Header = "Lot;Date;Recipe;Reactor;Scribe;Pocket;Tool;Name;Value";
QueryFilter = "Barrier_Composition_RPM_XY";
Title = "";
HTML = @"Metrology\?";
break;
case Test.DailyRPMAverage:
FileNameWithoutExtension = "RPMdata-short";
Header = "fProductId;fDate;average;stdDev;fRecipeName;Reactor;g4Scribe;Pocket Number;Tool ID;Recipe From Rpm File";
QueryFilter = "Epi Thickness Mean";
Title = "";
HTML = @"Metrology\?";
break;
case Test.DailyRPMPLRatio:
FileNameWithoutExtension = "PHOTOLUMINESCENCE_data-short";
Header = "fProductId;fDate;g4Scribe;fRecipeName;bandEdge_nm;bandEdge_V;yellowBand_Pmw;yellowBand_nm;yellowBand_V;Reactor;Pocket Number;Tool ID";
QueryFilter = "PL Ratio";
Title = "RPM Daily Verification";
HTML = @"Metrology\17 - rpm_verif_daily.html";
break;
case Test.VerificationRPM:
FileNameWithoutExtension = "PhotoLuminescence_Ver";
Header = "Part;Process;Date;Test;Value";
QueryFilter = "PL Edge Wavelength";
Title = "PL Daily Verification - [PL Edge Wavelength]";
HTML = @"Metrology\18 - photoluminescence_verif_daily.html";
break;
case Test.Photoreflectance:
FileNameWithoutExtension = "photoreflect_iqs_01";
Header = "Lot;Date;Part;Reactor;Scribe;Pocket;Tool;Point;WaferPosition_PR;PR_Peak";
QueryFilter = "PR Barrier Composition";
Title = "Photoreflectance 6 in, Photoreflectance 8 in";
HTML = @"GaN Epi Data\07 - photoreflectance.html";
break;
case Test.UV:
FileNameWithoutExtension = "uv_iqs_01";
Header = string.Empty;
QueryFilter = "UV Broken";
Title = "UV";
HTML = @"GaN Epi Data\15 - uv 2.1.html";
break;
case Test.VpdIcpmsAnalyte:
FileNameWithoutExtension = "VPD_iqs_01";
Header = "Reactor;RunID;RunDate;PartNumber;PocketNumber;WaferScribe;Analyte;Value";
QueryFilter = "Mg";
Title = "VpdIcpmsAnalyteData";
HTML = @"";
break;
case Test.WarpAndBow:
FileNameWithoutExtension = "warp_iqs_01";
Header = "fDate;fRecipeName;fProductId;g4Scribe;warp;bow;tool;Reactor;Pocket ID;bow_range;BowX;BowY;CenterBow";
QueryFilter = "BowCenter";
Title = "Warp and Bow";
HTML = @"GaN Epi Data\14 - warp.html";
break;
case Test.VerificationWarpAndBow:
FileNameWithoutExtension = "warp_ver_iqs_01";
Header = "Part;Process;Date;WaferScribe;totWarp;bow";
QueryFilter = "Bow Calibration";
Title = "6 Inch Warp/Bow Daily Verification, 8 Inch Warp/Bow Daily Verification";
HTML = @"Metrology\19 - warp_cal_daily.html";
break;
case Test.XRDXY:
FileNameWithoutExtension = "xrd_iqs_NEW_01";
Header = "Reactor;fDate;fRecipeName;Lot;pocketNumber;g4Scribe;ToolID;Name;Value;Group";
QueryFilter = "SL Period";
Title = "XRD XY Raw Data Viewer";
HTML = @"GaN Epi Data\11 - xrd.html";
break;
case Test.XRDWeightedAverage:
FileNameWithoutExtension = "xrd_iqs_NEW_01_WtAVG";
Header = "Reactor;fDate;fRecipeName;Lot;pocketNumber;g4Scribe;Name;Value;Group";
//QueryFilter = "Al% Barrier WTAVG";
QueryFilter = "SL Period WTAVG";
Title = "XRD Weighted Average Data";
HTML = @"GaN Epi Data\11 - xrd.html";
break;
case Test.MonthlyXRD:
FileNameWithoutExtension = "xrd_monthly_ver_iqs_01";
Header = "Part;Process;Date;TestName;Value";
QueryFilter = "XRD 2-Theta Position";
Title = "XRD Monthly Verification";
HTML = @"Metrology\03 - xrd_verif_monthly.html";
break;
case Test.WeeklyXRD:
FileNameWithoutExtension = "xrd_weekly_ver_iqs_01";
Header = "Part;Process;Lot;Date;TestName;Value";
QueryFilter = "XRD Weekly AL% Center";
Title = "XRD Weekly Verification";
HTML = @"Metrology\12 - xrd_verif_weekly.html";
break;
case Test.JVXRD:
FileNameWithoutExtension = "xrd_iqs_NEW_01";
Header = "Reactor;fDate;fRecipeName;Lot;pocketNumber;g4Scribe;ToolID;Name;Value;Group";
QueryFilter = "SL Period";
Title = "XRD XY Raw Data Viewer";
HTML = @"GaN Epi Data\11 - xrd.html";
break;
default:
throw new Exception();
}
FileName = string.Concat(FileNameWithoutExtension, ".txt");
}
public ScopeInfo ShallowCopy() => (ScopeInfo)MemberwiseClone();
}

View File

@ -0,0 +1,21 @@
namespace Adaptation.Shared.Metrology;
public partial class WS
{
public class Attachment
{
public string UniqueId { get; set; }
public string DestinationFileName { get; set; }
public string SourceFileName { get; set; }
public Attachment(string uniqueId, string destinationFileName, string sourceFileName)
{
UniqueId = uniqueId;
DestinationFileName = destinationFileName;
SourceFileName = sourceFileName;
}
}
}

View File

@ -0,0 +1,27 @@
using System.Collections.Generic;
using System.Text.Json;
namespace Adaptation.Shared.Metrology;
public partial class WS
{
// this class represents the response from the Inbound API endpoint
public class Results
{
// true or false if data was written to the database
public bool Success { get; set; }
// if true, contains ID of the Header record in the database
public long HeaderID { get; set; }
// if false, this collection will contain a list of errors
public List<string> Errors { get; set; }
// this collection will contain a list of warnings, they will not prevent data from being saved
public List<string> Warnings { get; set; }
// this is just a helper function to make displaying the results easier
public override string ToString() => JsonSerializer.Serialize(this, GetType());
}
}

View File

@ -0,0 +1,123 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Text;
using System.Text.Json;
namespace Adaptation.Shared.Metrology;
public partial class WS
{
public static (string, Results) SendData(string url, object payload, int timeoutSeconds = 120)
{
Results results = new();
string resultsJson = string.Empty;
try
{
string json = JsonSerializer.Serialize(payload, payload.GetType());
if (string.IsNullOrEmpty(url) || !url.Contains(":") || !url.Contains("."))
throw new Exception("Invalid URL");
using (HttpClient httpClient = new())
{
httpClient.Timeout = new TimeSpan(0, 0, 0, timeoutSeconds, 0);
HttpRequestMessage httpRequestMessage = new()
{
RequestUri = new Uri(url),
Method = HttpMethod.Post,
Content = new StringContent(json, Encoding.UTF8, "application/json")
};
HttpResponseMessage httpResponseMessage = httpClient.SendAsync(httpRequestMessage, HttpCompletionOption.ResponseContentRead).Result;
resultsJson = httpResponseMessage.Content.ReadAsStringAsync().Result;
results = JsonSerializer.Deserialize<Results>(resultsJson);
}
if (!results.Success)
results.Errors.Add(results.ToString());
}
catch (Exception e)
{
Exception exception = e;
StringBuilder stringBuilder = new();
while (exception is not null)
{
_ = stringBuilder.AppendLine(exception.Message);
exception = exception.InnerException;
}
if (results.Errors is null)
results.Errors = new List<string>();
results.Errors.Add(stringBuilder.ToString());
}
return new(resultsJson, results);
}
// this method is a wrapper for attaching a file to either a header or data record
// URL is the same URL used for SendData, ex: http://localhost/api/inbound/CDE
// attachToHeaderId is the ID returned by SendData
// attachToDataUniqueId is the string unique ID for the data record, aka the Title of the Sharepoint list entry
// fileContents is a byte array with the contents of the file
// fileName is which attachment this is, image.pdf, data.pdf, data.txt, header.pdf, etc
// timeoutSeconds is configured as the request timeout
// this method will either succeed or throw an exception
// also, this has been made synchronous
public static void AttachFile(string url, long attachToHeaderId, string attachToDataUniqueId, byte[] fileContents, string fileName, int timeoutSeconds = 60)
{
using HttpClient httpClient = new();
string requestUrl = url + "/attachment?headerid=" + attachToHeaderId.ToString();
if (!string.IsNullOrWhiteSpace(attachToDataUniqueId))
{
requestUrl += "&datauniqueid=";
requestUrl += System.Net.WebUtility.UrlEncode(attachToDataUniqueId);
}
requestUrl += "&filename="; // this is just so the web server log shows the filename
requestUrl += System.Net.WebUtility.UrlEncode(fileName);
httpClient.Timeout = new TimeSpan(0, 0, 0, timeoutSeconds, 0);
MultipartFormDataContent multipartFormDataContent = new();
ByteArrayContent byteArrayContent = new(fileContents);
byteArrayContent.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream");
multipartFormDataContent.Add(byteArrayContent, "attachment", fileName);
HttpResponseMessage httpResponseMessage = httpClient.PostAsync(requestUrl, multipartFormDataContent).Result;
if (httpResponseMessage.IsSuccessStatusCode)
return;
string resultBody = httpResponseMessage.Content.ReadAsStringAsync().Result;
throw new Exception("Attachment failed: " + resultBody);
}
public static void AttachFiles(string url, long headerID, List<Attachment> headerAttachments = null, List<Attachment> dataAttachments = null)
{
try
{
if (headerAttachments is not null)
{
foreach (Attachment attachment in headerAttachments)
AttachFile(url, headerID, "", File.ReadAllBytes(attachment.SourceFileName), attachment.DestinationFileName);
}
if (dataAttachments is not null)
{
foreach (Attachment attachment in dataAttachments)
AttachFile(url, headerID, attachment.UniqueId, File.ReadAllBytes(attachment.SourceFileName), attachment.DestinationFileName);
}
//MessageBox.Show(r.ToString());
}
catch (Exception e)
{
Exception exception = e;
StringBuilder stringBuilder = new();
while (exception is not null)
{
_ = stringBuilder.AppendLine(exception.Message);
exception = exception.InnerException;
}
//MessageBox.Show(msgs.ToString(), "Exception", //MessageBoxButtons.OK, //MessageBoxIcon.Error);
throw new Exception(stringBuilder.ToString());
}
}
}

View File

@ -12,8 +12,6 @@ namespace Adaptation.Shared;
public class ProcessDataStandardFormat
{
public const string RecordStart = "RECORD_START";
public enum SearchFor
{
EquipmentIntegration = 1,
@ -348,8 +346,8 @@ public class ProcessDataStandardFormat
int startsAt = 0;
string[] segments;
int? currentGroup = null;
char inputSeperator = '\t';
char outputSeperator = '\t';
char inputSeparator = '\t';
char outputSeparator = '\t';
List<int> vs = new();
List<int[]> groups = new();
string[] lines = File.ReadAllLines(reportFullPath);
@ -358,7 +356,7 @@ public class ProcessDataStandardFormat
{
if (string.IsNullOrEmpty(lines[i]))
continue;
segments = lines[i].Split(inputSeperator);
segments = lines[i].Split(inputSeparator);
if (currentGroup is null)
currentGroup = segments.Length;
if (segments.Length != currentGroup)
@ -368,20 +366,20 @@ public class ProcessDataStandardFormat
startsAt = i;
}
}
if (startsAt == lines.Length - 1 && lines[0].Split(inputSeperator).Length != currentGroup)
if (startsAt == lines.Length - 1 && lines[0].Split(inputSeparator).Length != currentGroup)
groups.Add(new int[] { lines.Length - 1, lines.Length - 1 });
for (int g = 0; g < groups.Count; g++)
{
vs.Clear();
group = groups[g];
line = lines[group[0]];
segments = line.Split(inputSeperator);
segments = line.Split(inputSeparator);
for (int s = 0; s < segments.Length; s++)
vs.Add(segments[s].Length);
for (int i = group[0]; i <= group[1]; i++)
{
line = lines[i];
segments = line.Split(inputSeperator);
segments = line.Split(inputSeparator);
for (int s = 0; s < segments.Length; s++)
{
if (vs[s] < segments[s].Length)
@ -390,16 +388,16 @@ public class ProcessDataStandardFormat
}
_ = stringBuilder.Clear();
for (int s = 0; s < segments.Length; s++)
_ = stringBuilder.Append((s + 1).ToString().PadLeft(vs[s], ' ')).Append(outputSeperator);
_ = stringBuilder.Append((s + 1).ToString().PadLeft(vs[s], ' ')).Append(outputSeparator);
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
for (int i = group[0]; i <= group[1]; i++)
{
line = lines[i];
_ = stringBuilder.Clear();
segments = line.Split(inputSeperator);
segments = line.Split(inputSeparator);
for (int s = 0; s < segments.Length; s++)
_ = stringBuilder.Append(segments[s].PadLeft(vs[s], ' ')).Append(outputSeperator);
_ = stringBuilder.Append(segments[s].PadLeft(vs[s], ' ')).Append(outputSeparator);
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
results.Add(stringBuilder.ToString());
}

View File

@ -1,22 +1,23 @@
using System;
using System.Collections.Generic;
using System.IO;
namespace Adaptation.Shared.Properties;
public interface ILogistics
{
public object NullData { get; }
public string JobID { get; } //CellName
public long Sequence { get; } //Ticks
public DateTime DateTimeFromSequence { get; }
public FileInfo FileInfo { get; }
public string JobID { get; }
public List<string> Logistics1 { get; }
public List<Logistics2> Logistics2 { get; }
public string MID { get; }
public string MesEntity { get; }
public object NullData { get; }
public string ProcessJobID { get; }
public string ReportFullPath { get; }
public long Sequence { get; }
public double TotalSecondsSinceLastWriteTimeFromSequence { get; }
public string MesEntity { get; } //SPC
public string ReportFullPath { get; } //Extract file
public string ProcessJobID { get; set; } //Reactor (duplicate but I want it in the logistics)
public string MID { get; set; } //Lot & Pocket || Lot
public List<string> Tags { get; set; }
public List<string> Logistics1 { get; set; }
public List<Logistics2> Logistics2 { get; set; }
}