Not Tested

json in process-data-standard-format

process-data-standard-format with HeaderId and SubgroupId

process-data-standard-format with pipes

UniqueId replacement for attachments

Write input PDSF in output after EOF

ProcessDataStandardFormat over Tuple

MoveMatchingFiles to use ProcessDataStandardFormatMapping
This commit is contained in:
Mike Phares 2025-05-22 09:38:11 -07:00
parent fd853ddcd4
commit 525d54f392
15 changed files with 1843 additions and 1230 deletions

View File

@ -60,6 +60,26 @@
"command": "code ../EC.csproj",
"problemMatcher": []
},
{
"label": "Readme",
"type": "shell",
"command": "code ../README.md",
"problemMatcher": []
},
{
"label": "File-Folder-Helper AOT s X Day-Helper-2025-03-20",
"type": "shell",
"command": "L:/DevOps/Mesa_FI/File-Folder-Helper/bin/Release/net8.0/win-x64/publish/File-Folder-Helper.exe",
"args": [
"s",
"X",
"L:/DevOps/EAF-Mesa-Integration/EC",
"Day-Helper-2025-03-20",
"false",
"4"
],
"problemMatcher": []
},
{
"label": "Git Config",
"type": "shell",

View File

@ -67,7 +67,7 @@
<PackageReference Include="System.Text.Json" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Infineon.Mesa.PDF.Text.Stripper" Version="4.8.0.1"><NoWarn>NU1701</NoWarn></PackageReference>
<PackageReference Include="Infineon.Mesa.PDF.Text.Stripper" Version="4.8.0.2"><NoWarn>NU1701</NoWarn></PackageReference>
</ItemGroup>
<ItemGroup>
<PackageReference Include="Infineon.Yoda.DotNetCore" Version="5.4.3" />

View File

@ -9,7 +9,7 @@ namespace Adaptation.FileHandlers;
public class CellInstanceConnectionName
{
internal static IFileRead Get(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, int? connectionCount)
internal static IFileRead Get(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, int? connectionCount)
{
IFileRead result = cellInstanceConnectionName switch
{

View File

@ -18,7 +18,7 @@ public class FileRead : Shared.FileRead, IFileRead
private readonly AppSettings _AppSettings;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), true, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;

View File

@ -18,7 +18,7 @@ public class FileRead : Shared.FileRead, IFileRead
private readonly AppSettings _AppSettings;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Shared.Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), true, smtp, fileParameter, cellInstanceName, connectionCount, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted: connectionCount is null)
{
_MinFileLength = 10;

View File

@ -44,9 +44,9 @@ public class FileRead : Properties.IFileRead
protected readonly string _CellInstanceConnectionNameBase;
protected readonly Dictionary<string, List<long>> _DummyRuns;
protected readonly Dictionary<string, string> _FileParameter;
protected readonly Dictionary<long, List<string>> _StaticRuns;
protected readonly string _ParameterizedModelObjectDefinitionType;
protected readonly FileConnectorConfiguration _FileConnectorConfiguration;
protected readonly Dictionary<long, List<Metrology.WS.Results>> _StaticRuns;
protected readonly IList<ModelObjectParameterDefinition> _ModelObjectParameterDefinitions;
bool Properties.IFileRead.IsEvent => _IsEvent;
@ -63,7 +63,147 @@ public class FileRead : Properties.IFileRead
string Properties.IFileRead.CellInstanceConnectionName => _CellInstanceConnectionName;
string Properties.IFileRead.ParameterizedModelObjectDefinitionType => _ParameterizedModelObjectDefinitionType;
public FileRead(IDescription description, bool isEvent, ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted)
protected static string GetTupleFile<T>(Logistics logistics, List<T> descriptions, Properties.IScopeInfo scopeInfo, string duplicateDirectory, string duplicateFile) where T : Properties.IDescription
{
string result;
string rds;
string fileName;
string dateValue;
string rdsPlaceholder = "%RDS%";
string mesEntityPlaceholder = "%MesEntity%";
if (descriptions.Count == 0 || string.IsNullOrEmpty(descriptions[0].RDS))
rds = logistics.MID;
else
rds = descriptions[0].RDS;
string[] segments = scopeInfo.FileName.Split(new string[] { "DateTime:" }, StringSplitOptions.RemoveEmptyEntries);
if (segments.Length == 0)
result = string.Concat(duplicateDirectory, @"\", scopeInfo.FileNameWithoutExtension.Replace(rdsPlaceholder, rds).Replace(mesEntityPlaceholder, logistics.MesEntity));
else
{
segments = segments[1].Split('%');
string datePlaceholder = "%DateTime%";
dateValue = logistics.DateTimeFromSequence.ToString(segments[0]);
foreach (string segment in scopeInfo.FileName.Split('%'))
{
if (!segment.Contains(segments[0]))
continue;
datePlaceholder = string.Concat('%', segment, '%');
}
fileName = scopeInfo.FileName.Replace(rdsPlaceholder, rds).Replace(mesEntityPlaceholder, logistics.MesEntity).Replace(datePlaceholder, dateValue);
if (!duplicateFile.Contains("Viewer"))
result = Path.Combine(duplicateDirectory, fileName);
else
result = Path.Combine(duplicateDirectory, $"Viewer_{fileName}");
}
if (result.Contains('%'))
throw new Exception("Placeholder exists!");
return result;
}
protected void WaitForFileConsumption<T>(string sourceDirectoryCloaking, Logistics logistics, DateTime dateTime, List<T> descriptions, string successDirectory, string duplicateDirectory, string duplicateFile, List<(Properties.IScopeInfo, string)> collection) where T : Properties.IDescription
{
bool check;
long preWait;
string tupleFile;
string tupleFileName = string.Empty;
List<string> duplicateFiles = new();
StringBuilder stringBuilder = new();
List<int> consumedFileIndices = new();
bool moreThanAnHour = _BreakAfterSeconds > 3600;
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
preWait = dateTime.AddMilliseconds(1234).Ticks;
else
preWait = dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (collection.Count == 0)
duplicateFiles.Add(duplicateFile);
string fileName = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
string successFile = string.Concat(successDirectory, @"\", Path.GetFileName(logistics.ReportFullPath));
foreach ((Properties.IScopeInfo scopeInfo, string text) in collection)
{
if (scopeInfo.FileName.StartsWith(@"\"))
tupleFile = scopeInfo.FileName;
else if (!scopeInfo.FileName.Contains('%'))
tupleFile = string.Concat(duplicateDirectory, @"\", fileName, "_", scopeInfo.FileNameWithoutExtension, ".pdsfc");
else
tupleFile = GetTupleFile(logistics, descriptions, scopeInfo, duplicateDirectory, duplicateFile);
tupleFileName = Path.GetFileNameWithoutExtension(tupleFile).Split('.')[0];
duplicateFiles.Add(tupleFile);
if (_IsEAFHosted)
File.WriteAllText(tupleFile, text);
}
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(100);
}
if (!moreThanAnHour)
{
for (short z = 0; z < short.MaxValue; z++)
{
try
{
check = string.IsNullOrEmpty(successDirectory) || File.Exists(successFile);
if (check)
{
consumedFileIndices.Clear();
for (int i = 0; i < duplicateFiles.Count; i++)
{
if (!File.Exists(duplicateFiles[i]))
{
if (string.IsNullOrEmpty(tupleFileName))
consumedFileIndices.Add(i);
else if (duplicateFiles.All(l => Path.GetFileNameWithoutExtension(l).Split('.')[0] == tupleFileName))
{
for (int j = 0; j < duplicateFiles.Count; j++)
consumedFileIndices.Add(j);
}
else
consumedFileIndices.Add(i);
}
}
if (consumedFileIndices.Count == duplicateFiles.Count)
break;
}
}
catch (Exception) { }
if (DateTime.Now.Ticks > breakAfter)
{
for (int i = 0; i < duplicateFiles.Count; i++)
{
if (File.Exists(duplicateFiles[i]))
{
try
{ File.Delete(duplicateFiles[i]); }
catch (Exception) { }
_ = stringBuilder.Append('<').Append(duplicateFiles[i]).Append("> ");
}
}
throw new Exception(string.Concat("After {", _BreakAfterSeconds, "} seconds, right side of {", sourceDirectoryCloaking, "} didn't consume file(s) ", stringBuilder));
}
Thread.Sleep(250);
}
}
}
protected void WaitForFileConsumption<T>(DateTime dateTime, List<T> descriptions, bool isDummyRun, string successDirectory, string duplicateDirectory, List<(Properties.IScopeInfo, string)> collection, string duplicateFile) where T : Properties.IDescription
{
if (!isDummyRun && _IsEAFHosted)
WaitForFileConsumption(_FileConnectorConfiguration.SourceDirectoryCloaking, _Logistics, dateTime, descriptions, successDirectory, duplicateDirectory, duplicateFile, collection);
else
{
long breakAfter = DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
for (short i = 0; i < short.MaxValue; i++)
{
if (!_IsEAFHosted || DateTime.Now.Ticks > breakAfter)
break;
Thread.Sleep(500);
}
}
}
public FileRead(IDescription description, bool isEvent, ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, int? connectionCount, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<Metrology.WS.Results>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted)
{
_SMTP = smtp;
_IsEvent = isEvent;
@ -160,10 +300,161 @@ public class FileRead : Properties.IFileRead
return result;
}
protected void UpdateLastTicksDuration(long ticksDuration)
{
if (ticksDuration < 50000000)
ticksDuration = 50000000;
_LastTicksDuration = (long)Math.Ceiling(ticksDuration * .667);
_Log.Info($"{new TimeSpan(ticksDuration).TotalMilliseconds} TotalMillisecond(s) to process{Environment.NewLine}{_CellInstanceConnectionName}{Environment.NewLine}<{_ReportFullPath}>");
}
internal static string GetParentParent(string value)
{
string result = Path.GetDirectoryName(Path.GetDirectoryName(value));
return result;
}
internal static List<string> GetDirectoryNames(string directory)
{
#nullable enable
List<string> results = new();
string? fileName;
string? checkDirectory = directory;
string? pathRoot = Path.GetPathRoot(directory);
string extension = Path.GetExtension(directory);
if (string.IsNullOrEmpty(pathRoot))
throw new NullReferenceException(nameof(pathRoot));
if (Directory.Exists(directory))
{
fileName = Path.GetFileName(directory);
if (!string.IsNullOrEmpty(fileName))
results.Add(fileName);
}
else if ((string.IsNullOrEmpty(extension) || extension.Length > 3) && !File.Exists(directory))
{
fileName = Path.GetFileName(directory);
if (!string.IsNullOrEmpty(fileName))
results.Add(fileName);
}
for (int i = 0; i < int.MaxValue; i++)
{
checkDirectory = Path.GetDirectoryName(checkDirectory);
if (string.IsNullOrEmpty(checkDirectory) || checkDirectory == pathRoot)
break;
fileName = Path.GetFileName(checkDirectory);
if (string.IsNullOrEmpty(fileName))
continue;
results.Add(fileName);
}
results.Add(pathRoot);
results.Reverse();
return results;
#nullable disable
}
internal static string GetJobIdParentDirectory(string directory)
{
string result;
if (!string.IsNullOrEmpty(Path.GetFileName(directory)))
result = Path.GetFullPath(GetParentParent(directory));
else
result = Path.GetFullPath(GetParentParent(Path.GetDirectoryName(directory)));
if (!Directory.Exists(result))
_ = Directory.CreateDirectory(result);
return result;
}
internal static string GetFileNameAfterUnderscoreSplit(string reportFullPath)
{
string result;
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
if (segments.Length <= 2)
result = segments[0];
else
result = string.Concat(segments[0], segments[2]);
return result;
}
internal string[] GetInProcessDirectory(string jobIdDirectory)
{
List<string> results = new();
if (!_IsEAFHosted)
results = new string[] { jobIdDirectory }.ToList();
else
{
string[] files;
string logisticsSequence = _Logistics.Sequence.ToString();
string[] directories = Directory.GetDirectories(jobIdDirectory, $"*{logisticsSequence}*", SearchOption.TopDirectoryOnly);
foreach (string directory in directories)
{
files = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
if (files.Length == 0)
continue;
results.Add(directory);
}
}
if ((results is null) || results.Count != 1)
throw new Exception("Didn't find directory by logistics sequence");
return results.ToArray();
}
protected static string[] GetMatches(FileConnectorConfiguration fileConnectorConfiguration)
{
string[] segments;
string[] results = null;
foreach (string subSourceFileFilter in fileConnectorConfiguration.SourceFileFilters)
{
segments = subSourceFileFilter.Split('\\');
if (fileConnectorConfiguration.IncludeSubDirectories.Value)
results = Directory.GetFiles(fileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.AllDirectories);
else
results = Directory.GetFiles(fileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.TopDirectoryOnly);
if (results.Length != 0)
break;
}
return results;
}
protected static void NestExistingFiles(FileConnectorConfiguration fileConnectorConfiguration)
{
// if (!fileConnectorConfiguration.IncludeSubDirectories.Value && fileConnectorConfiguration.TriggerOnCreated is not null && fileConnectorConfiguration.TriggerOnCreated.Value)
if (!fileConnectorConfiguration.IncludeSubDirectories.Value)
{
string[] matches = GetMatches(fileConnectorConfiguration);
if (matches is not null && matches.Length > 0)
{
string fileName;
string nestedDirectory = Path.Combine(fileConnectorConfiguration.SourceFileLocation, DateTime.Now.Ticks.ToString());
if (!Directory.Exists(nestedDirectory))
_ = Directory.CreateDirectory(nestedDirectory);
foreach (string match in matches)
{
fileName = Path.GetFileName(match);
File.Move(match, Path.Combine(nestedDirectory, fileName));
}
}
}
}
protected static List<Properties.IDescription> GetDuplicatorDescriptions(JsonElement[] jsonElements)
{
List<Properties.IDescription> results = new();
Duplicator.Description description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<Duplicator.Description>(jsonElement.ToString(), jsonSerializerOptions);
results.Add(description);
}
return results;
}
protected static ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList<ModelObjectParameterDefinition> modelObjectParameters, string propertyNamePrefix)
{
ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) select l).ToArray();
if (!results.Any())
if (results.Length == 0)
throw new Exception(cellInstanceConnectionName);
return results;
}
@ -171,17 +462,43 @@ public class FileRead : Properties.IFileRead
protected static ModelObjectParameterDefinition[] GetProperties(string cellInstanceConnectionName, IList<ModelObjectParameterDefinition> modelObjectParameters, string propertyNamePrefix, string propertyNameSuffix)
{
ModelObjectParameterDefinition[] results = (from l in modelObjectParameters where l.Name.StartsWith(propertyNamePrefix) && l.Name.EndsWith(propertyNameSuffix) select l).ToArray();
if (!results.Any())
if (results.Length == 0)
throw new Exception(cellInstanceConnectionName);
return results;
}
protected void UpdateLastTicksDuration(long ticksDuration)
protected void SetFileParameter(string key, string value)
{
if (ticksDuration < 50000000)
ticksDuration = 50000000;
_LastTicksDuration = (long)Math.Ceiling(ticksDuration * .667);
_Log.Info($"{new TimeSpan(ticksDuration).TotalMilliseconds} TotalMillisecond(s) to process{Environment.NewLine}{_CellInstanceConnectionName}{Environment.NewLine}<{_ReportFullPath}>");
if (_FileConnectorConfiguration is null || _FileConnectorConfiguration.TargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.TargetFileName.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileName.Contains(string.Concat("%", key, "%")))
{
if (_FileParameter.ContainsKey(key))
_FileParameter[key] = value;
else
_FileParameter.Add(key, value);
}
}
protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
{
string directory;
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType)
directory = Path.Combine(_TracePath, _EquipmentType, "Target", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName);
else
directory = Path.Combine(_TracePath, _EquipmentType, "Source", weekDirectory, day, _CellInstanceName, _CellInstanceConnectionName);
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
File.WriteAllText(file, lines);
if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
try
{ File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
catch (Exception) { }
}
}
protected void WaitForThread(Thread thread, List<Exception> threadExceptions)
@ -203,7 +520,7 @@ public class FileRead : Properties.IFileRead
}
lock (threadExceptions)
{
if (threadExceptions.Any())
if (threadExceptions.Count != 0)
{
foreach (Exception item in threadExceptions)
_Log.Error(string.Concat(item.Message, Environment.NewLine, Environment.NewLine, item.StackTrace));
@ -215,230 +532,6 @@ public class FileRead : Properties.IFileRead
}
}
private void WriteAllLines(string to, string[] exceptionLines)
{
string fileName = string.Concat(to, @"\readme.txt");
try
{
if (!Directory.Exists(to))
_ = Directory.CreateDirectory(to);
File.WriteAllLines(fileName, exceptionLines);
}
catch (Exception ex) { _Log.Error(ex.Message); }
}
protected string[] Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
string[] results;
bool isErrorFile = exception is not null;
if (!to.EndsWith(@"\"))
_ = string.Concat(to, @"\");
if (!isErrorFile)
results = Array.Empty<string>();
else
{
results = new string[] { _Logistics.Sequence.ToString(), _Logistics.ReportFullPath, from, resolvedFileLocation, to, string.Empty, string.Empty, exception.Message, string.Empty, string.Empty, exception.StackTrace };
if (!_IsDuplicator)
WriteAllLines(to, results);
}
if (extractResults is not null && extractResults.Item4 is not null && extractResults.Item4.Any())
{
string itemFile;
List<string> directories = new();
foreach (FileInfo sourceFile in extractResults.Item4)
{
if (sourceFile.FullName != _Logistics.ReportFullPath)
{
itemFile = sourceFile.FullName.Replace(from, to);
Shared1880(itemFile, directories, sourceFile, isErrorFile);
}
else if (!isErrorFile && _Logistics is not null)
Shared1811(to, sourceFile);
}
Shared0231(directories);
}
return results;
}
protected static string GetTupleFile<T>(Logistics logistics, List<T> descriptions, Properties.IScopeInfo scopeInfo, string duplicateDirectory, string duplicateFile) where T : Properties.IDescription
{
string result;
string rds;
string fileName;
string dateValue;
string rdsPlaceholder = "%RDS%";
string mesEntityPlaceholder = "%MesEntity%";
if (!descriptions.Any() || string.IsNullOrEmpty(descriptions[0].RDS))
rds = logistics.MID;
else
rds = descriptions[0].RDS;
string[] segments = scopeInfo.FileName.Split(new string[] { "DateTime:" }, StringSplitOptions.RemoveEmptyEntries);
if (segments.Length == 0)
result = string.Concat(duplicateDirectory, @"\", scopeInfo.FileNameWithoutExtension.Replace(rdsPlaceholder, rds).Replace(mesEntityPlaceholder, logistics.MesEntity));
else
{
segments = segments[1].Split('%');
string datePlaceholder = "%DateTime%";
dateValue = logistics.DateTimeFromSequence.ToString(segments[0]);
foreach (string segment in scopeInfo.FileName.Split('%'))
{
if (!segment.Contains(segments[0]))
continue;
datePlaceholder = string.Concat('%', segment, '%');
}
fileName = scopeInfo.FileName.Replace(rdsPlaceholder, rds).Replace(mesEntityPlaceholder, logistics.MesEntity).Replace(datePlaceholder, dateValue);
if (!duplicateFile.Contains("Viewer"))
result = Path.Combine(duplicateDirectory, fileName);
else
result = Path.Combine(duplicateDirectory, $"Viewer_{fileName}");
}
if (result.Contains('%'))
throw new Exception("Placeholder exists!");
return result;
}
protected void WaitForFileConsumption<T>(string sourceDirectoryCloaking, Logistics logistics, DateTime dateTime, List<T> descriptions, string successDirectory, string duplicateDirectory, string duplicateFile, List<(Properties.IScopeInfo, string)> collection) where T : Properties.IDescription
{
bool check;
long preWait;
string tupleFile;
string tupleFileName = string.Empty;
List<string> duplicateFiles = new();
StringBuilder stringBuilder = new();
List<int> consumedFileIndices = new();
bool moreThanAnHour = _BreakAfterSeconds > 3600;
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
preWait = dateTime.AddMilliseconds(1234).Ticks;
else
preWait = dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (!collection.Any())
duplicateFiles.Add(duplicateFile);
string fileName = Path.GetFileNameWithoutExtension(logistics.ReportFullPath);
string successFile = string.Concat(successDirectory, @"\", Path.GetFileName(logistics.ReportFullPath));
foreach ((Properties.IScopeInfo scopeInfo, string text) in collection)
{
if (scopeInfo.FileName.StartsWith(@"\"))
tupleFile = scopeInfo.FileName;
else if (!scopeInfo.FileName.Contains('%'))
tupleFile = string.Concat(duplicateDirectory, @"\", fileName, "_", scopeInfo.FileNameWithoutExtension, ".pdsfc");
else
tupleFile = GetTupleFile(logistics, descriptions, scopeInfo, duplicateDirectory, duplicateFile);
tupleFileName = Path.GetFileNameWithoutExtension(tupleFile).Split('.')[0];
duplicateFiles.Add(tupleFile);
if (_IsEAFHosted)
File.WriteAllText(tupleFile, text);
}
for (short i = 0; i < short.MaxValue; i++)
{
if (DateTime.Now.Ticks > preWait)
break;
Thread.Sleep(100);
}
if (!moreThanAnHour)
{
for (short z = 0; z < short.MaxValue; z++)
{
try
{
check = string.IsNullOrEmpty(successDirectory) || File.Exists(successFile);
if (check)
{
consumedFileIndices.Clear();
for (int i = 0; i < duplicateFiles.Count; i++)
{
if (!File.Exists(duplicateFiles[i]))
{
if (string.IsNullOrEmpty(tupleFileName))
consumedFileIndices.Add(i);
else if (duplicateFiles.All(l => Path.GetFileNameWithoutExtension(l).Split('.')[0] == tupleFileName))
{
for (int j = 0; j < duplicateFiles.Count; j++)
consumedFileIndices.Add(j);
}
else
consumedFileIndices.Add(i);
}
}
if (consumedFileIndices.Count == duplicateFiles.Count)
break;
}
}
catch (Exception) { }
if (DateTime.Now.Ticks > breakAfter)
{
for (int i = 0; i < duplicateFiles.Count; i++)
{
if (File.Exists(duplicateFiles[i]))
{
try
{ File.Delete(duplicateFiles[i]); }
catch (Exception) { }
_ = stringBuilder.Append('<').Append(duplicateFiles[i]).Append("> ");
}
}
throw new Exception(string.Concat("After {", _BreakAfterSeconds, "} seconds, right side of {", sourceDirectoryCloaking, "} didn't consume file(s) ", stringBuilder));
}
Thread.Sleep(250);
}
}
}
protected void SetFileParameter(string key, string value)
{
if (_FileConnectorConfiguration is null || _FileConnectorConfiguration.TargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileLocation.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.TargetFileName.Contains(string.Concat("%", key, "%")) || _FileConnectorConfiguration.ErrorTargetFileName.Contains(string.Concat("%", key, "%")))
{
if (_FileParameter.ContainsKey(key))
_FileParameter[key] = value;
else
_FileParameter.Add(key, value);
}
}
protected void SetFileParameterLotIDToLogisticsMID(bool includeLogisticsSequence = true)
{
string key;
if (!includeLogisticsSequence)
key = "LotID";
else
key = "LotIDWithLogisticsSequence";
string value = string.Concat(_Logistics.MID, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
SetFileParameter(key, value);
}
protected void SetFileParameterLotID(string value, bool includeLogisticsSequence = true)
{
string key;
if (!includeLogisticsSequence)
key = "LotID";
else
{
key = "LotIDWithLogisticsSequence";
value = string.Concat(value, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
}
SetFileParameter(key, value);
}
protected void WritePDSF(IFileRead fileRead, JsonElement[] jsonElements)
{
string directory;
if (!_CellInstanceConnectionName.StartsWith(_CellInstanceName) && _CellInstanceConnectionNameBase == _EquipmentType)
directory = Path.Combine(_TracePath, _EquipmentType, "Target", _CellInstanceName, _CellInstanceConnectionName);
else
directory = Path.Combine(_TracePath, _EquipmentType, "Source", _CellInstanceName, _CellInstanceConnectionName);
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
string file = Path.Combine(directory, string.Concat(_Logistics.MesEntity, "_", _Logistics.Sequence, ".ipdsf"));
string lines = ProcessDataStandardFormat.GetPDSFText(fileRead, _Logistics, jsonElements, logisticsText: string.Empty);
File.WriteAllText(file, lines);
if (_Logistics.TotalSecondsSinceLastWriteTimeFromSequence > 600)
{
try
{ File.SetLastWriteTime(file, _Logistics.DateTimeFromSequence); }
catch (Exception) { }
}
}
protected void Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults)
{
if (!_IsEAFHosted)
@ -457,80 +550,49 @@ public class FileRead : Properties.IFileRead
}
}
protected void TriggerEvents(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, List<string> headerNames, Dictionary<string, string> keyValuePairs)
protected string[] Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, string to, string from, string resolvedFileLocation, Exception exception)
{
object value;
string description;
List<object[]> list;
for (int i = 0; i < extractResults.Item3.Length; i++)
{
_Log.Debug(string.Concat("TriggerEvent - {", _Logistics.ReportFullPath, "} ", i, " of ", extractResults.Item3.Length));
foreach (JsonProperty jsonProperty in extractResults.Item3[i].EnumerateObject())
{
if (jsonProperty.Value.ValueKind != JsonValueKind.String || !keyValuePairs.ContainsKey(jsonProperty.Name))
description = string.Empty;
else
description = keyValuePairs[jsonProperty.Name].Split('|')[0];
if (!_UseCyclicalForDescription || headerNames.Contains(jsonProperty.Name))
value = jsonProperty.Value.ToString();
else
{
list = new List<object[]>();
for (int z = 0; z < extractResults.Item3.Length; z++)
list.Add(new object[] { z, extractResults.Item3[z].GetProperty(jsonProperty.Name).ToString() });
value = list;
}
}
if (_UseCyclicalForDescription)
break;
}
}
protected Tuple<string, Test[], JsonElement[], List<FileInfo>> ReExtract(IFileRead fileRead, List<string> headerNames, Dictionary<string, string> keyValuePairs)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (!Directory.Exists(_FileConnectorConfiguration.SourceFileLocation))
results = null;
string[] results;
bool isErrorFile = exception is not null;
if (!to.EndsWith(@"\"))
_ = string.Concat(to, @"\");
if (!isErrorFile)
results = Array.Empty<string>();
else
{
string[] segments;
string[] matches = null;
foreach (string subSourceFileFilter in _FileConnectorConfiguration.SourceFileFilters)
results = new string[] { _Logistics.Sequence.ToString(), _Logistics.ReportFullPath, from, resolvedFileLocation, to, string.Empty, string.Empty, exception.Message, string.Empty, string.Empty, exception.StackTrace };
if (!_IsDuplicator)
WriteAllLines(to, results);
}
if (extractResults is not null && extractResults.Item4 is not null && extractResults.Item4.Count != 0)
{
string itemFile;
List<string> directories = new();
foreach (FileInfo sourceFile in extractResults.Item4)
{
segments = subSourceFileFilter.Split('\\');
if (_FileConnectorConfiguration.IncludeSubDirectories.Value)
matches = Directory.GetFiles(_FileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.AllDirectories);
else
matches = Directory.GetFiles(_FileConnectorConfiguration.SourceFileLocation, segments.Last(), SearchOption.TopDirectoryOnly);
if (matches.Any())
break;
}
if (matches is null || !matches.Any())
results = null;
else
{
_ReportFullPath = matches[0];
results = fileRead.GetExtractResult(_ReportFullPath, _EventName);
if (!_IsEAFHosted)
TriggerEvents(results, headerNames, keyValuePairs);
if (sourceFile.FullName != _Logistics.ReportFullPath)
{
itemFile = sourceFile.FullName.Replace(from, to);
Shared1880(itemFile, directories, sourceFile, isErrorFile);
}
else if (!isErrorFile && _Logistics is not null)
Shared1811(to, sourceFile);
}
Shared0231(directories);
}
return results;
}
protected static List<Properties.IDescription> GetDuplicatorDescriptions(JsonElement[] jsonElements)
private void WriteAllLines(string to, string[] exceptionLines)
{
List<Properties.IDescription> results = new();
Duplicator.Description description;
JsonSerializerOptions jsonSerializerOptions = new() { NumberHandling = JsonNumberHandling.AllowReadingFromString | JsonNumberHandling.WriteAsString };
foreach (JsonElement jsonElement in jsonElements)
string fileName = string.Concat(to, @"\readme.txt");
try
{
if (jsonElement.ValueKind != JsonValueKind.Object)
throw new Exception();
description = JsonSerializer.Deserialize<Duplicator.Description>(jsonElement.ToString(), jsonSerializerOptions);
results.Add(description);
if (!Directory.Exists(to))
_ = Directory.CreateDirectory(to);
File.WriteAllLines(fileName, exceptionLines);
}
return results;
catch (Exception ex) { _Log.Error(ex.Message); }
}
private void Shared1880(string itemFile, List<string> directories, FileInfo sourceFile, bool isErrorFile)
@ -562,6 +624,9 @@ public class FileRead : Properties.IFileRead
case FileConnectorConfiguration.PostProcessingModeEnum.Delete:
File.Delete(sourceFile.FullName);
break;
case FileConnectorConfiguration.PostProcessingModeEnum.None:
File.Move(sourceFile.FullName, itemFile);
break;
default:
throw new Exception();
}
@ -573,11 +638,12 @@ public class FileRead : Properties.IFileRead
if (!_IsDuplicator && _FileConnectorConfiguration.SourceFileFilter != "*" && sourceFile.Exists && sourceFile.Length < _MinFileLength)
{
string directoryName = Path.GetFileName(to);
string jobIdDirectory = Path.GetDirectoryName(to);
string jobIdDirectory = GetJobIdDirectory(to);
DateTime dateTime = DateTime.Now.AddMinutes(-15);
string day = $"{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}{@"\"}{_Logistics.DateTimeFromSequence:yyyy-MM-dd}";
string destinationDirectory = string.Concat(jobIdDirectory, @"\_ Ignore 100 bytes\", weekDirectory, @"\", directoryName);
string weekDirectory = $"{_Logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
string destinationDirectory = Path.Combine(jobIdDirectory, "_ Ignore 100 bytes", weekDirectory, day, directoryName);
if (!Directory.Exists(destinationDirectory))
_ = Directory.CreateDirectory(destinationDirectory);
File.Move(sourceFile.FullName, string.Concat(destinationDirectory, @"\", sourceFile.Name));
@ -588,13 +654,13 @@ public class FileRead : Properties.IFileRead
{
if (!checkDirectory.Contains('_'))
continue;
if (Directory.GetDirectories(checkDirectory, "*", SearchOption.TopDirectoryOnly).Any())
if (Directory.GetDirectories(checkDirectory, "*", SearchOption.TopDirectoryOnly).Length != 0)
continue;
if (Directory.GetFiles(checkDirectory, "*", SearchOption.TopDirectoryOnly).Any())
if (Directory.GetFiles(checkDirectory, "*", SearchOption.TopDirectoryOnly).Length != 0)
continue;
if (Directory.GetDirectories(checkDirectory, "*", SearchOption.AllDirectories).Any())
if (Directory.GetDirectories(checkDirectory, "*", SearchOption.AllDirectories).Length != 0)
continue;
if (Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories).Any())
if (Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories).Length != 0)
continue;
if (new DirectoryInfo(checkDirectory).CreationTime > dateTime)
continue;
@ -602,6 +668,52 @@ public class FileRead : Properties.IFileRead
}
}
catch (Exception) { throw; }
DeleteEmptyTopDirectories(jobIdDirectory);
}
}
private string GetJobIdDirectory(string path)
{
string result;
List<string> directoryNames = GetDirectoryNames(path);
if (!directoryNames.Contains(_Logistics.JobID))
result = Path.GetDirectoryName(path);
else
{
result = string.Empty;
foreach (string directoryName in directoryNames)
{
result = Path.Combine(result, directoryName);
if (directoryName == _Logistics.JobID)
break;
}
}
return result;
}
private static void DeleteEmptyTopDirectories(string rootDirectory)
{
if (Directory.Exists(rootDirectory))
{
string[] files;
string[] directories;
string[] subDirectories = Directory.GetDirectories(rootDirectory, "*", SearchOption.TopDirectoryOnly);
foreach (string subDirectory in subDirectories)
{
files = Directory.GetFiles(subDirectory, "*", SearchOption.AllDirectories);
if (files.Length > 0)
continue;
directories = Directory.GetDirectories(subDirectory, "*", SearchOption.TopDirectoryOnly);
if (directories.Length > 0)
continue;
try
{ Directory.Delete(subDirectory); }
catch (UnauthorizedAccessException)
{
new DirectoryInfo(subDirectory).Attributes = FileAttributes.Normal;
Directory.Delete(subDirectory);
}
}
}
}
@ -611,72 +723,87 @@ public class FileRead : Properties.IFileRead
{
foreach (string directory in (from l in directories orderby l.Split('\\').Length descending select l).Distinct())
{
if (Directory.Exists(directory) && !Directory.GetFiles(directory).Any())
if (Directory.Exists(directory) && Directory.GetFiles(directory).Length == 0)
Directory.Delete(directory);
}
}
}
protected void WaitForFileConsumption<T>(DateTime dateTime, List<T> descriptions, bool isDummyRun, string successDirectory, string duplicateDirectory, List<(Properties.IScopeInfo, string)> collection, string duplicateFile) where T : Properties.IDescription
protected void SetFileParameterLotID(string value, bool includeLogisticsSequence = true)
{
if (!isDummyRun && _IsEAFHosted)
WaitForFileConsumption(_FileConnectorConfiguration.SourceDirectoryCloaking, _Logistics, dateTime, descriptions, successDirectory, duplicateDirectory, duplicateFile, collection);
string key;
if (!includeLogisticsSequence)
key = "LotID";
else
{
long breakAfter = DateTime.Now.AddSeconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
for (short i = 0; i < short.MaxValue; i++)
key = "LotIDWithLogisticsSequence";
value = string.Concat(value, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
}
SetFileParameter(key, value);
}
protected void SetFileParameterLotIDToLogisticsMID(bool includeLogisticsSequence = true)
{
string key;
if (!includeLogisticsSequence)
key = "LotID";
else
key = "LotIDWithLogisticsSequence";
string value = string.Concat(_Logistics.MID, "_", _Logistics.Sequence, "_", DateTime.Now.Ticks - _Logistics.Sequence);
SetFileParameter(key, value);
}
protected Tuple<string, Test[], JsonElement[], List<FileInfo>> ReExtract(IFileRead fileRead, List<string> headerNames, Dictionary<string, string> keyValuePairs)
{
Tuple<string, Test[], JsonElement[], List<FileInfo>> results;
if (!Directory.Exists(_FileConnectorConfiguration.SourceFileLocation))
results = null;
else
{
string[] matches = GetMatches(_FileConnectorConfiguration);
if (matches is null || matches.Length == 0)
results = null;
else
{
if (!_IsEAFHosted || DateTime.Now.Ticks > breakAfter)
break;
Thread.Sleep(500);
_ReportFullPath = matches[0];
results = fileRead.GetExtractResult(_ReportFullPath, _EventName);
if (!_IsEAFHosted)
TriggerEvents(results, headerNames, keyValuePairs);
}
}
}
internal static string GetJobIdParentDirectory(string directory)
{
string result;
if (!string.IsNullOrEmpty(Path.GetFileName(directory)))
result = Path.GetFullPath(GetParentParent(directory));
else
result = Path.GetFullPath(GetParentParent(Path.GetDirectoryName(directory)));
if (!Directory.Exists(result))
_ = Directory.CreateDirectory(result);
return result;
}
internal string[] GetInProcessDirectory(string jobIdDirectory)
{
string[] results;
if (!_IsEAFHosted)
results = new string[] { jobIdDirectory };
else
{
string logisticsSequence = _Logistics.Sequence.ToString();
results = Directory.GetDirectories(jobIdDirectory, string.Concat(_Logistics.MID, '*', logisticsSequence, '*'), SearchOption.TopDirectoryOnly);
}
if ((results is null) || results.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
return results;
}
internal static string GetFileNameAfterUnderscoreSplit(string reportFullPath)
protected void TriggerEvents(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, List<string> headerNames, Dictionary<string, string> keyValuePairs)
{
string result;
string[] segments = Path.GetFileNameWithoutExtension(reportFullPath).Split('_');
if (segments.Length <= 2)
result = segments[0];
else
result = string.Concat(segments[0], segments[2]);
return result;
}
internal static string GetParentParent(string value)
{
string result = Path.GetDirectoryName(Path.GetDirectoryName(value));
return result;
object value;
string segments;
string description;
List<object[]> list;
for (int i = 0; i < extractResults.Item3.Length; i++)
{
_Log.Debug(string.Concat("TriggerEvent - {", _Logistics.ReportFullPath, "} ", i, " of ", extractResults.Item3.Length));
foreach (JsonProperty jsonProperty in extractResults.Item3[i].EnumerateObject())
{
if (jsonProperty.Value.ValueKind != JsonValueKind.String || !keyValuePairs.TryGetValue(jsonProperty.Name, out segments))
description = string.Empty;
else
description = segments.Split('|')[0];
if (!_UseCyclicalForDescription || headerNames.Contains(jsonProperty.Name))
value = jsonProperty.Value.ToString();
else
{
list = new List<object[]>();
for (int z = 0; z < extractResults.Item3.Length; z++)
list.Add(new object[] { z, extractResults.Item3[z].GetProperty(jsonProperty.Name).ToString() });
value = list;
}
}
if (_UseCyclicalForDescription)
break;
}
}
}
// 2022-06-08 -> Shared - FileRead
// 2025-03-25 -> Shared - FileRead

View File

@ -35,6 +35,9 @@ public class Logistics : ILogistics
public long Sequence => _Sequence;
public double TotalSecondsSinceLastWriteTimeFromSequence => _TotalSecondsSinceLastWriteTimeFromSequence;
private static string DefaultMesEntity(DateTime dateTime) =>
string.Concat(dateTime.Ticks, "_MES_ENTITY");
public Logistics(IFileRead fileRead)
{
DateTime dateTime = DateTime.Now;
@ -84,14 +87,14 @@ public class Logistics : ILogistics
_Logistics2 = new List<Logistics2>();
}
public Logistics(string reportFullPath, string logistics)
internal Logistics(string reportFullPath, ProcessDataStandardFormat processDataStandardFormat)
{
string key;
DateTime dateTime;
string[] segments;
_FileInfo = new(reportFullPath);
_Logistics1 = logistics.Split(new string[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries).ToList();
if (!Logistics1.Any() || !Logistics1[0].StartsWith("LOGISTICS_1"))
_Logistics1 = processDataStandardFormat.Logistics.ToList();
if (Logistics1.Count == 0 || !Logistics1[0].StartsWith("LOGISTICS_1"))
{
_NullData = null;
_JobID = "null";
@ -190,8 +193,6 @@ public class Logistics : ILogistics
}
}
private static string DefaultMesEntity(DateTime dateTime) => string.Concat(dateTime.Ticks, "_MES_ENTITY");
internal void Update(string mid, string processJobID)
{
_MID = mid;

View File

@ -2,18 +2,29 @@
public partial class WS
{
public class Attachment
{
public string UniqueId { get; set; }
public string DestinationFileName { get; set; }
public string SourceFileName { get; set; }
#nullable enable
public Attachment(string uniqueId, string destinationFileName, string sourceFileName)
public long HeaderId { get; set; }
public string UniqueId { get; set; }
public string SubGroupId { get; set; }
public string AttachmentId { get; set; }
public string SourceFileName { get; set; }
public string HeaderIdDirectory { get; set; }
public string DestinationFileName { get; set; }
public Attachment(Results? results, string headerIdDirectory, string uniqueId, string destinationFileName, string sourceFileName)
{
UniqueId = uniqueId;
DestinationFileName = destinationFileName;
SourceFileName = sourceFileName;
HeaderIdDirectory = headerIdDirectory;
DestinationFileName = destinationFileName;
AttachmentId = System.Guid.NewGuid().ToString();
HeaderId = results?.HeaderId is null ? -1 : results.HeaderId.Value;
SubGroupId = results?.SubgroupId is null ? string.Empty : results.SubgroupId.Value.ToString();
}
}

View File

@ -1,27 +1,75 @@
using System.Collections.Generic;
using System;
using System.Collections.Generic;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace Adaptation.Shared.Metrology;
public partial class WS
{
// this class represents the response from the Inbound API endpoint
public class Results
{
// true or false if data was written to the database
public bool Success { get; set; }
// if true, contains ID of the Header record in the database
public long HeaderID { get; set; }
#nullable enable
// if false, this collection will contain a list of errors
public List<string> Errors { get; set; }
[JsonConstructor]
public Results(List<string>? errors,
long? headerId,
long? subgroupId,
bool? success,
List<string>? warnings)
{
Errors = errors;
Success = success;
HeaderId = headerId;
Warnings = warnings;
SubgroupId = subgroupId;
}
// this collection will contain a list of warnings, they will not prevent data from being saved
public List<string> Warnings { get; set; }
[JsonPropertyName("errors")] public List<string>? Errors { get; set; }
[JsonPropertyName("headerID")] public long? HeaderId { get; set; }
[JsonPropertyName("subgroupId")] public long? SubgroupId { get; set; }
[JsonPropertyName("success")] public bool? Success { get; set; }
[JsonPropertyName("warnings")] public List<string>? Warnings { get; set; }
public override string ToString()
{
string result = JsonSerializer.Serialize(this, ResultsSourceGenerationContext.Default.Results);
return result;
}
internal static Results Get(Results results, long? subgroupId) =>
new(results.Errors, results.HeaderId, subgroupId, results.Success, results.Warnings);
internal static Results Get(string resultsJson, Exception e)
{
Results results;
Exception? exception = e;
List<string> errors = new();
StringBuilder stringBuilder = new();
while (exception is not null)
{
_ = stringBuilder.AppendLine(exception.Message);
exception = exception.InnerException;
}
errors.Add(resultsJson);
errors.Add(stringBuilder.ToString());
results = new(errors: errors,
headerId: null,
subgroupId: null,
success: false,
warnings: new());
return results;
}
// this is just a helper function to make displaying the results easier
public override string ToString() => JsonSerializer.Serialize(this, GetType());
}
}
[JsonSourceGenerationOptions(WriteIndented = true)]
[JsonSerializable(typeof(WS.Results))]
internal partial class ResultsSourceGenerationContext : JsonSerializerContext
{
}

View File

@ -10,9 +10,11 @@ namespace Adaptation.Shared.Metrology;
public partial class WS
{
public static (string, Results) SendData(string url, object payload, int timeoutSeconds = 120)
#nullable enable
public static (string, Results) SendData(string url, long sequence, string directory, object payload, int timeoutSeconds = 120)
{
Results results = new();
Results? wsResults = null;
string resultsJson = string.Empty;
try
{
@ -30,85 +32,83 @@ public partial class WS
};
HttpResponseMessage httpResponseMessage = httpClient.SendAsync(httpRequestMessage, HttpCompletionOption.ResponseContentRead).Result;
resultsJson = httpResponseMessage.Content.ReadAsStringAsync().Result;
results = JsonSerializer.Deserialize<Results>(resultsJson, new JsonSerializerOptions { PropertyNameCaseInsensitive = true });
wsResults = JsonSerializer.Deserialize(resultsJson, ResultsSourceGenerationContext.Default.Results);
if (wsResults is null)
throw new NullReferenceException(nameof(wsResults));
string checkDirectory = Path.Combine(directory, $"-{wsResults.HeaderId}");
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
File.WriteAllText(Path.Combine(checkDirectory, $"{sequence}.json"), json);
}
if (!results.Success)
results.Errors.Add(results.ToString());
if (wsResults.Success is null || !wsResults.Success.Value)
wsResults.Errors?.Add(wsResults.ToString());
}
catch (Exception e)
{
Exception exception = e;
StringBuilder stringBuilder = new();
while (exception is not null)
{
_ = stringBuilder.AppendLine(exception.Message);
exception = exception.InnerException;
}
results.Errors ??= new List<string>();
results.Errors.Add(resultsJson);
results.Errors.Add(stringBuilder.ToString());
}
return new(resultsJson, results);
{ wsResults ??= Results.Get(resultsJson, e); }
return new(resultsJson, wsResults);
}
// this method is a wrapper for attaching a file to either a header or data record
// URL is the same URL used for SendData, ex: http://localhost/api/inbound/CDE
// attachToHeaderId is the ID returned by SendData
// attachToDataUniqueId is the string unique ID for the data record, aka the Title of the Sharepoint list entry
// fileContents is a byte array with the contents of the file
// fileName is which attachment this is, image.pdf, data.pdf, data.txt, header.pdf, etc
// timeoutSeconds is configured as the request timeout
// this method will either succeed or throw an exception
// also, this has been made synchronous
public static void AttachFile(string url, long attachToHeaderId, string attachToDataUniqueId, byte[] fileContents, string fileName, int timeoutSeconds = 60)
public static void AttachFile(string url, Attachment attachment, int timeoutSeconds = 60)
{
using HttpClient httpClient = new();
string requestUrl = url + "/attachment?headerid=" + attachToHeaderId.ToString();
if (!string.IsNullOrWhiteSpace(attachToDataUniqueId))
{
requestUrl += "&datauniqueid=";
requestUrl += System.Net.WebUtility.UrlEncode(attachToDataUniqueId);
}
requestUrl += "&filename="; // this is just so the web server log shows the filename
requestUrl += System.Net.WebUtility.UrlEncode(fileName);
string json = JsonSerializer.Serialize(attachment);
httpClient.Timeout = new TimeSpan(0, 0, 0, timeoutSeconds, 0);
MultipartFormDataContent multipartFormDataContent = new();
ByteArrayContent byteArrayContent = new(fileContents);
byteArrayContent.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream");
multipartFormDataContent.Add(byteArrayContent, "attachment", fileName);
HttpResponseMessage httpResponseMessage = httpClient.PostAsync(requestUrl, multipartFormDataContent).Result;
if (httpResponseMessage.IsSuccessStatusCode)
return;
string resultBody = httpResponseMessage.Content.ReadAsStringAsync().Result;
throw new Exception("Attachment failed: " + resultBody);
StringContent httpContent = new(json, Encoding.UTF8, "application/json");
HttpResponseMessage httpResponseMessage = httpClient.PostAsync($"{url}/attachment", httpContent).Result;
if (!httpResponseMessage.IsSuccessStatusCode)
{
string resultBody = httpResponseMessage.Content.ReadAsStringAsync().Result;
throw new Exception($"Attachment failed: {resultBody}");
}
}
public static void AttachFiles(string url, long headerID, List<Attachment> headerAttachments = null, List<Attachment> dataAttachments = null)
public static void AttachFiles(string url, List<Attachment>? headerAttachments = null, List<Attachment>? dataAttachments = null)
{
string directory;
try
{
string? directoryName;
if (headerAttachments is not null)
{
foreach (Attachment attachment in headerAttachments)
AttachFile(url, headerID, "", File.ReadAllBytes(attachment.SourceFileName), attachment.DestinationFileName);
{
directoryName = Path.GetDirectoryName(attachment.HeaderIdDirectory);
if (string.IsNullOrEmpty(directoryName))
continue;
directory = Path.Combine(directoryName, attachment.AttachmentId) ?? throw new Exception();
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
File.Copy(attachment.SourceFileName, Path.Combine(directory, attachment.DestinationFileName), overwrite: true);
}
}
if (dataAttachments is not null)
{
foreach (Attachment attachment in dataAttachments)
AttachFile(url, headerID, attachment.UniqueId, File.ReadAllBytes(attachment.SourceFileName), attachment.DestinationFileName);
{
directoryName = Path.GetDirectoryName(attachment.HeaderIdDirectory.Replace("Header", "Data"));
if (string.IsNullOrEmpty(directoryName))
continue;
directory = Path.Combine(directoryName, attachment.AttachmentId) ?? throw new Exception();
if (!Directory.Exists(directory))
_ = Directory.CreateDirectory(directory);
File.Copy(attachment.SourceFileName, Path.Combine(directory, attachment.DestinationFileName), overwrite: true);
}
}
if (headerAttachments is not null)
{
foreach (Attachment attachment in headerAttachments)
AttachFile(url, attachment);
}
if (dataAttachments is not null)
{
foreach (Attachment attachment in dataAttachments)
AttachFile(url, attachment);
}
//MessageBox.Show(r.ToString());
}
catch (Exception e)
{
Exception exception = e;
Exception? exception = e;
StringBuilder stringBuilder = new();
while (exception is not null)
{

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,33 @@
using System.Collections.ObjectModel;
namespace Adaptation.Shared;
public class ProcessDataStandardFormatMapping
{
public ReadOnlyCollection<string> BackfillColumns { get; private set; }
public ReadOnlyCollection<int> ColumnIndices { get; private set; }
public ReadOnlyCollection<string> IgnoreColumns { get; private set; }
public ReadOnlyCollection<string> IndexOnlyColumns { get; private set; }
public ReadOnlyDictionary<string, string> KeyValuePairs { get; private set; }
public ReadOnlyCollection<string> NewColumnNames { get; private set; }
public ReadOnlyCollection<string> OldColumnNames { get; private set; }
public ProcessDataStandardFormatMapping(ReadOnlyCollection<string> backfillColumns,
ReadOnlyCollection<int> columnIndices,
ReadOnlyCollection<string> ignoreColumns,
ReadOnlyCollection<string> indexOnlyColumns,
ReadOnlyDictionary<string, string> keyValuePairs,
ReadOnlyCollection<string> newColumnNames,
ReadOnlyCollection<string> oldColumnNames)
{
BackfillColumns = backfillColumns;
ColumnIndices = columnIndices;
IgnoreColumns = ignoreColumns;
IndexOnlyColumns = indexOnlyColumns;
KeyValuePairs = keyValuePairs;
NewColumnNames = newColumnNames;
OldColumnNames = oldColumnNames;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -140,6 +140,7 @@
<Compile Include="Adaptation\Shared\Logistics.cs" />
<Compile Include="Adaptation\Shared\Logistics2.cs" />
<Compile Include="Adaptation\Shared\ProcessDataStandardFormat.cs" />
<Compile Include="Adaptation\Shared\ProcessDataStandardFormatMapping.cs" />
<Compile Include="Adaptation\Shared\Properties\IProcessData.cs" />
<Compile Include="Adaptation\Shared\RijndaelEncryption.cs" />
<Compile Include="Adaptation\Shared\Test.cs" />

View File

@ -37,7 +37,7 @@ public partial class FileRead : FileReaderHandler, ISMTP
private FilePathGenerator _FilePathGeneratorForTarget;
private readonly List<EquipmentParameter> _EquipmentParameters;
private static readonly Dictionary<string, List<long>> _DummyRuns;
private static readonly Dictionary<long, List<string>> _StaticRuns;
private static readonly Dictionary<long, List<Adaptation.Shared.Metrology.WS.Results>> _StaticRuns;
static FileRead()
{