Removed Infineon.Mesa.gpcl6 and added Includes

Bug in yml
dotnet tool
PackageReference arrangement
RDS Blank
Change int dotnet test
nuget ^
[spcepiworld].[dbo].[evnt_inf]
Assembly Version
WS Result bug fix and Nuget bump, PSN, Reactor and Extra RDS rule
OpenInsightApi and testRunTitle
editorconfig bugs
Fix Type
serializerValue
RDS oversight
PropertyNameCaseInsensitive
Save check for file already present
NoWaitDirectory
MoveArchive allow empty directory and continueOnError for clean files
CreatePointerFile and more on NoWaitDirectory
This commit is contained in:
2023-03-20 14:19:21 -07:00
parent 7248a46452
commit c507cd4028
48 changed files with 857 additions and 650 deletions

View File

@ -115,7 +115,7 @@ public class FileRead : Shared.FileRead, IFileRead
}
}
private void MoveArchive(DateTime dateTime)
private void MoveArchive(string reportFullPath, DateTime dateTime)
{
if (dateTime == DateTime.MinValue)
{ }
@ -128,12 +128,17 @@ public class FileRead : Shared.FileRead, IFileRead
string jobIdDirectory = Path.Combine(_JobIdParentDirectory, _Logistics.JobID);
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
string[] matchDirectories = new string[] { GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).FirstOrDefault() };
if ((matchDirectories is null) || matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
destinationArchiveDirectory = Path.Combine(destinationArchiveDirectory, Path.GetFileName(sourceDirectory));
Directory.Move(sourceDirectory, destinationArchiveDirectory);
if (!Directory.GetDirectories(jobIdDirectory).Any())
File.Copy(reportFullPath, Path.Combine(destinationArchiveDirectory, Path.GetFileName(reportFullPath)));
else
{
string[] matchDirectories = GetDirectoriesRecursively(jobIdDirectory, logisticsSequence).ToArray();
if (matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
string sourceDirectory = Path.GetDirectoryName(matchDirectories[0]);
destinationArchiveDirectory = Path.Combine(destinationArchiveDirectory, Path.GetFileName(sourceDirectory));
Directory.Move(sourceDirectory, destinationArchiveDirectory);
}
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
@ -146,7 +151,7 @@ public class FileRead : Shared.FileRead, IFileRead
List<Shared.Properties.IDescription> descriptions = GetDuplicatorDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
MoveArchive(dateTime);
MoveArchive(reportFullPath, dateTime);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -7,6 +7,7 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
using System.Threading;
@ -153,18 +154,36 @@ public class FileRead : Shared.FileRead, IFileRead
return results;
}
private void MoveCollection(DateTime dateTime, List<(string matchingFile, string checkFile)> collection)
private static List<(string, string, string, string, string)> GetCollection(List<(string matchingFile, string checkFile)> collection)
{
long preWait;
List<(string, string, string, string, string)> results = new();
string errFile;
string checkDirectory;
string noWaitDirectory;
foreach ((string matchingFile, string checkFile) in collection)
{
errFile = string.Concat(checkFile, ".err");
checkDirectory = Path.GetDirectoryName(checkFile);
if (!Directory.Exists(checkDirectory))
_ = Directory.CreateDirectory(checkDirectory);
noWaitDirectory = Path.Combine(checkDirectory, "NoWaitDirectory");
results.Add(new(matchingFile, checkFile, errFile, checkDirectory, noWaitDirectory));
}
return results;
}
private void MoveCollection(DateTime dateTime, List<(string matchingFile, string checkFile)> collection)
{
long preWait;
List<(string checkFile, string errFile)> postCollection = new();
foreach ((string matchingFile, string checkFile, string errFile, string checkDirectory, string noWaitDirectory) in GetCollection(collection))
{
File.Move(matchingFile, checkFile);
if (Directory.Exists(noWaitDirectory))
{
postCollection.Add(new(checkFile, errFile));
continue;
}
if (_FileConnectorConfiguration?.FileHandleWaitTime is null)
preWait = DateTime.Now.AddMilliseconds(1234).Ticks;
else
@ -182,10 +201,45 @@ public class FileRead : Shared.FileRead, IFileRead
if (!File.Exists(checkFile))
break;
if (new TimeSpan(DateTime.Now.Ticks - dateTime.Ticks).TotalSeconds > _BreakAfterSeconds)
throw new Exception($"Not all files were consumned after {_BreakAfterSeconds} second(s)!");
throw new Exception($"Not all files were consumed after {_BreakAfterSeconds} second(s)!");
Thread.Sleep(500);
}
}
if (postCollection.Any())
{
Thread.Sleep(500);
StringBuilder stringBuilder = new();
foreach ((string checkFile, string errFile) in postCollection)
{
if (File.Exists(errFile))
_ = stringBuilder.AppendLine(File.ReadAllText(errFile));
if (File.Exists(checkFile))
_ = stringBuilder.AppendLine($"<{checkFile}> was not consumed by the end!");
}
if (stringBuilder.Length > 0)
throw new Exception(stringBuilder.ToString());
}
}
private static void CreatePointerFile(int numberLength, string parentDirectory, List<string> matchingFiles)
{
#nullable enable
string checkFile;
string writeFile;
string? directoryName;
int parentDirectoryLength = parentDirectory.Length;
foreach (string matchingFile in matchingFiles)
{
directoryName = Path.GetDirectoryName(matchingFile);
if (directoryName is null)
continue;
checkFile = $"{matchingFile[0]}{directoryName.Substring(parentDirectoryLength + numberLength + 1)}";
writeFile = Path.Combine(parentDirectory, $"{directoryName.Substring(parentDirectory.Length + 1, numberLength)}.txt");
if (File.Exists(writeFile))
continue;
File.AppendAllLines(writeFile, new string[] { parentDirectory, matchingFile, directoryName, checkFile });
}
#nullable disable
}
private Tuple<string, Test[], JsonElement[], List<FileInfo>> GetExtractResult(string reportFullPath, DateTime dateTime)
@ -201,6 +255,9 @@ public class FileRead : Shared.FileRead, IFileRead
List<string> matchingFiles = GetMatchingFiles(ticks, reportFullPath, searchDirectories);
if (matchingFiles.Count != searchDirectories.Count)
throw new Exception($"Didn't find all files after {_BreakAfterSeconds} second(s)!");
try
{ CreatePointerFile(numberLength, parentParentDirectory, matchingFiles); }
catch (Exception) { }
List<(string matchingFile, string checkFile)> collection = GetCollection(numberLength, parentParentDirectory, matchingFiles);
MoveCollection(dateTime, collection);
return results;

View File

@ -6,6 +6,7 @@ using Adaptation.Shared.Methods;
using Adaptation.Shared.Metrology;
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
@ -18,6 +19,8 @@ public class FileRead : Shared.FileRead, IFileRead
private readonly string _IqsConnectionString;
private readonly string _OpenInsightFilePattern;
private readonly string _OpenInsightApiECDirectory;
private readonly string _OpenInsightApiIFXDirectory;
public FileRead(ISMTP smtp, Dictionary<string, string> fileParameter, string cellInstanceName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, string parameterizedModelObjectDefinitionType, IList<ModelObjectParameterDefinition> modelObjectParameters, string equipmentDictionaryName, Dictionary<string, List<long>> dummyRuns, Dictionary<long, List<string>> staticRuns, bool useCyclicalForDescription, bool isEAFHosted) :
base(new Description(), false, smtp, fileParameter, cellInstanceName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, modelObjectParameters, equipmentDictionaryName, dummyRuns, staticRuns, useCyclicalForDescription, isEAFHosted)
@ -31,6 +34,8 @@ public class FileRead : Shared.FileRead, IFileRead
throw new Exception(cellInstanceConnectionName);
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_OpenInsightApiIFXDirectory = @"\\messdv002.na.infineon.com\Candela\Archive\API";
_OpenInsightApiECDirectory = @"\\messv02ecc1.ec.local\EC_Metrology_Si\Archive\API";
_IqsConnectionString = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "IQS.ConnectionString");
_OpenInsightFilePattern = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.FilePattern");
}
@ -230,7 +235,7 @@ public class FileRead : Shared.FileRead, IFileRead
return result.ToString();
}
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, List<pcl.Description> descriptions, Test[] tests)
private void SaveOpenInsightFile(string reportFullPath, DateTime dateTime, string logistics, List<pcl.Description> descriptions, Test[] tests)
{
bool isDummyRun = false;
List<(Shared.Properties.IScopeInfo, string)> collection = new();
@ -247,17 +252,22 @@ public class FileRead : Shared.FileRead, IFileRead
string lines = GetLines(_Logistics, descriptions);
if (!string.IsNullOrEmpty(lines))
{
int? count;
long? subGroupId;
long breakAfter = dateTime.AddSeconds(_BreakAfterSeconds).Ticks;
long preWait = _FileConnectorConfiguration?.FileHandleWaitTime is null ? dateTime.AddMilliseconds(1234).Ticks : dateTime.AddMilliseconds(_FileConnectorConfiguration.FileHandleWaitTime.Value).Ticks;
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN) || string.IsNullOrEmpty(descriptions[0].RDS))
subGroupId = null;
if (string.IsNullOrEmpty(descriptions[0].Reactor) || string.IsNullOrEmpty(descriptions[0].PSN))
(subGroupId, count) = (null, null);
else
(subGroupId, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
(subGroupId, count, string _) = FromIQS.GetCommandText(_IqsConnectionString, _Logistics, descriptions[0], breakAfter, preWait);
if (subGroupId is null)
collection.Add(new(new ScopeInfo(tests[0], _OpenInsightFilePattern), lines));
else
else if (count is null)
collection.Add(new(new ScopeInfo(tests[0], $"{subGroupId.Value} {_OpenInsightFilePattern}"), lines));
else
collection.Add(new(new ScopeInfo(tests[0], $"{subGroupId.Value} E{count.Value} {_OpenInsightFilePattern}"), lines));
string weekOfYear = _Calendar.GetWeekOfYear(_Logistics.DateTimeFromSequence, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
FromIQS.Save(_OpenInsightApiECDirectory, _OpenInsightApiIFXDirectory, _Logistics, reportFullPath, logistics, descriptions.First(), lines, subGroupId, weekOfYear);
}
}
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
@ -274,7 +284,7 @@ public class FileRead : Shared.FileRead, IFileRead
List<pcl.Description> descriptions = pcl.ProcessData.GetDescriptions(jsonElements);
Test[] tests = (from l in descriptions select (Test)l.Test).ToArray();
if (_IsEAFHosted && _FileConnectorConfiguration.FileScanningIntervalInSeconds > 0)
SaveOpenInsightFile(reportFullPath, dateTime, descriptions, tests);
SaveOpenInsightFile(reportFullPath, dateTime, pdsf.Item1, descriptions, tests);
results = new Tuple<string, Test[], JsonElement[], List<FileInfo>>(pdsf.Item1, tests, jsonElements, new List<FileInfo>());
return results;
}

View File

@ -3,6 +3,7 @@ using System;
using System.Data;
using System.Data.SqlClient;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using System.Text.Json;
@ -29,7 +30,12 @@ public class FromIQS
.AppendLine(" pl.f_name pl_name, ")
.AppendLine(" pd.f_name pd_name, ")
.AppendLine(" td.f_test td_test, ")
.AppendLine(" td.f_name td_name ")
.AppendLine(" td.f_name td_name, ")
.AppendLine(" (select count(ev.f_evnt) ")
.AppendLine(" from [spcepiworld].[dbo].[evnt_inf] ev ")
.AppendLine(" where ev.f_prcs = rd.f_prcs ")
.AppendLine(" and ev.f_part = pd.f_part ")
.AppendLine(" and ev.f_sgtm = se.f_sgtm) ev_count ")
.AppendLine(" from [spcepiworld].[dbo].[sgrp_ext] se ")
.AppendLine(" join [spcepiworld].[dbo].[prcs_dat] rd ")
.AppendLine(" on se.f_prcs = rd.f_prcs ")
@ -44,12 +50,13 @@ public class FromIQS
.AppendLine(" where se.f_flag = 0 ");
if (subGroupId is not null)
_ = result.Append(" and se.f_sgrp = ").Append(subGroupId).AppendLine(" ");
if (!string.IsNullOrEmpty(description.RDS))
_ = result.Append(" and pl.f_name = '").Append(description.RDS).AppendLine("' ");
_ = result
.Append(" and rd.f_name = '").Append(description.Reactor).AppendLine("' ")
.Append(" and pd.f_name = '").Append(description.PSN).AppendLine("' ")
.AppendLine(" and jd.f_name in ('TENCOR1', 'TENCOR2', 'TENCOR3') ")
.Append(" and jd.f_name = '").Append(logistics.MesEntity).AppendLine("' ")
.Append(" and pl.f_name = '").Append(description.RDS).AppendLine("' ")
.Append(" and dateadd(HH, -7, (dateadd(SS, convert(bigint, se.f_sgtm), '19700101'))) = '").Append(dateTime).AppendLine("' ")
.AppendLine(" for json path ");
return result.ToString();
@ -67,9 +74,10 @@ public class FromIQS
return stringBuilder;
}
internal static (long?, string) GetCommandText(string connectionString, Logistics logistics, pcl.Description description, long breakAfter, long preWait)
internal static (long?, int?, string) GetCommandText(string connectionString, Logistics logistics, pcl.Description description, long breakAfter, long preWait)
{
string dateTime;
int? count = null;
string commandText;
long? result = null;
string dateFormat = pcl.Description.GetDateFormat();
@ -113,10 +121,99 @@ public class FromIQS
{
result = subGroupId;
commandText = GetCommandText(logistics, description, dateTime, subGroupId);
if (jsonProperties.Any() && jsonProperties[10].Name == "ev_count" && int.TryParse(jsonProperties[10].Value.ToString(), out int evCount))
count = evCount;
}
}
}
return new(result, commandText);
return new(result, count, commandText);
}
private static string GetJson(Logistics logistics, string logisticLines, pcl.Description description)
{
string result;
StringBuilder stringBuilder = new();
var @object = new
{
description.MesEntity,
description.Employee,
// description.Layer,
description.PSN,
description.RDS,
description.Reactor,
description.Recipe,
// description.Zone,
logistics.DateTimeFromSequence.Ticks
};
string[] pair;
string safeValue;
string[] segments;
string serializerValue;
foreach (string line in logisticLines.Split(new string[] { Environment.NewLine }, StringSplitOptions.None))
{
segments = line.Split('\t');
if (segments.Length < 2)
continue;
segments = segments[1].Split(';');
_ = stringBuilder.Append('{');
foreach (string segment in segments)
{
pair = segment.Split('=');
if (pair.Length != 2 || pair[0].Length < 3)
continue;
serializerValue = JsonSerializer.Serialize(pair[1]);
safeValue = serializerValue.Substring(1, serializerValue.Length - 2);
_ = stringBuilder.Append('"').Append(pair[0].Substring(2)).Append('"').Append(':').Append('"').Append(safeValue).Append('"').Append(',');
}
if (stringBuilder.Length > 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.Append('}').Append(',');
}
if (stringBuilder.Length > 0)
_ = stringBuilder.Remove(stringBuilder.Length - 1, 1);
_ = stringBuilder.Append(']').Append('}');
_ = stringBuilder.Insert(0, ",\"Logistics\":[");
string json = JsonSerializer.Serialize(@object);
_ = stringBuilder.Insert(0, json.Substring(0, json.Length - 1));
JsonElement? jsonElement = JsonSerializer.Deserialize<JsonElement>(stringBuilder.ToString());
result = jsonElement is null ? "{}" : JsonSerializer.Serialize(jsonElement, new JsonSerializerOptions { WriteIndented = true });
return result;
}
internal static void Save(string openInsightApiECDirectory, string openInsightApiIFXDirectory, Logistics logistics, string reportFullPath, string logisticLines, pcl.Description description, string lines, long? subGroupId, string weekOfYear)
{
string checkFile;
string fileName = Path.GetFileName(reportFullPath);
string json = GetJson(logistics, logisticLines, description);
string? ecPathRoot = Path.GetPathRoot(openInsightApiECDirectory);
string? ifxPathRoot = Path.GetPathRoot(openInsightApiIFXDirectory);
bool ecExists = ecPathRoot is not null && Directory.Exists(ecPathRoot);
bool ifxExists = ifxPathRoot is not null && Directory.Exists(ifxPathRoot);
string weekYear = $"{logistics.DateTimeFromSequence:yyyy}_Week_{weekOfYear}";
string ecDirectory = Path.Combine(openInsightApiECDirectory, weekYear, $"-{description.PSN}", $"-{description.Reactor}", $"-{description.RDS}", $"-{subGroupId}");
string ifxDirectory = Path.Combine(openInsightApiIFXDirectory, weekYear, $"-{description.PSN}", $"-{description.Reactor}", $"-{description.RDS}", $"-{subGroupId}");
if (ecExists && !Directory.Exists(ecDirectory))
_ = Directory.CreateDirectory(ecDirectory);
if (ifxExists && !Directory.Exists(ifxDirectory))
_ = Directory.CreateDirectory(ifxDirectory);
checkFile = Path.Combine(ecDirectory, fileName);
if (ecExists && !File.Exists(checkFile))
File.Copy(reportFullPath, checkFile);
checkFile = Path.Combine(ifxDirectory, fileName);
if (ifxExists && !File.Exists(checkFile))
File.Copy(reportFullPath, checkFile);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.txt");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, lines);
checkFile = Path.Combine(ifxDirectory, $"{logistics.DateTimeFromSequence.Ticks}.txt");
if (ifxExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, lines);
checkFile = Path.Combine(ecDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ecExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json);
checkFile = Path.Combine(ifxDirectory, $"{logistics.DateTimeFromSequence.Ticks}.json");
if (ifxExists && !File.Exists(checkFile))
File.WriteAllText(checkFile, json);
}
#nullable disable

View File

@ -30,9 +30,6 @@ public class FileRead : Shared.FileRead, IFileRead
if (!_IsDuplicator)
throw new Exception(cellInstanceConnectionName);
_OpenInsightMetrologyViewerAPI = GetPropertyValue(cellInstanceConnectionName, modelObjectParameters, "OpenInsight.MetrologyViewerAPI");
string barcode = TestMe.GetBarcode("192.168.0.121");
if (string.IsNullOrEmpty(barcode))
{ }
}
void IFileRead.Move(Tuple<string, Test[], JsonElement[], List<FileInfo>> extractResults, Exception exception)

View File

@ -1,105 +0,0 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Net.Http;
using System.Text.Json;
namespace Adaptation.FileHandlers.OpenInsightMetrologyViewer;
public class TestMe
{
private static List<string> GetURLCollection(string barcodeServerIP)
{
List<string> results = new();
int weekOfYear;
string checkURL;
DateTime dateTime;
string weekDirectory;
string weekOfYearPadded;
string lastURL = string.Empty;
Calendar calendar = new CultureInfo("en-US").Calendar;
for (int i = 1; i < 3; i++)
{
if (i == 1)
dateTime = DateTime.Now;
else
dateTime = DateTime.Now.AddHours(-4);
weekOfYear = calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday) - 1;
weekOfYearPadded = weekOfYear.ToString("00");
weekDirectory = $"{dateTime:yyyy}_Week_{weekOfYearPadded}/{dateTime:yyyy-MM-dd}";
checkURL = string.Concat("http://", barcodeServerIP, '/', weekDirectory);
if (i == 1 || checkURL != lastURL)
{
results.Add(string.Concat(checkURL, "/A"));
results.Add(string.Concat(checkURL, "/B"));
}
lastURL = checkURL;
}
return results;
}
private static List<string> GetURLPossible(HttpClient httpClient, List<string> urlCollection, JsonSerializerOptions propertyNameCaseInsensitiveJsonSerializerOptions)
{
List<string> results = new();
string json;
NginxFileSystem[] nginxFileSystemCollection;
DateTime minimumDateTime = DateTime.Now.AddHours(-4);
string nginxFormat = "ddd, dd MMM yyyy HH:mm:ss zzz";
foreach (string url in urlCollection)
{
try
{
json = httpClient.GetStringAsync(url).Result;
nginxFileSystemCollection = JsonSerializer.Deserialize<NginxFileSystem[]>(json, propertyNameCaseInsensitiveJsonSerializerOptions);
foreach (NginxFileSystem nginxFileSystem in nginxFileSystemCollection)
{
if (!DateTime.TryParseExact(nginxFileSystem.MTime.Replace("GMT", "+00:00"), nginxFormat, CultureInfo.InvariantCulture, DateTimeStyles.None, out DateTime dateTime))
continue;
if (dateTime < minimumDateTime)
continue;
results.Add(string.Concat(url, '/', nginxFileSystem.Name));
}
}
catch
{ }
}
return results;
}
private static List<(string, BarcodeRecord)> GetBarcodePossible(HttpClient httpClient, JsonSerializerOptions propertyNameCaseInsensitiveJsonSerializerOptions, List<string> possibleURLCollection)
{
List<(string, BarcodeRecord)> results = new();
string json;
BarcodeRecord barcodeRecord;
foreach (string possibleURL in possibleURLCollection)
{
try
{
json = httpClient.GetStringAsync(possibleURL).Result;
barcodeRecord = JsonSerializer.Deserialize<BarcodeRecord>(json, propertyNameCaseInsensitiveJsonSerializerOptions);
results.Add(new(possibleURL, barcodeRecord));
}
catch
{ }
}
return results;
}
public static string GetBarcode(string barcodeServerIP)
{
string result = string.Empty;
using HttpClient httpClient = new();
List<string> urlCollection = GetURLCollection(barcodeServerIP);
JsonSerializerOptions propertyNameCaseInsensitiveJsonSerializerOptions = new() { PropertyNameCaseInsensitive = true };
List<string> possibleURLCollection = GetURLPossible(httpClient, urlCollection, propertyNameCaseInsensitiveJsonSerializerOptions);
List<(string, BarcodeRecord)> possibleBarcodeCollection = GetBarcodePossible(httpClient, propertyNameCaseInsensitiveJsonSerializerOptions, possibleURLCollection);
foreach ((string url, BarcodeRecord barcodeRecord) in possibleBarcodeCollection)
{
if (string.IsNullOrEmpty(url) || string.IsNullOrEmpty(barcodeRecord.Barcode))
continue;
}
return result;
}
}

View File

@ -298,7 +298,7 @@ public class WSRequest
if (!wsResults.Success)
throw new Exception(wsResults.ToString());
}
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json);
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json, new JsonSerializerOptions { PropertyNameCaseInsensitive = true });
long wsResultsHeaderID = metrologyWSRequest.HeaderID;
string[] pclFiles = Directory.GetFiles(matchDirectory, "*.pcl", SearchOption.TopDirectoryOnly);
if (pclFiles.Length != 1)

View File

@ -118,7 +118,7 @@ public class FileRead : Shared.FileRead, IFileRead
if (!Directory.Exists(jobIdDirectory))
_ = Directory.CreateDirectory(jobIdDirectory);
string[] matchDirectories = GetInProcessDirectory(jobIdDirectory);
if ((matchDirectories is null) || matchDirectories.Length != 1)
if (matchDirectories.Length != 1)
throw new Exception("Didn't find directory by logistics sequence");
if (fileInfo.Exists && fileInfo.LastWriteTime < fileInfo.CreationTime)
File.SetLastWriteTime(reportFullPath, fileInfo.CreationTime);

View File

@ -250,11 +250,11 @@ public class ProcessData : IProcessData
return result;
}
private static (string, string) GetReactorAndRDS(string defaultReactor, string defaultRDS, string text, string formattedText, string[] segments)
private static (string, string) GetReactorAndRDS(string defaultReactor, string defaultRDS, string text, string formattedText, string[] segments, bool hasRDS)
{
string rds;
string reactor;
if (string.IsNullOrEmpty(text) || segments.Length == 0 || string.IsNullOrEmpty(formattedText))
if (string.IsNullOrEmpty(text) || segments.Length == 0 || string.IsNullOrEmpty(formattedText) || (segments.Length > 1 && !hasRDS))
reactor = defaultReactor;
else
reactor = segments[0];
@ -270,11 +270,11 @@ public class ProcessData : IProcessData
return new(reactor, rds);
}
private static (string, string) GetLayerAndPSN(string defaultLayer, string defaultPSN, string[] segments)
private static (string, string) GetLayerAndPSN(string defaultLayer, string defaultPSN, string[] segments, bool hasRDS)
{
string psn;
string layer;
if (segments.Length <= 2)
if (segments.Length <= 2 || (segments.Length > 1 && !hasRDS))
{
psn = defaultPSN;
layer = defaultLayer;
@ -356,8 +356,9 @@ public class ProcessData : IProcessData
if (lot.Length > 2 && lot[0] == '1' && (lot[1] == 'T' || lot[1] == 't'))
lot = lot.Substring(2);
string[] segments = lot.Split('-');
(reactor, rds) = GetReactorAndRDS(defaultReactor, defaultRDS, text, lot, segments);
(layer, psn) = GetLayerAndPSN(defaultLayer, defaultPSN, segments);
bool hasRDS = Regex.IsMatch(lot, "[-]?[0-9]{5,}[-]?");
(reactor, rds) = GetReactorAndRDS(defaultReactor, defaultRDS, text, lot, segments, hasRDS);
(layer, psn) = GetLayerAndPSN(defaultLayer, defaultPSN, segments, hasRDS);
zone = GetZone(segments);
employee = defaultEmployee;
}