Match TFS Changeset 303335
This commit is contained in:
49
Adaptation/Helpers/ConfigData.Level.cs
Normal file
49
Adaptation/Helpers/ConfigData.Level.cs
Normal file
@ -0,0 +1,49 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace Adaptation.Helpers
|
||||
{
|
||||
|
||||
public partial class ConfigData
|
||||
{
|
||||
public enum Level
|
||||
{
|
||||
IsXToOpenInsightMetrologyViewer, //MetrologyWS.SendData(logic, string.Concat("http://", serverName, "/api/inbound/Tencor"), headerAttachments, detailAttachments);
|
||||
IsXToIQSSi, //bool WriteFileSPC(Dictionary
|
||||
IsXToIQSGaN, //GAN_PPTST
|
||||
IsXToOpenInsight, //bool WriteFileOpenInsight(Dictionary
|
||||
IsXToOpenInsightMetrologyViewerAttachments, //Site-Two
|
||||
IsXToAPC,
|
||||
IsXToSPaCe,
|
||||
IsXToArchive,
|
||||
IsArchive,
|
||||
IsDummy,
|
||||
IsManualOIEntry,
|
||||
IsTIBCO,
|
||||
IsNaEDA
|
||||
}
|
||||
|
||||
public static List<Tuple<int, Enum, string>> LevelTuples
|
||||
{
|
||||
get
|
||||
{
|
||||
return new List<Tuple<int, Enum, string>>
|
||||
{
|
||||
new Tuple<int, Enum, string>(0, Level.IsNaEDA, @"\EC_EDA\Staging\Traces\~\Source"),
|
||||
new Tuple<int, Enum, string>(15, Level.IsXToOpenInsightMetrologyViewer, @"\EC_EAFLog\TracesMES\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Level.IsXToIQSSi, @"\EC_SPC_Si\Traces\~\PollPath"),
|
||||
new Tuple<int, Enum, string>(36, Level.IsXToIQSGaN, @"\EC_SPC_GaN\Traces\~\PollPath"),
|
||||
new Tuple<int, Enum, string>(36, Level.IsXToOpenInsight, @"\\messa01ec.ec.local\APPS\Metrology\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Level.IsXToOpenInsightMetrologyViewerAttachments, @"\EC_Characterization_Si\In Process\~\Source"),
|
||||
new Tuple<int, Enum, string>(360, Level.IsXToAPC, @"\EC_APC\Staging\Traces\~\PollPath"),
|
||||
new Tuple<int, Enum, string>(-36, Level.IsXToSPaCe, @"\EC_SPC_Si\Traces\~\Source"),
|
||||
new Tuple<int, Enum, string>(180, Level.IsXToArchive, @"\EC_EAFLog\TracesArchive\~\Source"),
|
||||
new Tuple<int, Enum, string>(36, Level.IsArchive, @"\EC_Characterization_Si\Processed")
|
||||
//new Tuple<int, Enum, string>("IsDummy"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
614
Adaptation/Helpers/ConfigData.cs
Normal file
614
Adaptation/Helpers/ConfigData.cs
Normal file
@ -0,0 +1,614 @@
|
||||
using Adaptation.Eaf.Management.ConfigurationData.CellAutomation;
|
||||
using Adaptation.Ifx.Eaf.EquipmentConnector.File.Configuration;
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Metrology;
|
||||
using Infineon.Monitoring.MonA;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.IO.Compression;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.Helpers
|
||||
{
|
||||
|
||||
public partial class ConfigData : ConfigDataBase
|
||||
{
|
||||
|
||||
internal const object NullData = null;
|
||||
internal const int MinFileLength = 100;
|
||||
|
||||
public string IqsFile { get; private set; }
|
||||
public string TracePath { get; private set; }
|
||||
public Level? Duplicator { get; private set; }
|
||||
public string IfxChannel { get; private set; }
|
||||
public string IfxSubject { get; private set; }
|
||||
public string MemoryPath { get; private set; }
|
||||
public string NoWaferMap { get; private set; }
|
||||
public string VillachPath { get; private set; }
|
||||
public string ProgressPath { get; private set; }
|
||||
public string IqsQueryFilter { get; private set; }
|
||||
public string IfxSubjectPrefix { get; private set; }
|
||||
public string GhostPCLFileName { get; private set; }
|
||||
public string OpenInsightSiViewer { get; private set; }
|
||||
public string OpenInsightFilePattern { get; private set; }
|
||||
public string ConnectionStringLSL2SQL { get; private set; }
|
||||
public string OIContextDataSearchPath { get; private set; }
|
||||
public string OIContextDataResultsPath { get; private set; }
|
||||
public string OIContextDataPendingPath { get; private set; }
|
||||
public string IfxConfigurationLocation { get; private set; }
|
||||
public string OpenInsightMetrogyViewerAPI { get; private set; }
|
||||
public List<string> TransportSetupMessages { get; private set; }
|
||||
public string IfxConfigurationLocationLocalCopy { get; private set; }
|
||||
public static Dictionary<string, List<long>> DummyRuns { get; private set; }
|
||||
|
||||
private Timer _Timer;
|
||||
private int _LastDummyRunIndex;
|
||||
private readonly Calendar _Calendar;
|
||||
private readonly string _ReportFullPath;
|
||||
|
||||
public ConfigData(ILogic logic, string cellName, string cellInstanceConnectionName, FileConnectorConfiguration fileConnectorConfiguration, string equipmentTypeName, IList<ModelObjectParameterDefinition> modelObjectParameterDefinitions, string parameterizedModelObjectDefinitionType, bool isEAFHosted) :
|
||||
base(cellName, cellInstanceConnectionName, fileConnectorConfiguration, equipmentTypeName, parameterizedModelObjectDefinitionType, isEAFHosted)
|
||||
{
|
||||
_LastDummyRunIndex = -1;
|
||||
UseCyclicalForDescription = false;
|
||||
CultureInfo cultureInfo = new CultureInfo("en-US");
|
||||
_Calendar = cultureInfo.Calendar;
|
||||
string firstSourceFileFilter = fileConnectorConfiguration.SourceFileFilter.Split('|')[0];
|
||||
if (DummyRuns is null)
|
||||
DummyRuns = new Dictionary<string, List<long>>();
|
||||
bool isDuplicator = cellInstanceConnectionName.StartsWith(cellName);
|
||||
int level = (cellInstanceConnectionName.Length - cellInstanceConnectionName.Replace("-", string.Empty).Length);
|
||||
if (!isDuplicator)
|
||||
Duplicator = null;
|
||||
else
|
||||
{
|
||||
CellNames.Add(cellName, cellName);
|
||||
MesEntities.Add(cellName, cellName);
|
||||
Duplicator = (Level)level;
|
||||
}
|
||||
if (isDuplicator)
|
||||
ProcessDataDescription = new Duplicator.Description();
|
||||
else
|
||||
ProcessDataDescription = new ProcessData.FileRead.Description();
|
||||
if (!isDuplicator)
|
||||
{
|
||||
//Verify(fileConnectorConfiguration, cellInstanceConnectionName);
|
||||
if (!EafHosted)
|
||||
VerifyProcessDataDescription(logic);
|
||||
else
|
||||
WriteExportAliases(logic, cellName, cellInstanceConnectionName);
|
||||
}
|
||||
GhostPCLFileName = string.Concat(Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().Location), @"\gpcl6win64.exe");
|
||||
if (EafHosted && Duplicator.HasValue && Duplicator.Value == Level.IsXToOpenInsightMetrologyViewerAttachments && !File.Exists(GhostPCLFileName))
|
||||
throw new Exception("Ghost PCL FileName doesn't Exist!");
|
||||
//LincPDFCFileName = string.Concat(Path.GetDirectoryName(System.Reflection.Assembly.GetEntryAssembly().Location), @"\LincPDFC.exe");
|
||||
if (!modelObjectParameterDefinitions.Any())
|
||||
{
|
||||
CellNames.Add(cellName, "****");
|
||||
MesEntities.Add(cellName, "****");
|
||||
}
|
||||
else
|
||||
{
|
||||
int index;
|
||||
string key;
|
||||
string variable = string.Empty;
|
||||
Dictionary<string, string> iqsSection = new Dictionary<string, string>();
|
||||
Dictionary<string, string> pathSection = new Dictionary<string, string>();
|
||||
Dictionary<string, string> tibcoSection = new Dictionary<string, string>();
|
||||
Dictionary<string, string> commonSection = new Dictionary<string, string>();
|
||||
Dictionary<string, string> openInsightSection = new Dictionary<string, string>();
|
||||
Dictionary<string, string> connectionStringsSection = new Dictionary<string, string>();
|
||||
foreach (ModelObjectParameterDefinition modelObjectParameterDefinition in modelObjectParameterDefinitions)
|
||||
{
|
||||
if (!modelObjectParameterDefinition.Name.Contains('.'))
|
||||
continue;
|
||||
else if (modelObjectParameterDefinition.Name.StartsWith("Description.") && (modelObjectParameterDefinition.Name.EndsWith(".EventName") || modelObjectParameterDefinition.Name.EndsWith(".EquipmentType")))
|
||||
continue;
|
||||
index = modelObjectParameterDefinition.Name.IndexOf(".");
|
||||
if (index <= -1)
|
||||
continue;
|
||||
key = modelObjectParameterDefinition.Name.Substring(0, index);
|
||||
variable = modelObjectParameterDefinition.Name.Substring(index + 1);
|
||||
if (key == "COMMON")
|
||||
commonSection.Add(variable, modelObjectParameterDefinition.Value);
|
||||
else if (key == "CONNECTION STRINGS")
|
||||
connectionStringsSection.Add(variable, modelObjectParameterDefinition.Value);
|
||||
else if (key == "IQS")
|
||||
iqsSection.Add(variable, modelObjectParameterDefinition.Value);
|
||||
else if (key == "OpenInsight")
|
||||
openInsightSection.Add(variable, modelObjectParameterDefinition.Value);
|
||||
else if (key == "PATH")
|
||||
pathSection.Add(variable, modelObjectParameterDefinition.Value);
|
||||
//else if (key == "REACTOR")
|
||||
// reactorTuples.Add(new Tuple<string, string>(variable, modelObjectParameterDefinition.Value));
|
||||
else if (key == "TIBCO")
|
||||
tibcoSection.Add(variable, modelObjectParameterDefinition.Value);
|
||||
else
|
||||
throw new Exception();
|
||||
}
|
||||
if (!iqsSection.Any())
|
||||
throw new Exception("IQS section is missing from configuration");
|
||||
else
|
||||
{
|
||||
key = "FILE";
|
||||
if (iqsSection.ContainsKey(key))
|
||||
IqsFile = iqsSection[key];
|
||||
else
|
||||
throw new Exception(string.Concat("Missing IQS Configuration entry for ", key));
|
||||
if (string.IsNullOrEmpty(IqsFile))
|
||||
throw new Exception(string.Format("IQS key {0} is empty", key));
|
||||
key = "QUERY";
|
||||
if (iqsSection.ContainsKey(key))
|
||||
IqsQueryFilter = iqsSection[key];
|
||||
else
|
||||
throw new Exception(string.Concat("Missing IQS Configuration entry for ", key));
|
||||
if (string.IsNullOrEmpty(IqsQueryFilter))
|
||||
throw new Exception(string.Format("IQS key {0} is empty", key));
|
||||
}
|
||||
if (!pathSection.Any())
|
||||
throw new Exception("Path section is missing from configuration");
|
||||
else
|
||||
{
|
||||
key = "TRACE";
|
||||
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
|
||||
TracePath = pathSection[key];
|
||||
if (!string.IsNullOrEmpty(TracePath) && !Directory.Exists(TracePath))
|
||||
Directory.CreateDirectory(TracePath);
|
||||
key = "VILLACH";
|
||||
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
|
||||
VillachPath = pathSection[key];
|
||||
if (!string.IsNullOrEmpty(VillachPath) && !Directory.Exists(VillachPath))
|
||||
Directory.CreateDirectory(VillachPath);
|
||||
key = "Progress";
|
||||
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
|
||||
ProgressPath = pathSection[key];
|
||||
if (!string.IsNullOrEmpty(ProgressPath) && Directory.Exists(Path.GetPathRoot(ProgressPath)) && !Directory.Exists(ProgressPath))
|
||||
Directory.CreateDirectory(ProgressPath);
|
||||
key = "Memory";
|
||||
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
|
||||
MemoryPath = pathSection[key];
|
||||
if (!string.IsNullOrEmpty(MemoryPath) && !Directory.Exists(MemoryPath))
|
||||
Directory.CreateDirectory(MemoryPath);
|
||||
key = "OIContextDataSearch";
|
||||
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
|
||||
OIContextDataSearchPath = pathSection[key];
|
||||
if (!string.IsNullOrEmpty(OIContextDataSearchPath) && !Directory.Exists(OIContextDataSearchPath))
|
||||
Directory.CreateDirectory(OIContextDataSearchPath);
|
||||
key = "OIContextDataPending";
|
||||
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
|
||||
OIContextDataPendingPath = pathSection[key];
|
||||
if (!string.IsNullOrEmpty(OIContextDataPendingPath) && !Directory.Exists(OIContextDataPendingPath))
|
||||
Directory.CreateDirectory(OIContextDataPendingPath);
|
||||
key = "OIContextDataResults";
|
||||
if (pathSection.ContainsKey(key) && pathSection[key].StartsWith(@"\\"))
|
||||
OIContextDataResultsPath = pathSection[key];
|
||||
if (!string.IsNullOrEmpty(OIContextDataResultsPath) && !Directory.Exists(OIContextDataResultsPath))
|
||||
Directory.CreateDirectory(OIContextDataResultsPath);
|
||||
}
|
||||
if (!commonSection.Any())
|
||||
throw new Exception("Common section is missing from configuration");
|
||||
else
|
||||
{
|
||||
key = "CELL_NAMES";
|
||||
if (!commonSection.ContainsKey(key) || !commonSection[key].Contains(';') || !commonSection[key].Contains(':'))
|
||||
throw new Exception();
|
||||
else
|
||||
{
|
||||
string[] segments;
|
||||
string[] cellNames = commonSection[key].Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries);
|
||||
foreach (string item in cellNames)
|
||||
{
|
||||
segments = item.Split(':');
|
||||
CellNames.Add(segments[0].Trim(), segments[1].Trim());
|
||||
}
|
||||
}
|
||||
if (!string.IsNullOrEmpty(cellName) && !CellNames.ContainsKey(cellName))
|
||||
throw new Exception();
|
||||
key = "MES_ENTITIES";
|
||||
if (!commonSection.ContainsKey(key) || !commonSection[key].Contains(';') || !commonSection[key].Contains(':'))
|
||||
throw new Exception();
|
||||
else
|
||||
{
|
||||
string[] segments;
|
||||
string[] mesEntity = commonSection[key].Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries);
|
||||
foreach (string item in mesEntity)
|
||||
{
|
||||
segments = item.Split(':');
|
||||
MesEntities.Add(segments[0].Trim(), segments[1].Trim());
|
||||
}
|
||||
}
|
||||
if (!string.IsNullOrEmpty(cellName) && !MesEntities.ContainsKey(cellName))
|
||||
throw new Exception();
|
||||
key = "NoWaferMap";
|
||||
if (commonSection.ContainsKey(key))
|
||||
NoWaferMap = commonSection[key];
|
||||
if (string.IsNullOrEmpty(NoWaferMap))
|
||||
throw new Exception(string.Format("Common key {0} is empty", key));
|
||||
if (Duplicator.HasValue && Duplicator.Value == Level.IsXToOpenInsightMetrologyViewerAttachments && !File.Exists(NoWaferMap))
|
||||
throw new Exception(string.Format("Common key {0} is doesn't exist!", key));
|
||||
}
|
||||
if (!connectionStringsSection.Any())
|
||||
throw new Exception("Connection Strings section is missing from configuration");
|
||||
else
|
||||
{
|
||||
key = "LSL2SQL";
|
||||
if (connectionStringsSection.ContainsKey(key))
|
||||
ConnectionStringLSL2SQL = connectionStringsSection[key];
|
||||
else
|
||||
throw new Exception(string.Format("{0} is missing from Equipment Type Configuration", key));
|
||||
if (string.IsNullOrEmpty(ConnectionStringLSL2SQL))
|
||||
throw new Exception(string.Format("Connection String key {0} is empty", key));
|
||||
}
|
||||
if (!openInsightSection.Any())
|
||||
throw new Exception("OpenInsight section is missing from configuration");
|
||||
else
|
||||
{
|
||||
key = "MetrologyViewerAPI";
|
||||
if (openInsightSection.ContainsKey(key))
|
||||
OpenInsightMetrogyViewerAPI = openInsightSection[key];
|
||||
if (string.IsNullOrEmpty(OpenInsightMetrogyViewerAPI))
|
||||
throw new Exception(string.Format("OpenInsight key {0} is empty", key));
|
||||
if (!OpenInsightMetrogyViewerAPI.Contains(":") || !OpenInsightMetrogyViewerAPI.Contains("."))
|
||||
throw new Exception(string.Format("OpenInsight key {0} is invalid", key));
|
||||
key = "SiViewer";
|
||||
if (openInsightSection.ContainsKey(key))
|
||||
OpenInsightSiViewer = openInsightSection[key];
|
||||
if (string.IsNullOrEmpty(OpenInsightSiViewer))
|
||||
throw new Exception(string.Format("OpenInsight key {0} is empty", key));
|
||||
if (!OpenInsightSiViewer.Contains(":") || !OpenInsightSiViewer.Contains("."))
|
||||
throw new Exception(string.Format("OpenInsight key {0} is invalid", key));
|
||||
key = "FilePattern";
|
||||
if (openInsightSection.ContainsKey(key))
|
||||
OpenInsightFilePattern = openInsightSection[key];
|
||||
else
|
||||
throw new Exception(string.Concat("Missing OpenInsight Configuration entry for ", key));
|
||||
if (string.IsNullOrEmpty(OpenInsightFilePattern))
|
||||
throw new Exception(string.Format("OpenInsight key {0} is empty", key));
|
||||
}
|
||||
if (!MesEntities.Any())
|
||||
throw new Exception();
|
||||
if (!tibcoSection.Any())
|
||||
throw new Exception("TIBCO section is missing from configuration");
|
||||
else
|
||||
{
|
||||
key = "IFX_CHANNEL";
|
||||
if (tibcoSection.ContainsKey(key))
|
||||
IfxChannel = tibcoSection[key];
|
||||
else
|
||||
throw new Exception("IFX_CHANNEL is missing from Equipment Type Configuration");
|
||||
if (string.IsNullOrEmpty(IfxChannel))
|
||||
throw new Exception(string.Format("TIBCO section key {0} is empty", key));
|
||||
key = "IFX_SUBJECT_PREFIX";
|
||||
if (tibcoSection.ContainsKey(key))
|
||||
IfxSubjectPrefix = tibcoSection[key];
|
||||
else
|
||||
throw new Exception("IFX_SUBJECT_PREFIX is missing from Equipment Type Configuration");
|
||||
if (string.IsNullOrEmpty(IfxSubjectPrefix))
|
||||
throw new Exception(string.Format("TIBCO section key {0} is empty", key));
|
||||
key = "IFX_CONFIGURATION_LOCATION";
|
||||
if (tibcoSection.ContainsKey(key))
|
||||
IfxConfigurationLocation = tibcoSection[key];
|
||||
else
|
||||
throw new Exception("IFX_CONFIGURATION_LOCATION is missing from Equipment Type Configuration");
|
||||
if (string.IsNullOrEmpty(IfxConfigurationLocation))
|
||||
throw new Exception(string.Format("TIBCO section key {0} is empty", key));
|
||||
key = "IFX_CONFIGURATION_LOCATION_LOCAL_COPY";
|
||||
if (tibcoSection.ContainsKey(key))
|
||||
IfxConfigurationLocationLocalCopy = tibcoSection[key];
|
||||
else
|
||||
throw new Exception("IFX_CONFIGURATION_LOCATION_LOCAL_COPY is missing from Equipment Type Configuration");
|
||||
if (string.IsNullOrEmpty(IfxConfigurationLocationLocalCopy))
|
||||
throw new Exception(string.Format("TIBCO section key {0} is empty", key));
|
||||
key = "IFX_SUBJECT";
|
||||
if (tibcoSection.ContainsKey(key))
|
||||
IfxSubject = tibcoSection[key];
|
||||
else
|
||||
throw new Exception("IFX_SUBJECT KEY is missing from Equipment Type Configuration");
|
||||
if (string.IsNullOrEmpty(IfxSubject))
|
||||
throw new Exception(string.Format("TIBCO section key {0} is empty", key));
|
||||
if (Duplicator.HasValue && Duplicator.Value == Level.IsTIBCO)
|
||||
{
|
||||
Si.Transport.Initialize(this);
|
||||
if (!string.IsNullOrEmpty(fileConnectorConfiguration.SourceFileLocation))
|
||||
TransportSetupMessages = Si.Transport.Setup(useSleep: true, setIfxTransport: true);
|
||||
else
|
||||
TransportSetupMessages = Si.Transport.Setup(useSleep: false, setIfxTransport: false);
|
||||
}
|
||||
}
|
||||
if (IsSourceTimer || IsDatabaseExportToIPDSF || (Duplicator.HasValue && Duplicator.Value == Level.IsDummy))
|
||||
{
|
||||
if (!Directory.Exists(fileConnectorConfiguration.SourceFileLocation))
|
||||
Directory.CreateDirectory(fileConnectorConfiguration.SourceFileLocation);
|
||||
_ReportFullPath = string.Concat(fileConnectorConfiguration.SourceFileLocation, firstSourceFileFilter.Replace("*", @"\"));
|
||||
if (Debugger.IsAttached || fileConnectorConfiguration.PreProcessingMode == FileConnectorConfiguration.PreProcessingModeEnum.Process)
|
||||
Callback(null);
|
||||
else
|
||||
{
|
||||
int milliSeconds;
|
||||
milliSeconds = (int)((fileConnectorConfiguration.FileScanningIntervalInSeconds * 1000) / 2);
|
||||
_Timer = new Timer(Callback, null, milliSeconds, Timeout.Infinite);
|
||||
milliSeconds += 2000;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void CheckProcessDataDescription(Dictionary<Test, List<Duplicator.Description>> results, bool extra)
|
||||
{
|
||||
foreach (Test test in results.Keys)
|
||||
{
|
||||
if (test == Test.Tencor)
|
||||
{
|
||||
if (!(ProcessDataDescription is ProcessData.FileRead.Description))
|
||||
ProcessDataDescription = new ProcessData.FileRead.Description();
|
||||
}
|
||||
else
|
||||
throw new Exception();
|
||||
}
|
||||
}
|
||||
|
||||
private void CallbackIsDummy(string traceDummyFile, List<Tuple<string, string, string, string, int>> tuples, bool fileConnectorConfigurationIncludeSubDirectories, bool includeSubDirectoriesExtra)
|
||||
{
|
||||
int fileCount;
|
||||
string[] files;
|
||||
string monARessource;
|
||||
string checkDirectory;
|
||||
string sourceArchiveFile;
|
||||
string inProcessDirectory;
|
||||
const string site = "sjc";
|
||||
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
|
||||
MonIn monIn = MonIn.GetInstance(monInURL);
|
||||
string stateName = string.Concat("Dummy_", _EventName);
|
||||
foreach (Tuple<string, string, string, string, int> item in tuples)
|
||||
{
|
||||
monARessource = item.Item1;
|
||||
sourceArchiveFile = item.Item2;
|
||||
inProcessDirectory = item.Item3;
|
||||
checkDirectory = item.Item4;
|
||||
fileCount = item.Item5;
|
||||
try
|
||||
{
|
||||
if (fileCount > 0 || string.IsNullOrEmpty(checkDirectory))
|
||||
{
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Warning.ToString() });
|
||||
monIn.SendStatus(site, monARessource, stateName, State.Warning);
|
||||
for (int i = 1; i < 12; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
else if (inProcessDirectory == checkDirectory)
|
||||
continue;
|
||||
if (!EafHosted)
|
||||
continue;
|
||||
if (!File.Exists(sourceArchiveFile))
|
||||
continue;
|
||||
if (!long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
continue;
|
||||
ZipFile.ExtractToDirectory(sourceArchiveFile, inProcessDirectory);
|
||||
if (fileConnectorConfigurationIncludeSubDirectories && includeSubDirectoriesExtra)
|
||||
{
|
||||
if (_EventName == EventName.FileRead)
|
||||
checkDirectory = string.Concat(checkDirectory, @"\", sequence);
|
||||
else if (_EventName == EventName.FileReadDaily)
|
||||
checkDirectory = string.Concat(checkDirectory, @"\Source\", sequence);
|
||||
else
|
||||
throw new Exception();
|
||||
}
|
||||
if (fileConnectorConfigurationIncludeSubDirectories)
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
else
|
||||
files = Directory.GetFiles(inProcessDirectory, "*", SearchOption.TopDirectoryOnly);
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
foreach (string file in files)
|
||||
File.SetLastWriteTime(file, new DateTime(sequence));
|
||||
if (!fileConnectorConfigurationIncludeSubDirectories)
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(checkDirectory, @"\", Path.GetFileName(file)));
|
||||
}
|
||||
else
|
||||
{
|
||||
string[] directories = Directory.GetDirectories(inProcessDirectory, "*", SearchOption.AllDirectories);
|
||||
foreach (string directory in directories)
|
||||
Directory.CreateDirectory(string.Concat(checkDirectory, directory.Substring(inProcessDirectory.Length)));
|
||||
foreach (string file in files)
|
||||
File.Move(file, string.Concat(checkDirectory, file.Substring(inProcessDirectory.Length)));
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Ok.ToString() });
|
||||
monIn.SendStatus(site, monARessource, stateName, State.Ok);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
File.AppendAllLines(traceDummyFile, new string[] { site, monARessource, stateName, State.Critical.ToString(), exception.Message, exception.StackTrace });
|
||||
monIn.SendStatus(site, monARessource, stateName, State.Critical);
|
||||
try
|
||||
{
|
||||
Eaf.Core.Smtp.ISmtp smtp = Eaf.Core.Backbone.Instance.GetBackboneComponentsOfType<Eaf.Core.Smtp.ISmtp>().SingleOrDefault();
|
||||
Eaf.Core.Smtp.EmailMessage emailMessage = new Eaf.Core.Smtp.EmailMessage(string.Concat("Exception:", EquipmentElementName), string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace), Eaf.Core.Smtp.MailPriority.High);
|
||||
smtp.Send(emailMessage);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void CallbackIsDummy()
|
||||
{
|
||||
DateTime dateTime = DateTime.Now;
|
||||
bool check = (dateTime.Hour > 7 && dateTime.Hour < 18 && dateTime.DayOfWeek != DayOfWeek.Sunday && dateTime.DayOfWeek != DayOfWeek.Saturday);
|
||||
if (check)
|
||||
{
|
||||
int fileCount;
|
||||
string[] files;
|
||||
string monARessource;
|
||||
string checkDirectory;
|
||||
string sourceArchiveFile;
|
||||
string sourceFileLocation;
|
||||
string targetFileLocation;
|
||||
string inProcessDirectory;
|
||||
string weekOfYear = _Calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string traceDummyDirectory = string.Concat(Path.GetPathRoot(TracePath), @"\TracesDummy\", CellName, @"\Source\", dateTime.ToString("yyyy"), "___Week_", weekOfYear);
|
||||
if (!Directory.Exists(traceDummyDirectory))
|
||||
Directory.CreateDirectory(traceDummyDirectory);
|
||||
string traceDummyFile = string.Concat(traceDummyDirectory, @"\", dateTime.Ticks, " - ", CellName, ".txt");
|
||||
File.AppendAllText(traceDummyFile, string.Empty);
|
||||
List<Tuple<string, string, string, string, int>> tuples = new List<Tuple<string, string, string, string, int>>();
|
||||
foreach (var keyValuePair in CellNames)
|
||||
{
|
||||
monARessource = keyValuePair.Key;
|
||||
if (!keyValuePair.Value.Contains(@"\"))
|
||||
continue;
|
||||
foreach (string sourceFileFilter in FileConnectorConfiguration.SourceFileFilter.Split('|'))
|
||||
{
|
||||
if (sourceFileFilter.ToLower().StartsWith(keyValuePair.Value.Replace(@"\", string.Empty)))
|
||||
sourceFileLocation = Path.GetFullPath(FileConnectorConfiguration.SourceFileLocation);
|
||||
else if (FileConnectorConfiguration.SourceFileLocation.ToLower().EndsWith(keyValuePair.Value))
|
||||
sourceFileLocation = Path.GetFullPath(FileConnectorConfiguration.SourceFileLocation);
|
||||
else
|
||||
sourceFileLocation = Path.GetFullPath(string.Concat(FileConnectorConfiguration.SourceFileLocation, @"\", keyValuePair.Value));
|
||||
sourceArchiveFile = Path.GetFullPath(string.Concat(sourceFileLocation, @"\", sourceFileFilter));
|
||||
targetFileLocation = Path.GetFullPath(string.Concat(FileConnectorConfiguration.TargetFileLocation, @"\", keyValuePair.Value));
|
||||
if (!File.Exists(sourceArchiveFile))
|
||||
continue;
|
||||
if (!DummyRuns.ContainsKey(monARessource))
|
||||
DummyRuns.Add(monARessource, new List<long>());
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceFileLocation, targetFileLocation, sourceArchiveFile, 0));
|
||||
}
|
||||
}
|
||||
File.AppendAllLines(traceDummyFile, from l in tuples select l.Item4);
|
||||
if (tuples.Any())
|
||||
{
|
||||
_LastDummyRunIndex += 1;
|
||||
if (_LastDummyRunIndex >= tuples.Count)
|
||||
_LastDummyRunIndex = 0;
|
||||
monARessource = tuples[_LastDummyRunIndex].Item1;
|
||||
sourceFileLocation = tuples[_LastDummyRunIndex].Item2;
|
||||
targetFileLocation = tuples[_LastDummyRunIndex].Item3;
|
||||
sourceArchiveFile = tuples[_LastDummyRunIndex].Item4;
|
||||
//fileCount = tuples[_LastDummyRunIndex].Item5;
|
||||
tuples.Clear();
|
||||
if (long.TryParse(Path.GetFileNameWithoutExtension(sourceArchiveFile).Replace("x", string.Empty), out long sequence))
|
||||
{
|
||||
if (!DummyRuns[monARessource].Contains(sequence))
|
||||
DummyRuns[monARessource].Add(sequence);
|
||||
inProcessDirectory = string.Concat(ProgressPath, @"\", monARessource, @"\Dummy_in process\", sequence);
|
||||
checkDirectory = inProcessDirectory;
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
Directory.CreateDirectory(checkDirectory);
|
||||
files = Directory.GetFiles(checkDirectory, "*", SearchOption.AllDirectories);
|
||||
fileCount = files.Length;
|
||||
if (files.Any())
|
||||
{
|
||||
if (files.Length > 250)
|
||||
throw new Exception("Safety net!");
|
||||
try
|
||||
{
|
||||
foreach (string file in files)
|
||||
File.Delete(file);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
|
||||
checkDirectory = targetFileLocation;
|
||||
files = Directory.GetFiles(checkDirectory, string.Concat("*", sequence, "*"), SearchOption.TopDirectoryOnly);
|
||||
fileCount = files.Length;
|
||||
tuples.Add(new Tuple<string, string, string, string, int>(monARessource, sourceArchiveFile, inProcessDirectory, checkDirectory, fileCount));
|
||||
}
|
||||
}
|
||||
if (tuples.Any())
|
||||
CallbackIsDummy(traceDummyFile, tuples, FileConnectorConfiguration.IncludeSubDirectories.Value, includeSubDirectoriesExtra: false);
|
||||
}
|
||||
}
|
||||
|
||||
private void Callback(object state)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Duplicator is null)
|
||||
{
|
||||
if (File.Exists(_ReportFullPath))
|
||||
File.Delete(_ReportFullPath);
|
||||
File.WriteAllText(_ReportFullPath, string.Empty);
|
||||
}
|
||||
else if (Duplicator.Value == Level.IsDummy)
|
||||
CallbackIsDummy();
|
||||
else
|
||||
throw new Exception();
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
try
|
||||
{
|
||||
Eaf.Core.Smtp.ISmtp smtp = Eaf.Core.Backbone.Instance.GetBackboneComponentsOfType<Eaf.Core.Smtp.ISmtp>().SingleOrDefault();
|
||||
Eaf.Core.Smtp.EmailMessage emailMessage = new Eaf.Core.Smtp.EmailMessage(string.Concat("Exception:", EquipmentElementName), string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace), Eaf.Core.Smtp.MailPriority.High);
|
||||
smtp.Send(emailMessage);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
try
|
||||
{
|
||||
TimeSpan timeSpan;
|
||||
if (IsDatabaseExportToIPDSF)
|
||||
timeSpan = new TimeSpan(DateTime.Now.AddMinutes(1).Ticks - DateTime.Now.Ticks);
|
||||
else if (IsSourceTimer)
|
||||
timeSpan = new TimeSpan(DateTime.Now.AddMinutes(15).Ticks - DateTime.Now.Ticks);
|
||||
else if (Duplicator.HasValue && Duplicator.Value == Level.IsDummy)
|
||||
timeSpan = new TimeSpan(DateTime.Now.AddSeconds(FileConnectorConfiguration.FileScanningIntervalInSeconds.Value).Ticks - DateTime.Now.Ticks);
|
||||
else if (Duplicator.HasValue)
|
||||
timeSpan = new TimeSpan(DateTime.Now.AddSeconds(30).Ticks - DateTime.Now.Ticks);
|
||||
else
|
||||
timeSpan = new TimeSpan(DateTime.Now.AddDays(.5).Ticks - DateTime.Now.Ticks);
|
||||
if (!(_Timer is null))
|
||||
_Timer.Change((long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
else
|
||||
_Timer = new Timer(Callback, null, (long)timeSpan.TotalMilliseconds, Timeout.Infinite);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
try
|
||||
{
|
||||
Eaf.Core.Smtp.ISmtp smtp = Eaf.Core.Backbone.Instance.GetBackboneComponentsOfType<Eaf.Core.Smtp.ISmtp>().SingleOrDefault();
|
||||
Eaf.Core.Smtp.EmailMessage emailMessage = new Eaf.Core.Smtp.EmailMessage(string.Concat("Exception:", EquipmentElementName), string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace), Eaf.Core.Smtp.MailPriority.High);
|
||||
smtp.Send(emailMessage);
|
||||
}
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
public Tuple<string, JsonElement?, List<FileInfo>> IsManualOIEntry(string reportFullPath)
|
||||
{
|
||||
Tuple<string, JsonElement?, List<FileInfo>> results;
|
||||
string monARessource;
|
||||
const string site = "sjc";
|
||||
string equipment = string.Empty;
|
||||
string description = string.Empty;
|
||||
string stateName = "MANUAL_OI_ENTRY";
|
||||
string json = File.ReadAllText(reportFullPath);
|
||||
JsonElement jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
|
||||
results = new Tuple<string, JsonElement?, List<FileInfo>>(string.Empty, jsonElement, new List<FileInfo>());
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
{
|
||||
if (jsonProperty.Name == "Equipment")
|
||||
equipment = jsonProperty.Value.ToString();
|
||||
else if (jsonProperty.Name == "Description")
|
||||
description = jsonProperty.Value.ToString();
|
||||
}
|
||||
if (string.IsNullOrEmpty(equipment))
|
||||
monARessource = CellName;
|
||||
else
|
||||
monARessource = equipment;
|
||||
const string monInURL = "http://moninhttp.sjc.infineon.com/input/text";
|
||||
MonIn monIn = MonIn.GetInstance(monInURL);
|
||||
if (EafHosted)
|
||||
monIn.SendStatus(site, monARessource, stateName, State.Warning, description);
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
53
Adaptation/Helpers/ProcessData.DataFile.cs
Normal file
53
Adaptation/Helpers/ProcessData.DataFile.cs
Normal file
@ -0,0 +1,53 @@
|
||||
namespace Adaptation.Helpers
|
||||
{
|
||||
|
||||
public partial class ProcessData
|
||||
{
|
||||
|
||||
public class DataFile
|
||||
{
|
||||
|
||||
public long Id { get; set; }
|
||||
public string AreaCount { get; set; }
|
||||
public string AreaTotal { get; set; }
|
||||
public string Bin1 { get; set; }
|
||||
public string Bin2 { get; set; }
|
||||
public string Bin3 { get; set; }
|
||||
public string Bin4 { get; set; }
|
||||
public string Bin5 { get; set; }
|
||||
public string Bin6 { get; set; }
|
||||
public string Bin7 { get; set; }
|
||||
public string Bin8 { get; set; }
|
||||
public string Comments { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string Diameter { get; set; }
|
||||
public string Exclusion { get; set; }
|
||||
public string Gain { get; set; }
|
||||
public string HazeAverage { get; set; }
|
||||
public string HazePeak { get; set; }
|
||||
public string HazeRegion { get; set; }
|
||||
public string HazeRng { get; set; }
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string LPDCM2 { get; set; }
|
||||
public string LPDCount { get; set; }
|
||||
public string Laser { get; set; }
|
||||
public string Mean { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string ScratchCount { get; set; }
|
||||
public string ScratchTotal { get; set; }
|
||||
public string Slot { get; set; }
|
||||
public string Sort { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string SumOfDefects { get; set; }
|
||||
public string Thresh { get; set; }
|
||||
public string Thruput { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Data { get; set; }
|
||||
public int i { get; set; }
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
473
Adaptation/Helpers/ProcessData.FileRead.Description.cs
Normal file
473
Adaptation/Helpers/ProcessData.FileRead.Description.cs
Normal file
@ -0,0 +1,473 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Metrology;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.Helpers
|
||||
{
|
||||
|
||||
public partial class ProcessData
|
||||
{
|
||||
|
||||
public class FileRead
|
||||
{
|
||||
|
||||
public class Description : IProcessDataDescription
|
||||
{
|
||||
|
||||
public int Test { get; set; }
|
||||
public int Count { get; set; }
|
||||
public int Index { get; set; }
|
||||
//
|
||||
public string EventName { get; set; }
|
||||
public string NullData { get; set; }
|
||||
public string JobID { get; set; }
|
||||
public string Sequence { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string ReportFullPath { get; set; }
|
||||
public string ProcessJobID { get; set; }
|
||||
public string MID { get; set; }
|
||||
//
|
||||
public string Date { get; set; }
|
||||
public string Employee { get; set; }
|
||||
public string Lot { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
//
|
||||
public string Comments { get; set; }
|
||||
public string Diameter { get; set; }
|
||||
public string Exclusion { get; set; }
|
||||
public string Gain { get; set; }
|
||||
public string HeaderUniqueId { get; set; }
|
||||
public string Laser { get; set; }
|
||||
public string ParseErrorText { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Slot { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
//
|
||||
public string AreaCount { get; set; }
|
||||
public string AreaCountAvg { get; set; }
|
||||
public string AreaCountMax { get; set; }
|
||||
public string AreaCountMin { get; set; }
|
||||
public string AreaCountStdDev { get; set; }
|
||||
public string AreaTotal { get; set; }
|
||||
public string AreaTotalAvg { get; set; }
|
||||
public string AreaTotalMax { get; set; }
|
||||
public string AreaTotalMin { get; set; }
|
||||
public string AreaTotalStdDev { get; set; }
|
||||
public string Bin1 { get; set; }
|
||||
public string Bin2 { get; set; }
|
||||
public string Bin3 { get; set; }
|
||||
public string Bin4 { get; set; }
|
||||
public string Bin5 { get; set; }
|
||||
public string Bin6 { get; set; }
|
||||
public string Bin7 { get; set; }
|
||||
public string Bin8 { get; set; }
|
||||
public string HazeAverage { get; set; }
|
||||
public string HazeAverageAvg { get; set; }
|
||||
public string HazeAverageMax { get; set; }
|
||||
public string HazeAverageMin { get; set; }
|
||||
public string HazeAverageStdDev { get; set; }
|
||||
public string HazePeak { get; set; }
|
||||
public string HazeRegion { get; set; }
|
||||
public string HazeRegionAvg { get; set; }
|
||||
public string HazeRegionMax { get; set; }
|
||||
public string HazeRegionMin { get; set; }
|
||||
public string HazeRegionStdDev { get; set; }
|
||||
public string HazeRng { get; set; }
|
||||
public string LPDCM2 { get; set; }
|
||||
public string LPDCM2Avg { get; set; }
|
||||
public string LPDCM2Max { get; set; }
|
||||
public string LPDCM2Min { get; set; }
|
||||
public string LPDCM2StdDev { get; set; }
|
||||
public string LPDCount { get; set; }
|
||||
public string LPDCountAvg { get; set; }
|
||||
public string LPDCountMax { get; set; }
|
||||
public string LPDCountMin { get; set; }
|
||||
public string LPDCountStdDev { get; set; }
|
||||
public string Mean { get; set; }
|
||||
public string ScratchCount { get; set; }
|
||||
public string ScratchCountAvg { get; set; }
|
||||
public string ScratchCountMax { get; set; }
|
||||
public string ScratchCountMin { get; set; }
|
||||
public string ScratchCountStdDev { get; set; }
|
||||
public string ScratchTotal { get; set; }
|
||||
public string ScratchTotalAvg { get; set; }
|
||||
public string ScratchTotalMax { get; set; }
|
||||
public string ScratchTotalMin { get; set; }
|
||||
public string ScratchTotalStdDev { get; set; }
|
||||
public string Sort { get; set; }
|
||||
public string StdDev { get; set; }
|
||||
public string SumOfDefects { get; set; }
|
||||
public string SumOfDefectsAvg { get; set; }
|
||||
public string SumOfDefectsMax { get; set; }
|
||||
public string SumOfDefectsMin { get; set; }
|
||||
public string SumOfDefectsStdDev { get; set; }
|
||||
public string Thresh { get; set; }
|
||||
public string Thruput { get; set; }
|
||||
//
|
||||
public object Data { get; set; }
|
||||
public object Parameters { get; set; }
|
||||
|
||||
public string GetEventName() { return nameof(FileRead); }
|
||||
public string GetEventDescription() { return "File Has been read and parsed"; }
|
||||
|
||||
public List<string> GetHeaderNames(ILogic logic, ConfigDataBase configDataBase)
|
||||
{
|
||||
List<string> results = new List<string>
|
||||
{
|
||||
nameof(Date),
|
||||
nameof(Employee),
|
||||
nameof(Lot),
|
||||
nameof(PSN),
|
||||
nameof(Reactor),
|
||||
nameof(Recipe)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
public List<string> GetDetailNames(ILogic logic, ConfigDataBase configDataBase)
|
||||
{
|
||||
List<string> results = new List<string>
|
||||
{
|
||||
nameof(Comments),
|
||||
nameof(Diameter),
|
||||
nameof(Exclusion),
|
||||
nameof(Gain),
|
||||
nameof(HeaderUniqueId),
|
||||
nameof(Laser),
|
||||
nameof(ParseErrorText),
|
||||
nameof(RDS),
|
||||
nameof(Slot),
|
||||
nameof(UniqueId)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
public List<string> GetParameterNames(ILogic logic, ConfigDataBase configDataBase)
|
||||
{
|
||||
List<string> results = new List<string>
|
||||
{
|
||||
nameof(AreaCount),
|
||||
nameof(AreaCountAvg),
|
||||
nameof(AreaCountMax),
|
||||
nameof(AreaCountMin),
|
||||
nameof(AreaCountStdDev),
|
||||
nameof(AreaTotal),
|
||||
nameof(AreaTotalAvg),
|
||||
nameof(AreaTotalMax),
|
||||
nameof(AreaTotalMin),
|
||||
nameof(AreaTotalStdDev),
|
||||
nameof(Bin1),
|
||||
nameof(Bin2),
|
||||
nameof(Bin3),
|
||||
nameof(Bin4),
|
||||
nameof(Bin5),
|
||||
nameof(Bin6),
|
||||
nameof(Bin7),
|
||||
nameof(Bin8),
|
||||
nameof(HazeAverage),
|
||||
nameof(HazeAverageAvg),
|
||||
nameof(HazeAverageMax),
|
||||
nameof(HazeAverageMin),
|
||||
nameof(HazeAverageStdDev),
|
||||
nameof(HazePeak),
|
||||
nameof(HazeRegion),
|
||||
nameof(HazeRegionAvg),
|
||||
nameof(HazeRegionMax),
|
||||
nameof(HazeRegionMin),
|
||||
nameof(HazeRegionStdDev),
|
||||
nameof(HazeRng),
|
||||
nameof(LPDCM2),
|
||||
nameof(LPDCM2Avg),
|
||||
nameof(LPDCM2Max),
|
||||
nameof(LPDCM2Min),
|
||||
nameof(LPDCM2StdDev),
|
||||
nameof(LPDCount),
|
||||
nameof(LPDCountAvg),
|
||||
nameof(LPDCountMax),
|
||||
nameof(LPDCountMin),
|
||||
nameof(LPDCountStdDev),
|
||||
nameof(Mean),
|
||||
nameof(ScratchCount),
|
||||
nameof(ScratchCountAvg),
|
||||
nameof(ScratchCountMax),
|
||||
nameof(ScratchCountMin),
|
||||
nameof(ScratchCountStdDev),
|
||||
nameof(ScratchTotal),
|
||||
nameof(ScratchTotalAvg),
|
||||
nameof(ScratchTotalMax),
|
||||
nameof(ScratchTotalMin),
|
||||
nameof(ScratchTotalStdDev),
|
||||
nameof(Sort),
|
||||
nameof(StdDev),
|
||||
nameof(SumOfDefects),
|
||||
nameof(SumOfDefectsAvg),
|
||||
nameof(SumOfDefectsMax),
|
||||
nameof(SumOfDefectsMin),
|
||||
nameof(SumOfDefectsStdDev),
|
||||
nameof(Thresh),
|
||||
nameof(Thruput)
|
||||
};
|
||||
return results;
|
||||
}
|
||||
|
||||
public List<string> GetPairedParameterNames(ILogic logic, ConfigDataBase configDataBase)
|
||||
{
|
||||
List<string> results = new List<string>();
|
||||
return results;
|
||||
}
|
||||
|
||||
public List<string> GetIgnoreParameterNames(ILogic logic, ConfigDataBase configDataBase, Test test)
|
||||
{
|
||||
List<string> results = new List<string>();
|
||||
return results;
|
||||
}
|
||||
|
||||
public List<string> GetNames(ILogic logic, ConfigDataBase configDataBase)
|
||||
{
|
||||
List<string> results = new List<string>();
|
||||
IProcessDataDescription processDataDescription = GetDefault(logic, configDataBase);
|
||||
string json = JsonSerializer.Serialize(processDataDescription, processDataDescription.GetType());
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
if (!(@object is JsonElement jsonElement))
|
||||
throw new Exception();
|
||||
foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
results.Add(jsonProperty.Name);
|
||||
return results;
|
||||
}
|
||||
|
||||
public IProcessDataDescription GetDisplayNames(ILogic logic, ConfigDataBase configDataBase)
|
||||
{
|
||||
Description result = new Description();
|
||||
return result;
|
||||
}
|
||||
|
||||
public IProcessDataDescription GetDefault(ILogic logic, ConfigDataBase configDataBase)
|
||||
{
|
||||
Description result = new Description
|
||||
{
|
||||
Test = -1,
|
||||
Count = 0,
|
||||
Index = -1,
|
||||
//
|
||||
EventName = GetEventName(),
|
||||
NullData = string.Empty,
|
||||
JobID = logic.Logistics.JobID,
|
||||
Sequence = logic.Logistics.Sequence.ToString(),
|
||||
MesEntity = logic.Logistics.MesEntity,
|
||||
ReportFullPath = logic.Logistics.ReportFullPath,
|
||||
ProcessJobID = logic.Logistics.ProcessJobID,
|
||||
MID = logic.Logistics.MID,
|
||||
//
|
||||
Date = nameof(Date),
|
||||
Employee = nameof(Employee),
|
||||
Lot = nameof(Lot),
|
||||
PSN = nameof(PSN),
|
||||
Reactor = nameof(Reactor),
|
||||
Recipe = nameof(Recipe),
|
||||
//
|
||||
Comments = nameof(Comments),
|
||||
Diameter = nameof(Diameter),
|
||||
Exclusion = nameof(Exclusion),
|
||||
Gain = nameof(Gain),
|
||||
HeaderUniqueId = nameof(HeaderUniqueId),
|
||||
Laser = nameof(Laser),
|
||||
ParseErrorText = nameof(ParseErrorText),
|
||||
RDS = nameof(RDS),
|
||||
Slot = nameof(Slot),
|
||||
UniqueId = nameof(UniqueId),
|
||||
//
|
||||
AreaCount = nameof(AreaCount),
|
||||
AreaCountAvg = nameof(AreaCountAvg),
|
||||
AreaCountMax = nameof(AreaCountMax),
|
||||
AreaCountMin = nameof(AreaCountMin),
|
||||
AreaCountStdDev = nameof(AreaCountStdDev),
|
||||
AreaTotal = nameof(AreaTotal),
|
||||
AreaTotalAvg = nameof(AreaTotalAvg),
|
||||
AreaTotalMax = nameof(AreaTotalMax),
|
||||
AreaTotalMin = nameof(AreaTotalMin),
|
||||
AreaTotalStdDev = nameof(AreaTotalStdDev),
|
||||
Bin1 = nameof(Bin1),
|
||||
Bin2 = nameof(Bin2),
|
||||
Bin3 = nameof(Bin3),
|
||||
Bin4 = nameof(Bin4),
|
||||
Bin5 = nameof(Bin5),
|
||||
Bin6 = nameof(Bin6),
|
||||
Bin7 = nameof(Bin7),
|
||||
Bin8 = nameof(Bin8),
|
||||
HazeAverage = nameof(HazeAverage),
|
||||
HazeAverageAvg = nameof(HazeAverageAvg),
|
||||
HazeAverageMax = nameof(HazeAverageMax),
|
||||
HazeAverageMin = nameof(HazeAverageMin),
|
||||
HazeAverageStdDev = nameof(HazeAverageStdDev),
|
||||
HazePeak = nameof(HazePeak),
|
||||
HazeRegion = nameof(HazeRegion),
|
||||
HazeRegionAvg = nameof(HazeRegionAvg),
|
||||
HazeRegionMax = nameof(HazeRegionMax),
|
||||
HazeRegionMin = nameof(HazeRegionMin),
|
||||
HazeRegionStdDev = nameof(HazeRegionStdDev),
|
||||
HazeRng = nameof(HazeRng),
|
||||
LPDCM2 = nameof(LPDCM2),
|
||||
LPDCM2Avg = nameof(LPDCM2Avg),
|
||||
LPDCM2Max = nameof(LPDCM2Max),
|
||||
LPDCM2Min = nameof(LPDCM2Min),
|
||||
LPDCM2StdDev = nameof(LPDCM2StdDev),
|
||||
LPDCount = nameof(LPDCount),
|
||||
LPDCountAvg = nameof(LPDCountAvg),
|
||||
LPDCountMax = nameof(LPDCountMax),
|
||||
LPDCountMin = nameof(LPDCountMin),
|
||||
LPDCountStdDev = nameof(LPDCountStdDev),
|
||||
Mean = nameof(Mean),
|
||||
ScratchCount = nameof(ScratchCount),
|
||||
ScratchCountAvg = nameof(ScratchCountAvg),
|
||||
ScratchCountMax = nameof(ScratchCountMax),
|
||||
ScratchCountMin = nameof(ScratchCountMin),
|
||||
ScratchCountStdDev = nameof(ScratchCountStdDev),
|
||||
ScratchTotal = nameof(ScratchTotal),
|
||||
ScratchTotalAvg = nameof(ScratchTotalAvg),
|
||||
ScratchTotalMax = nameof(ScratchTotalMax),
|
||||
ScratchTotalMin = nameof(ScratchTotalMin),
|
||||
ScratchTotalStdDev = nameof(ScratchTotalStdDev),
|
||||
Sort = nameof(Sort),
|
||||
StdDev = nameof(StdDev),
|
||||
SumOfDefects = nameof(SumOfDefects),
|
||||
SumOfDefectsAvg = nameof(SumOfDefectsAvg),
|
||||
SumOfDefectsMax = nameof(SumOfDefectsMax),
|
||||
SumOfDefectsMin = nameof(SumOfDefectsMin),
|
||||
SumOfDefectsStdDev = nameof(SumOfDefectsStdDev),
|
||||
Thresh = nameof(Thresh),
|
||||
Thruput = nameof(Thruput),
|
||||
//
|
||||
Data = nameof(Data),
|
||||
Parameters = nameof(Parameters)
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
public List<IProcessDataDescription> GetDescription(ILogic logic, ConfigDataBase configDataBase, List<Test> tests, IProcessData iProcessData)
|
||||
{
|
||||
List<IProcessDataDescription> results = new List<IProcessDataDescription>();
|
||||
if (iProcessData is null || !(iProcessData is ProcessData processData) || processData.Header is null || !processData.Details.Any())
|
||||
results.Add(GetDefault(logic, configDataBase));
|
||||
else
|
||||
{
|
||||
string nullData;
|
||||
Description description;
|
||||
object configDataNullData = ConfigData.NullData;
|
||||
if (configDataNullData is null)
|
||||
nullData = string.Empty;
|
||||
else
|
||||
nullData = configDataNullData.ToString();
|
||||
for (int i = 0; i < processData.Details.Count; i++)
|
||||
{
|
||||
description = new Description
|
||||
{
|
||||
Test = (int)tests[i],
|
||||
Count = tests.Count,
|
||||
Index = i,
|
||||
//
|
||||
EventName = GetEventName(),
|
||||
NullData = nullData,
|
||||
JobID = logic.Logistics.JobID,
|
||||
Sequence = logic.Logistics.Sequence.ToString(),
|
||||
MesEntity = logic.Logistics.MesEntity,
|
||||
ReportFullPath = logic.Logistics.ReportFullPath,
|
||||
ProcessJobID = logic.Logistics.ProcessJobID,
|
||||
MID = logic.Logistics.MID,
|
||||
//
|
||||
Date = processData.Header.Date,
|
||||
Employee = processData.Header.PSN,
|
||||
Lot = processData.Header.Lot,
|
||||
PSN = processData.Header.PSN,
|
||||
Reactor = processData.Header.Reactor,
|
||||
Recipe = processData.Header.Recipe,
|
||||
//
|
||||
Comments = processData.Details[i].Comments,
|
||||
Diameter = processData.Details[i].Diameter,
|
||||
Exclusion = processData.Details[i].Exclusion,
|
||||
Gain = processData.Details[i].Gain,
|
||||
HeaderUniqueId = processData.Details[i].HeaderUniqueId,
|
||||
Laser = processData.Details[i].Laser,
|
||||
ParseErrorText = processData.Header.ParseErrorText,
|
||||
RDS = processData.Header.RDS,
|
||||
Slot = processData.Details[i].Slot,
|
||||
UniqueId = processData.Details[i].UniqueId,
|
||||
//
|
||||
AreaCount = processData.Details[i].AreaCount,
|
||||
AreaCountAvg = processData.Header.AreaCountAvg,
|
||||
AreaCountMax = processData.Header.AreaCountMax,
|
||||
AreaCountMin = processData.Header.AreaCountMin,
|
||||
AreaCountStdDev = processData.Header.AreaCountStdDev,
|
||||
AreaTotal = processData.Details[i].AreaTotal,
|
||||
AreaTotalAvg = processData.Header.AreaTotalAvg,
|
||||
AreaTotalMax = processData.Header.AreaTotalMax,
|
||||
AreaTotalMin = processData.Header.AreaTotalMin,
|
||||
AreaTotalStdDev = processData.Header.AreaTotalStdDev,
|
||||
Bin1 = processData.Details[i].Bin1,
|
||||
Bin2 = processData.Details[i].Bin2,
|
||||
Bin3 = processData.Details[i].Bin3,
|
||||
Bin4 = processData.Details[i].Bin4,
|
||||
Bin5 = processData.Details[i].Bin5,
|
||||
Bin6 = processData.Details[i].Bin6,
|
||||
Bin7 = processData.Details[i].Bin7,
|
||||
Bin8 = processData.Details[i].Bin8,
|
||||
HazeAverage = processData.Details[i].HazeAverage,
|
||||
HazeAverageAvg = processData.Header.HazeAverageAvg,
|
||||
HazeAverageMax = processData.Header.HazeAverageMax,
|
||||
HazeAverageMin = processData.Header.HazeAverageMin,
|
||||
HazeAverageStdDev = processData.Header.HazeAverageStdDev,
|
||||
HazePeak = processData.Details[i].HazePeak,
|
||||
HazeRegion = processData.Details[i].HazeRegion,
|
||||
HazeRegionAvg = processData.Header.HazeRegionAvg,
|
||||
HazeRegionMax = processData.Header.HazeRegionMax,
|
||||
HazeRegionMin = processData.Header.HazeRegionMin,
|
||||
HazeRegionStdDev = processData.Header.HazeRegionStdDev,
|
||||
HazeRng = processData.Details[i].HazeRng,
|
||||
LPDCM2 = processData.Details[i].LPDCM2,
|
||||
LPDCM2Avg = processData.Header.LPDCM2Avg,
|
||||
LPDCM2Max = processData.Header.LPDCM2Max,
|
||||
LPDCM2Min = processData.Header.LPDCM2Min,
|
||||
LPDCM2StdDev = processData.Header.LPDCM2StdDev,
|
||||
LPDCount = processData.Details[i].LPDCount,
|
||||
LPDCountAvg = processData.Header.LPDCountAvg,
|
||||
LPDCountMax = processData.Header.LPDCountMax,
|
||||
LPDCountMin = processData.Header.LPDCountMin,
|
||||
LPDCountStdDev = processData.Header.LPDCountStdDev,
|
||||
Mean = processData.Details[i].Mean,
|
||||
ScratchCount = processData.Details[i].ScratchCount,
|
||||
ScratchCountAvg = processData.Header.ScratchCountAvg,
|
||||
ScratchCountMax = processData.Header.ScratchCountMax,
|
||||
ScratchCountMin = processData.Header.ScratchCountMin,
|
||||
ScratchCountStdDev = processData.Header.ScratchCountStdDev,
|
||||
ScratchTotal = processData.Details[i].ScratchTotal,
|
||||
ScratchTotalAvg = processData.Header.ScratchTotalAvg,
|
||||
ScratchTotalMax = processData.Header.ScratchTotalMax,
|
||||
ScratchTotalMin = processData.Header.ScratchTotalMin,
|
||||
ScratchTotalStdDev = processData.Header.ScratchTotalStdDev,
|
||||
Sort = processData.Details[i].Sort,
|
||||
StdDev = processData.Details[i].StdDev,
|
||||
SumOfDefects = processData.Details[i].SumOfDefects,
|
||||
SumOfDefectsAvg = processData.Header.SumOfDefectsAvg,
|
||||
SumOfDefectsMax = processData.Header.SumOfDefectsMax,
|
||||
SumOfDefectsMin = processData.Header.SumOfDefectsMin,
|
||||
SumOfDefectsStdDev = processData.Header.SumOfDefectsStdDev,
|
||||
Thresh = processData.Details[i].Thresh,
|
||||
Thruput = processData.Details[i].Thruput
|
||||
};
|
||||
results.Add(description);
|
||||
}
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
61
Adaptation/Helpers/ProcessData.HeaderFile.cs
Normal file
61
Adaptation/Helpers/ProcessData.HeaderFile.cs
Normal file
@ -0,0 +1,61 @@
|
||||
namespace Adaptation.Helpers
|
||||
{
|
||||
|
||||
public partial class ProcessData
|
||||
{
|
||||
|
||||
public class HeaderFile
|
||||
{
|
||||
|
||||
public string JobID { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string AreaCountAvg { get; set; }
|
||||
public string AreaCountMax { get; set; }
|
||||
public string AreaCountMin { get; set; }
|
||||
public string AreaCountStdDev { get; set; }
|
||||
public string AreaTotalAvg { get; set; }
|
||||
public string AreaTotalMax { get; set; }
|
||||
public string AreaTotalMin { get; set; }
|
||||
public string AreaTotalStdDev { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string HazeAverageAvg { get; set; }
|
||||
public string HazeAverageMax { get; set; }
|
||||
public string HazeAverageMin { get; set; }
|
||||
public string HazeAverageStdDev { get; set; }
|
||||
public string HazeRegionAvg { get; set; }
|
||||
public string HazeRegionMax { get; set; }
|
||||
public string HazeRegionMin { get; set; }
|
||||
public string HazeRegionStdDev { get; set; }
|
||||
public string LPDCM2Avg { get; set; }
|
||||
public string LPDCM2Max { get; set; }
|
||||
public string LPDCM2Min { get; set; }
|
||||
public string LPDCM2StdDev { get; set; }
|
||||
public string LPDCountAvg { get; set; }
|
||||
public string LPDCountMax { get; set; }
|
||||
public string LPDCountMin { get; set; }
|
||||
public string LPDCountStdDev { get; set; }
|
||||
public string Lot { get; set; }
|
||||
public string ParseErrorText { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string ScratchCountAvg { get; set; }
|
||||
public string ScratchCountMax { get; set; }
|
||||
public string ScratchCountMin { get; set; }
|
||||
public string ScratchCountStdDev { get; set; }
|
||||
public string ScratchTotalAvg { get; set; }
|
||||
public string ScratchTotalMax { get; set; }
|
||||
public string ScratchTotalMin { get; set; }
|
||||
public string ScratchTotalStdDev { get; set; }
|
||||
public string SumOfDefectsAvg { get; set; }
|
||||
public string SumOfDefectsMax { get; set; }
|
||||
public string SumOfDefectsMin { get; set; }
|
||||
public string SumOfDefectsStdDev { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
203
Adaptation/Helpers/ProcessData.WSRequest.cs
Normal file
203
Adaptation/Helpers/ProcessData.WSRequest.cs
Normal file
@ -0,0 +1,203 @@
|
||||
using Adaptation.Shared.Metrology;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
||||
namespace Adaptation.Helpers
|
||||
{
|
||||
|
||||
public partial class ProcessData
|
||||
{
|
||||
public class WSRequest
|
||||
{
|
||||
|
||||
public long Id { get; set; }
|
||||
public string AreaCountAvg { get; set; }
|
||||
public string AreaCountMax { get; set; }
|
||||
public string AreaCountMin { get; set; }
|
||||
public string AreaCountStdDev { get; set; }
|
||||
public string AreaTotalAvg { get; set; }
|
||||
public string AreaTotalMax { get; set; }
|
||||
public string AreaTotalMin { get; set; }
|
||||
public string AreaTotalStdDev { get; set; }
|
||||
public string Date { get; set; }
|
||||
public string HazeAverageAvg { get; set; }
|
||||
public string HazeAverageMax { get; set; }
|
||||
public string HazeAverageMin { get; set; }
|
||||
public string HazeAverageStdDev { get; set; }
|
||||
public string HazeRegionAvg { get; set; }
|
||||
public string HazeRegionMax { get; set; }
|
||||
public string HazeRegionMin { get; set; }
|
||||
public string HazeRegionStdDev { get; set; }
|
||||
public string Layer { get; set; }
|
||||
public string LotID { get; set; }
|
||||
public string LPDCM2Avg { get; set; }
|
||||
public string LPDCM2Max { get; set; }
|
||||
public string LPDCM2Min { get; set; }
|
||||
public string LPDCM2StdDev { get; set; }
|
||||
public string LPDCountAvg { get; set; }
|
||||
public string LPDCountMax { get; set; }
|
||||
public string LPDCountMin { get; set; }
|
||||
public string LPDCountStdDev { get; set; }
|
||||
public string Operator { get; set; }
|
||||
public string ParseErrorText { get; set; }
|
||||
public string PSN { get; set; }
|
||||
public string RDS { get; set; }
|
||||
public string Reactor { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
public string ScratchCountAvg { get; set; }
|
||||
public string ScratchCountMax { get; set; }
|
||||
public string ScratchCountMin { get; set; }
|
||||
public string ScratchCountStdDev { get; set; }
|
||||
public string ScratchTotalAvg { get; set; }
|
||||
public string ScratchTotalMax { get; set; }
|
||||
public string ScratchTotalMin { get; set; }
|
||||
public string ScratchTotalStdDev { get; set; }
|
||||
public string SumOfDefectsAvg { get; set; }
|
||||
public string SumOfDefectsMax { get; set; }
|
||||
public string SumOfDefectsMin { get; set; }
|
||||
public string SumOfDefectsStdDev { get; set; }
|
||||
public string Title { get; set; }
|
||||
public string UniqueId { get; set; }
|
||||
public string Zone { get; set; }
|
||||
|
||||
public string CellName { get; set; }
|
||||
public string Data { get; set; }
|
||||
public int i { get; set; }
|
||||
public List<DataFile> Details { get; protected set; }
|
||||
|
||||
[Obsolete("For json")] public WSRequest() { }
|
||||
|
||||
internal WSRequest(ILogic logic, List<FileRead.Description> descriptions)
|
||||
{
|
||||
i = -1;
|
||||
Id = 0;
|
||||
Zone = null;
|
||||
Layer = null;
|
||||
Title = null;
|
||||
Data = "*Data*";
|
||||
Details = new List<DataFile>();
|
||||
CellName = logic.Logistics.MesEntity;
|
||||
FileRead.Description x = descriptions[0];
|
||||
//Header
|
||||
{
|
||||
AreaCountAvg = x.AreaCountAvg;
|
||||
AreaCountMax = x.AreaCountMax;
|
||||
AreaCountMin = x.AreaCountMin;
|
||||
AreaCountStdDev = x.AreaCountStdDev;
|
||||
AreaTotalAvg = x.AreaTotalAvg;
|
||||
AreaTotalMax = x.AreaTotalMax;
|
||||
AreaTotalMin = x.AreaTotalMin;
|
||||
AreaTotalStdDev = x.AreaTotalStdDev;
|
||||
Date = x.Date;
|
||||
HazeAverageAvg = x.HazeAverageAvg;
|
||||
HazeAverageMax = x.HazeAverageMax;
|
||||
HazeAverageMin = x.HazeAverageMin;
|
||||
HazeAverageStdDev = x.HazeAverageStdDev;
|
||||
HazeRegionAvg = x.HazeRegionAvg;
|
||||
HazeRegionMax = x.HazeRegionMax;
|
||||
HazeRegionMin = x.HazeRegionMin;
|
||||
HazeRegionStdDev = x.HazeRegionStdDev;
|
||||
LotID = x.Lot;
|
||||
LPDCM2Avg = x.LPDCM2Avg;
|
||||
LPDCM2Max = x.LPDCM2Max;
|
||||
LPDCM2Min = x.LPDCM2Min;
|
||||
LPDCM2StdDev = x.LPDCM2StdDev;
|
||||
LPDCountAvg = x.LPDCountAvg;
|
||||
LPDCountMax = x.LPDCountMax;
|
||||
LPDCountMin = x.LPDCountMin;
|
||||
LPDCountStdDev = x.LPDCountStdDev;
|
||||
ParseErrorText = x.ParseErrorText;
|
||||
PSN = x.PSN;
|
||||
RDS = x.RDS;
|
||||
Reactor = x.Reactor;
|
||||
Recipe = x.Recipe;
|
||||
ScratchCountAvg = x.ScratchCountAvg;
|
||||
ScratchCountMax = x.ScratchCountMax;
|
||||
ScratchCountMin = x.ScratchCountMin;
|
||||
ScratchCountStdDev = x.ScratchCountStdDev;
|
||||
ScratchTotalAvg = x.ScratchTotalAvg;
|
||||
ScratchTotalMax = x.ScratchTotalMax;
|
||||
ScratchTotalMin = x.ScratchTotalMin;
|
||||
ScratchTotalStdDev = x.ScratchTotalStdDev;
|
||||
SumOfDefectsAvg = x.SumOfDefectsAvg;
|
||||
SumOfDefectsMax = x.SumOfDefectsMax;
|
||||
SumOfDefectsMin = x.SumOfDefectsMin;
|
||||
SumOfDefectsStdDev = x.SumOfDefectsStdDev;
|
||||
UniqueId = x.UniqueId;
|
||||
}
|
||||
DataFile dataFile;
|
||||
foreach (FileRead.Description description in descriptions)
|
||||
{
|
||||
dataFile = new DataFile
|
||||
{
|
||||
Data = "*Data*",
|
||||
i = -1,
|
||||
Id = 0, //item.Id,
|
||||
AreaCount = description.AreaCount,
|
||||
AreaTotal = description.AreaTotal,
|
||||
Bin1 = description.Bin1,
|
||||
Bin2 = description.Bin2,
|
||||
Bin3 = description.Bin3,
|
||||
Bin4 = description.Bin4,
|
||||
Bin5 = description.Bin5,
|
||||
Bin6 = description.Bin6,
|
||||
Bin7 = description.Bin7,
|
||||
Bin8 = description.Bin8,
|
||||
Comments = description.Comments,
|
||||
Date = description.Date,
|
||||
Diameter = description.Diameter,
|
||||
Exclusion = description.Exclusion,
|
||||
Gain = description.Gain,
|
||||
HazeAverage = description.HazeAverage,
|
||||
HazePeak = description.HazePeak,
|
||||
HazeRegion = description.HazeRegion,
|
||||
HazeRng = description.HazeRng,
|
||||
HeaderUniqueId = description.HeaderUniqueId,
|
||||
LPDCM2 = description.LPDCM2,
|
||||
LPDCount = description.LPDCount,
|
||||
Laser = description.Laser,
|
||||
Mean = description.Mean,
|
||||
Recipe = description.Recipe,
|
||||
ScratchCount = description.ScratchCount,
|
||||
ScratchTotal = description.ScratchTotal,
|
||||
Slot = description.Slot,
|
||||
Sort = description.Sort,
|
||||
StdDev = description.StdDev,
|
||||
SumOfDefects = description.SumOfDefects,
|
||||
Thresh = description.Thresh,
|
||||
Thruput = description.Thruput,
|
||||
Title = null,
|
||||
UniqueId = description.UniqueId
|
||||
};
|
||||
Details.Add(dataFile);
|
||||
}
|
||||
Date = logic.Logistics.DateTimeFromSequence.ToString();
|
||||
if (UniqueId is null && Details.Any())
|
||||
UniqueId = Details[0].HeaderUniqueId;
|
||||
for (int i = 0; i < Details.Count; i++)
|
||||
{
|
||||
if (string.IsNullOrEmpty(Details[i].Bin1))
|
||||
Details[i].Bin1 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin2))
|
||||
Details[i].Bin2 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin3))
|
||||
Details[i].Bin3 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin4))
|
||||
Details[i].Bin4 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin5))
|
||||
Details[i].Bin5 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin6))
|
||||
Details[i].Bin6 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin7))
|
||||
Details[i].Bin7 = null;
|
||||
if (string.IsNullOrEmpty(Details[i].Bin8))
|
||||
Details[i].Bin8 = null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
912
Adaptation/Helpers/ProcessData.cs
Normal file
912
Adaptation/Helpers/ProcessData.cs
Normal file
@ -0,0 +1,912 @@
|
||||
using Adaptation.Shared;
|
||||
using Adaptation.Shared.Metrology;
|
||||
using log4net;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace Adaptation.Helpers
|
||||
{
|
||||
|
||||
public partial class ProcessData : IProcessData
|
||||
{
|
||||
|
||||
public HeaderFile Header { get; private set; }
|
||||
public List<DataFile> Details { get; private set; }
|
||||
|
||||
private int _I;
|
||||
private readonly ILog _Log;
|
||||
private string _Data;
|
||||
|
||||
public ProcessData(ILogic logic, ConfigData configData, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Header = null;
|
||||
fileInfoCollection.Clear();
|
||||
_I = 0;
|
||||
_Data = string.Empty;
|
||||
Details = new List<DataFile>();
|
||||
_Log = LogManager.GetLogger(typeof(ProcessData));
|
||||
Tuple<HeaderFile, List<DataFile>> tuple = Parse(logic, configData, fileInfoCollection);
|
||||
Details.AddRange(tuple.Item2);
|
||||
Header = tuple.Item1;
|
||||
}
|
||||
|
||||
public Tuple<string, JsonElement?, List<FileInfo>> GetResults(ILogic logic, ConfigDataBase configDataBase, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Tuple<string, JsonElement?, List<FileInfo>> results;
|
||||
if (!(configDataBase is ConfigData configData))
|
||||
throw new Exception();
|
||||
List<Test> tests = new List<Test>();
|
||||
List<IProcessDataDescription> descriptions;
|
||||
EventName eventName = configData.GetEventNameValue();
|
||||
if (eventName == EventName.FileRead && Details.Any())
|
||||
{
|
||||
foreach (DataFile item in Details)
|
||||
tests.Add(Test.Tencor);
|
||||
descriptions = configData.GetDescription(logic, tests, this);
|
||||
}
|
||||
else
|
||||
throw new Exception();
|
||||
if (!configData.EafHosted)
|
||||
{
|
||||
new FileRead.Description().GetDescription(logic, configData, tests, this);
|
||||
}
|
||||
if (tests.Count != descriptions.Count)
|
||||
throw new Exception();
|
||||
for (int i = 0; i < tests.Count; i++)
|
||||
{
|
||||
if (descriptions[i].Test != (int)tests[i])
|
||||
throw new Exception();
|
||||
}
|
||||
string json;
|
||||
if (descriptions[0] is Duplicator.Description)
|
||||
{
|
||||
List<Duplicator.Description> duplicatorDescriptions = (from l in descriptions select (Duplicator.Description)l).ToList();
|
||||
json = JsonSerializer.Serialize(duplicatorDescriptions, duplicatorDescriptions.GetType());
|
||||
}
|
||||
else if (descriptions[0] is FileRead.Description)
|
||||
{
|
||||
List<FileRead.Description> fileReadDescriptions = (from l in descriptions select (FileRead.Description)l).ToList();
|
||||
json = JsonSerializer.Serialize(fileReadDescriptions, fileReadDescriptions.GetType());
|
||||
}
|
||||
else
|
||||
throw new Exception();
|
||||
object @object = JsonSerializer.Deserialize<object>(json);
|
||||
if (!(@object is JsonElement jsonElement))
|
||||
throw new Exception();
|
||||
results = new Tuple<string, JsonElement?, List<FileInfo>>(logic.Logistics.Logistics1[0], jsonElement, fileInfoCollection);
|
||||
return results;
|
||||
}
|
||||
|
||||
public static Dictionary<Test, List<Duplicator.Description>> GetKeyValuePairs(ConfigData configData, JsonElement jsonElement, List<Duplicator.Description> processDataDescriptions, bool extra = false)
|
||||
{
|
||||
Dictionary<Test, List<Duplicator.Description>> results = configData.GetKeyValuePairs(processDataDescriptions);
|
||||
configData.CheckProcessDataDescription(results, extra);
|
||||
return results;
|
||||
}
|
||||
|
||||
public static List<FileRead.Description> GetProcessDataFileReadDescriptions(ConfigData configData, JsonElement jsonElement)
|
||||
{
|
||||
List<FileRead.Description> results = new List<FileRead.Description>();
|
||||
List<IProcessDataDescription> processDataDescriptions = configData.GetIProcessDataDescriptions(jsonElement);
|
||||
foreach (IProcessDataDescription processDataDescription in processDataDescriptions)
|
||||
{
|
||||
if (!(processDataDescription is FileRead.Description description))
|
||||
continue;
|
||||
results.Add(description);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
public static string GetLines(ILogic logic, List<FileRead.Description> descriptions, bool ganPPTST)
|
||||
{
|
||||
StringBuilder result = new StringBuilder();
|
||||
FileRead.Description x = descriptions[0];
|
||||
if (ganPPTST)
|
||||
{
|
||||
string slot;
|
||||
string reactor;
|
||||
const int eight = 8;
|
||||
DateTime dateTime = DateTime.Parse(x.Date);
|
||||
string lot = x.Lot.ToLower().Replace("69-", string.Empty).Replace("71-", string.Empty).Replace("-", string.Empty);
|
||||
if (string.IsNullOrEmpty(x.Lot) || x.Lot.Length < 2)
|
||||
reactor = "R";
|
||||
else
|
||||
reactor = string.Concat("R", x.Lot.Substring(0, 2));
|
||||
result.Append(nameof(x.Date)).Append(";").
|
||||
Append("Part").Append(";").
|
||||
Append(nameof(x.Reactor)).Append(";").
|
||||
Append("Lot").Append(";").
|
||||
Append(nameof(DataFile.Slot)).Append(";").
|
||||
Append(nameof(DataFile.Bin1)).Append(";").
|
||||
Append(nameof(DataFile.Bin2)).Append(";").
|
||||
Append(nameof(DataFile.Bin3)).Append(";").
|
||||
Append(nameof(DataFile.Bin4)).Append(";").
|
||||
Append(nameof(DataFile.Bin5)).Append(";").
|
||||
Append(nameof(DataFile.Bin6)).Append(";").
|
||||
Append("Bin9").
|
||||
AppendLine();
|
||||
foreach (FileRead.Description description in descriptions)
|
||||
{
|
||||
slot = description.Slot.Replace("*", string.Empty);
|
||||
result.Append("!").Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(";").
|
||||
Append("Particle Adder;").
|
||||
Append(reactor).Append(";").
|
||||
Append(lot).Append(";").
|
||||
Append(slot).Append(";").
|
||||
Append(description.Bin1).Append(";").
|
||||
Append(description.Bin2).Append(";").
|
||||
Append(description.Bin3).Append(";").
|
||||
Append(description.Bin4).Append(";").
|
||||
Append(description.Bin5).Append(";").
|
||||
Append(description.Bin6).Append(";").
|
||||
Append(description.AreaCount).
|
||||
AppendLine();
|
||||
}
|
||||
if (descriptions.Count != eight)
|
||||
{
|
||||
string negitiveTenThousand = "-10000";
|
||||
for (int i = descriptions.Count; i < eight; i++)
|
||||
{
|
||||
result.Append("!").Append(dateTime.ToString("MM/dd/yyyy HH:mm:ss")).Append(";").
|
||||
Append("Particle Adder;").
|
||||
Append(reactor).Append(";").
|
||||
Append(lot).Append(";").
|
||||
Append(negitiveTenThousand).Append(";").
|
||||
Append(negitiveTenThousand).Append(";").
|
||||
Append(negitiveTenThousand).Append(";").
|
||||
Append(negitiveTenThousand).Append(";").
|
||||
Append(negitiveTenThousand).Append(";").
|
||||
Append(negitiveTenThousand).Append(";").
|
||||
Append(negitiveTenThousand).Append(";").
|
||||
Append(negitiveTenThousand).
|
||||
AppendLine();
|
||||
}
|
||||
}
|
||||
if (result.ToString().Split('\n').Length != (eight + 2))
|
||||
throw new Exception(string.Concat("Must have ", eight, " samples"));
|
||||
}
|
||||
else
|
||||
{
|
||||
char del = '\t';
|
||||
result.Append(x.AreaCountAvg).Append(del). // 001 - AreaCountAvg
|
||||
Append(x.AreaCountMax).Append(del). // 002 - AreaCountMax
|
||||
Append(x.AreaCountMin).Append(del). // 003 - AreaCountMin
|
||||
Append(x.AreaCountStdDev).Append(del). // 004 - AreaCountStdDev
|
||||
Append(x.AreaTotalAvg).Append(del). // 005 - AreaTotalAvg
|
||||
Append(x.AreaTotalMax).Append(del). // 006 - AreaTotalMax
|
||||
Append(x.AreaTotalMin).Append(del). // 007 - AreaTotalMin
|
||||
Append(x.AreaTotalStdDev).Append(del). // 008 - AreaTotalStdDev
|
||||
Append(x.Date).Append(del). // 009 -
|
||||
Append(x.HazeAverageAvg).Append(del). // 010 - Haze Average
|
||||
Append(x.HazeAverageMax).Append(del). // 011 -
|
||||
Append(x.HazeAverageMin).Append(del). // 012 -
|
||||
Append(x.HazeAverageStdDev).Append(del). // 013 -
|
||||
Append(x.HazeRegionAvg).Append(del). // 014 -
|
||||
Append(x.HazeRegionMax).Append(del). // 015 -
|
||||
Append(x.HazeRegionMin).Append(del). // 016 -
|
||||
Append(x.HazeRegionStdDev).Append(del). // 017 -
|
||||
Append(x.Lot).Append(del). // 018 -
|
||||
Append(x.LPDCM2Avg).Append(del). // 019 -
|
||||
Append(x.LPDCM2Max).Append(del). // 020 -
|
||||
Append(x.LPDCM2Min).Append(del). // 021 -
|
||||
Append(x.LPDCM2StdDev).Append(del). // 022 -
|
||||
Append(x.LPDCountAvg).Append(del). // 023 -
|
||||
Append(x.LPDCountMax).Append(del). // 024 -
|
||||
Append(x.LPDCM2Min).Append(del). // 025 -
|
||||
Append(x.LPDCountStdDev).Append(del). // 026 -
|
||||
Append(x.Employee).Append(del). // 027 -
|
||||
Append(x.RDS).Append(del). // 028 - Lot
|
||||
Append(x.Reactor).Append(del). // 029 - Process
|
||||
Append(x.Recipe.Replace(";", string.Empty)).Append(del). // 030 - Part
|
||||
Append(x.ScratchCountAvg).Append(del). // 031 - Scratch Count
|
||||
Append(x.ScratchCountMax).Append(del). // 032 -
|
||||
Append(x.ScratchCountMin).Append(del). // 033 -
|
||||
Append(x.ScratchTotalStdDev).Append(del). // 034 -
|
||||
Append(x.ScratchTotalAvg).Append(del). // 035 - Scratch Length
|
||||
Append(x.ScratchTotalMax).Append(del). // 036 -
|
||||
Append(x.ScratchTotalMin).Append(del). // 037 -
|
||||
Append(x.ScratchTotalStdDev).Append(del). // 038 -
|
||||
Append(x.SumOfDefectsAvg).Append(del). // 039 - Average Sum of Defects
|
||||
Append(x.SumOfDefectsMax).Append(del). // 040 - Max Sum of Defects
|
||||
Append(x.SumOfDefectsMin).Append(del). // 041 - Min Sum of Defects
|
||||
Append(x.SumOfDefectsStdDev).Append(del). // 042 - SumOfDefectsStdDev
|
||||
Append(logic.Logistics.MesEntity).Append(del). // 043 -
|
||||
AppendLine();
|
||||
}
|
||||
return result.ToString();
|
||||
}
|
||||
|
||||
private static void UpdateDataPDF(List<FileRead.Description> descriptions, string checkFileName)
|
||||
{
|
||||
string value;
|
||||
object possiblePage;
|
||||
object possibleString;
|
||||
object possibleCOSArray;
|
||||
java.util.List tokenList;
|
||||
java.util.List arrayList;
|
||||
java.io.OutputStream outputStream;
|
||||
java.util.ListIterator tokenIterator;
|
||||
java.util.ListIterator arrayIterator;
|
||||
List<string> updateValues = new List<string>();
|
||||
string reactorLoadLock = descriptions[0].Comments;
|
||||
StringBuilder stringBuilder = new StringBuilder();
|
||||
java.io.File file = new java.io.File(checkFileName);
|
||||
org.apache.pdfbox.pdmodel.common.PDStream pdStream;
|
||||
org.apache.pdfbox.pdmodel.common.PDStream updatedStream;
|
||||
org.apache.pdfbox.pdfparser.PDFStreamParser pdfStreamParser;
|
||||
org.apache.pdfbox.pdfwriter.ContentStreamWriter contentStreamWriter;
|
||||
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
|
||||
org.apache.pdfbox.pdmodel.PDDocumentCatalog pdDocumentCatalog = pdDocument.getDocumentCatalog();
|
||||
java.util.List pagesList = pdDocumentCatalog.getAllPages();
|
||||
java.util.ListIterator pageIterator = pagesList.listIterator();
|
||||
for (short i = 1; i < short.MaxValue; i++)
|
||||
{
|
||||
if (!pageIterator.hasNext())
|
||||
break;
|
||||
possiblePage = pageIterator.next();
|
||||
if (!(possiblePage is org.apache.pdfbox.pdmodel.PDPage page))
|
||||
continue;
|
||||
pdStream = page.getContents();
|
||||
pdfStreamParser = new org.apache.pdfbox.pdfparser.PDFStreamParser(pdStream);
|
||||
pdfStreamParser.parse();
|
||||
tokenList = pdfStreamParser.getTokens();
|
||||
tokenIterator = tokenList.listIterator();
|
||||
for (short t = 1; i < short.MaxValue; t++)
|
||||
{
|
||||
if (!tokenIterator.hasNext())
|
||||
break;
|
||||
possibleCOSArray = tokenIterator.next();
|
||||
if (!(possibleCOSArray is org.apache.pdfbox.cos.COSArray cossArray))
|
||||
continue;
|
||||
stringBuilder.Clear();
|
||||
arrayList = cossArray.toList();
|
||||
arrayIterator = arrayList.listIterator();
|
||||
for (short a = 1; i < short.MaxValue; a++)
|
||||
{
|
||||
if (!arrayIterator.hasNext())
|
||||
break;
|
||||
possibleString = arrayIterator.next();
|
||||
if (!(possibleString is org.apache.pdfbox.cos.COSString cossString))
|
||||
continue;
|
||||
value = cossString.getString();
|
||||
stringBuilder.Append(value);
|
||||
if (value != "]")
|
||||
continue;
|
||||
updateValues.Add(value);
|
||||
value = stringBuilder.ToString();
|
||||
if (value.Contains("[]"))
|
||||
cossArray.setString(a - 1, string.Concat("*", reactorLoadLock, "]"));
|
||||
else
|
||||
cossArray.setString(a - 1, string.Concat(" {*", reactorLoadLock, "}]"));
|
||||
}
|
||||
}
|
||||
if (updateValues.Any())
|
||||
{
|
||||
updatedStream = new org.apache.pdfbox.pdmodel.common.PDStream(pdDocument);
|
||||
outputStream = updatedStream.createOutputStream();
|
||||
contentStreamWriter = new org.apache.pdfbox.pdfwriter.ContentStreamWriter(outputStream);
|
||||
contentStreamWriter.writeTokens(tokenList);
|
||||
outputStream.close();
|
||||
page.setContents(updatedStream);
|
||||
}
|
||||
}
|
||||
if (updateValues.Any())
|
||||
pdDocument.save(checkFileName);
|
||||
pdDocument.close();
|
||||
}
|
||||
|
||||
internal static void PostOpenInsightMetrologyViewerAttachments(ILog log, ConfigData configData, Logistics logistics, DateTime dateTime, string logisticsSequenceMemoryDirectory, List<FileRead.Description> descriptions, string matchDirectory)
|
||||
{
|
||||
string checkFileName;
|
||||
string[] pclFiles = Directory.GetFiles(matchDirectory, "*.pcl", SearchOption.TopDirectoryOnly);
|
||||
if (pclFiles.Length != 1)
|
||||
throw new Exception("Invalid source file count!");
|
||||
string sourceFileNameNoExt = Path.GetFileNameWithoutExtension(pclFiles[0]);
|
||||
string wsResultsMemoryFile = string.Concat(logisticsSequenceMemoryDirectory, @"\", nameof(WS.Results), ".json");
|
||||
if (!File.Exists(wsResultsMemoryFile))
|
||||
throw new Exception(string.Concat("Memory file <", wsResultsMemoryFile, "> doesn't exist!"));
|
||||
string json = File.ReadAllText(wsResultsMemoryFile);
|
||||
WS.Results metrologyWSRequest = JsonSerializer.Deserialize<WS.Results>(json);
|
||||
long wsResultsHeaderID = metrologyWSRequest.HeaderID;
|
||||
List<WS.Attachment> dataAttachments = new List<WS.Attachment>();
|
||||
List<WS.Attachment> headerAttachments = new List<WS.Attachment>();
|
||||
checkFileName = string.Concat(matchDirectory, @"\", sourceFileNameNoExt, "_data.pdf");
|
||||
if (!File.Exists(checkFileName))
|
||||
log.Debug("Header file doesn't exist!");
|
||||
else
|
||||
{
|
||||
UpdateDataPDF(descriptions, checkFileName);
|
||||
headerAttachments.Add(new WS.Attachment(descriptions[0].HeaderUniqueId, "Data.pdf", checkFileName));
|
||||
}
|
||||
foreach (FileRead.Description description in descriptions)
|
||||
{
|
||||
checkFileName = string.Concat(matchDirectory, @"\", sourceFileNameNoExt, "_", description.Slot.Replace('*', 's'), "_image.pdf");
|
||||
if (File.Exists(checkFileName))
|
||||
dataAttachments.Add(new WS.Attachment(description.UniqueId, "Image.pdf", checkFileName));
|
||||
checkFileName = string.Concat(matchDirectory, @"\", sourceFileNameNoExt, "_", description.Slot.Replace('*', 's'), "_data.pdf");
|
||||
if (File.Exists(checkFileName))
|
||||
dataAttachments.Add(new WS.Attachment(description.UniqueId, "Data.pdf", checkFileName));
|
||||
}
|
||||
if (dataAttachments.Count == 0 || dataAttachments.Count != descriptions.Count)
|
||||
log.Debug("Invalid attachment count!");
|
||||
WS.AttachFiles(configData.OpenInsightMetrogyViewerAPI, wsResultsHeaderID, headerAttachments, dataAttachments);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert the raw data file to parsable file format - in this case from PCL to PDF
|
||||
/// </summary>
|
||||
/// <param name="sourceFile">source file to be converted to PDF</param>
|
||||
/// <returns></returns>
|
||||
private static string ConvertSourceFileToPdf(ConfigData configData, string sourceFile)
|
||||
{
|
||||
string result = Path.ChangeExtension(sourceFile, ".pdf");
|
||||
if (!File.Exists(result))
|
||||
{
|
||||
//string arguments = string.Concat("-i \"", sourceFile, "\" -o \"", result, "\"");
|
||||
string arguments = string.Concat("-dSAFER -dBATCH -dNOPAUSE -sOutputFile=\"", result, "\" -sDEVICE=pdfwrite \"", sourceFile, "\"");
|
||||
//Process process = Process.Start(configData.LincPDFCFileName, arguments);
|
||||
Process process = Process.Start(configData.GhostPCLFileName, arguments);
|
||||
process.WaitForExit(30000);
|
||||
if (!File.Exists(result))
|
||||
throw new Exception("PDF file wasn't created");
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test and fix a data line from the Lot Summary page if there are two values that are merged.
|
||||
/// </summary>
|
||||
/// <param name="toEol">data line from Lot Summary</param>
|
||||
private void FixToEolArray(ref string[] toEol)
|
||||
{
|
||||
const int MAX_COLUMNS = 9;
|
||||
int[] mColumnWidths = new int[MAX_COLUMNS] { 8, 6, 6, 6, 6, 7, 7, 5, 7 };
|
||||
// is it short at least one data point
|
||||
if (toEol.Length < MAX_COLUMNS)
|
||||
{
|
||||
_Log.Debug($"****FixToEolArray - Starting array:");
|
||||
_Log.Debug(toEol);
|
||||
_Log.Debug($"****FixToEolArray - Column widths:");
|
||||
_Log.Debug(mColumnWidths);
|
||||
string leftVal, rightVal;
|
||||
|
||||
// size up and assign a working list
|
||||
List<string> toEolList = new List<string>(toEol);
|
||||
if (string.IsNullOrEmpty(toEolList[toEolList.Count - 1]))
|
||||
toEolList.RemoveAt(toEolList.Count - 1); // removes a null element at end
|
||||
_Log.Debug($"****FixToEolArray - New toEolList:");
|
||||
_Log.Debug(toEolList);
|
||||
for (int i = toEolList.Count; i < MAX_COLUMNS; i++)
|
||||
toEolList.Insert(0, ""); // insert to top of list
|
||||
_Log.Debug(toEolList);
|
||||
|
||||
// start at the end
|
||||
for (int i = MAX_COLUMNS - 1; i >= 0; i--)
|
||||
{
|
||||
// test for a bad value - does it have too many characters
|
||||
_Log.Debug($"****FixToEolArray - toEolList[i].Length: {toEolList[i].Length}, mColumnWidths[i]: {mColumnWidths[i]}");
|
||||
if (toEolList[i].Length > mColumnWidths[i])
|
||||
{
|
||||
// split it up into its two parts
|
||||
leftVal = toEolList[i].Substring(0, toEolList[i].Length - mColumnWidths[i]);
|
||||
rightVal = toEolList[i].Substring(leftVal.Length);
|
||||
_Log.Debug($"****FixToEolArray - Split leftVal: {leftVal}");
|
||||
_Log.Debug($"****FixToEolArray - Split rightVal: {rightVal}");
|
||||
|
||||
// insert new value
|
||||
toEolList[i] = rightVal;
|
||||
toEolList.Insert(i, leftVal);
|
||||
if (string.IsNullOrEmpty(toEolList[0]))
|
||||
toEolList.RemoveAt(0); // removes a null element at end
|
||||
_Log.Debug($"****FixToEolArray - Fixed toEolList:");
|
||||
_Log.Debug(toEolList);
|
||||
}
|
||||
}
|
||||
toEol = toEolList.ToArray();
|
||||
_Log.Debug($"****FixToEolArray - Ending array:");
|
||||
_Log.Debug(toEol);
|
||||
}
|
||||
}
|
||||
|
||||
private void ScanPast(string text)
|
||||
{
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num > -1)
|
||||
_I = num + text.Length;
|
||||
else
|
||||
_I = _Data.Length;
|
||||
}
|
||||
|
||||
private string GetBefore(string text)
|
||||
{
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num > -1)
|
||||
{
|
||||
string str = _Data.Substring(_I, num - _I);
|
||||
_I = num + text.Length;
|
||||
return str.Trim();
|
||||
}
|
||||
string str1 = _Data.Substring(_I);
|
||||
_I = _Data.Length;
|
||||
return str1.Trim();
|
||||
}
|
||||
|
||||
private string GetBefore(string text, bool trim)
|
||||
{
|
||||
if (trim)
|
||||
return GetBefore(text);
|
||||
int num = _Data.IndexOf(text, _I);
|
||||
if (num > -1)
|
||||
{
|
||||
string str = _Data.Substring(_I, num - _I);
|
||||
_I = num + text.Length;
|
||||
return str;
|
||||
}
|
||||
string str1 = _Data.Substring(_I);
|
||||
_I = _Data.Length;
|
||||
return str1;
|
||||
}
|
||||
|
||||
private bool IsNullOrWhiteSpace(string text)
|
||||
{
|
||||
for (int index = 0; index < text.Length; ++index)
|
||||
{
|
||||
if (!char.IsWhiteSpace(text[index]))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private bool IsBlankLine()
|
||||
{
|
||||
int num = _Data.IndexOf("\n", _I);
|
||||
return IsNullOrWhiteSpace(num > -1 ? _Data.Substring(_I, num - _I) : _Data.Substring(_I));
|
||||
}
|
||||
|
||||
private string GetToEOL()
|
||||
{
|
||||
return GetBefore("\n");
|
||||
}
|
||||
|
||||
private string GetToEOL(bool trim)
|
||||
{
|
||||
if (trim)
|
||||
return GetToEOL();
|
||||
return GetBefore("\n", false);
|
||||
}
|
||||
|
||||
private string GetToText(string text)
|
||||
{
|
||||
return _Data.Substring(_I, _Data.IndexOf(text, _I) - _I).Trim();
|
||||
}
|
||||
|
||||
private string GetToken()
|
||||
{
|
||||
while (_I < _Data.Length && IsNullOrWhiteSpace(_Data.Substring(_I, 1)))
|
||||
++_I;
|
||||
int j = _I;
|
||||
while (j < _Data.Length && !IsNullOrWhiteSpace(_Data.Substring(j, 1)))
|
||||
++j;
|
||||
string str = _Data.Substring(_I, j - _I);
|
||||
_I = j;
|
||||
return str.Trim();
|
||||
}
|
||||
|
||||
private string PeekNextLine()
|
||||
{
|
||||
int j = _I;
|
||||
string toEol = GetToEOL();
|
||||
_I = j;
|
||||
return toEol;
|
||||
}
|
||||
|
||||
private HeaderFile ParseLotSummary(ILogic logic, string headerFileName, Dictionary<string, string> pages, Dictionary<string, List<DataFile>> slots)
|
||||
{
|
||||
HeaderFile result = new HeaderFile { JobID = logic.Logistics.JobID, MesEntity = logic.Logistics.MesEntity, Date = DateTime.Now.ToString() };
|
||||
_I = 0;
|
||||
//string headerText;
|
||||
//string altHeaderFileName = Path.ChangeExtension(headerFileName, ".txt");
|
||||
//if (File.Exists(altHeaderFileName))
|
||||
// headerText = File.ReadAllText(altHeaderFileName);
|
||||
//else
|
||||
//{
|
||||
// //Pdfbox, IKVM.AWT.WinForms
|
||||
// org.apache.pdfbox.pdmodel.PDDocument pdfDocument = org.apache.pdfbox.pdmodel.PDDocument.load(headerFileName);
|
||||
// org.apache.pdfbox.util.PDFTextStripper stripper = new org.apache.pdfbox.util.PDFTextStripper();
|
||||
// headerText = stripper.getText(pdfDocument);
|
||||
// pdfDocument.close();
|
||||
// File.AppendAllText(altHeaderFileName, headerText);
|
||||
//}
|
||||
//result.Id = h;
|
||||
//result.Title = h;
|
||||
//result.Zone = h;
|
||||
//result.PSN = h;
|
||||
//result.Layer = h;
|
||||
result.ParseErrorText = string.Empty;
|
||||
if (!pages.ContainsKey(headerFileName))
|
||||
throw new Exception();
|
||||
_I = 0;
|
||||
_Data = pages[headerFileName];
|
||||
ScanPast("Date:");
|
||||
result.Date = GetToEOL();
|
||||
ScanPast("Recipe ID:");
|
||||
result.Recipe = GetBefore("LotID:");
|
||||
result.Recipe = result.Recipe.Replace(";", "");
|
||||
if (_Data.Contains("[]"))
|
||||
result.Lot = GetBefore("[]");
|
||||
else if (_Data.Contains("[7]"))
|
||||
result.Lot = GetBefore("[7]");
|
||||
else
|
||||
result.Lot = GetBefore("[");
|
||||
|
||||
// Remove illegal characters \/:*?"<>| found in the Lot.
|
||||
result.Lot = Regex.Replace(result.Lot, @"[\\,\/,\:,\*,\?,\"",\<,\>,\|]", "_").Split('\r')[0].Split('\n')[0];
|
||||
|
||||
// determine number of wafers and their slot numbers
|
||||
_Log.Debug(_Data.Substring(_I));
|
||||
string slot;
|
||||
string toEOL;
|
||||
int slotCount = _Data.Substring(_I).Split('*').Length - 1;
|
||||
_Log.Debug($"****HeaderFile - Slot Count: {slotCount}.");
|
||||
for (int i = 0; i < slotCount; i++)
|
||||
{
|
||||
ScanPast("*");
|
||||
toEOL = GetToEOL(false);
|
||||
slot = string.Concat("*", toEOL.Substring(0, 2));
|
||||
if (!slots.ContainsKey(slot))
|
||||
slots.Add(slot, new List<DataFile>());
|
||||
}
|
||||
_Log.Debug($"****HeaderFile - Slots:");
|
||||
_Log.Debug(slots);
|
||||
|
||||
ScanPast("Min:");
|
||||
|
||||
string[] toEol1 = GetToEOL(false).Trim().Split(' ');
|
||||
_Log.Debug($"****HeaderFile - toEol1 Count: {toEol1.Length}.");
|
||||
FixToEolArray(ref toEol1);
|
||||
result.LPDCountMin = toEol1[0].Trim();
|
||||
result.LPDCM2Min = toEol1[1].Trim();
|
||||
result.AreaCountMin = toEol1[2].Trim();
|
||||
result.AreaTotalMin = toEol1[3].Trim();
|
||||
result.ScratchCountMin = toEol1[4].Trim();
|
||||
result.ScratchTotalMin = toEol1[5].Trim();
|
||||
result.SumOfDefectsMin = toEol1[6].Trim();
|
||||
result.HazeRegionMin = toEol1[7].Trim();
|
||||
result.HazeAverageMin = toEol1[8].Trim();
|
||||
ScanPast("Max:");
|
||||
|
||||
string[] toEol2 = GetToEOL(false).Trim().Split(' ');
|
||||
_Log.Debug($"****HeaderFile - toEol2 Count: {toEol2.Length}.");
|
||||
FixToEolArray(ref toEol2);
|
||||
result.LPDCountMax = toEol2[0].Trim();
|
||||
result.LPDCM2Max = toEol2[1].Trim();
|
||||
result.AreaCountMax = toEol2[2].Trim();
|
||||
result.AreaTotalMax = toEol2[3].Trim();
|
||||
result.ScratchCountMax = toEol2[4].Trim();
|
||||
result.ScratchTotalMax = toEol2[5].Trim();
|
||||
result.SumOfDefectsMax = toEol2[6].Trim();
|
||||
result.HazeRegionMax = toEol2[7].Trim();
|
||||
result.HazeAverageMax = toEol2[8].Trim();
|
||||
ScanPast("Average:");
|
||||
|
||||
string[] toEol3 = GetToEOL(false).Trim().Split(' ');
|
||||
_Log.Debug($"****HeaderFile - toEol3 Count: {toEol3.Length}.");
|
||||
FixToEolArray(ref toEol3);
|
||||
result.LPDCountAvg = toEol3[0].Trim();
|
||||
result.LPDCM2Avg = toEol3[1].Trim();
|
||||
result.AreaCountAvg = toEol3[2].Trim();
|
||||
result.AreaTotalAvg = toEol3[3].Trim();
|
||||
result.ScratchCountAvg = toEol3[4].Trim();
|
||||
result.ScratchTotalAvg = toEol3[5].Trim();
|
||||
result.SumOfDefectsAvg = toEol3[6].Trim();
|
||||
result.HazeRegionAvg = toEol3[7].Trim();
|
||||
result.HazeAverageAvg = toEol3[8].Trim();
|
||||
ScanPast("Std Dev:");
|
||||
|
||||
string[] toEol4 = GetToEOL(false).Trim().Split(' ');
|
||||
_Log.Debug($"****HeaderFile - toEol4 Count: {toEol4.Length}.");
|
||||
FixToEolArray(ref toEol4);
|
||||
result.LPDCountStdDev = toEol4[0].Trim();
|
||||
result.LPDCM2StdDev = toEol4[1].Trim();
|
||||
result.AreaCountStdDev = toEol4[2].Trim();
|
||||
result.AreaTotalStdDev = toEol4[3].Trim();
|
||||
result.ScratchCountStdDev = toEol4[4].Trim();
|
||||
result.ScratchTotalStdDev = toEol4[5].Trim();
|
||||
result.SumOfDefectsStdDev = toEol4[6].Trim();
|
||||
result.HazeRegionStdDev = toEol4[7].Trim();
|
||||
result.HazeAverageStdDev = toEol4[8].Trim();
|
||||
|
||||
string[] segments = result.Lot.Split('-');
|
||||
if (segments.Length > 0)
|
||||
result.Reactor = segments[0];
|
||||
if (segments.Length > 1)
|
||||
result.RDS = segments[1];
|
||||
if (segments.Length > 2)
|
||||
result.PSN = segments[2];
|
||||
// Example of header.UniqueId is TENCOR1_33-289217-4693_201901300556533336
|
||||
result.UniqueId = string.Format("{0}_{1}_{2}", logic.Logistics.JobID, result.Lot, Path.GetFileNameWithoutExtension(logic.Logistics.ReportFullPath));
|
||||
return result;
|
||||
}
|
||||
|
||||
private DataFile ParseWaferSummary(HeaderFile headerFile, string waferFileName, Dictionary<string, string> pages)
|
||||
{
|
||||
DataFile result = new DataFile { Data = "*Data*", i = -1, };
|
||||
_I = 0;
|
||||
//string waferText;
|
||||
//string altWaferFileName = Path.ChangeExtension(waferFileName, ".txt");
|
||||
//if (File.Exists(altWaferFileName))
|
||||
// waferText = File.ReadAllText(altWaferFileName);
|
||||
//else
|
||||
//{
|
||||
// //Pdfbox, IKVM.AWT.WinForms
|
||||
// org.apache.pdfbox.pdmodel.PDDocument pdfDocument = org.apache.pdfbox.pdmodel.PDDocument.load(waferFileName);
|
||||
// org.apache.pdfbox.util.PDFTextStripper dataStripper = new org.apache.pdfbox.util.PDFTextStripper();
|
||||
// waferText = dataStripper.getText(pdfDocument);
|
||||
// pdfDocument.close();
|
||||
// File.AppendAllText(altWaferFileName, waferText);
|
||||
//}
|
||||
List<string> stringList = new List<string>();
|
||||
result.HeaderUniqueId = headerFile.UniqueId;
|
||||
result.Id = 0;
|
||||
result.Title = null;
|
||||
if (!pages.ContainsKey(waferFileName))
|
||||
throw new Exception();
|
||||
_I = 0;
|
||||
_Data = pages[waferFileName];
|
||||
ScanPast("Date:");
|
||||
result.Date = GetToEOL();
|
||||
ScanPast("ID#");
|
||||
result.Slot = GetToEOL();
|
||||
if (result.Slot.Length > 5)
|
||||
result.Slot = string.Concat(result.Slot.Substring(0, 5), "... - ***");
|
||||
//result.Slot = result.Slot.Replace("*", "");
|
||||
ScanPast("Comments:");
|
||||
result.Comments = GetToEOL();
|
||||
ScanPast("Sort:");
|
||||
result.Sort = GetToEOL();
|
||||
ScanPast("LPD Count:");
|
||||
result.LPDCount = GetToEOL();
|
||||
ScanPast("LPD / cm2:");
|
||||
result.LPDCM2 = GetToEOL();
|
||||
while (GetBefore(":").Contains("Bin"))
|
||||
stringList.Add(GetToEOL());
|
||||
if (stringList.Count >= 1)
|
||||
result.Bin1 = stringList[0];
|
||||
if (stringList.Count >= 2)
|
||||
result.Bin2 = stringList[1];
|
||||
if (stringList.Count >= 3)
|
||||
result.Bin3 = stringList[2];
|
||||
if (stringList.Count >= 4)
|
||||
result.Bin4 = stringList[3];
|
||||
if (stringList.Count >= 5)
|
||||
result.Bin5 = stringList[4];
|
||||
if (stringList.Count >= 6)
|
||||
result.Bin6 = stringList[5];
|
||||
if (stringList.Count >= 7)
|
||||
result.Bin7 = stringList[6];
|
||||
if (stringList.Count >= 8)
|
||||
result.Bin8 = stringList[7];
|
||||
result.Mean = GetToEOL();
|
||||
ScanPast("Std Dev:");
|
||||
result.StdDev = GetToEOL();
|
||||
ScanPast("Area Count:");
|
||||
result.AreaCount = GetToEOL();
|
||||
ScanPast("Area Total:");
|
||||
result.AreaTotal = GetToEOL();
|
||||
ScanPast("Scratch Count:");
|
||||
result.ScratchCount = GetToEOL();
|
||||
ScanPast("Scratch Total:");
|
||||
result.ScratchTotal = GetToEOL();
|
||||
ScanPast("Sum of All Defects:");
|
||||
result.SumOfDefects = GetToEOL();
|
||||
ScanPast("Haze Region:");
|
||||
result.HazeRegion = GetToEOL();
|
||||
ScanPast("Haze Average:");
|
||||
result.HazeAverage = GetToEOL();
|
||||
ScanPast("Haze Peak:");
|
||||
result.HazePeak = GetToEOL();
|
||||
ScanPast("Laser:");
|
||||
result.Laser = GetBefore("Gain:");
|
||||
result.Gain = GetBefore("Diameter:");
|
||||
result.Diameter = GetToEOL();
|
||||
ScanPast("Thresh:");
|
||||
result.Thresh = GetBefore("Exclusion:");
|
||||
result.Exclusion = GetToEOL();
|
||||
ScanPast("Haze Rng:");
|
||||
result.HazeRng = GetBefore("Thruput:");
|
||||
result.Thruput = GetToEOL();
|
||||
ScanPast("Recipe ID:");
|
||||
result.Recipe = GetToEOL();
|
||||
result.UniqueId = string.Format("{0}_{1}", headerFile.UniqueId, result.Slot.Replace("*", string.Empty).TrimStart('0'));
|
||||
return result;
|
||||
}
|
||||
|
||||
private Tuple<HeaderFile, List<DataFile>> Parse(ILogic logic, ConfigData configData, List<FileInfo> fileInfoCollection)
|
||||
{
|
||||
Tuple<HeaderFile, List<DataFile>> result;
|
||||
object item;
|
||||
string pageText;
|
||||
string pagePDFFile;
|
||||
string pageTextFile;
|
||||
List<string> sourceFiles = new List<string>();
|
||||
List<string> missingSlots = new List<string>();
|
||||
List<DataFile> dataFiles = new List<DataFile>();
|
||||
Dictionary<string, string> pages = new Dictionary<string, string>();
|
||||
string sourcePath = Path.GetDirectoryName(logic.Logistics.ReportFullPath);
|
||||
Dictionary<string, List<DataFile>> slots = new Dictionary<string, List<DataFile>>();
|
||||
string sourceFileNamePdf = ConvertSourceFileToPdf(configData, logic.Logistics.ReportFullPath);
|
||||
sourceFiles.Add(sourceFileNamePdf);
|
||||
string sourceFileNameNoExt = Path.GetFileNameWithoutExtension(logic.Logistics.ReportFullPath);
|
||||
////PdfSharp open pdf
|
||||
//using (PdfSharp.Pdf.PdfDocument sourceDocument = PdfSharp.Pdf.IO.PdfReader.Open(sourceFileNamePdf, PdfSharp.Pdf.IO.PdfDocumentOpenMode.Import))
|
||||
//{
|
||||
// for (int idxPage = 0; idxPage < sourceDocument.PageCount; idxPage++)
|
||||
// {
|
||||
// // split the pdf into seperate pages. Odd pages are wafer image, even are wafer summary. Last page is Lot Summary.
|
||||
// _Log.Debug($"****ParseData - Splitting page: {idxPage}, sourceDocument: {sourceDocument.FullPath}, sourcePathFileNoExt: {sourcePathFileNoExt}");
|
||||
// //SplitPage(sourceDocument, sourcePathFileNoExt, idxPage);
|
||||
// pageNum = idxPage + 1;
|
||||
// pageFile = string.Format("{0}_{1}.pdf", sourcePathFileNoExt, pageNum);
|
||||
// _Log.Debug($"****SplitPage - Page {pageNum} Source file: {sourceDocument.FullPath}");
|
||||
// _Log.Debug($"****SplitPage - Page {pageNum} Output file: {pageFile}");
|
||||
// //PdfSharp Create new document
|
||||
// PdfSharp.Pdf.PdfDocument outputDocument = new PdfSharp.Pdf.PdfDocument { Version = sourceDocument.Version };
|
||||
// outputDocument.Info.Title = string.Format("Page {0} of {1}", pageNum, sourceDocument.Info.Title);
|
||||
// outputDocument.Info.Creator = sourceDocument.Info.Creator;
|
||||
// outputDocument.AddPage(sourceDocument.Pages[idxPage]);
|
||||
// outputDocument.Pages[0].CropBox = new PdfSharp.Pdf.PdfRectangle(new PdfSharp.Drawing.XRect(0, 100, 700, 700));
|
||||
// outputDocument.Save(pageFile);
|
||||
// }
|
||||
// sourceDocumentPageCount = sourceDocument.PageCount;
|
||||
// sourceDocument.Close();
|
||||
//}
|
||||
java.io.File file = new java.io.File(sourceFileNamePdf);
|
||||
org.apache.pdfbox.util.Splitter splitter = new org.apache.pdfbox.util.Splitter();
|
||||
org.apache.pdfbox.pdmodel.PDDocument pdDocument = org.apache.pdfbox.pdmodel.PDDocument.load(file);
|
||||
java.util.List list = splitter.split(pdDocument);
|
||||
java.util.ListIterator iterator = list.listIterator();
|
||||
org.apache.pdfbox.util.PDFTextStripper dataStripper = new org.apache.pdfbox.util.PDFTextStripper();
|
||||
for (short i = 1; i < short.MaxValue; i++)
|
||||
{
|
||||
if (!iterator.hasNext())
|
||||
break;
|
||||
item = iterator.next();
|
||||
pagePDFFile = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_", i, ".pdf");
|
||||
pageTextFile = Path.ChangeExtension(pagePDFFile, ".txt");
|
||||
if (File.Exists(pageTextFile))
|
||||
{
|
||||
pageText = File.ReadAllText(pageTextFile);
|
||||
sourceFiles.Add(pageTextFile);
|
||||
if (!(item is org.apache.pdfbox.pdmodel.PDDocument pd))
|
||||
continue;
|
||||
pd.close();
|
||||
}
|
||||
else if (File.Exists(pagePDFFile))
|
||||
{
|
||||
org.apache.pdfbox.pdmodel.PDDocument document = org.apache.pdfbox.pdmodel.PDDocument.load(pagePDFFile);
|
||||
pageText = dataStripper.getText(document);
|
||||
document.close();
|
||||
sourceFiles.Add(pagePDFFile);
|
||||
if (!(item is org.apache.pdfbox.pdmodel.PDDocument pd))
|
||||
continue;
|
||||
pd.close();
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!(item is org.apache.pdfbox.pdmodel.PDDocument pd))
|
||||
continue;
|
||||
pageText = dataStripper.getText(pd);
|
||||
pd.save(pagePDFFile);
|
||||
sourceFiles.Add(pagePDFFile);
|
||||
pd.close();
|
||||
File.WriteAllText(pageTextFile, pageText);
|
||||
sourceFiles.Add(pageTextFile);
|
||||
}
|
||||
pages.Add(pagePDFFile, pageText);
|
||||
}
|
||||
pdDocument.close();
|
||||
// parse lot summary
|
||||
_Log.Debug($"****ParseData - Parsing lot summary");
|
||||
List<Tuple<string, string>> pageMapping = new List<Tuple<string, string>>();
|
||||
string headerFileName = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_", pages.Count, ".pdf");
|
||||
HeaderFile headerFile = ParseLotSummary(logic, headerFileName, pages, slots);
|
||||
foreach (KeyValuePair<string, string> keyValuePair in pages)
|
||||
{
|
||||
if (keyValuePair.Key == headerFileName)
|
||||
continue;
|
||||
if (string.IsNullOrEmpty(keyValuePair.Value.Trim()))
|
||||
{
|
||||
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
|
||||
continue;
|
||||
}
|
||||
if (!pages.ContainsKey(keyValuePair.Key))
|
||||
throw new Exception();
|
||||
DataFile dataFile = ParseWaferSummary(headerFile, keyValuePair.Key, pages);
|
||||
if (string.IsNullOrEmpty(dataFile.Recipe) || dataFile.Recipe != headerFile.Recipe)
|
||||
{
|
||||
missingSlots.Add(keyValuePair.Key);
|
||||
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
|
||||
continue;
|
||||
}
|
||||
if (!slots.ContainsKey(dataFile.Slot))
|
||||
{
|
||||
missingSlots.Add(keyValuePair.Key);
|
||||
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Empty));
|
||||
continue;
|
||||
}
|
||||
pageMapping.Add(new Tuple<string, string>(keyValuePair.Key, string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_", dataFile.Slot.Replace('*', 's'), "_data.pdf")));
|
||||
slots[dataFile.Slot].Add(dataFile);
|
||||
}
|
||||
string checkFileName = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_data.pdf");
|
||||
if (!File.Exists(checkFileName))
|
||||
{
|
||||
File.Move(headerFileName, checkFileName);
|
||||
sourceFiles.Remove(headerFileName);
|
||||
sourceFiles.Add(checkFileName);
|
||||
}
|
||||
checkFileName = string.Empty;
|
||||
for (int i = pageMapping.Count - 1; i > -1; i--)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(pageMapping[i].Item2))
|
||||
{
|
||||
checkFileName = pageMapping[i].Item2;
|
||||
if (!File.Exists(checkFileName))
|
||||
{
|
||||
File.Move(pageMapping[i].Item1, checkFileName);
|
||||
sourceFiles.Remove(pageMapping[i].Item1);
|
||||
sourceFiles.Add(checkFileName);
|
||||
}
|
||||
}
|
||||
else if (!string.IsNullOrEmpty(checkFileName))
|
||||
{
|
||||
//if (i == 0 || !string.IsNullOrEmpty(pageMapping[i - 1].Item2))
|
||||
//{
|
||||
checkFileName = checkFileName.Replace("_data.pdf", "_image.pdf");
|
||||
if (!File.Exists(checkFileName))
|
||||
{
|
||||
File.Move(pageMapping[i].Item1, checkFileName);
|
||||
sourceFiles.Remove(pageMapping[i].Item1);
|
||||
sourceFiles.Add(checkFileName);
|
||||
}
|
||||
//}
|
||||
checkFileName = string.Empty;
|
||||
}
|
||||
}
|
||||
foreach (KeyValuePair<string, List<DataFile>> keyValuePair in slots)
|
||||
{
|
||||
if (!keyValuePair.Value.Any() || keyValuePair.Value[0] is null)
|
||||
missingSlots.Add(string.Concat("Slot ", keyValuePair.Key, ") is missing."));
|
||||
else
|
||||
{
|
||||
foreach (DataFile data in keyValuePair.Value)
|
||||
dataFiles.Add(data);
|
||||
}
|
||||
}
|
||||
if (missingSlots.Any())
|
||||
{
|
||||
string missingSlotsFile = string.Concat(sourcePath, @"\", sourceFileNameNoExt, "_MissingSlots.txt");
|
||||
File.WriteAllLines(missingSlotsFile, missingSlots);
|
||||
sourceFiles.Add(missingSlotsFile);
|
||||
}
|
||||
headerFile.Date = DateTime.Parse(headerFile.Date).ToString();
|
||||
//Equipment data is wrong!!!
|
||||
headerFile.Date = DateTime.Now.ToString();
|
||||
//Equipment data is wrong!!!
|
||||
//for (int i = 0; i < dataFiles.Count; i++)
|
||||
// dataFiles[i].Date = DateTime.Parse(dataFiles[i].Date).ToString();
|
||||
foreach (string sourceFile in sourceFiles)
|
||||
fileInfoCollection.Add(new FileInfo(sourceFile));
|
||||
fileInfoCollection.Add(new FileInfo(logic.Logistics.ReportFullPath));
|
||||
result = new Tuple<HeaderFile, List<DataFile>>(headerFile, dataFiles);
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
19
Adaptation/Helpers/Si/Transport.Input.cs
Normal file
19
Adaptation/Helpers/Si/Transport.Input.cs
Normal file
@ -0,0 +1,19 @@
|
||||
namespace Adaptation.Si
|
||||
{
|
||||
|
||||
internal partial class Transport
|
||||
{
|
||||
|
||||
public class Input
|
||||
{
|
||||
public string Sequence { get; set; }
|
||||
public string Area { get; set; }
|
||||
public string EquipmentType { get; set; }
|
||||
public string MesEntity { get; set; }
|
||||
public string MID { get; set; }
|
||||
public string Recipe { get; set; }
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
18
Adaptation/Helpers/Si/Transport.Item.cs
Normal file
18
Adaptation/Helpers/Si/Transport.Item.cs
Normal file
@ -0,0 +1,18 @@
|
||||
namespace Adaptation.Si
|
||||
{
|
||||
|
||||
internal partial class Transport
|
||||
{
|
||||
|
||||
public class Item
|
||||
{
|
||||
public string Name { get; set; } //WaferLot //UniqueID
|
||||
public string Type { get; set; } //SatelliteGroup //Sort
|
||||
public string Number { get; set; } //PocketNumber //Slot
|
||||
public string Qty { get; set; } //1
|
||||
public string CarrierName { get; set; } //PROCESS_GROUP
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
198
Adaptation/Helpers/Si/Transport.Job.cs
Normal file
198
Adaptation/Helpers/Si/Transport.Job.cs
Normal file
@ -0,0 +1,198 @@
|
||||
using Adaptation.Helpers;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Data.SqlClient;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace Adaptation.Si
|
||||
{
|
||||
|
||||
internal partial class Transport
|
||||
{
|
||||
|
||||
public class Job
|
||||
{
|
||||
|
||||
public string AutomationMode { get; }
|
||||
public string BasicType { get; }
|
||||
public string Equipment { get; }
|
||||
public string JobName { get; }
|
||||
public string LotName { get; }
|
||||
public string PackageName { get; }
|
||||
public string ProcessSpecName { get; }
|
||||
public string ProcessType { get; }
|
||||
public string ProductName { get; }
|
||||
public string Qty { get; }
|
||||
public string RecipeName { get; }
|
||||
public string StateModel { get; }
|
||||
//
|
||||
public bool IsAreaSi { get; }
|
||||
public DateTime DateTime { get; }
|
||||
public List<Item> Items { get; }
|
||||
|
||||
public Job(ConfigData configData, string mid)
|
||||
{
|
||||
Items = new List<Item>();
|
||||
if (mid[0] != '{' || mid[mid.Length - 1] != '}' || !mid.Contains("\"Si\""))
|
||||
IsAreaSi = false;
|
||||
else
|
||||
{
|
||||
string[] segments;
|
||||
const string hypen = "-";
|
||||
Input input = JsonSerializer.Deserialize<Input>(mid);
|
||||
IsAreaSi = input.Area == "Si";
|
||||
if (!long.TryParse(input.Sequence, out long sequence))
|
||||
DateTime = DateTime.Now;
|
||||
else
|
||||
DateTime = new DateTime(sequence);
|
||||
if (!string.IsNullOrEmpty(input.MID) && input.MID.Length > 9 && input.MID[2] == hypen[0] && input.MID[9] == hypen[0])
|
||||
segments = input.MID.Split(hypen[0]);
|
||||
else
|
||||
segments = new string[] { hypen, hypen, hypen };
|
||||
//
|
||||
AutomationMode = string.Concat(DateTime.Ticks, ".", input.MesEntity);
|
||||
if (segments[1] == hypen)
|
||||
BasicType = hypen;
|
||||
else
|
||||
BasicType = GetBasicType(configData, hypen, segments[1]);
|
||||
Equipment = input.MesEntity;
|
||||
JobName = DateTime.Ticks.ToString();
|
||||
if (segments[0] == hypen)
|
||||
LotName = input.MID;
|
||||
else
|
||||
LotName = segments[1];
|
||||
PackageName = hypen; //WAFER_ID WaferLot
|
||||
ProcessSpecName = hypen; //WAFER_POS PocketNumber
|
||||
ProcessType = segments[0];
|
||||
ProductName = segments[2].Split('.')[0];
|
||||
Qty = "1";
|
||||
RecipeName = input.Recipe;
|
||||
StateModel = input.EquipmentType;
|
||||
Items.Add(new Item { Name = "0", Type = "NA", Number = (0 + 1).ToString(), Qty = "1", CarrierName = hypen });
|
||||
MoveOldFiles(configData);
|
||||
}
|
||||
}
|
||||
|
||||
public string GetBasicType(ConfigData configData, string hypen, string rds)
|
||||
{
|
||||
string result;
|
||||
// string json;
|
||||
// string loadLock;
|
||||
// JsonElement jsonElement;
|
||||
// DateTime dateTime = DateTime.Now;
|
||||
// string rdsFile = Path.Combine(configData.OIContextDataResultsPath, $"{DateTime.Ticks}.rds");
|
||||
// string jsonFile = Path.Combine(configData.OIContextDataResultsPath, $"{DateTime.Ticks}.json");
|
||||
// File.WriteAllText(Path.Combine(configData.OIContextDataSearchPath, $"{DateTime.Ticks}.rds"), rds);
|
||||
// CultureInfo cultureInfo = new CultureInfo("en-US");
|
||||
// Calendar calendar = cultureInfo.Calendar;
|
||||
// string weekOfYear = calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
// string yearWeek = string.Concat(dateTime.ToString("yyyy"), "___Week_", weekOfYear);
|
||||
// string resultsDirectory = Path.Combine(configData.OIContextDataResultsPath, yearWeek);
|
||||
// if (!Directory.Exists(resultsDirectory))
|
||||
// Directory.CreateDirectory(resultsDirectory);
|
||||
// long breakAfter = dateTime.AddSeconds(60).Ticks;
|
||||
// for (int i = 0; i < short.MaxValue; i++)
|
||||
// {
|
||||
// if (File.Exists(rdsFile) && File.Exists(jsonFile))
|
||||
// {
|
||||
// loadLock = string.Empty;
|
||||
// json = File.ReadAllText(jsonFile);
|
||||
// jsonElement = JsonSerializer.Deserialize<JsonElement>(json);
|
||||
// if (jsonElement.ValueKind == JsonValueKind.Object)
|
||||
// {
|
||||
// foreach (JsonProperty jsonProperty in jsonElement.EnumerateObject())
|
||||
// {
|
||||
// if (jsonProperty.Name != "LoadLock")
|
||||
// continue;
|
||||
// loadLock = jsonProperty.Value.ToString();
|
||||
// }
|
||||
// }
|
||||
// if (string.IsNullOrEmpty(loadLock))
|
||||
// File.Move(jsonFile, Path.Combine(configData.OIContextDataResultsPath, $"{DateTime.Ticks}.err"));
|
||||
// else
|
||||
// {
|
||||
// File.Move(rdsFile, Path.Combine(configData.OIContextDataResultsPath, yearWeek, $"{DateTime.Ticks}.rds"));
|
||||
// File.Move(jsonFile, Path.Combine(configData.OIContextDataResultsPath, yearWeek, $"{DateTime.Ticks}.json"));
|
||||
// result = loadLock;
|
||||
// }
|
||||
// break;
|
||||
// }
|
||||
// if (DateTime.Now.Ticks > breakAfter)
|
||||
// break;
|
||||
// }
|
||||
object scalar = null;
|
||||
StringBuilder sql = new StringBuilder();
|
||||
sql.Append(" SELECT ").
|
||||
Append(" CASE ").
|
||||
Append(" WHEN LOAD_LOCK_SIDE = 'L' THEN 'Left - ' ").
|
||||
Append(" WHEN LOAD_LOCK_SIDE = 'R' THEN 'Right - ' ").
|
||||
Append(" ELSE LOAD_LOCK_SIDE ").
|
||||
Append(" END + REACTOR_TYPE AS LOAD_LOCK ").
|
||||
Append(" FROM [LSL2SQL].[dbo].[REACT_RUN] ").
|
||||
Append($" WHERE RDS_NO = '{rds}' ");
|
||||
//Append(" AND LOAD_SIG != '' ");
|
||||
try
|
||||
{
|
||||
using (SqlConnection sqlConnection = new SqlConnection(configData.ConnectionStringLSL2SQL))
|
||||
{
|
||||
sqlConnection.Open();
|
||||
using (SqlCommand sqlCommand = new SqlCommand(sql.ToString(), sqlConnection))
|
||||
scalar = sqlCommand.ExecuteScalar();
|
||||
sqlConnection.Close();
|
||||
}
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
}
|
||||
if (scalar is null)
|
||||
result = hypen;
|
||||
else
|
||||
result = scalar.ToString();
|
||||
return result;
|
||||
}
|
||||
|
||||
private void MoveOldFiles(ConfigData configData)
|
||||
{
|
||||
string yearWeek;
|
||||
string[] oldFiles;
|
||||
FileInfo fileInfo;
|
||||
string weekOfYear;
|
||||
string moveDirectory;
|
||||
DateTime daysOld = DateTime.Now.AddDays(-2);
|
||||
CultureInfo cultureInfo = new CultureInfo("en-US");
|
||||
Calendar calendar = cultureInfo.Calendar;
|
||||
string[] directories = new string[] { configData.OIContextDataSearchPath, configData.OIContextDataPendingPath, configData.OIContextDataResultsPath };
|
||||
foreach (string directory in directories)
|
||||
{
|
||||
try
|
||||
{
|
||||
oldFiles = Directory.GetFiles(directory, "*", SearchOption.TopDirectoryOnly);
|
||||
foreach (string oldFile in oldFiles)
|
||||
{
|
||||
fileInfo = new FileInfo(oldFile);
|
||||
if (!fileInfo.Exists || fileInfo.LastWriteTime > daysOld)
|
||||
continue;
|
||||
weekOfYear = calendar.GetWeekOfYear(fileInfo.LastWriteTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
yearWeek = string.Concat(fileInfo.LastWriteTime.ToString("yyyy"), "___Week_", weekOfYear);
|
||||
moveDirectory = Path.Combine(fileInfo.DirectoryName, yearWeek);
|
||||
if (!Directory.Exists(moveDirectory))
|
||||
Directory.CreateDirectory(moveDirectory);
|
||||
try
|
||||
{ File.Move(oldFile, Path.Combine(moveDirectory, fileInfo.Name)); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
catch (Exception)
|
||||
{
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
77
Adaptation/Helpers/Si/Transport.Logistics.cs
Normal file
77
Adaptation/Helpers/Si/Transport.Logistics.cs
Normal file
@ -0,0 +1,77 @@
|
||||
namespace Adaptation.Si
|
||||
{
|
||||
|
||||
internal partial class Transport
|
||||
{
|
||||
/// <summary>
|
||||
/// EDA-Configurator.pdf
|
||||
/// CDS Namespace Reply Variables
|
||||
/// </summary>
|
||||
private class Logistics
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Basic Type
|
||||
/// </summary>
|
||||
public string BASIC_TYPE { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Text for additional infromation
|
||||
/// </summary>
|
||||
public string INFO { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Unique assignment of lot processing to the basic cell (Dresden)
|
||||
/// </summary>
|
||||
public string JOBID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Equipment name used in MES
|
||||
/// </summary>
|
||||
public string MES_ENTITY { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Lot number, name for a lot
|
||||
/// </summary>
|
||||
public string MID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Recipe (Process Program ID)
|
||||
/// </summary>
|
||||
public string PPID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Process group (e.g. C5PR)
|
||||
/// </summary>
|
||||
public string PROCESS_GROUP { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Product name
|
||||
/// </summary>
|
||||
public string PRODUCT { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of wafers in lot
|
||||
/// </summary>
|
||||
public string TOTAL_NUMBER_OF_WAFERS { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Equipment sequence number
|
||||
/// </summary>
|
||||
public string SEQUENCE { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Unique wafer number (barcode, OCR)
|
||||
/// </summary>
|
||||
public string WAFER_ID { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Wafer position in a tube (Furnace)
|
||||
/// </summary>
|
||||
public string WAFER_POS { get; set; }
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
218
Adaptation/Helpers/Si/Transport.cs
Normal file
218
Adaptation/Helpers/Si/Transport.cs
Normal file
@ -0,0 +1,218 @@
|
||||
using Adaptation.Helpers;
|
||||
using Infineon.Yoda;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
|
||||
namespace Adaptation.Si
|
||||
{
|
||||
|
||||
internal partial class Transport
|
||||
{
|
||||
|
||||
private static object _IfxTransport;
|
||||
private static ConfigData _ConfigData;
|
||||
|
||||
internal static void Initialize(ConfigData configData)
|
||||
{
|
||||
_IfxTransport = null;
|
||||
_ConfigData = configData;
|
||||
}
|
||||
|
||||
internal static List<string> Setup(bool useSleep, bool setIfxTransport)
|
||||
{
|
||||
List<string> results = new List<string>();
|
||||
if (useSleep)
|
||||
{
|
||||
for (int i = 1; i < 4; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
if (setIfxTransport)
|
||||
{
|
||||
results.Add(string.Concat("IfxTransport Subject: ", _ConfigData.IfxSubject));
|
||||
IfxDoc ifxDoc = new IfxDoc();
|
||||
ifxDoc.Add(IfxConst.SUBJECT_PREFIX, _ConfigData.IfxSubjectPrefix);
|
||||
ifxDoc.Add(IfxConst.IFX_CHANNEL, _ConfigData.IfxChannel);
|
||||
ifxDoc.Add(IfxConst.IFX_CONFIGURATION_LOCATION, _ConfigData.IfxConfigurationLocation);
|
||||
ifxDoc.Add(IfxConst.IFX_CONFIGURATION_LOCATION_LOCAL_COPY, _ConfigData.IfxConfigurationLocationLocalCopy);
|
||||
results.Add(string.Concat("IfxTransport Config: ", ifxDoc));
|
||||
_IfxTransport = new IfxTransport();
|
||||
IfxTransport ifxTransport = (IfxTransport)_IfxTransport;
|
||||
ifxTransport.Create(ifxDoc);
|
||||
if (useSleep)
|
||||
{
|
||||
for (int i = 1; i < 10; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
results.Add(string.Concat("IfxTransport Current Daemon: ", ifxTransport.CurrentDaemon));
|
||||
results.Add(string.Concat("IfxTransport Current Network: ", ifxTransport.CurrentNetwork));
|
||||
results.Add(string.Concat("IfxTransport Current Service: ", ifxTransport.CurrentService));
|
||||
results.Add(string.Concat("IfxTransport Current PoolName: ", ifxTransport.CurrentPoolName));
|
||||
}
|
||||
for (int i = 1; i < 3; i++)
|
||||
Thread.Sleep(500);
|
||||
if (_IfxTransport is null)
|
||||
throw new Exception();
|
||||
else
|
||||
{
|
||||
IfxTransport ifxTransport = (IfxTransport)_IfxTransport;
|
||||
string[] subjects = _ConfigData.IfxSubject.Split('|');
|
||||
foreach (string subject in subjects)
|
||||
ifxTransport.Subscribe(string.Concat(_ConfigData.IfxSubjectPrefix, ".", subject));
|
||||
ifxTransport.ReliableMessage += MainTransport_ReliableMessage;
|
||||
for (int i = 1; i < 3; i++)
|
||||
Thread.Sleep(500);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
private static void MoveSourceFiles(string[] sourceFiles, string pdsfFileLogistics, Calendar calendar)
|
||||
{
|
||||
DateTime dateTime;
|
||||
string weekOfYear;
|
||||
string checkDirectory;
|
||||
foreach (string pdsfFile in sourceFiles)
|
||||
{
|
||||
if (pdsfFile == pdsfFileLogistics)
|
||||
continue;
|
||||
dateTime = new FileInfo(pdsfFile).LastWriteTime;
|
||||
weekOfYear = calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
checkDirectory = string.Concat(Path.GetDirectoryName(pdsfFile), @"\_ Logistics Archive\", dateTime.ToString("yyyy"), "_Week_", weekOfYear);
|
||||
if (!Directory.Exists(checkDirectory))
|
||||
Directory.CreateDirectory(checkDirectory);
|
||||
try
|
||||
{ File.Move(pdsfFile, string.Concat(checkDirectory, @"\", Path.GetFileName(pdsfFile))); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
|
||||
private static string GetJobsMID(IfxDoc envelopeDocument)
|
||||
{
|
||||
string mid;
|
||||
if (envelopeDocument is null || !envelopeDocument.FieldExists("LotName"))
|
||||
mid = string.Empty;
|
||||
else
|
||||
mid = envelopeDocument.GetFieldByName("LotName").ToString();
|
||||
return mid;
|
||||
}
|
||||
|
||||
private static IfxDoc GetJobsReply(Job job)
|
||||
{
|
||||
IfxDoc result = new IfxDoc();
|
||||
IfxDoc itemDoc;
|
||||
IfxDoc jobDoc = new IfxDoc();
|
||||
IfxDoc lotDoc = new IfxDoc();
|
||||
IfxDoc recipeDoc = new IfxDoc();
|
||||
List<IfxDoc> itemDocs = new List<IfxDoc>();
|
||||
jobDoc.Add("AutomationMode", job.AutomationMode);
|
||||
jobDoc.Add("CreationTimestamp", job.DateTime);
|
||||
jobDoc.Add("CreationUser", "-");
|
||||
jobDoc.Add("CurrentState", true);
|
||||
jobDoc.Add("Equipment", job.Equipment);
|
||||
jobDoc.Add("JobName", job.JobName);
|
||||
jobDoc.Add("LastUpdateTimestamp", job.DateTime);
|
||||
jobDoc.Add("LastUpdateUser", "-");
|
||||
jobDoc.Add("ProcessType", job.ProcessType);
|
||||
jobDoc.Add("StateModel", job.StateModel);
|
||||
jobDoc.Add("Status", "-");
|
||||
lotDoc.Add("BasicType", job.BasicType);
|
||||
lotDoc.Add("IsActive", true);
|
||||
lotDoc.Add("LotName", job.LotName);
|
||||
lotDoc.Add("LotState", "-");
|
||||
lotDoc.Add("PackageName", job.PackageName);
|
||||
lotDoc.Add("ProcessSpecName", job.ProcessSpecName);
|
||||
lotDoc.Add("ProductName", job.ProductName);
|
||||
lotDoc.Add("Qty", job.Qty);
|
||||
lotDoc.Add("Qty2", "-");
|
||||
recipeDoc.Add("RecipeName", job.RecipeName);
|
||||
lotDoc.Add("SpecName", "-");
|
||||
foreach (Item item in job.Items)
|
||||
{
|
||||
itemDoc = new IfxDoc();
|
||||
itemDoc.Add("Name", item.Name);
|
||||
itemDoc.Add("Type", item.Type);
|
||||
itemDoc.Add("Number", item.Number);
|
||||
itemDoc.Add("Qty", item.Qty);
|
||||
itemDoc.Add("CarrierName", item.CarrierName);
|
||||
itemDocs.Add(itemDoc);
|
||||
}
|
||||
jobDoc.Add("Recipe", recipeDoc);
|
||||
lotDoc.Add("Items", itemDocs.ToArray());
|
||||
jobDoc.Add("Lots", new IfxDoc[] { lotDoc });
|
||||
result.Add("FAJobs", new IfxDoc[] { jobDoc });
|
||||
result.Add("IFX_ECD", "0");
|
||||
result.Add("IFX_ETX", 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
private static void MainTransport_ReliableMessage(string subject, string replySubject, IfxEnvelope ifxEnvelope)
|
||||
{
|
||||
try
|
||||
{
|
||||
string mid = string.Empty;
|
||||
string[] sourceFiles = null;
|
||||
DateTime dateTime = DateTime.Now;
|
||||
string pdsfFileLogistics = string.Empty;
|
||||
IfxDoc envelopeDocument = ifxEnvelope.ExtractDocument();
|
||||
CultureInfo cultureInfo = new CultureInfo("en-US");
|
||||
Calendar calendar = cultureInfo.Calendar;
|
||||
string weekOfYear = calendar.GetWeekOfYear(dateTime, CalendarWeekRule.FirstDay, DayOfWeek.Sunday).ToString("00");
|
||||
string weekOfYearSegment = string.Concat(@"\", dateTime.ToString("yyyy"), "_Week_", weekOfYear, @"\", dateTime.ToString("yyyy-MM-dd"));
|
||||
if (!string.IsNullOrEmpty(_ConfigData.FileConnectorConfiguration.SourceFileLocation))
|
||||
{
|
||||
string directory = string.Concat(_ConfigData.FileConnectorConfiguration.SourceFileLocation, weekOfYearSegment);
|
||||
if (!Directory.Exists(directory))
|
||||
Directory.CreateDirectory(directory);
|
||||
string fileName = string.Concat(directory, @"\", subject.Replace(".", "~"), " - ", DateTime.Now.Ticks, ".xml");
|
||||
try
|
||||
{ envelopeDocument.SaveAsXml(fileName); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
if (!subject.EndsWith("GETJOBS"))
|
||||
throw new Exception();
|
||||
mid = GetJobsMID(envelopeDocument);
|
||||
Job job = new Job(_ConfigData, mid);
|
||||
if (job.IsAreaSi)
|
||||
{
|
||||
IfxDoc sendReply = GetJobsReply(job);
|
||||
ifxEnvelope.Transport.SendReply(ifxEnvelope, sendReply);
|
||||
if (!string.IsNullOrEmpty(_ConfigData.FileConnectorConfiguration.TargetFileLocation))
|
||||
{
|
||||
string directory = string.Concat(_ConfigData.FileConnectorConfiguration.TargetFileLocation, weekOfYearSegment);
|
||||
if (!Directory.Exists(directory))
|
||||
Directory.CreateDirectory(directory);
|
||||
string fileName = string.Concat(directory, @"\", subject.Replace(".", "~"), " - ", DateTime.Now.Ticks, ".xml");
|
||||
try
|
||||
{ sendReply.SaveAsXml(fileName); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
if (!(sourceFiles is null) && !string.IsNullOrEmpty(pdsfFileLogistics))
|
||||
MoveSourceFiles(sourceFiles, pdsfFileLogistics, calendar);
|
||||
}
|
||||
catch (Exception exception)
|
||||
{
|
||||
try
|
||||
{
|
||||
Eaf.Core.Smtp.ISmtp smtp = Eaf.Core.Backbone.Instance.GetBackboneComponentsOfType<Eaf.Core.Smtp.ISmtp>().SingleOrDefault();
|
||||
Eaf.Core.Smtp.EmailMessage emailMessage = new Eaf.Core.Smtp.EmailMessage(string.Concat("Exception:", _ConfigData.EquipmentElementName, ":MainTransport_ReliableMessage"), string.Concat(exception.Message, Environment.NewLine, Environment.NewLine, exception.StackTrace), Eaf.Core.Smtp.MailPriority.High);
|
||||
smtp.Send(emailMessage);
|
||||
}
|
||||
catch (Exception) { }
|
||||
string directory = _ConfigData.FileConnectorConfiguration.ErrorTargetFileLocation;
|
||||
if (!string.IsNullOrEmpty(directory) && Directory.Exists(directory))
|
||||
{
|
||||
string fileName = string.Concat(directory, @"\", subject.Replace(".", "~"), " - ", DateTime.Now.Ticks, ".txt");
|
||||
try
|
||||
{ File.WriteAllLines(fileName, new string[] { exception.Message, string.Empty, string.Empty, exception.StackTrace }); }
|
||||
catch (Exception) { }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
Reference in New Issue
Block a user